I would like to optimise 2 optimisers/ functions using the tensorflow v1 using the training set, hence would like to run the 2 optimisers in parallel.
import tensorflow.compat.v1 as tf
tf.disable_v2_behavior()
# ...
loss_i1 = - tf.reduce_mean(s1+s2+s3)
optimizer_i1 = tf.train.AdamOptimizer(learning_rate).minimize(loss_i1)
loss_i2 = - tf.reduce_mean(s1*s2*s3)
optimizer_i2 = tf.train.AdamOptimizer(learning_rate).minimize(loss_i2)
for epoch in range(epochs):
for batch_index in range(n_batches):
sys.stdout.flush()
_y, _s1, _s2, _s3 = fetch_batch(epoch, batch_index, batch_size, y_, s1_, s2_, s3_)
sess.run(optimizer_i1, feed_dict={y: _y, s1: _s1, s2: _s2, s3: _s3})
sess.run(optimizer_i2, feed_dict={y: _y, s1: _s1, s2: _s2, s3: _s3})
loss_train_i1 = loss_i1.eval(feed_dict={y: _y, s1: _s1, s2: _s2, s3: _s3})
loss_train_i2 = loss_i2.eval(feed_dict={y: _y, s1: _s1, s2: _s2, s3: _s3})