GatzZ
6/6/2018 - 6:32 AM

tensorflow clip_by_norm

tensorflow clip_by_norm

"""Prepare to Train"""
global_step = tf.Variable(0, name="global_step", trainable=False)

variable_to_train = []
for variable in tf.trainable_variables():
  if not(variable.name.startswith(FLAGS.loss_model)):
      variable_to_train.append(variable)
# train_op = tf.train.AdamOptimizer(1e-3).minimize(loss, global_step=global_step, var_list=variable_to_train)
#...................
optimizer = tf.train.AdamOptimizer(1e-3)
gvs = optimizer.compute_gradients(loss, var_list=variable_to_train)
capped_gvs = [(tf.clip_by_norm(grad, 10), var) for grad, var in gvs]
train_op = optimizer.apply_gradients(capped_gvs, global_step=global_step)
#....................