Skip to content

Commit

Permalink
Adds gradients scope for nicer graph
Browse files Browse the repository at this point in the history
  • Loading branch information
vierja committed Oct 9, 2017
1 parent c1f20a9 commit 669b158
Showing 1 changed file with 7 additions and 6 deletions.
13 changes: 7 additions & 6 deletions luminoth/train.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,13 +68,14 @@ def run(model_type, config_file, override_params, target='', cluster_spec=None,

trainable_vars = model.get_trainable_vars()

# Compute, clip and apply gradients
grads_and_vars = optimizer.compute_gradients(
total_loss, trainable_vars
)
with tf.name_scope('gradients'):
# Compute, clip and apply gradients
grads_and_vars = optimizer.compute_gradients(
total_loss, trainable_vars
)

# Clip by norm. TODO: Configurable
grads_and_vars = clip_gradients_by_norm(grads_and_vars)
# Clip by norm. TODO: Configurable
grads_and_vars = clip_gradients_by_norm(grads_and_vars)

train_op = optimizer.apply_gradients(
grads_and_vars, global_step=global_step
Expand Down

0 comments on commit 669b158

Please sign in to comment.