Skip to content

Commit

Permalink
Remove adam
Browse files Browse the repository at this point in the history
  • Loading branch information
John Griffith committed Jan 12, 2018
1 parent b5dcbb1 commit 159f216
Showing 1 changed file with 2 additions and 3 deletions.
5 changes: 2 additions & 3 deletions main.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,20 +46,19 @@
tf.app.flags.DEFINE_bool('use-augmented-data', False, 'Whether to use pre-generated augmented data on this run')
tf.app.flags.DEFINE_bool('normalise-data', True, 'Whether to normalise the training and test data on a per-image basis')
tf.app.flags.DEFINE_bool('whiten-data', True, 'Whether to \'whiten\' the training and test data on a whole-set basis')
tf.app.flags.DEFINE_bool('adam-optimser' ,False, 'Use AdamOptimiser, else use MGD. %(default)d')
tf.app.flags.DEFINE_bool('norm_layer' ,True, 'Use normalisation layer. %(default)d')
tf.app.flags.DEFINE_bool('lr_decay' ,True, 'Learning rate decay. %(default)d')
tf.app.flags.DEFINE_float('dropout-keep-rate', 1, 'Fraction of connections to keep. (default: %(default)d')
tf.app.flags.DEFINE_bool('max-pools', False, 'Use 3 max pooling layers')

run_log_dir = os.path.join(FLAGS.log_dir, 'exp_bs={bs}_lr={lr}_aug={aug}_'
'normd={nd}_wd={wd}_crelu={crelu}_'
'ms={ms}_adam={adam}_normlayer={norm}_'
'ms={ms}_normlayer={norm}_'
'lr_decay={lr_d}_dropoutkeep={do_keep}'

.format(bs=FLAGS.batch_size, lr=FLAGS.learning_rate, aug=FLAGS.use_augmented_data,
nd=FLAGS.normalise_data, wd=FLAGS.whiten_data, crelu=FLAGS.crelu,
ms=FLAGS.multi_scale, adam=FLAGS.adam_optimiser, norm=FLAGS.norm_layer,
ms=FLAGS.multi_scale, norm=FLAGS.norm_layer,
lr_d=FLAGS.lr_decay, do_keep=FLAGS.dropout_keep_rate))

checkpoint_path = os.path.join(run_log_dir, 'model.ckpt')
Expand Down

0 comments on commit 159f216

Please sign in to comment.