Skip to content

Commit

Permalink
Use default attention size if unspecified
Browse files Browse the repository at this point in the history
  • Loading branch information
lvapeab committed Jun 1, 2017
1 parent e5ada24 commit cea5d86
Showing 1 changed file with 1 addition and 1 deletion.
2 changes: 1 addition & 1 deletion model_zoo.py
Original file line number Diff line number Diff line change
Expand Up @@ -351,7 +351,7 @@ def GroundHogModel(self, params):
initial_memory = None
# 3.3. Attentional decoder
sharedAttRNNCond = eval('Att' + params['RNN_TYPE'] + 'Cond')(params['DECODER_HIDDEN_SIZE'],
att_dim=params['ATTENTION_SIZE'],
att_dim=params.get('ATTENTION_SIZE', 0),
W_regularizer=l2(params['RECURRENT_WEIGHT_DECAY']),
U_regularizer=l2(params['RECURRENT_WEIGHT_DECAY']),
V_regularizer=l2(params['RECURRENT_WEIGHT_DECAY']),
Expand Down

0 comments on commit cea5d86

Please sign in to comment.