Skip to content

Commit

Permalink
Fix test for recent keras 3 change
Browse files Browse the repository at this point in the history
When dropout rate is zero, dropout will now create no variables (no
rng seeds). We should set a non-zero dropout to test the presence of
these seeds in tests.
  • Loading branch information
mattdangerw committed Jan 10, 2024
1 parent fde5879 commit d7db88d
Show file tree
Hide file tree
Showing 3 changed files with 4 additions and 1 deletion.
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ def test_layer_behaviors(self):
init_kwargs={
"num_heads": 2,
"key_dim": 4,
"dropout": 0.1,
},
input_data={
"query": random.uniform(shape=(2, 4, 6)),
Expand Down
2 changes: 1 addition & 1 deletion keras_nlp/layers/modeling/f_net_encoder_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,11 +23,11 @@ def test_layer_behaviors(self):
cls=FNetEncoder,
init_kwargs={
"intermediate_dim": 4,
"dropout": 0,
"activation": "relu",
"layer_norm_epsilon": 1e-5,
"kernel_initializer": "HeNormal",
"bias_initializer": "Zeros",
"dropout": 0.1,
},
input_data=random.uniform(shape=(2, 4, 6)),
expected_output_shape=(2, 4, 6),
Expand Down
2 changes: 2 additions & 0 deletions keras_nlp/layers/modeling/transformer_decoder_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ def test_layer_behaviors(self, normalize_first):
"layer_norm_epsilon": 1e-05,
"kernel_initializer": "HeNormal",
"bias_initializer": "Zeros",
"dropout": 0.1,
},
input_data=random.uniform(shape=(2, 4, 6)),
expected_output_shape=(2, 4, 6),
Expand All @@ -59,6 +60,7 @@ def test_layer_behaviors_with_cross_attention(self, normalize_first):
"layer_norm_epsilon": 1e-05,
"kernel_initializer": "HeNormal",
"bias_initializer": "Zeros",
"dropout": 0.1,
},
input_data={
"decoder_sequence": random.uniform(shape=(2, 4, 6)),
Expand Down

0 comments on commit d7db88d

Please sign in to comment.