diff --git a/thinc/tests/backends/test_ops.py b/thinc/tests/backends/test_ops.py index e08d3f319..57a3a706f 100644 --- a/thinc/tests/backends/test_ops.py +++ b/thinc/tests/backends/test_ops.py @@ -1383,8 +1383,7 @@ def test_lstm_forward_training(ops, depth, dirs, nO, batch_size, nI): assert_allclose(Y, reference[0], atol=1e-4, rtol=1e-3) -@pytest.mark.skipif(platform.machine() == "aarch64", reason="Flaky, skip temporarily") -@pytest.mark.skipif(platform.machine() == "win_amd64", reason="Flaky, skip temporarily") +@pytest.mark.skip(reason="Flaky, skip temporarily") @pytest.mark.parametrize("ops", XP_OPS) @settings(max_examples=MAX_EXAMPLES, deadline=None) @given(args=draw_lstm_args()) diff --git a/thinc/tests/layers/test_linear.py b/thinc/tests/layers/test_linear.py index cab517560..b1752fba2 100644 --- a/thinc/tests/layers/test_linear.py +++ b/thinc/tests/layers/test_linear.py @@ -1,5 +1,3 @@ -import platform - import numpy import pytest from hypothesis import given, settings @@ -36,7 +34,7 @@ def test_linear_dimensions_on_data(): y.max.assert_called_with() -@pytest.mark.skipif(platform.machine() == "win_amd64", reason="Flaky, skip temporarily") +@pytest.mark.skip(reason="Flaky, skip temporarily") @given(arrays_OI_O_BI(max_batch=8, max_out=8, max_in=8)) def test_begin_update_matches_predict(W_b_input): model = get_model(W_b_input) @@ -47,7 +45,7 @@ def test_begin_update_matches_predict(W_b_input): assert_allclose(fwd_via_begin_update, fwd_via_predict_batch) -@pytest.mark.skipif(platform.machine() == "win_amd64", reason="Flaky, skip temporarily") +@pytest.mark.skip(reason="Flaky, skip temporarily") @given(arrays_OI_O_BI(max_batch=8, max_out=8, max_in=8)) def test_finish_update_calls_optimizer_with_weights(W_b_input): model = get_model(W_b_input) @@ -69,7 +67,7 @@ def sgd(key, data, gradient, **kwargs): assert (model.id, name) in seen_keys -@pytest.mark.skipif(platform.machine() == "win_amd64", reason="Flaky, skip temporarily") +@pytest.mark.skip(reason="Flaky, skip temporarily") @settings(max_examples=100) @given(arrays_OI_O_BI(max_batch=8, max_out=8, max_in=8)) def test_predict_small(W_b_input): @@ -92,7 +90,7 @@ def test_predict_small(W_b_input): assert_allclose(predicted_output, expected_output, rtol=0.01, atol=0.01) -@pytest.mark.skipif(platform.machine() == "win_amd64", reason="Flaky, skip temporarily") +@pytest.mark.skip(reason="Flaky, skip temporarily") @given(arrays_OI_O_BI(max_batch=20, max_out=30, max_in=30)) @settings(deadline=None) def test_predict_extensive(W_b_input): @@ -115,7 +113,7 @@ def test_predict_extensive(W_b_input): assert_allclose(predicted_output, expected_output, rtol=1e-04, atol=0.0001) -@pytest.mark.skipif(platform.machine() == "win_amd64", reason="Flaky, skip temporarily") +@pytest.mark.skip(reason="Flaky, skip temporarily") @given(arrays_OI_O_BI(max_batch=8, max_out=8, max_in=8)) def test_dropout_gives_zero_activations(W_b_input): model = chain(get_model(W_b_input), Dropout(1.0)) @@ -125,7 +123,7 @@ def test_dropout_gives_zero_activations(W_b_input): assert all(val == 0.0 for val in fwd_dropped.flatten()) -@pytest.mark.skipif(platform.machine() == "win_amd64", reason="Flaky, skip temporarily") +@pytest.mark.skip(reason="Flaky, skip temporarily") @given(arrays_OI_O_BI(max_batch=8, max_out=8, max_in=8)) def test_dropout_gives_zero_gradients(W_b_input): model = chain(get_model(W_b_input), Dropout(1.0))