From bf2f649a50e95d8fb5fd2069d92b418f10034f3e Mon Sep 17 00:00:00 2001 From: Yu Shi Date: Mon, 5 Feb 2024 13:45:17 +0000 Subject: [PATCH] skip tests for cuda version --- tests/python_package_test/test_engine.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/python_package_test/test_engine.py b/tests/python_package_test/test_engine.py index 3047565555e8..e90a9e44ff36 100644 --- a/tests/python_package_test/test_engine.py +++ b/tests/python_package_test/test_engine.py @@ -309,7 +309,7 @@ def test_missing_value_handle_none(): assert evals_result['valid_0']['auc'][-1] == pytest.approx(ret) -@pytest.mark.parametrize('use_quantized_grad', [True, False]) +@pytest.mark.parametrize('use_quantized_grad', [pytest.param(True, marks=pytest.mark.skipif(getenv('TASK', '') == 'cuda', reason='Skip because quantized training with categorical features is not supported for cuda version')), False]) def test_categorical_handle(use_quantized_grad): x = [0, 1, 2, 3, 4, 5, 6, 7] y = [0, 1, 0, 1, 0, 1, 0, 1] @@ -351,7 +351,7 @@ def test_categorical_handle(use_quantized_grad): assert evals_result['valid_0']['auc'][-1] == pytest.approx(ret) -@pytest.mark.parametrize('use_quantized_grad', [True, False]) +@pytest.mark.parametrize('use_quantized_grad', [pytest.param(True, marks=pytest.mark.skipif(getenv('TASK', '') == 'cuda', reason='Skip because quantized training with categorical features is not supported for cuda version')), False]) def test_categorical_handle_na(use_quantized_grad): x = [0, np.nan, 0, np.nan, 0, np.nan] y = [0, 1, 0, 1, 0, 1] @@ -393,7 +393,7 @@ def test_categorical_handle_na(use_quantized_grad): assert evals_result['valid_0']['auc'][-1] == pytest.approx(ret) -@pytest.mark.parametrize('use_quantized_grad', [True, False]) +@pytest.mark.parametrize('use_quantized_grad', [pytest.param(True, marks=pytest.mark.skipif(getenv('TASK', '') == 'cuda', reason='Skip because quantized training with categorical features is not supported for cuda version')), False]) def test_categorical_non_zero_inputs(use_quantized_grad): x = [1, 1, 1, 1, 1, 1, 2, 2] y = [1, 1, 1, 1, 1, 1, 0, 0]