diff --git a/python/cuml/cuml/tests/experimental/test_filex.py b/python/cuml/cuml/tests/experimental/test_filex.py index 59633c583e..43e71a4c6c 100644 --- a/python/cuml/cuml/tests/experimental/test_filex.py +++ b/python/cuml/cuml/tests/experimental/test_filex.py @@ -135,6 +135,7 @@ def _build_and_save_xgboost( ) @pytest.mark.parametrize("n_classes", [2, 5, 25]) @pytest.mark.skipif(not has_xgboost(), reason="need to install xgboost") +@pytest.mark.xfail(reason="xgboost's allocator doesn't work with older drivers") def test_fil_classification( train_device, infer_device, @@ -229,6 +230,7 @@ def test_fil_classification( "max_depth", [unit_param(3), unit_param(7), stress_param(11)] ) @pytest.mark.skipif(not has_xgboost(), reason="need to install xgboost") +@pytest.mark.xfail(reason="xgboost's allocator doesn't work with older drivers") def test_fil_regression( train_device, infer_device, @@ -511,6 +513,7 @@ def small_classifier_and_preds(tmpdir_factory, request): @pytest.mark.parametrize("infer_device", ("cpu", "gpu")) @pytest.mark.skipif(not has_xgboost(), reason="need to install xgboost") @pytest.mark.parametrize("precision", ["native", "float32", "float64"]) +@pytest.mark.xfail(reason="xgboost's allocator doesn't work with older drivers") def test_precision_xgboost( train_device, infer_device, precision, small_classifier_and_preds ): @@ -536,6 +539,7 @@ def test_precision_xgboost( @pytest.mark.skipif(has_xgboost() is False, reason="need to install xgboost") @pytest.mark.parametrize("layout", ["depth_first", "breadth_first", "layered"]) @pytest.mark.parametrize("chunk_size", [2, 4, 8, 16, 32]) +@pytest.mark.xfail(reason="xgboost's allocator doesn't work with older drivers") def test_performance_hyperparameters( train_device, infer_device, layout, chunk_size, small_classifier_and_preds ): @@ -562,6 +566,7 @@ def test_performance_hyperparameters( @pytest.mark.parametrize("train_device", ("cpu", "gpu")) @pytest.mark.parametrize("infer_device", ("cpu", "gpu")) @pytest.mark.skipif(not has_xgboost(), reason="need to install xgboost") +@pytest.mark.xfail(reason="xgboost's allocator doesn't work with older drivers") def test_output_args(train_device, infer_device, small_classifier_and_preds): with using_device_type(train_device): model_path, model_type, X, xgb_preds = small_classifier_and_preds @@ -722,6 +727,7 @@ def test_lightgbm( @pytest.mark.parametrize("n_classes", [2, 5, 25]) @pytest.mark.parametrize("num_boost_round", [10, 100]) @pytest.mark.skipif(not has_xgboost(), reason="need to install xgboost") +@pytest.mark.xfail(reason="xgboost's allocator doesn't work with older drivers") def test_predict_per_tree( train_device, infer_device, n_classes, num_boost_round, tmp_path ): @@ -783,6 +789,7 @@ def test_predict_per_tree( @pytest.mark.parametrize("infer_device", ("cpu", "gpu")) @pytest.mark.parametrize("n_classes", [5, 25]) @pytest.mark.skipif(not has_xgboost(), reason="need to install xgboost") +@pytest.mark.xfail(reason="xgboost's allocator doesn't work with older drivers") def test_predict_per_tree_with_vector_leaf( train_device, infer_device, n_classes, tmp_path ): @@ -829,6 +836,7 @@ def test_predict_per_tree_with_vector_leaf( @pytest.mark.parametrize("infer_device", ("cpu", "gpu")) @pytest.mark.parametrize("n_classes", [2, 5, 25]) @pytest.mark.skipif(not has_xgboost(), reason="need to install xgboost") +@pytest.mark.xfail(reason="xgboost's allocator doesn't work with older drivers") def test_apply(train_device, infer_device, n_classes, tmp_path): n_rows = 1000 n_columns = 30 diff --git a/python/cuml/cuml/tests/explainer/test_gpu_treeshap.py b/python/cuml/cuml/tests/explainer/test_gpu_treeshap.py index 8354cacff7..b6e448129f 100644 --- a/python/cuml/cuml/tests/explainer/test_gpu_treeshap.py +++ b/python/cuml/cuml/tests/explainer/test_gpu_treeshap.py @@ -149,6 +149,7 @@ def count_categorical_split(tl_model): ], ) @pytest.mark.skipif(not has_xgboost(), reason="need to install xgboost") +@pytest.mark.xfail(reason="xgboost's allocator doesn't work with older drivers") @pytest.mark.skipif(not has_shap(), reason="need to install shap") @pytest.mark.skipif(not has_sklearn(), reason="need to install scikit-learn") def test_xgb_regressor(objective): @@ -223,6 +224,7 @@ def test_xgb_regressor(objective): ], ) @pytest.mark.skipif(not has_xgboost(), reason="need to install xgboost") +@pytest.mark.xfail(reason="xgboost's allocator doesn't work with older drivers") @pytest.mark.skipif(not has_shap(), reason="need to install shap") @pytest.mark.skipif(not has_sklearn(), reason="need to install scikit-learn") def test_xgb_classifier(objective, n_classes): @@ -478,6 +480,7 @@ def test_sklearn_rf_classifier(n_classes): @pytest.mark.skipif(not has_xgboost(), reason="need to install xgboost") +@pytest.mark.xfail(reason="xgboost's allocator doesn't work with older drivers") def test_xgb_toy_categorical(): X = pd.DataFrame( { @@ -511,6 +514,7 @@ def test_xgb_toy_categorical(): @pytest.mark.parametrize("n_classes", [2, 3]) @pytest.mark.skipif(not has_xgboost(), reason="need to install xgboost") +@pytest.mark.xfail(reason="xgboost's allocator doesn't work with older drivers") @pytest.mark.skipif(not has_sklearn(), reason="need to install scikit-learn") def test_xgb_classifier_with_categorical(n_classes): n_samples = 100 @@ -572,6 +576,7 @@ def test_xgb_classifier_with_categorical(n_classes): @pytest.mark.skipif(not has_xgboost(), reason="need to install xgboost") +@pytest.mark.xfail(reason="xgboost's allocator doesn't work with older drivers") @pytest.mark.skipif(not has_sklearn(), reason="need to install scikit-learn") def test_xgb_regressor_with_categorical(): n_samples = 100 diff --git a/python/cuml/cuml/tests/test_benchmark.py b/python/cuml/cuml/tests/test_benchmark.py index 61157a2c73..cae0bc167a 100644 --- a/python/cuml/cuml/tests/test_benchmark.py +++ b/python/cuml/cuml/tests/test_benchmark.py @@ -195,6 +195,10 @@ def test_real_algos_runner(algo_name): ): pytest.xfail() + # TODO(hcho3): Remove once XGBoost works with older drivers + if algo_name == "FIL": + pytest.xfail(reason="xgboost's allocator doesn't work with older drivers") + runner = AccuracyComparisonRunner( [50], [5], dataset_name="classification", test_fraction=0.20 ) diff --git a/python/cuml/cuml/tests/test_fil.py b/python/cuml/cuml/tests/test_fil.py index 8070fa5a3c..4e71c709fd 100644 --- a/python/cuml/cuml/tests/test_fil.py +++ b/python/cuml/cuml/tests/test_fil.py @@ -133,6 +133,7 @@ def _build_and_save_xgboost( ) @pytest.mark.parametrize("n_classes", [2, 5, 25]) @pytest.mark.skipif(has_xgboost() is False, reason="need to install xgboost") +@pytest.mark.xfail(reason="xgboost's allocator doesn't work with older drivers") @pytest.mark.filterwarnings("ignore::FutureWarning") def test_fil_classification( n_rows, n_columns, num_rounds, n_classes, tmp_path @@ -205,6 +206,7 @@ def test_fil_classification( "max_depth", [unit_param(3), unit_param(7), stress_param(11)] ) @pytest.mark.skipif(has_xgboost() is False, reason="need to install xgboost") +@pytest.mark.xfail(reason="xgboost's allocator doesn't work with older drivers") @pytest.mark.filterwarnings("ignore::FutureWarning") def test_fil_regression(n_rows, n_columns, num_rounds, tmp_path, max_depth): # settings @@ -469,6 +471,7 @@ def small_classifier_and_preds(tmpdir_factory, request): @pytest.mark.skipif(has_xgboost() is False, reason="need to install xgboost") +@pytest.mark.xfail(reason="xgboost's allocator doesn't work with older drivers") @pytest.mark.parametrize( "algo", [ @@ -501,6 +504,7 @@ def test_output_algos(algo, small_classifier_and_preds): @pytest.mark.skipif(has_xgboost() is False, reason="need to install xgboost") +@pytest.mark.xfail(reason="xgboost's allocator doesn't work with older drivers") @pytest.mark.parametrize("precision", ["native", "float32", "float64"]) @pytest.mark.filterwarnings("ignore::FutureWarning") def test_precision_xgboost(precision, small_classifier_and_preds): @@ -521,6 +525,7 @@ def test_precision_xgboost(precision, small_classifier_and_preds): @pytest.mark.skipif(has_xgboost() is False, reason="need to install xgboost") +@pytest.mark.xfail(reason="xgboost's allocator doesn't work with older drivers") @pytest.mark.parametrize( "storage_type", [False, True, "auto", "dense", "sparse", "sparse8"] ) @@ -543,6 +548,7 @@ def test_output_storage_type(storage_type, small_classifier_and_preds): @pytest.mark.skipif(has_xgboost() is False, reason="need to install xgboost") +@pytest.mark.xfail(reason="xgboost's allocator doesn't work with older drivers") @pytest.mark.parametrize("storage_type", ["dense", "sparse"]) @pytest.mark.parametrize("blocks_per_sm", [1, 2, 3, 4]) @pytest.mark.filterwarnings("ignore::FutureWarning") @@ -567,6 +573,7 @@ def test_output_blocks_per_sm( @pytest.mark.skipif(has_xgboost() is False, reason="need to install xgboost") +@pytest.mark.xfail(reason="xgboost's allocator doesn't work with older drivers") @pytest.mark.parametrize("threads_per_tree", [2, 4, 8, 16, 32, 64, 128, 256]) @pytest.mark.filterwarnings("ignore::FutureWarning") def test_threads_per_tree(threads_per_tree, small_classifier_and_preds): @@ -594,6 +601,7 @@ def test_threads_per_tree(threads_per_tree, small_classifier_and_preds): @pytest.mark.parametrize("output_class", [True, False]) @pytest.mark.skipif(has_xgboost() is False, reason="need to install xgboost") +@pytest.mark.xfail(reason="xgboost's allocator doesn't work with older drivers") @pytest.mark.filterwarnings("ignore::FutureWarning") def test_thresholding(output_class, small_classifier_and_preds): model_path, model_type, X, xgb_preds = small_classifier_and_preds @@ -612,6 +620,7 @@ def test_thresholding(output_class, small_classifier_and_preds): @pytest.mark.skipif(has_xgboost() is False, reason="need to install xgboost") +@pytest.mark.xfail(reason="xgboost's allocator doesn't work with older drivers") @pytest.mark.filterwarnings("ignore::FutureWarning") def test_output_args(small_classifier_and_preds): model_path, model_type, X, xgb_preds = small_classifier_and_preds