Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions skpro/model_selection/_tuning.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
import pandas as pd
from sklearn.model_selection import ParameterGrid, ParameterSampler, check_cv

from skpro.metrics import CRPS
from skpro.benchmarking.evaluate import evaluate
from skpro.regression.base._delegate import _DelegatedProbaRegressor
from skpro.utils.parallel import parallelize
Expand Down Expand Up @@ -108,6 +109,8 @@ def _fit(self, X, y, C=None):

# scoring = check_scoring(self.scoring, obj=self)
scoring = self.scoring
if scoring is None:
scoring = CRPS()
scoring_name = f"test_{scoring.name}"
Comment on lines 110 to 114
Copy link

Copilot AI Mar 30, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Add a regression test to cover the scoring=None path (issue #1009). Without a test, this bug is likely to reappear since BaseGridSearch._fit relies on scoring.name/get_tag and will crash if scoring is ever left as None again. A minimal test can instantiate GridSearchCV with scoring=None, run fit, and assert that cv_results_ contains the expected CRPS-derived score column (e.g., mean_test_CRPS) and that no AttributeError is raised.

Copilot uses AI. Check for mistakes.

backend = self.backend
Expand Down
1 change: 1 addition & 0 deletions skpro/model_selection/tests/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
"""Tests for model selection utilities."""
53 changes: 53 additions & 0 deletions skpro/model_selection/tests/test_tuning.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
"""Tests for model selection tuning utilities."""

import pandas as pd
import pytest
from sklearn.model_selection import KFold

from skpro.model_selection import GridSearchCV, RandomizedSearchCV
from skpro.regression.dummy import DummyProbaRegressor
from skpro.tests.test_switch import run_test_module_changed


def _get_test_data():
X = pd.DataFrame({"x": [1, 2, 3, 4]})
y = pd.DataFrame({"y": [1.0, 2.0, 3.0, 4.0]})
return X, y


@pytest.mark.skipif(
not run_test_module_changed("skpro.model_selection"),
reason="Test only if skpro.model_selection has been changed",
)
def test_gridsearch_scoring_none_defaults_to_crps():
"""GridSearchCV should use CRPS when scoring is None."""
X, y = _get_test_data()

gscv = GridSearchCV(
estimator=DummyProbaRegressor(),
cv=KFold(n_splits=2),
param_grid={"strategy": ["empirical"]},
scoring=None,
)
gscv.fit(X, y)

assert "mean_test_CRPS" in gscv.cv_results_.columns


def test_randomizedsearch_scoring_none_defaults_to_crps():
"""RandomizedSearchCV should use CRPS when scoring is None."""
X, y = _get_test_data()

rscv = RandomizedSearchCV(
estimator=DummyProbaRegressor(),
cv=KFold(n_splits=2),
param_distributions={"strategy": ["empirical"]},
n_iter=1,
scoring=None,
backend_params={"n_jobs": 1},
)
rscv.fit(X, y)

assert "mean_test_CRPS" in rscv.cv_results_.columns