Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions skpro/distributions/base/tests/test_multiindex.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,12 @@
import pytest

from skpro.distributions.normal import Normal
from skpro.tests.test_switch import run_test_module_changed

pytestmark = pytest.mark.skipif(
not run_test_module_changed("skpro.distributions"),
reason="run only if skpro.distributions has been changed",
)


@pytest.fixture
Expand Down
26 changes: 26 additions & 0 deletions skpro/distributions/tests/test_normal_mixture.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,16 @@

import numpy as np
import pandas as pd
import pytest

from skpro.distributions.normal_mixture import NormalMixture
from skpro.tests.test_switch import run_test_for_class


@pytest.mark.skipif(
not run_test_for_class(NormalMixture),
reason="run test only if tested object has changed",
)
def test_pi_is_normalized_per_row():
"""Mixture weights should be normalized row-wise at construction."""
pi = np.array([[0.3, 0.7], [1.0, 2.0]])
Expand All @@ -17,6 +23,10 @@ def test_pi_is_normalized_per_row():
assert np.allclose(d._pi.sum(axis=1), 1.0)


@pytest.mark.skipif(
not run_test_for_class(NormalMixture),
reason="run test only if tested object has changed",
)
def test_mean_and_var_match_closed_form_scalar():
"""Mean/variance should match closed-form mixture formulas in scalar case."""
pi = np.array([0.5, 0.5])
Expand All @@ -34,6 +44,10 @@ def test_mean_and_var_match_closed_form_scalar():
assert np.isclose(d.var(), expected_var)


@pytest.mark.skipif(
not run_test_for_class(NormalMixture),
reason="run test only if tested object has changed",
)
def test_single_component_reduces_to_normal_pdf_and_cdf():
"""With one active component, pdf/cdf should match that Normal component."""
pi = np.array([[1.0, 0.0]])
Expand All @@ -49,6 +63,10 @@ def test_single_component_reduces_to_normal_pdf_and_cdf():
assert np.isclose(cdf, 0.5)


@pytest.mark.skipif(
not run_test_for_class(NormalMixture),
reason="run test only if tested object has changed",
)
def test_rowwise_weights_change_rowwise_mean():
"""Per-sample weights should produce different means per row."""
pi = np.array([[0.9, 0.1], [0.1, 0.9]])
Expand All @@ -62,6 +80,10 @@ def test_rowwise_weights_change_rowwise_mean():
assert np.isclose(means[1], 9.0)


@pytest.mark.skipif(
not run_test_for_class(NormalMixture),
reason="run test only if tested object has changed",
)
def test_sampling_mean_matches_theoretical_mean():
"""Large-sample mean should approximate theoretical mixture mean."""
pi = np.array([[0.5, 0.5]])
Expand All @@ -78,6 +100,10 @@ def test_sampling_mean_matches_theoretical_mean():
assert abs(sample_mean - theoretical_mean) < 0.1


@pytest.mark.skipif(
not run_test_for_class(NormalMixture),
reason="run test only if tested object has changed",
)
def test_energy_returns_non_negative_dataframe():
"""Energy outputs should be non-negative and keep expected tabular shape."""
pi = np.array([[1.0, 0.0]])
Expand Down
24 changes: 24 additions & 0 deletions skpro/distributions/tests/test_proba_basic.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,10 @@ def test_proba_example():
assert one_row.shape == (1, 2)


@pytest.mark.skipif(
not run_test_module_changed("skpro.distributions"),
reason="run only if skpro.distributions has been changed",
)
@pytest.mark.parametrize("subsetter", ["loc", "iloc"])
def test_proba_subsetters_loc_iloc(subsetter):
"""Test one subsetting case for BaseDistribution."""
Expand Down Expand Up @@ -57,6 +61,10 @@ def test_proba_subsetters_loc_iloc(subsetter):
assert nss.shape == ()


@pytest.mark.skipif(
not run_test_module_changed("skpro.distributions"),
reason="run only if skpro.distributions has been changed",
)
def test_proba_subsetters_at_iat():
"""Test one subsetting case for BaseDistribution."""
from skpro.distributions.normal import Normal
Expand All @@ -75,6 +83,10 @@ def test_proba_subsetters_at_iat():
assert nss == n.loc[1, 1]


@pytest.mark.skipif(
not run_test_module_changed("skpro.distributions"),
reason="run only if skpro.distributions has been changed",
)
def test_proba_index_coercion():
"""Test index coercion for BaseDistribution."""
from skpro.distributions.normal import Normal
Expand Down Expand Up @@ -105,6 +117,10 @@ def test_proba_index_coercion():
assert n.columns.equals(pd.Index([1, 2, 3]))


@pytest.mark.skipif(
not run_test_module_changed("skpro.distributions"),
reason="run only if skpro.distributions has been changed",
)
@pytest.mark.skipif(
not _check_soft_dependencies("matplotlib", severity="none"),
reason="skip if matplotlib is not available",
Expand Down Expand Up @@ -170,6 +186,10 @@ def test_discrete_pmf_plotting():
), "Should plot at multiple support points"


@pytest.mark.skipif(
not run_test_module_changed("skpro.distributions"),
reason="run only if skpro.distributions has been changed",
)
def test_to_df_parametric():
"""Tests coercion to DataFrame via get_params_df and to_df."""
from skpro.distributions.normal import Normal
Expand Down Expand Up @@ -222,6 +242,10 @@ def test_to_df_parametric():
assert ix not in ["index", "columns"]


@pytest.mark.skipif(
not run_test_module_changed("skpro.distributions"),
reason="run only if skpro.distributions has been changed",
)
def test_head_tail():
"""Test head and tail utility functions."""
from skpro.distributions.normal import Normal
Expand Down
6 changes: 6 additions & 0 deletions skpro/regression/tests/test_bandwidth.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,12 @@
bw_scott_1d,
bw_silverman_1d,
)
from skpro.tests.test_switch import run_test_module_changed

pytestmark = pytest.mark.skipif(
Copy link
Copy Markdown
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I would make this more specific to the containing modules

not run_test_module_changed("skpro.regression._bandwidth"),
reason="run only if skpro.regression._bandwidth has been changed",
)


def test_bandwidth_1d_methods_return_finite_positive_values():
Expand Down
27 changes: 27 additions & 0 deletions skpro/regression/tests/test_mapie_v1.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,26 @@
from sklearn.datasets import make_regression
from sklearn.linear_model import LinearRegression

from skpro.regression.conformal import (
MapieConformalizedQuantileRegressor,
MapieCrossConformalRegressor,
MapieSplitConformalRegressor,
)
from skpro.regression.jackknife import MapieJackknifeAfterBootstrapRegressor
from skpro.tests.test_switch import run_test_for_class

MAPIE_CLASSES = [
MapieSplitConformalRegressor,
MapieCrossConformalRegressor,
MapieJackknifeAfterBootstrapRegressor,
MapieConformalizedQuantileRegressor,
]


@pytest.mark.skipif(
not run_test_for_class(MAPIE_CLASSES),
reason="run test only if tested object has changed",
)
@pytest.mark.skipif(
not _check_soft_dependencies("mapie>=1.0", severity="none"),
reason="mapie>=1.0 not installed",
Expand All @@ -24,6 +43,10 @@ def test_mapie_v1_imports():
assert MapieConformalizedQuantileRegressor is not None


@pytest.mark.skipif(
not run_test_for_class(MAPIE_CLASSES),
reason="run test only if tested object has changed",
)
@pytest.mark.skipif(
not _check_soft_dependencies("mapie>=1.0", severity="none"),
reason="mapie>=1.0 not installed",
Expand All @@ -43,6 +66,10 @@ def test_mapie_v1_imports_from_top_level():
assert MapieConformalizedQuantileRegressor is not None


@pytest.mark.skipif(
not run_test_for_class(MAPIE_CLASSES),
reason="run test only if tested object has changed",
)
@pytest.mark.skipif(
not _check_soft_dependencies("mapie>=1.0", severity="none"),
reason="mapie>=1.0 not installed",
Expand Down
9 changes: 9 additions & 0 deletions skpro/regression/tests/test_ondil.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,13 @@
from skbase.utils.dependencies import _check_soft_dependencies

from skpro.regression.ondil import OndilOnlineGamlss
from skpro.tests.test_switch import run_test_for_class


@pytest.mark.skipif(
not run_test_for_class(OndilOnlineGamlss),
reason="run test only if tested object has changed",
)
@pytest.mark.skipif(
not _check_soft_dependencies(["ondil"], severity="none"),
reason="skip test if ondil is not installed in environment",
Expand All @@ -30,6 +35,10 @@ def test_ondil_instantiation_and_get_test_params():
assert isinstance(est, OndilOnlineGamlss)


@pytest.mark.skipif(
not run_test_for_class(OndilOnlineGamlss),
reason="run test only if tested object has changed",
)
@pytest.mark.skipif(
not _check_soft_dependencies(["ondil"], severity="none"),
reason="skip test if ondil is not installed in environment",
Expand Down
78 changes: 42 additions & 36 deletions skpro/regression/tests/test_pipeline_transform_chain.py
Original file line number Diff line number Diff line change
@@ -1,36 +1,42 @@
import numpy as np
import pandas as pd
from sklearn.linear_model import LinearRegression
from sklearn.preprocessing import FunctionTransformer

from skpro.regression.compose import Pipeline
from skpro.regression.residual import ResidualDouble


def test_transformer_chaining_in_predict():
"""Ensure transformers are applied sequentially in pipeline."""

X = pd.DataFrame({"x": np.arange(5)})
y = pd.Series(np.arange(5))

exp = FunctionTransformer(np.exp)

pipe = Pipeline(
[
("exp1", exp),
("exp2", exp),
("reg", ResidualDouble(LinearRegression())),
]
)

pipe.fit(X, y)

# run predict to ensure the pipeline works end-to-end
y_pred = pipe.predict(X)

# check that transformations were applied sequentially
Xt = pipe._transform(X)
expected = np.exp(np.exp(X))

assert np.allclose(Xt.values, expected.values)
assert len(y_pred) == len(y)
import numpy as np
import pandas as pd
import pytest
from sklearn.linear_model import LinearRegression
from sklearn.preprocessing import FunctionTransformer

from skpro.regression.compose import Pipeline
from skpro.regression.residual import ResidualDouble
from skpro.tests.test_switch import run_test_for_class


@pytest.mark.skipif(
not run_test_for_class([Pipeline, ResidualDouble]),
reason="run test only if tested object has changed",
)
def test_transformer_chaining_in_predict():
"""Ensure transformers are applied sequentially in pipeline."""

X = pd.DataFrame({"x": np.arange(5)})
y = pd.Series(np.arange(5))

exp = FunctionTransformer(np.exp)

pipe = Pipeline(
[
("exp1", exp),
("exp2", exp),
("reg", ResidualDouble(LinearRegression())),
]
)

pipe.fit(X, y)

# run predict to ensure the pipeline works end-to-end
y_pred = pipe.predict(X)

# check that transformations were applied sequentially
Xt = pipe._transform(X)
expected = np.exp(np.exp(X))

assert np.allclose(Xt.values, expected.values)
assert len(y_pred) == len(y)
Loading