-
Notifications
You must be signed in to change notification settings - Fork 179
Expand file tree
/
Copy pathbenchmark_baseline_regressors.py
More file actions
40 lines (33 loc) · 1.22 KB
/
benchmark_baseline_regressors.py
File metadata and controls
40 lines (33 loc) · 1.22 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
"""Benchmark script for baseline probabilistic regressors."""
import logging
import numpy as np
from sklearn.linear_model import LinearRegression
from skpro.metrics import PinballLoss
from skpro.regression.deterministic_reduction import DeterministicReductionRegressor
from skpro.regression.unconditional_distfit import UnconditionalDistfitRegressor
# Generate synthetic data
X = np.random.randn(200, 5)
y = 3 * X[:, 0] - 2 * X[:, 1] + np.random.randn(200)
# Split
X_train, X_test = X[:150], X[150:]
y_train, y_test = y[:150], y[150:]
# Baseline 1: Unconditional
reg1 = UnconditionalDistfitRegressor()
reg1.fit(X_train, y_train)
dist1 = reg1.predict_proba(X_test)
# Baseline 2: Deterministic reduction
reg2 = DeterministicReductionRegressor(LinearRegression(), distr_type="gaussian")
reg2.fit(X_train, y_train)
dist2 = reg2.predict_proba(X_test)
# Evaluate pinball loss at alpha=0.1, 0.5, 0.9
alphas = [0.1, 0.5, 0.9]
for alpha in alphas:
loss1 = PinballLoss(alpha=alpha)(y_test, dist1)
loss2 = PinballLoss(alpha=alpha)(y_test, dist2)
logging.info(
"Alpha=%s: UnconditionalDistfitRegressor pinball loss=%.4f, "
"DeterministicReductionRegressor pinball loss=%.4f",
alpha,
loss1,
loss2,
)