Skip to content

Commit a0f6f44

Browse files
committed
rm: rm legacy CategoricalNet
1 parent 70b287f commit a0f6f44

File tree

4 files changed

+13
-183
lines changed

4 files changed

+13
-183
lines changed

sbi/neural_nets/estimators/__init__.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,6 @@
22
from sbi.neural_nets.estimators.categorical_net import (
33
CategoricalMADE,
44
CategoricalMassEstimator,
5-
CategoricalNet,
65
)
76
from sbi.neural_nets.estimators.flowmatching_estimator import FlowMatchingEstimator
87
from sbi.neural_nets.estimators.mixed_density_estimator import MixedDensityEstimator

sbi/neural_nets/estimators/categorical_net.py

Lines changed: 3 additions & 104 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,6 @@
88
from nflows.utils import torchutils
99
from torch import Tensor, nn
1010
from torch.distributions import Categorical
11-
from torch.nn import Sigmoid, Softmax
1211
from torch.nn import functional as F
1312

1413
from sbi.neural_nets.estimators.base import ConditionalDensityEstimator
@@ -87,7 +86,7 @@ def forward(self, inputs: Tensor, context: Optional[Tensor] = None) -> Tensor:
8786
condition: Conditioning variable. (batch_size, *condition_shape)
8887
8988
Returns:
90-
Predicted categorical probabilities. (batch_size, *input_shape,
89+
Predicted categorical logits. (batch_size, *input_shape,
9190
num_categories)
9291
"""
9392
embedded_context = self.embedding_net.forward(context)
@@ -149,119 +148,19 @@ def _initialize(self):
149148
pass
150149

151150

152-
class CategoricalNet(nn.Module):
153-
"""Conditional density (mass) estimation for a categorical random variable.
154-
155-
Takes as input parameters theta and learns the parameters p of a Categorical.
156-
157-
Defines log prob and sample functions.
158-
"""
159-
160-
def __init__(
161-
self,
162-
num_input: int,
163-
num_categories: int,
164-
num_hidden: int = 20,
165-
num_layers: int = 2,
166-
embedding_net: Optional[nn.Module] = None,
167-
):
168-
"""Initialize the neural net.
169-
170-
Args:
171-
num_input: number of input units, i.e., dimensionality of the features.
172-
num_categories: number of output units, i.e., number of categories.
173-
num_hidden: number of hidden units per layer.
174-
num_layers: number of hidden layers.
175-
embedding_net: emebedding net for input.
176-
"""
177-
super().__init__()
178-
179-
self.num_hidden = num_hidden
180-
self.num_input = num_input
181-
self.activation = Sigmoid()
182-
self.softmax = Softmax(dim=1)
183-
self.num_categories = num_categories
184-
self.num_variables = 1
185-
186-
# Maybe add embedding net in front.
187-
if embedding_net is not None:
188-
self.input_layer = nn.Sequential(
189-
embedding_net, nn.Linear(num_input, num_hidden)
190-
)
191-
else:
192-
self.input_layer = nn.Linear(num_input, num_hidden)
193-
194-
# Repeat hidden units hidden layers times.
195-
self.hidden_layers = nn.ModuleList()
196-
for _ in range(num_layers):
197-
self.hidden_layers.append(nn.Linear(num_hidden, num_hidden))
198-
199-
self.output_layer = nn.Linear(num_hidden, num_categories)
200-
201-
def forward(self, condition: Tensor) -> Tensor:
202-
"""Return categorical probability predicted from a batch of inputs.
203-
204-
Args:
205-
condition: batch of context parameters for the net.
206-
207-
Returns:
208-
Tensor: batch of predicted categorical probabilities.
209-
"""
210-
# forward path
211-
condition = self.activation(self.input_layer(condition))
212-
213-
# iterate n hidden layers, input condition and calculate tanh activation
214-
for layer in self.hidden_layers:
215-
condition = self.activation(layer(condition))
216-
217-
return self.softmax(self.output_layer(condition))
218-
219-
def log_prob(self, input: Tensor, condition: Tensor) -> Tensor:
220-
"""Return categorical log probability of categories input, given condition.
221-
222-
Args:
223-
input: categories to evaluate.
224-
condition: parameters.
225-
226-
Returns:
227-
Tensor: log probs with shape (input.shape[0],)
228-
"""
229-
# Predict categorical ps and evaluate.
230-
ps = self.forward(condition)
231-
# Squeeze the last dimension (event dim) because `Categorical` has
232-
# `event_shape=()` but our data usually has an event_shape of `(1,)`.
233-
return Categorical(probs=ps).log_prob(input.squeeze(dim=-1))
234-
235-
def sample(self, sample_shape: torch.Size, condition: Tensor) -> Tensor:
236-
"""Returns samples from categorical random variable with probs predicted from
237-
the neural net.
238-
239-
Args:
240-
sample_shape: number of samples to obtain.
241-
condition: batch of parameters for prediction.
242-
243-
Returns:
244-
Tensor: Samples with shape (num_samples, 1)
245-
"""
246-
247-
# Predict Categorical ps and sample.
248-
ps = self.forward(condition)
249-
return Categorical(probs=ps).sample(sample_shape=sample_shape)
250-
251-
252151
class CategoricalMassEstimator(ConditionalDensityEstimator):
253152
"""Conditional density (mass) estimation for a categorical random variable.
254153
255154
The event_shape of this class is `()`.
256155
"""
257156

258157
def __init__(
259-
self, net: CategoricalNet, input_shape: torch.Size, condition_shape: torch.Size
158+
self, net: CategoricalMADE, input_shape: torch.Size, condition_shape: torch.Size
260159
) -> None:
261160
"""Initialize the mass estimator.
262161
263162
Args:
264-
net: CategoricalNet.
163+
net: CategoricalMADE.
265164
input_shape: Shape of the input data.
266165
condition_shape: Shape of the condition data
267166
"""

sbi/neural_nets/net_builders/categorial.py

Lines changed: 0 additions & 50 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,6 @@
99
from sbi.neural_nets.estimators import (
1010
CategoricalMADE,
1111
CategoricalMassEstimator,
12-
CategoricalNet,
1312
)
1413
from sbi.neural_nets.estimators.mixed_density_estimator import _is_discrete
1514
from sbi.utils.nn_utils import get_numel
@@ -18,55 +17,6 @@
1817

1918

2019
def build_categoricalmassestimator(
21-
batch_x: Tensor,
22-
batch_y: Tensor,
23-
z_score_x: Optional[str] = "none",
24-
z_score_y: Optional[str] = "independent",
25-
num_hidden: int = 20,
26-
num_layers: int = 2,
27-
embedding_net: nn.Module = nn.Identity(),
28-
):
29-
"""Returns a density estimator for a categorical random variable.
30-
31-
Args:
32-
batch_x: A batch of input data.
33-
batch_y: A batch of condition data.
34-
z_score_x: Whether to z-score the input data.
35-
z_score_y: Whether to z-score the condition data.
36-
num_hidden: Number of hidden units per layer.
37-
num_layers: Number of hidden layers.
38-
embedding_net: Embedding net for y.
39-
"""
40-
41-
if z_score_x != "none":
42-
raise ValueError("Categorical input should not be z-scored.")
43-
44-
check_data_device(batch_x, batch_y)
45-
if batch_x.shape[1] > 1:
46-
raise NotImplementedError("CategoricalMassEstimator only supports 1D input.")
47-
num_categories = unique(batch_x).numel()
48-
dim_condition = get_numel(batch_y, embedding_net=embedding_net)
49-
50-
z_score_y_bool, structured_y = z_score_parser(z_score_y)
51-
if z_score_y_bool:
52-
embedding_net = nn.Sequential(
53-
standardizing_net(batch_y, structured_y), embedding_net
54-
)
55-
56-
categorical_net = CategoricalNet(
57-
num_input=dim_condition,
58-
num_categories=num_categories,
59-
num_hidden=num_hidden,
60-
num_layers=num_layers,
61-
embedding_net=embedding_net,
62-
)
63-
64-
return CategoricalMassEstimator(
65-
categorical_net, input_shape=batch_x[0].shape, condition_shape=batch_y[0].shape
66-
)
67-
68-
69-
def build_autoregressive_categoricalmassestimator(
7020
batch_x: Tensor,
7121
batch_y: Tensor,
7222
z_score_x: Optional[str] = "none",

sbi/neural_nets/net_builders/mnle.py

Lines changed: 10 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,6 @@
1313
_separate_input,
1414
)
1515
from sbi.neural_nets.net_builders.categorial import (
16-
build_autoregressive_categoricalmassestimator,
1716
build_categoricalmassestimator,
1817
)
1918
from sbi.neural_nets.net_builders.flow import (
@@ -59,7 +58,6 @@ def build_mnle(
5958
z_score_x: Optional[str] = "independent",
6059
z_score_y: Optional[str] = "independent",
6160
flow_model: str = "nsf",
62-
categorical_model: str = "mlp",
6361
num_categorical_columns: Optional[Tensor] = None,
6462
embedding_net: nn.Module = nn.Identity(),
6563
combined_embedding_net: Optional[nn.Module] = None,
@@ -157,32 +155,16 @@ def build_mnle(
157155
combined_condition = torch.cat([disc_x, embedded_batch_y], dim=-1)
158156

159157
# Set up a categorical RV neural net for modelling the discrete data.
160-
if categorical_model == "made":
161-
discrete_net = build_autoregressive_categoricalmassestimator(
162-
disc_x,
163-
batch_y,
164-
z_score_x="none", # discrete data should not be z-scored.
165-
z_score_y="none", # y-embedding net already z-scores.
166-
num_hidden=hidden_features,
167-
num_layers=hidden_layers,
168-
embedding_net=embedding_net,
169-
num_categories=num_categorical_columns,
170-
)
171-
elif categorical_model == "mlp":
172-
assert num_disc == 1, "MLP only supports 1D input."
173-
discrete_net = build_categoricalmassestimator(
174-
disc_x,
175-
batch_y,
176-
z_score_x="none", # discrete data should not be z-scored.
177-
z_score_y="none", # y-embedding net already z-scores.
178-
num_hidden=hidden_features,
179-
num_layers=hidden_layers,
180-
embedding_net=embedding_net,
181-
)
182-
else:
183-
raise ValueError(
184-
f"Unknown categorical net {categorical_model}. Must be 'made' or 'mlp'."
185-
)
158+
discrete_net = build_categoricalmassestimator(
159+
disc_x,
160+
batch_y,
161+
z_score_x="none", # discrete data should not be z-scored.
162+
z_score_y="none", # y-embedding net already z-scores.
163+
num_hidden=hidden_features,
164+
num_layers=hidden_layers,
165+
embedding_net=embedding_net,
166+
num_categories=num_categorical_columns,
167+
)
186168

187169
if combined_embedding_net is None:
188170
# set up linear embedding net for combining discrete and continuous

0 commit comments

Comments
 (0)