Skip to content

Commit 537e0ad

Browse files
committed
Revert the MobileNetV5 changes to make backward compatability with the mobilenetv5 presets
1 parent 729f0af commit 537e0ad

File tree

3 files changed

+4
-8
lines changed

3 files changed

+4
-8
lines changed

keras_hub/src/models/gemma3n/gemma3n_backbone_test.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -155,7 +155,7 @@ def test_saved_model(self, backbone_type):
155155
)
156156

157157
@parameterized.named_parameters(
158-
("multimodal", "multimodal", 5426, 7),
158+
("multimodal", "multimodal", 5450, 7),
159159
("text_only", "text_only", 350, 4),
160160
)
161161
def test_architecture_characteristics(

keras_hub/src/models/mobilenetv5/mobilenetv5_blocks.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -85,7 +85,7 @@ def build(self, input_shape):
8585
self.has_skip = (
8686
in_chs == self.filters and self.stride == 1
8787
) and not self.noskip
88-
use_bias = False
88+
use_bias = self.norm_layer == "rms_norm"
8989

9090
if self.dw_kernel_size_start:
9191
self.dw_start = ConvNormAct(
@@ -353,7 +353,7 @@ def build(self, input_shape):
353353
else:
354354
mid_chs = adjust_channels(in_chs * self.exp_ratio)
355355
groups = num_groups(self.group_size, mid_chs)
356-
use_bias = False
356+
use_bias = self.norm_layer == "rms_norm"
357357
self.conv_exp = ConvNormAct(
358358
mid_chs,
359359
self.exp_kernel_size,

keras_hub/src/models/mobilenetv5/mobilenetv5_layers.py

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -268,18 +268,14 @@ def __init__(
268268
gamma_initializer="ones",
269269
dtype=self.dtype_policy,
270270
)
271-
elif norm_layer == "layer_norm":
271+
else:
272272
ln_axis = [1, 2, 3]
273273
if self.data_format == "channels_first":
274274
ln_axis = [2, 3, 1]
275275
self.norm = keras.layers.LayerNormalization(
276276
axis=ln_axis,
277277
dtype=self.dtype_policy,
278278
)
279-
else:
280-
raise ValueError(
281-
f"Unsupported norm_layer in ConvNormAct: {norm_layer}"
282-
)
283279

284280
if self.apply_act:
285281
if act_layer == "gelu":

0 commit comments

Comments
 (0)