@@ -27,12 +27,11 @@ function basicblock(inplanes::Integer, planes::Integer; stride::Integer = 1,
27
27
drop_block = identity, drop_path = identity,
28
28
attn_fn = planes -> identity)
29
29
first_planes = planes ÷ reduction_factor
30
- outplanes = planes
31
30
conv_bn1 = conv_norm ((3 , 3 ), inplanes => first_planes, identity; norm_layer, revnorm,
32
31
stride, pad = 1 )
33
- conv_bn2 = conv_norm ((3 , 3 ), first_planes => outplanes , identity; norm_layer, revnorm,
32
+ conv_bn2 = conv_norm ((3 , 3 ), first_planes => planes , identity; norm_layer, revnorm,
34
33
pad = 1 )
35
- layers = [conv_bn1... , drop_block, activation, conv_bn2... , attn_fn (outplanes ),
34
+ layers = [conv_bn1... , drop_block, activation, conv_bn2... , attn_fn (planes ),
36
35
drop_path]
37
36
return Chain (filter! (!= (identity), layers)... )
38
37
end
@@ -201,7 +200,7 @@ function basicblock_builder(block_repeats::AbstractVector{<:Integer};
201
200
expansion:: Integer = 1 , norm_layer = BatchNorm,
202
201
revnorm:: Bool = false , activation = relu,
203
202
attn_fn = planes -> identity,
204
- drop_block_rate = 0.0 , drop_path_rate = 0.0 ,
203
+ drop_block_rate = nothing , drop_path_rate = nothing ,
205
204
stride_fn = resnet_stride, planes_fn = resnet_planes,
206
205
downsample_tuple = (downsample_conv, downsample_identity))
207
206
pathschedule = linear_scheduler (drop_path_rate; depth = sum (block_repeats))
@@ -236,7 +235,7 @@ function bottleneck_builder(block_repeats::AbstractVector{<:Integer};
236
235
expansion:: Integer = 4 , norm_layer = BatchNorm,
237
236
revnorm:: Bool = false , activation = relu,
238
237
attn_fn = planes -> identity,
239
- drop_block_rate = 0.0 , drop_path_rate = 0.0 ,
238
+ drop_block_rate = nothing , drop_path_rate = nothing ,
240
239
stride_fn = resnet_stride, planes_fn = resnet_planes,
241
240
downsample_tuple = (downsample_conv, downsample_identity))
242
241
pathschedule = linear_scheduler (drop_path_rate; depth = sum (block_repeats))
@@ -295,8 +294,8 @@ function resnet(block_type, block_repeats::AbstractVector{<:Integer},
295
294
inchannels:: Integer = 3 , stem_fn = resnet_stem, connection = addact,
296
295
activation = relu, norm_layer = BatchNorm, revnorm:: Bool = false ,
297
296
attn_fn = planes -> identity, pool_layer = AdaptiveMeanPool ((1 , 1 )),
298
- use_conv:: Bool = false , drop_block_rate = 0.0 , drop_path_rate = 0.0 ,
299
- dropout_rate = 0.0 , nclasses:: Integer = 1000 , kwargs... )
297
+ use_conv:: Bool = false , drop_block_rate = nothing , drop_path_rate = nothing ,
298
+ dropout_rate = nothing , nclasses:: Integer = 1000 , kwargs... )
300
299
# Build stem
301
300
stem = stem_fn (; inchannels)
302
301
# Block builder
@@ -319,8 +318,8 @@ function resnet(block_type, block_repeats::AbstractVector{<:Integer},
319
318
downsample_tuple = downsample_opt,
320
319
kwargs... )
321
320
elseif block_type == bottle2neck
322
- @assert drop_block_rate== 0.0 " DropBlock not supported for `bottle2neck`. Set `drop_block_rate` to 0.0 "
323
- @assert drop_path_rate== 0.0 " DropPath not supported for `bottle2neck`. Set `drop_path_rate` to 0.0 "
321
+ @assert isnothing ( drop_block_rate) " DropBlock not supported for `bottle2neck`. Set `drop_block_rate` to nothing "
322
+ @assert isnothing ( drop_path_rate) " DropPath not supported for `bottle2neck`. Set `drop_path_rate` to nothing "
324
323
@assert reduction_factor== 1 " Reduction factor not supported for `bottle2neck`. Set `reduction_factor` to 1"
325
324
get_layers = bottle2neck_builder (block_repeats; inplanes, cardinality, base_width,
326
325
activation, norm_layer, revnorm, attn_fn,
@@ -347,7 +346,7 @@ const RESNET_CONFIGS = Dict(18 => (basicblock, [2, 2, 2, 2]),
347
346
50 => (bottleneck, [3 , 4 , 6 , 3 ]),
348
347
101 => (bottleneck, [3 , 4 , 23 , 3 ]),
349
348
152 => (bottleneck, [3 , 8 , 36 , 3 ]))
350
-
349
+ # larger ResNet-like models
351
350
const LRESNET_CONFIGS = Dict (50 => (bottleneck, [3 , 4 , 6 , 3 ]),
352
351
101 => (bottleneck, [3 , 4 , 23 , 3 ]),
353
352
152 => (bottleneck, [3 , 8 , 36 , 3 ]))
0 commit comments