11"""
22 conv_norm(kernel_size::Dims{2}, inplanes::Integer, outplanes::Integer,
33 activation = relu; norm_layer = BatchNorm, revnorm::Bool = false,
4- eps::Float32 = 1.0f-5, preact::Bool = false, use_norm::Bool = true,
5- stride::Integer = 1, pad::Integer = 0, dilation::Integer = 1,
6- groups::Integer = 1, [bias, weight, init])
4+ preact::Bool = false, stride::Integer = 1, pad::Integer = 0,
5+ dilation::Integer = 1, groups::Integer = 1, [bias, weight, init])
76
87Create a convolution + normalisation layer pair with activation.
98
@@ -14,33 +13,27 @@ Create a convolution + normalisation layer pair with activation.
1413 - `outplanes`: number of output feature maps
1514 - `activation`: the activation function for the final layer
1615 - `norm_layer`: the normalisation layer used. Note that using `identity` as the normalisation
17- layer will result in no normalisation being applied i.e. this will be the same as
18- setting `use_norm = false`.
16+ layer will result in no normalisation being applied. (This is only compatible with `preact`
17+ and `revnorm` both set to ` false`.)
1918 - `revnorm`: set to `true` to place the normalisation layer before the convolution
2019 - `preact`: set to `true` to place the activation function before the normalisation layer
2120 (only compatible with `revnorm = false`)
22- - `use_norm `: set to `false` to disable normalisation
23- (only compatible with `revnorm = false ` and `preact = false`)
21+ - `bias `: bias for the convolution kernel. This is set to `false` by default if
22+ `norm_layer` is not `identity ` and `true` otherwise.
2423 - `stride`: stride of the convolution kernel
2524 - `pad`: padding of the convolution kernel
2625 - `dilation`: dilation of the convolution kernel
2726 - `groups`: groups for the convolution kernel
28- - `bias`: bias for the convolution kernel. This is set to `false` by default if
29- `use_norm = true`.
3027 - `weight`, `init`: initialization for the convolution kernel (see [`Flux.Conv`](@ref))
3128"""
3229function conv_norm (kernel_size:: Dims{2} , inplanes:: Integer , outplanes:: Integer ,
3330 activation = relu; norm_layer = BatchNorm, revnorm:: Bool = false ,
34- eps:: Float32 = 1.0f-5 , preact:: Bool = false , use_norm:: Bool = true ,
35- bias = ! use_norm, kwargs... )
36- # no normalization layer (including case where normalization layer is identity)
37- use_norm = use_norm && norm_layer != = identity
38- if ! use_norm
31+ preact:: Bool = false , bias = ! (norm_layer != = identity), kwargs... )
32+ # no normalization layer
33+ if ! (norm_layer != = identity)
3934 if preact || revnorm
40- throw (ArgumentError (" `preact` only supported with `use_norm = true`. Check if
41- `use_norm = false` is intended. Note that it is also possible to trigger this
42- error if you set `norm_layer` to `identity` since that returns the same
43- behaviour as `use_norm`." ))
35+ throw (ArgumentError (" `preact` only supported with `norm_layer !== identity`.
36+ Check if a non-`identity` norm layer is intended." ))
4437 else
4538 # early return if no norm layer is required
4639 return [Conv (kernel_size, inplanes => outplanes, activation; kwargs... )]
@@ -64,7 +57,7 @@ function conv_norm(kernel_size::Dims{2}, inplanes::Integer, outplanes::Integer,
6457 end
6558 # layers
6659 layers = [Conv (kernel_size, inplanes => outplanes, activations. conv; bias, kwargs... ),
67- norm_layer (normplanes, activations. norm; ϵ = eps )]
60+ norm_layer (normplanes, activations. norm)]
6861 return revnorm ? reverse (layers) : layers
6962end
7063
@@ -86,6 +79,7 @@ TensorFlow implementation.
8679"""
8780function basic_conv_bn (kernel_size:: Dims{2} , inplanes, outplanes, activation = relu;
8881 kwargs... )
89- return conv_norm (kernel_size, inplanes, outplanes, activation; norm_layer = BatchNorm,
90- eps = 1.0f-3 , kwargs... )
82+ # TensorFlow uses a default epsilon of 1e-3 for BatchNorm
83+ norm_layer = (args... ; kwargs... ) -> BatchNorm (args... ; ϵ = 1.0f-3 , kwargs... )
84+ return conv_norm (kernel_size, inplanes, outplanes, activation; norm_layer, kwargs... )
9185end
0 commit comments