@@ -12,6 +12,7 @@ BINARY_ACTIVATIONS = filter(f -> hasmethod(f, Tuple{Float64, Float64}), ACTIVATI
1212@test rrelu(0.0 ) == 0.0
1313@test elu(0.0 ) == 0.0
1414@test gelu(0.0 ) == 0.0
15+ @test gelu_fast(0.0 ) == 0.0
1516@test swish(0.0 ) == 0.0
1617@test hardswish(0.0 ) == 0.0
1718@test lisht(0.0 ) == 0.0
@@ -35,7 +36,8 @@ BINARY_ACTIVATIONS = filter(f -> hasmethod(f, Tuple{Float64, Float64}), ACTIVATI
3536@test relu6(1.0 ) == 1.0
3637@test rrelu(1.0 ) == 1.0
3738@test elu(1.0 ) == 1.0
38- @test gelu(1.0 ) == 0.8411919906082768
39+ @test gelu(1.0 ) == 0.8413447460685429
40+ @test gelu_fast(1.0 ) == 0.8411919906082768
3941@test swish(1.0 ) == sigmoid(1.0 )
4042@test hardswish(1.0 ) == hardsigmoid(1.0 )
4143@test lisht(1.0 ) ≈ 1.0 * tanh(1.0 )
@@ -57,7 +59,8 @@ BINARY_ACTIVATIONS = filter(f -> hasmethod(f, Tuple{Float64, Float64}), ACTIVATI
5759@test relu6(- 1.0 ) == 0.0
5860@test - 1 / 3.0 <= rrelu(- 1.0 ) <= - 1 / 8.0
5961@test elu(- 1.0 ) == exp(- 1.0 ) - 1.0
60- @test gelu(- 1.0 ) ≈ - 0.15880800939172324
62+ @test gelu(- 1.0 ) == - 0.15865525393145707
63+ @test gelu_fast(- 1.0 ) ≈ - 0.15880800939172324
6164@test swish(- 1.0 ) == - sigmoid(- 1.0 )
6265@test hardswish(- 1.0 ) == - hardsigmoid(- 1.0 )
6366@test lisht(- 1.0 ) ≈ - 1.0 * tanh(- 1.0 )
114117 a == softsign && continue
115118 @test ! isnan(a(Inf32 ))
116119
117- a in [gelu, swish, hardswish, logcosh, mish] && continue
120+ a in [gelu, gelu_fast, swish, hardswish, logcosh, mish] && continue
118121 @test ! isnan(a(- Inf32 ))
119122 end
120123end
0 commit comments