From 9367b9577168caa1e766b096db204c754d3128b3 Mon Sep 17 00:00:00 2001 From: Carlo Lucibello Date: Sat, 12 Oct 2024 19:27:06 +0200 Subject: [PATCH] fixes --- test/ext_cuda/layers.jl | 4 +--- test/ext_cuda/losses.jl | 5 +++-- test/test_utils.jl | 3 ++- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/test/ext_cuda/layers.jl b/test/ext_cuda/layers.jl index c148334dcf..0d985e59d1 100644 --- a/test/ext_cuda/layers.jl +++ b/test/ext_cuda/layers.jl @@ -11,9 +11,7 @@ end -const ACTIVATIONS = [identity, relu, tanh, - sigmoid, exp, softplus, - elu, selu] +const ACTIVATIONS = [identity, tanh, softplus, elu] function gpu_gradtest(name::String, layers::Vector, x_cpu, args...; test_cpu = true, test_mode = false) @testset "$name GPU grad tests" begin diff --git a/test/ext_cuda/losses.jl b/test/ext_cuda/losses.jl index b5ba4d2f4d..e787e64881 100644 --- a/test/ext_cuda/losses.jl +++ b/test/ext_cuda/losses.jl @@ -27,8 +27,9 @@ y = [1 0 0 0 1 @test focal_loss(x, y) ≈ focal_loss(gpu(x), gpu(y)) @testset "GPU: $loss" for loss in ALL_LOSSES - x = rand(Float32, 3,4) - y = rand(Float32, 3,4) + # let's stay far from the boundaries to avoid problems with finite differences gradients + x = 0.1f0 + 0.8f0 .* rand(Float32, 3, 4) + y = 0.1f0 + 0.8f0 .* rand(Float32, 3, 4) @test loss(x, y) ≈ loss(gpu(x), gpu(y)) test_gradients(loss, x, y, test_gpu=true, test_grad_f = false) diff --git a/test/test_utils.jl b/test/test_utils.jl index 522def9f96..a12d5fed66 100644 --- a/test/test_utils.jl +++ b/test/test_utils.jl @@ -14,7 +14,8 @@ const ALL_LOSSES = [Flux.Losses.mse, Flux.Losses.mae, Flux.Losses.msle, function finitediff_withgradient(f, x...) y = f(x...) - fdm = FiniteDifferences.central_fdm(5, 1) + # We set a range to avoid domain errors + fdm = FiniteDifferences.central_fdm(5, 1, max_range=1e-2) return y, FiniteDifferences.grad(fdm, f, x...) end