Skip to content

Commit

Permalink
fixes
Browse files Browse the repository at this point in the history
  • Loading branch information
CarloLucibello committed Oct 12, 2024
1 parent 18ab9b1 commit 9367b95
Show file tree
Hide file tree
Showing 3 changed files with 6 additions and 6 deletions.
4 changes: 1 addition & 3 deletions test/ext_cuda/layers.jl
Original file line number Diff line number Diff line change
Expand Up @@ -11,9 +11,7 @@
end


const ACTIVATIONS = [identity, relu, tanh,
sigmoid, exp, softplus,
elu, selu]
const ACTIVATIONS = [identity, tanh, softplus, elu]

function gpu_gradtest(name::String, layers::Vector, x_cpu, args...; test_cpu = true, test_mode = false)
@testset "$name GPU grad tests" begin
Expand Down
5 changes: 3 additions & 2 deletions test/ext_cuda/losses.jl
Original file line number Diff line number Diff line change
Expand Up @@ -27,8 +27,9 @@ y = [1 0 0 0 1
@test focal_loss(x, y) focal_loss(gpu(x), gpu(y))

@testset "GPU: $loss" for loss in ALL_LOSSES
x = rand(Float32, 3,4)
y = rand(Float32, 3,4)
# let's stay far from the boundaries to avoid problems with finite differences gradients
x = 0.1f0 + 0.8f0 .* rand(Float32, 3, 4)
y = 0.1f0 + 0.8f0 .* rand(Float32, 3, 4)
@test loss(x, y) loss(gpu(x), gpu(y))

test_gradients(loss, x, y, test_gpu=true, test_grad_f = false)
Expand Down
3 changes: 2 additions & 1 deletion test/test_utils.jl
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,8 @@ const ALL_LOSSES = [Flux.Losses.mse, Flux.Losses.mae, Flux.Losses.msle,

function finitediff_withgradient(f, x...)
y = f(x...)
fdm = FiniteDifferences.central_fdm(5, 1)
# We set a range to avoid domain errors
fdm = FiniteDifferences.central_fdm(5, 1, max_range=1e-2)
return y, FiniteDifferences.grad(fdm, f, x...)
end

Expand Down

0 comments on commit 9367b95

Please sign in to comment.