diff --git a/Project.toml b/Project.toml index 7bc0f8b..3ea51c4 100644 --- a/Project.toml +++ b/Project.toml @@ -1,7 +1,7 @@ name = "ManifoldDiff" uuid = "af67fdf4-a580-4b9f-bbec-742ef357defd" authors = ["Seth Axen ", "Mateusz Baran ", "Ronny Bergmann "] -version = "0.3.4" +version = "0.3.5" [deps] LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" diff --git a/src/subgradients.jl b/src/subgradients.jl index a09628c..61fd718 100644 --- a/src/subgradients.jl +++ b/src/subgradients.jl @@ -1,7 +1,7 @@ @doc raw""" - subgrad_distance(M, q, p; c = 1, atol=eps(eltype(p))]) - subgrad_distance!(M, X, q, p; c = 1, atol=eps(eltype(p))]) + subgrad_distance(M, q, p[, c = 1; atol = 0]) + subgrad_distance!(M, X, q, p[, c = 1; atol = 0]) compute the subgradient of the distance (in place of `X`) @@ -22,28 +22,41 @@ for ``c\neq 1`` or ``p\neq q``. Note that for the remaining case ``c=1``, # Optional * `c` – (`1`) the exponent of the distance, i.e. the default is the distance -* `atol` – (`eps(eltype(p))`) the tolerance to use when evaluating the distance between `p` and `q`. +* `atol` – (`0`) the tolerance to use when evaluating the distance between `p` and `q`. """ -function subgrad_distance(M, q, p; c::Int = 1, atol = 0) +function subgrad_distance(M, q, p, c::Int = 2; atol = 0) if c == 2 return -log(M, p, q) elseif c == 1 && distance(M, q, p) ≤ atol - X = rand(M; vector_at = p) - (norm(M, p, X) > 1.0) && (X ./= norm(M, p, X)) - return X + return normal_cone_vector(M, p) else return -distance(M, p, q)^(c - 2) * log(M, p, q) end end -function subgrad_distance!(M, X, q, p; c::Int = 1, atol = 0) +function subgrad_distance!(M, X, q, p, c::Int = 2; atol = 0) log!(M, X, p, q) if c == 2 X .*= -one(eltype(X)) elseif c == 1 && distance(M, q, p) ≤ atol - ManifoldsBase.rand!(M, X; vector_at = p) - (norm(M, p, X) > 1.0) && (X ./= norm(M, p, X)) + normal_cone_vector!(M, X, p) else X .*= -distance(M, p, q)^(c - 2) end return X end +function normal_cone_vector(M, p) + Y = rand(M; vector_at = p) + if norm(M, p, Y) > 1.0 + Y ./= norm(M, p, Y) + Y .*= rand() + end + return Y +end +function normal_cone_vector!(M, Y, p) + ManifoldsBase.rand!(M, Y; vector_at = p) + if norm(M, p, Y) > 1.0 + Y ./= norm(M, p, Y) + Y .*= rand() + end + return Y +end diff --git a/test/test_subgradients.jl b/test/test_subgradients.jl index 9ea9e89..f8bcdc1 100644 --- a/test/test_subgradients.jl +++ b/test/test_subgradients.jl @@ -11,25 +11,31 @@ using Random w = p X = zero_vector(M, w) Random.seed!(42) - ManifoldDiff.subgrad_distance!(M, X, w, p) - Y = ManifoldDiff.subgrad_distance(M, w, p) + ManifoldDiff.subgrad_distance!(M, X, w, p, 1) + Random.seed!(42) + Y = ManifoldDiff.subgrad_distance(M, w, p, 1) + @test Y == X @test is_vector(M, p, X) @test norm(M, p, X) ≤ 1.0 + ManifoldDiff.subgrad_distance!(M, Y, w, p, 1) @test is_vector(M, p, Y) @test norm(M, p, Y) ≤ 1.0 Z = ManifoldDiff.grad_distance(M, p, r) - W = ManifoldDiff.subgrad_distance(M, p, r; c = 2) + W = ManifoldDiff.subgrad_distance(M, p, r) + @test Z == W + Z = ManifoldDiff.grad_distance(M, p, r, 2) + W = ManifoldDiff.subgrad_distance(M, p, r, 2) @test Z == W X = zero_vector(M, p) - ManifoldDiff.subgrad_distance!(M, X, p, q; c = 2) - Y = ManifoldDiff.subgrad_distance(M, p, q; c = 2) + ManifoldDiff.subgrad_distance!(M, X, p, q) + Y = ManifoldDiff.subgrad_distance(M, p, q) Z = [0.0, 0.0, -π / 2] # known solution @test X == Y @test X == Z - U = zero_vector(M, p) - ManifoldDiff.subgrad_distance!(M, U, p, q) - V = ManifoldDiff.subgrad_distance(M, p, q) + U = zero_vector(M, q) + ManifoldDiff.subgrad_distance!(M, U, p, q, 1) + V = ManifoldDiff.subgrad_distance(M, p, q, 1) W = -distance(M, q, p)^(-1) * log(M, q, p) # solution @test U == V @test U == W