Skip to content

Commit

Permalink
commented code
Browse files Browse the repository at this point in the history
  • Loading branch information
ngiann committed Aug 19, 2024
1 parent 7ef536f commit ce49649
Showing 1 changed file with 10 additions and 10 deletions.
20 changes: 10 additions & 10 deletions src/gplvmplus/unpack_gplvmplus.jl
Original file line number Diff line number Diff line change
Expand Up @@ -6,26 +6,26 @@ function unpack_gplvmplus(p, D, N, net, Q)

MARK = 0

Z = reshape(p[MARK+1:MARK+Q*N], Q, N); MARK += Q*N
Z = reshape(p[MARK+1:MARK+Q*N], Q, N); MARK += Q*N # latent coordinates

θ = log1pexp(p[MARK+1]); MARK += 1
θ = log1pexp(p[MARK+1]); MARK += 1 # GP lengthscale

β = log1pexp(p[MARK+1]) + 1; MARK += 1
β = log1pexp(p[MARK+1]) + 1; MARK += 1 # inverse noise

w = p[MARK+1:MARK+nwts]; MARK += nwts
w = p[MARK+1:MARK+nwts]; MARK += nwts # neural network weights

Λroot = Diagonal((p[MARK+1:MARK+N])); MARK += N
Λroot = Diagonal((p[MARK+1:MARK+N])); MARK += N # diagonal for parametrising covariance of posterior function values

α = log1pexp(p[MARK+1]); MARK += 1
α = log1pexp(p[MARK+1]); MARK += 1 # scaling coefficient inside exp(⋅) non-linearity

b = p[MARK+1]; MARK += 1
b = p[MARK+1]; MARK += 1 # shift coefficient inside exp(⋅) non-linearity

c = log1pexp.(p[MARK+1:MARK+N]); MARK += N
c = log1pexp.(p[MARK+1:MARK+N]); MARK += N # individual scaling coefficients

@assert(MARK == length(p))

μ = net(w, Z)
μ = net(w, Z) # posterior mean of latent function values parametrised by neural network

return Z, [1.0;θ], β, μ, Λroot, w, α, b, c
return Z, [1.0;θ], β, μ, Λroot, w, α, b, c # global amplitude fixed to 1.0 without loss of generalisation

end

0 comments on commit ce49649

Please sign in to comment.