Skip to content

Commit

Permalink
Bump compat to 0.6.0. Add eltype spec to onehot
Browse files Browse the repository at this point in the history
  • Loading branch information
JoeyT1994 committed Apr 9, 2024
1 parent ae165fb commit e92bc88
Show file tree
Hide file tree
Showing 7 changed files with 16 additions and 19 deletions.
2 changes: 1 addition & 1 deletion Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -18,4 +18,4 @@ SplitApplyCombine = "03a91e81-4c3e-53e1-a0a4-9c0c8f19dd66"
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"

[compat]
ITensorNetworks = "0.4"
ITensorNetworks = "0.6.0"
6 changes: 1 addition & 5 deletions examples/2d_laplace_solver.jl
Original file line number Diff line number Diff line change
Expand Up @@ -11,11 +11,7 @@ using NamedGraphs:
random_bfs_tree,
undirected_graph
using ITensors: ITensors, Index, siteinds, dim, tags, replaceprime!, MPO, MPS, inner
using ITensorNetworks:
ITensorNetwork,
dmrg,
TTN,
maxlinkdim
using ITensorNetworks: ITensorNetwork, dmrg, TTN, maxlinkdim
using Dictionaries: Dictionary
using SplitApplyCombine: group
using Random: seed!
Expand Down
3 changes: 2 additions & 1 deletion src/itensornetworkfunction.jl
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,8 @@ function project(fitn::ITensorNetworkFunction, vertex_to_bit_value_map)
fitn = copy(fitn)
s = siteinds(fitn)
for v in keys(vertex_to_bit_value_map)
fitn[v] = fitn[v] * onehot(only(s[v]) => vertex_to_bit_value_map[v] + 1)
fitn[v] =
fitn[v] * onehot(eltype(fitn[v]), only(s[v]) => vertex_to_bit_value_map[v] + 1)
end
return fitn
end
Expand Down
2 changes: 1 addition & 1 deletion src/itensornetworks_elementary_functions.jl
Original file line number Diff line number Diff line change
Expand Up @@ -249,7 +249,7 @@ function polynomial_itensornetwork(
end

function random_itensornetwork(s::IndsNetwork, bit_map; kwargs...)
return ITensorNetworkFunction(randomITensorNetwork(s; kwargs...), bit_map)
return ITensorNetworkFunction(random_tensornetwork(s; kwargs...), bit_map)
end

const const_itn = const_itensornetwork
Expand Down
14 changes: 7 additions & 7 deletions src/itensornetworks_elementary_operators.jl
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ using ITensors:
prime,
noprime!,
contract
using ITensorNetworks: IndsNetwork, ITensorNetwork, TTN, TreeTensorNetwork, combine_linkinds
using ITensorNetworks: IndsNetwork, ITensorNetwork, TreeTensorNetwork, combine_linkinds, ttn

function plus_shift_ttn(
s::IndsNetwork, bit_map; dimension=default_dimension(), boundary_value=[0.0]
Expand All @@ -31,7 +31,7 @@ function plus_shift_ttn(
add!(ttn_op, 1.0, (string_site...)...)
end

return TTN(ttn_op, s; algorithm="svd")
return ttn(ttn_op, s; algorithm="svd")
end

function minus_shift_ttn(s::IndsNetwork, bit_map; dimension=default_dimension())
Expand All @@ -49,14 +49,14 @@ function minus_shift_ttn(s::IndsNetwork, bit_map; dimension=default_dimension())
add!(ttn_op, 1.0, (string_site...)...)
end

return TTN(ttn_op, s; algorithm="svd")
return ttn(ttn_op, s; algorithm="svd")
end

function no_shift_ttn(s::IndsNetwork)
ttn_op = OpSum()
string_site_full = [("I", v) for v in vertices(s)]
add!(ttn_op, 1.0, (string_site_full...)...)
return TTN(ttn_op, s; algorithm="svd")
return ttn(ttn_op, s; algorithm="svd")
end

function stencil(
Expand Down Expand Up @@ -145,15 +145,15 @@ Base.:*(fs::ITensorNetworkFunction...) = multiply(fs...)
function operate(
operator::TreeTensorNetwork, ψ::ITensorNetworkFunction; truncate_kwargs=(;), kwargs...
)
ψ_tn = TTN(itensornetwork(ψ))
ψ_tn = ttn(itensornetwork(ψ))
ψO_tn = noprime(contract(operator, ψ_tn; init=prime(copy(ψ_tn)), kwargs...))
ψO_tn = truncate(ψO_tn; truncate_kwargs...)

return ITensorNetworkFunction(ITensorNetwork(ψO_tn), bit_map(ψ))
end

function operate(operator::ITensorNetwork, ψ::ITensorNetworkFunction; kwargs...)
return operate(TTN(operator), ψ; kwargs...)
return operate(ttn(operator), ψ; kwargs...)
end

function operate(
Expand All @@ -169,5 +169,5 @@ end
function operate(
operators::Vector{ITensorNetwork{V}}, ψ::ITensorNetworkFunction; kwargs...
) where {V}
return operate(TTN.(operators), ψ; kwargs...)
return operate(ttn.(operators), ψ; kwargs...)
end
4 changes: 2 additions & 2 deletions src/itensornetworksutils.jl
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
using ITensors: Index, dim, inds
using ITensorNetworks: randomITensorNetwork, IndsNetwork
using ITensorNetworks: random_tensornetwork, IndsNetwork

"""Build the order L tensor corresponding to fx(x): x ∈ [0,1]."""
function build_full_rank_tensor(L::Int64, fx::Function; base::Int64=2)
Expand Down Expand Up @@ -31,7 +31,7 @@ function c_tensor(phys_ind::Index, virt_inds::Vector)
end

function copy_tensor_network(s::IndsNetwork; linkdim::Int64=1)
tn = randomITensorNetwork(s; link_space=linkdim)
tn = random_tensornetwork(s; link_space=linkdim)
for v in vertices(tn)
virt_inds = setdiff(inds(tn[v]), Index[only(s[v])])
tn[v] = c_tensor(only(s[v]), virt_inds)
Expand Down
4 changes: 2 additions & 2 deletions test/test_itensorfunction.jl
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ using TensorNetworkFunctionals
using Graphs: SimpleGraph, uniform_tree
using NamedGraphs: NamedGraph, named_grid, vertices, named_comb_tree, rename_vertices
using ITensors: siteinds
using ITensorNetworks: randomITensorNetwork
using ITensorNetworks: random_tensornetwork
using Dictionaries: Dictionary
using SplitApplyCombine: group
using Random: seed!
Expand All @@ -16,7 +16,7 @@ using Distributions: Uniform
g = named_grid((L, 1))
s = siteinds("S=1/2", g)

ψ = randomITensorNetwork(s; link_space=2)
ψ = random_tensornetwork(s; link_space=2)

= ITensorNetworkFunction(ψ)

Expand Down

0 comments on commit e92bc88

Please sign in to comment.