Skip to content

Commit 818b6e4

Browse files
authored
Remove support for MPS/MPO, Sweeps, AbstractObserver (#50)
1 parent 2391ee4 commit 818b6e4

40 files changed

+510
-402
lines changed

Project.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@ authors = ["Matthew Fishman <[email protected]> and contributors"]
44
version = "0.2.2"
55

66
[deps]
7+
AbstractTrees = "1520ce14-60c1-5f80-bbc7-55ef81b5835c"
78
Compat = "34da2185-b29b-5c13-b0c7-acf172513d20"
89
DataGraphs = "b5a273c3-7e6c-41f6-98bd-8d7f1525a36a"
910
Dictionaries = "85a47980-9c8c-11e8-2b9f-f7ca1fa99fb4"

README.md

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -273,8 +273,8 @@ julia> @visualize Z;
273273

274274
julia> contraction_sequence(Z)
275275
2-element Vector{Vector}:
276-
[(1, 1), (1, 2)]
277-
Any[(2, 1), Any[(2, 2), [(3, 1), (3, 2)]]]
276+
Key{Tuple{Int64, Int64}}[Key((1, 1)), Key((1, 2))]
277+
Any[Key((2, 1)), Any[Key((2, 2)), Key{Tuple{Int64, Int64}}[Key((3, 1)), Key((3, 2))]]]
278278

279279
julia>= contract(Z, (1, 1) => (2, 1));
280280

examples/treetensornetworks/solvers/04_tdvp_observers.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@ end
3636

3737
function measure_sz(; psi, bond, half_sweep)
3838
if bond == 1 && half_sweep == 2
39-
return expect(psi, "Sz"; sites=N ÷ 2)
39+
return expect(psi, "Sz"; vertices=[N ÷ 2])
4040
end
4141
return nothing
4242
end

examples/treetensornetworks/solvers/05_tdvp_nonuniform_timesteps.jl

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -37,11 +37,11 @@ println("\nResults")
3737
println("=======")
3838
print("step = ", 0)
3939
print(", time = ", zero(ComplexF64))
40-
print(", ⟨Sᶻ⟩ = ", round(expect(psi0, "Sz"; sites=N ÷ 2); digits=3))
40+
print(", ⟨Sᶻ⟩ = ", round(expect(psi0, "Sz"; vertices=[N ÷ 2]); digits=3))
4141
println()
4242
for n in 1:length(times)
4343
print("step = ", n)
4444
print(", time = ", round(times[n]; digits=3))
45-
print(", ⟨Sᶻ⟩ = ", round(expect(psis[n], "Sz"; sites=N ÷ 2); digits=3))
45+
print(", ⟨Sᶻ⟩ = ", round(expect(psis[n], "Sz"; vertices=[N ÷ 2]); digits=3))
4646
println()
4747
end

src/ITensorNetworks.jl

Lines changed: 5 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
module ITensorNetworks
22

3+
using AbstractTrees
34
using Compat
45
using DataGraphs
56
using Dictionaries
@@ -10,6 +11,7 @@ using IsApprox
1011
using ITensors
1112
using ITensors.ContractionSequenceOptimization
1213
using ITensors.ITensorVisualizationCore
14+
using ITensors.LazyApply
1315
using IterTools
1416
using KrylovKit: KrylovKit
1517
using NamedGraphs
@@ -56,6 +58,7 @@ function iterate(::AbstractDataGraph)
5658
)
5759
end
5860

61+
include("observers.jl")
5962
include("utils.jl")
6063
include("visualize.jl")
6164
include("graphs.jl")
@@ -65,7 +68,7 @@ include("lattices.jl")
6568
include("abstractindsnetwork.jl")
6669
include("indextags.jl")
6770
include("indsnetwork.jl")
68-
include("opsum.jl") # Requires IndsNetwork
71+
include("opsum.jl")
6972
include("sitetype.jl")
7073
include("abstractitensornetwork.jl")
7174
include("contraction_sequences.jl")
@@ -86,14 +89,6 @@ include(joinpath("treetensornetworks", "projttns", "abstractprojttn.jl"))
8689
include(joinpath("treetensornetworks", "projttns", "projttn.jl"))
8790
include(joinpath("treetensornetworks", "projttns", "projttnsum.jl"))
8891
include(joinpath("treetensornetworks", "projttns", "projttn_apply.jl"))
89-
# Compatibility of ITensors.MPS/MPO with tree sweeping routines
90-
include(joinpath("treetensornetworks", "solvers", "tree_patch.jl"))
91-
# Compatibility of ITensor observer and Observers
92-
# TODO: Delete this
93-
include(joinpath("treetensornetworks", "solvers", "update_observer.jl"))
94-
# Utilities for making it easier
95-
# to define solvers (like ODE solvers)
96-
# for TDVP
9792
include(joinpath("treetensornetworks", "solvers", "solver_utils.jl"))
9893
include(joinpath("treetensornetworks", "solvers", "applyexp.jl"))
9994
include(joinpath("treetensornetworks", "solvers", "tdvporder.jl"))
@@ -103,10 +98,7 @@ include(joinpath("treetensornetworks", "solvers", "tdvp_generic.jl"))
10398
include(joinpath("treetensornetworks", "solvers", "tdvp.jl"))
10499
include(joinpath("treetensornetworks", "solvers", "dmrg.jl"))
105100
include(joinpath("treetensornetworks", "solvers", "dmrg_x.jl"))
106-
include(joinpath("treetensornetworks", "solvers", "projmpo_apply.jl"))
107-
include(joinpath("treetensornetworks", "solvers", "contract_operator_state.jl"))
108-
include(joinpath("treetensornetworks", "solvers", "projmps2.jl"))
109-
include(joinpath("treetensornetworks", "solvers", "projmpo_mps2.jl"))
101+
include(joinpath("treetensornetworks", "solvers", "contract.jl"))
110102
include(joinpath("treetensornetworks", "solvers", "linsolve.jl"))
111103
include(joinpath("treetensornetworks", "solvers", "tree_sweeping.jl"))
112104

src/abstractitensornetwork.jl

Lines changed: 71 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -118,6 +118,10 @@ function LinearAlgebra.promote_leaf_eltypes(tn::AbstractITensorNetwork)
118118
return LinearAlgebra.promote_leaf_eltypes(itensors(tn))
119119
end
120120

121+
function trivial_space(tn::AbstractITensorNetwork)
122+
return trivial_space(tn[first(vertices(tn))])
123+
end
124+
121125
function ITensors.promote_itensor_eltype(tn::AbstractITensorNetwork)
122126
return LinearAlgebra.promote_leaf_eltypes(tn)
123127
end
@@ -133,13 +137,18 @@ function ITensors.convert_leaf_eltype(eltype::Type, tn::AbstractITensorNetwork)
133137
return tn
134138
end
135139

136-
# TODO: mimic ITensors.AbstractMPS implementation using map
140+
# TODO: Mimic ITensors.AbstractMPS implementation using map
141+
# TODO: Implement using `adapt`
137142
function NDTensors.convert_scalartype(eltype::Type{<:Number}, tn::AbstractITensorNetwork)
138143
tn = copy(tn)
139144
vertex_data(tn) .= ITensors.adapt.(Ref(eltype), vertex_data(tn))
140145
return tn
141146
end
142147

148+
function Base.complex(tn::AbstractITensorNetwork)
149+
return NDTensors.convert_scalartype(complex(LinearAlgebra.promote_leaf_eltypes(tn)), tn)
150+
end
151+
143152
#
144153
# Conversion to Graphs
145154
#
@@ -168,22 +177,48 @@ function IndsNetwork(tn::AbstractITensorNetwork)
168177
return is
169178
end
170179

171-
function siteinds(tn::AbstractITensorNetwork)
180+
# Alias
181+
indsnetwork(tn::AbstractITensorNetwork) = IndsNetwork(tn)
182+
183+
function external_indsnetwork(tn::AbstractITensorNetwork)
172184
is = IndsNetwork(underlying_graph(tn))
173185
for v in vertices(tn)
174186
is[v] = uniqueinds(tn, v)
175187
end
176188
return is
177189
end
178190

179-
function linkinds(tn::AbstractITensorNetwork)
191+
# For backwards compatibility
192+
# TODO: Delete this
193+
siteinds(tn::AbstractITensorNetwork) = external_indsnetwork(tn)
194+
195+
# External indsnetwork of the flattened network, with vertices
196+
# mapped back to `tn1`.
197+
function flatten_external_indsnetwork(
198+
tn1::AbstractITensorNetwork,
199+
tn2::AbstractITensorNetwork,
200+
)
201+
is = external_indsnetwork(sim(tn1; sites=[]) tn2)
202+
flattened_is = IndsNetwork(underlying_graph(tn1))
203+
for v in vertices(flattened_is)
204+
# setindex_preserve_graph!(flattened_is, unioninds(is[v, 1], is[v, 2]), v)
205+
flattened_is[v] = unioninds(is[v, 1], is[v, 2])
206+
end
207+
return flattened_is
208+
end
209+
210+
function internal_indsnetwork(tn::AbstractITensorNetwork)
180211
is = IndsNetwork(underlying_graph(tn))
181212
for e in edges(tn)
182213
is[e] = commoninds(tn, e)
183214
end
184215
return is
185216
end
186217

218+
# For backwards compatibility
219+
# TODO: Delete this
220+
linkinds(tn::AbstractITensorNetwork) = internal_indsnetwork(tn)
221+
187222
#
188223
# Index access
189224
#
@@ -313,6 +348,22 @@ end
313348
# TODO: how to define this lazily?
314349
#norm(tn::AbstractITensorNetwork) = sqrt(inner(tn, tn))
315350

351+
function isapprox(
352+
x::AbstractITensorNetwork,
353+
y::AbstractITensorNetwork;
354+
atol::Real=0,
355+
rtol::Real=Base.rtoldefault(
356+
LinearAlgebra.promote_leaf_eltypes(x), LinearAlgebra.promote_leaf_eltypes(y), atol
357+
),
358+
)
359+
error("Not implemented")
360+
d = norm(x - y)
361+
if !isfinite(d)
362+
error("In `isapprox(x::AbstractITensorNetwork, y::AbstractITensorNetwork)`, `norm(x - y)` is not finite")
363+
end
364+
return d <= max(atol, rtol * max(norm(x), norm(y)))
365+
end
366+
316367
function contract(tn::AbstractITensorNetwork; sequence=vertices(tn), kwargs...)
317368
sequence_linear_index = deepmap(v -> vertex_to_parent_vertex(tn, v), sequence)
318369
return contract(Vector{ITensor}(tn); sequence=sequence_linear_index, kwargs...)
@@ -739,6 +790,23 @@ function site_combiners(tn::AbstractITensorNetwork{V}) where {V}
739790
return Cs
740791
end
741792

793+
function insert_missing_internal_inds(tn::AbstractITensorNetwork, edges; internal_inds_space=trivial_space(tn))
794+
tn = copy(tn)
795+
for e in edges
796+
if !hascommoninds(tn[src(e)], tn[dst(e)])
797+
iₑ = Index(internal_inds_space, edge_tag(e))
798+
X = onehot(iₑ => 1)
799+
tn[src(e)] *= X
800+
tn[dst(e)] *= dag(X)
801+
end
802+
end
803+
return tn
804+
end
805+
806+
function insert_missing_internal_inds(tn::AbstractITensorNetwork; internal_inds_space=trivial_space(tn))
807+
return insert_internal_inds(tn, edges(tn); internal_inds_space)
808+
end
809+
742810
## # TODO: should this make sure that internal indices
743811
## # don't clash?
744812
## function hvncat(

src/apply.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,7 @@ function ITensors.apply(
4747
o⃗::Vector{ITensor},
4848
ψ::AbstractITensorNetwork;
4949
cutoff,
50-
maxdim,
50+
maxdim=typemax(Int),
5151
normalize=false,
5252
ortho=false,
5353
)

src/contraction_sequences.jl

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,8 +4,8 @@ end
44

55
function contraction_sequence(tn::AbstractITensorNetwork; kwargs...)
66
seq_linear_index = contraction_sequence(Vector{ITensor}(tn); kwargs...)
7-
# TODO: use Functors.fmap
8-
return deepmap(n -> vertices(tn)[n], seq_linear_index)
7+
# TODO: Use Functors.fmap?
8+
return deepmap(n -> Key(vertices(tn)[n]), seq_linear_index)
99
end
1010

1111
function contraction_sequence(::Algorithm"optimal", tn::Vector{ITensor})

src/expect.jl

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -5,17 +5,17 @@ function expect(
55
maxdim=nothing,
66
ortho=false,
77
sequence=nothing,
8-
sites=vertices(ψ),
8+
vertices=vertices(ψ),
99
)
1010
s = siteinds(ψ)
1111
ElT = promote_itensor_eltype(ψ)
12-
# ElT = ishermitian(ITensors.op(op, s[sites[1]])) ? real(ElT) : ElT
13-
res = Dictionary(sites, Vector{ElT}(undef, length(sites)))
12+
# ElT = ishermitian(ITensors.op(op, s[vertices[1]])) ? real(ElT) : ElT
13+
res = Dictionary(vertices, Vector{ElT}(undef, length(vertices)))
1414
if isnothing(sequence)
1515
sequence = contraction_sequence(inner_network(ψ, ψ; flatten=true))
1616
end
1717
normψ² = norm_sqr(ψ; sequence)
18-
for v in sites
18+
for v in vertices
1919
O = ITensor(Op(op, v), s)
2020
= apply(O, ψ; cutoff, maxdim, ortho)
2121
res[v] = contract_inner(ψ, Oψ; sequence) / normψ²

src/exports.jl

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,8 @@ export grid,
2323
vertex_path
2424

2525
# NamedGraphs
26-
export named_binary_tree,
26+
export Key,
27+
named_binary_tree,
2728
named_grid,
2829
is_tree,
2930
parent_vertex,
@@ -45,7 +46,6 @@ export IndsNetwork, union_all_inds
4546
# ITensorNetworks: itensornetwork.jl
4647
export AbstractITensorNetwork,
4748
ITensorNetwork,
48-
randomITensorNetwork,
4949
,
5050
itensors,
5151
reverse_bfs_edges,
@@ -59,13 +59,17 @@ export AbstractITensorNetwork,
5959
combine_linkinds,
6060
subgraphs,
6161
reverse_bfs_edges,
62+
randomITensorNetwork,
63+
random_mps,
6264
# treetensornetwork
6365
default_root_vertex,
66+
mpo,
67+
mps,
6468
ortho_center,
6569
set_ortho_center,
6670
TreeTensorNetwork,
6771
TTN,
68-
randomTTN,
72+
random_ttn,
6973
ProjTTN,
7074
ProjTTNSum,
7175
ProjTTNApply,

0 commit comments

Comments
 (0)