Skip to content
This repository has been archived by the owner on Jul 7, 2024. It is now read-only.

Implement evolve of an MPS with an MPO #35

Open
wants to merge 18 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 4 additions & 5 deletions Project.toml
Original file line number Diff line number Diff line change
@@ -1,14 +1,13 @@
name = "Qrochet"
uuid = "881a8f22-b5d0-48b0-96e5-a244b33f36d4"
authors = ["Sergio Sánchez Ramírez <[email protected]>"]
version = "0.1.1"
version = "0.1.2"

[deps]
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
Muscle = "21fe5c4b-a943-414d-bf3e-516f24900631"
Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
Tenet = "85d41934-b9cd-44e1-8730-56d86f15f3ec"
ValSplit = "0625e100-946b-11ec-09cd-6328dd093154"

[weakdeps]
ChainRulesCore = "d360d2e6-b24c-11e9-a2a3-2a2ae2dbcce4"
Expand All @@ -24,9 +23,9 @@ QrochetYaoExt = "Yao"

[compat]
ChainRulesCore = "1.0"
ChainRulesTestUtils = "1"
Muscle = "0.1"
Quac = "0.3"
Tenet = "0.5"
ValSplit = "0.1"
Yao = "0.8"
Tenet = "0.6"
Yao = "0.8, 0.9"
julia = "1.9"
4 changes: 2 additions & 2 deletions docs/src/quantum.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,8 @@ nsites
## Queries

```@docs
Tenet.select(::Quantum, ::Val{:index}, ::Site)
Tenet.select(::Quantum, ::Val{:tensor}, ::Site)
Tenet.inds(::Quantum; kwargs...)
Tenet.tensors(::Quantum; kwargs...)
```

## Connecting `Quantum` Tensor Networks
Expand Down
16 changes: 16 additions & 0 deletions examples/Project.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
[deps]
AbstractTrees = "1520ce14-60c1-5f80-bbc7-55ef81b5835c"
ClusterManagers = "34f1f09b-3a8b-5176-ab39-66d58a4d544e"
Dagger = "d58978e5-989f-55fb-8d15-ea34adc7bf54"
Distributed = "8ba89e20-285c-5b6f-9357-94700520ee1b"
EinExprs = "b1794770-133b-4de1-afb4-526377e9f4c5"
IterTools = "c8e1da08-722c-5040-9ed9-7db0dc04731e"
KaHyPar = "2a6221f6-aa48-11e9-3542-2d9e0ef01880"
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
ProgressMeter = "92933f4c-e287-5a05-a399-4b506db050ca"
Qrochet = "881a8f22-b5d0-48b0-96e5-a244b33f36d4"
Reactant = "3c362404-f566-11ee-1572-e11a4b42c853"
Revise = "295af30f-e4ad-537b-8983-00126c2a3abe"
Tenet = "85d41934-b9cd-44e1-8730-56d86f15f3ec"
TimespanLogging = "a526e669-04d3-4846-9525-c66122c55f63"
Yao = "5872b779-8223-5990-8dd0-5abbb0748c8c"
71 changes: 71 additions & 0 deletions examples/dagger.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,71 @@
using Tenet
using Qrochet
using Yao: Yao
using EinExprs
using AbstractTrees
using Distributed
using Dagger
using TimespanLogging
using KaHyPar

m = 10
circuit = Yao.EasyBuild.rand_google53(m);
H = Quantum(circuit)
ψ = Product(fill([1, 0], Yao.nqubits(circuit)))
qtn = merge(Quantum(ψ), H, Quantum(ψ)')
tn = Tenet.TensorNetwork(qtn)

contract_smaller_dims = 20
target_size = 24

Tenet.transform!(tn, Tenet.ContractSimplification())
path = einexpr(
tn,
optimizer = HyPar(
parts = 2,
imbalance = 0.41,
edge_scaler = (ind_size) -> 10 * Int(round(log2(ind_size))),
vertex_scaler = (prod_size) -> 100 * Int(round(exp2(prod_size))),
),
);

max_dims_path = @show maximum(ndims, Branches(path))
flops_path = @show mapreduce(flops, +, Branches(path))
@show log10(flops_path)

grouppath = deepcopy(path);
function recursiveforeach!(f, expr)
f(expr)
foreach(arg -> recursiveforeach!(f, arg), args(expr))
end
sizedict = merge(Iterators.map(i -> i.size, Leaves(path))...);
recursiveforeach!(grouppath) do expr
merge!(expr.size, sizedict)
if all(<(contract_smaller_dims) ∘ ndims, expr.args)
empty!(expr.args)
end
end

max_dims_grouppath = maximum(ndims, Branches(grouppath))
flops_grouppath = mapreduce(flops, +, Branches(grouppath))
targetinds = findslices(SizeScorer(), grouppath, size = 2^(target_size));

subexprs = map(Leaves(grouppath)) do expr
EinExprs.select(path, tuple(head(expr)...)) |> only
end

addprocs(3)
@everywhere using Dagger, Tenet

disttn = Tenet.TensorNetwork(
map(subexprs) do subexpr
Tensor(
distribute( # data
parent(Tenet.contract(tn; path = subexpr)),
Blocks([i ∈ targetinds ? 1 : 2 for i in head(subexpr)]...),
),
head(subexpr), # inds
)
end,
)
@show Tenet.contract(disttn; path = grouppath)
103 changes: 103 additions & 0 deletions examples/distributed.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,103 @@
using Yao: Yao
using Qrochet
using Tenet
using EinExprs
using KaHyPar
using Random
using Distributed
using ClusterManagers
using AbstractTrees

n = 64
depth = 6

circuit = Yao.chain(n)

for _ in 1:depth
perm = randperm(n)

for (i, j) in Iterators.partition(perm, 2)
push!(circuit, Yao.put((i, j) => Yao.EasyBuild.FSimGate(2π * rand(), 2π * rand())))
# push!(circuit, Yao.control(n, i, j => Yao.phase(2π * rand())))
end
end

H = Quantum(circuit)
ψ = zeros(Product, n)

tn = TensorNetwork(merge(Quantum(ψ), H, Quantum(ψ)'))
transform!(tn, Tenet.ContractSimplification())

path = einexpr(
tn,
optimizer = HyPar(
parts = 2,
imbalance = 0.41,
edge_scaler = (ind_size) -> 10 * Int(round(log2(ind_size))),
vertex_scaler = (prod_size) -> 100 * Int(round(exp2(prod_size))),
),
)

@show maximum(ndims, Branches(path))
@show maximum(length, Branches(path)) * sizeof(eltype(tn)) / 1024^3

@show log10(mapreduce(flops, +, Branches(path)))

cutinds = findslices(SizeScorer(), path, size = 2^24)
cuttings = [[i => dim for dim in 1:size(tn, i)] for i in cutinds]

# mock sliced path - valid for all slices
proj_inds = first.(cuttings)
slice_path = view(path.path, proj_inds...)

expr = Tenet.codegen(Val(:outplace), slice_path)

manager = SlurmManager(2 * 112 - 1)
addprocs(manager, cpus_per_task = 1, exeflags = "--project=$(Base.active_project())")
# @everywhere using LinearAlgebra
# @everywhere LinearAlgebra.BLAS.set_num_threads(2)

@everywhere using Tenet, EinExprs, IterTools, LinearAlgebra, Reactant, AbstractTrees
@everywhere tn = $tn
@everywhere slice_path = $slice_path
@everywhere cuttings = $cuttings
@everywhere expr = $expr

partial_results = map(enumerate(workers())) do (i, worker)
Distributed.@spawnat worker begin
# interleaved chunking without instantiation
it = takenth(Iterators.drop(Iterators.product(cuttings...), i - 1), nworkers())

f = @eval $expr
mock_slice = view(tn, first(it)...)
tensors′ = [
Tensor(Reactant.ConcreteRArray(copy(parent(mock_slice[head(leaf)...]))), inds(mock_slice[head(leaf)...])) for leaf in Leaves(slice_path)
]
g = Reactant.compile(f, Tuple(tensors′))

# local reduction of chunk
accumulator = zero(eltype(tn))

for proj_inds in it
slice = view(tn, proj_inds...)
tensors′ = [
Tensor(
Reactant.ConcreteRArray(copy(parent(mock_slice[head(leaf)...]))),
inds(mock_slice[head(leaf)...]),
) for leaf in Leaves(slice_path)
]
res = only(g(tensors′...))

# avoid OOM due to garbage accumulation
GC.gc()

accumulator += res
end

return accumulator
end
end

@show result = sum(Distributed.fetch.(partial_results))

rmprocs(workers())
46 changes: 31 additions & 15 deletions src/Ansatz.jl
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
using Tenet
using ValSplit
using LinearAlgebra

"""
Expand All @@ -25,11 +24,9 @@ for f in [
:noutputs,
:inputs,
:outputs,
:sites,
:nsites,
:nlanes,
:socket,
:(Tenet.tensors),
:(Tenet.arrays),
:(Base.collect),
]
Expand All @@ -46,33 +43,49 @@ alias(::A) where {A} = string(A)
function Base.summary(io::IO, tn::A) where {A<:Ansatz}
print(io, "$(alias(tn)) (inputs=$(ninputs(tn)), outputs=$(noutputs(tn)))")
end
Base.show(io::IO, tn::A) where {A<:Ansatz} = Base.summary(io, tn)
Base.show(io::IO, tn::A) where {A<:Ansatz} = summary(io, tn)

@valsplit 2 Tenet.select(tn::Ansatz, query::Symbol, args...) = select(Quantum(tn), query, args...)
sites(tn::Ansatz; kwargs...) = sites(Quantum(tn); kwargs...)

function Tenet.select(tn::Ansatz, ::Val{:between}, site1::Site, site2::Site)
function Tenet.inds(tn::Ansatz; kwargs...)
if keys(kwargs) === (:bond,)
inds(tn, Val(:bond), kwargs[:bond]...)
else
inds(Quantum(tn); kwargs...)
end
end

function Tenet.inds(tn::Ansatz, ::Val{:bond}, site1::Site, site2::Site)
@assert site1 ∈ sites(tn) "Site $site1 not found"
@assert site2 ∈ sites(tn) "Site $site2 not found"
@assert site1 != site2 "Sites must be different"

tensor1 = select(Quantum(tn), :tensor, site1)
tensor2 = select(Quantum(tn), :tensor, site2)
tensor1 = tensors(tn; at = site1)
tensor2 = tensors(tn; at = site2)

isdisjoint(inds(tensor1), inds(tensor2)) && return nothing
return only(inds(tensor1) ∩ inds(tensor2))
end

TensorNetwork(tn)[only(inds(tensor1) ∩ inds(tensor2))]
function Tenet.tensors(tn::Ansatz; kwargs...)
if keys(kwargs) === (:between,)
tensors(tn, Val(:between), kwargs[:between]...)
else
tensors(Quantum(tn); kwargs...)
end
end

function Tenet.select(tn::Ansatz, ::Val{:bond}, site1::Site, site2::Site)
function Tenet.tensors(tn::Ansatz, ::Val{:between}, site1::Site, site2::Site)
@assert site1 ∈ sites(tn) "Site $site1 not found"
@assert site2 ∈ sites(tn) "Site $site2 not found"
@assert site1 != site2 "Sites must be different"

tensor1 = select(Quantum(tn), :tensor, site1)
tensor2 = select(Quantum(tn), :tensor, site2)
tensor1 = tensors(tn; at = site1)
tensor2 = tensors(tn; at = site2)

isdisjoint(inds(tensor1), inds(tensor2)) && return nothing
return only(inds(tensor1) ∩ inds(tensor2))

TensorNetwork(tn)[only(inds(tensor1) ∩ inds(tensor2))]
end

struct MissingSchmidtCoefficientsException <: Base.Exception
Expand All @@ -86,8 +99,11 @@ function Base.showerror(io::IO, e::MissingSchmidtCoefficientsException)
end

function LinearAlgebra.norm(ψ::Ansatz, p::Real = 2; kwargs...)
p != 2 && throw(ArgumentError("p=$p is not implemented yet"))
p == 2 || throw(ArgumentError("only L2-norm is implemented yet"))

return LinearAlgebra.norm2(ψ; kwargs...)
end

# TODO: Replace with contract(hcat(ψ, ψ')...) when implemented
function LinearAlgebra.norm2(ψ::Ansatz; kwargs...)
return contract(merge(TensorNetwork(ψ), TensorNetwork(ψ')); kwargs...) |> only |> sqrt |> abs
end
Loading
Loading