Skip to content

Commit

Permalink
Import code from Tensors package (#81)
Browse files Browse the repository at this point in the history
* Format code

* Import code from `Tensors`

* Fix OMEinsum import in tests

* Update Julia minimum version to v1.9

* Remove `NetworkLayout` dependency

* Fix `BlockArray` test dependency

* Clean code

* Stop testing on Julia 1.8

* Bump `EinExprs` minimum version to v0.4

* Rename `labels` to `EinExprs.inds`

* Fix namespace clash in `expand`

* Fix `einexpr` call

* Implement conversion to `EinExprs.Tensor`

* Bump `EinExprs` to v0.5

Temporarily break `contract` function

* Fix name shadowing of `inds` method

* Refactor `RankSimplifaction` to new `EinExprs` structure

* Skip `contract` in `Transformation` tests

* Bump `EinExprs` to v0.5.1
  • Loading branch information
mofeing authored Aug 29, 2023
1 parent bae5c97 commit 6a67594
Show file tree
Hide file tree
Showing 26 changed files with 1,227 additions and 214 deletions.
1 change: 0 additions & 1 deletion .github/workflows/CI.yml
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,6 @@ jobs:
fail-fast: false
matrix:
version:
- '1.8'
- '1.9'
os:
- ubuntu-latest
Expand Down
1 change: 0 additions & 1 deletion .github/workflows/generate-sysimage.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@ jobs:
strategy:
matrix:
version:
- '1.8'
- '1.9'
os:
- ubuntu-latest
Expand Down
12 changes: 2 additions & 10 deletions Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -11,15 +11,10 @@ EinExprs = "b1794770-133b-4de1-afb4-526377e9f4c5"
GraphMakie = "1ecd5474-83a3-4783-bb4f-06765db800d2"
Graphs = "86223c79-3864-5bf0-83f7-82e725a168b6"
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
Makie = "ee78f7c6-11fb-53f2-987a-cfe4a2b5a57a"
Muscle = "21fe5c4b-a943-414d-bf3e-516f24900631"
NetworkLayout = "46757867-2c16-5918-afeb-47bfcb05e46a"
OMEinsum = "ebe7aa44-baf0-506c-a96f-8464559b3922"
Permutations = "2ae35dd2-176d-5d53-8349-f30d82d94d4f"
Quac = "b9105292-1415-45cf-bff1-d6ccf71e6143"
Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
Requires = "ae029012-a4dd-5104-9daa-d747884805df"
Tensors = "a57d67a0-4683-47ff-be60-6114e830558b"
UUIDs = "cf7118a7-6976-5b1a-9a39-7adc72f591a4"
ValSplit = "0625e100-946b-11ec-09cd-6328dd093154"

Expand All @@ -35,16 +30,13 @@ TenetQuacExt = "Quac"
Bijections = "0.1"
Combinatorics = "1.0"
DeltaArrays = "0.1.1"
EinExprs = "0.2, 0.3"
EinExprs = "0.5.1"
GraphMakie = "0.4,0.5"
Graphs = "1.7"
Makie = "0.18, 0.19"
Muscle = "0.1"
NetworkLayout = "0.4"
OMEinsum = "0.7"
Permutations = "0.4"
Quac = "0.2"
Requires = "1.3"
Tensors = "0.1.11"
ValSplit = "0.1"
julia = "1.8"
julia = "1.9"
2 changes: 1 addition & 1 deletion docs/src/tensor-network.md
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ Information about a `TensorNetwork` can be queried with the following functions.
## Query information

```@docs
labels(::TensorNetwork)
inds(::TensorNetwork)
size(::TensorNetwork)
tensors(::TensorNetwork)
length(::TensorNetwork)
Expand Down
24 changes: 11 additions & 13 deletions ext/TenetMakieExt.jl
Original file line number Diff line number Diff line change
@@ -1,15 +1,10 @@
module TenetMakieExt

if isdefined(Base, :get_extension)
using Tenet
else
using ..Tenet
end

using Tenet
using Combinatorics: combinations
using Graphs
using Makie
using NetworkLayout: dim

using GraphMakie

"""
Expand All @@ -21,7 +16,7 @@ Plot a [`TensorNetwork`](@ref) as a graph.
# Keyword Arguments
- `labels` Whether to show the index labels. Defaults to `false`.
- `inds` Whether to show the index labels. Defaults to `false`.
- `layout` Algorithm used to map graph vertices to a (2D or 3D) coordinate system.
The algorithms implemented in the `NetworkLayout` package are recommended.
"""
Expand All @@ -31,8 +26,11 @@ function Makie.plot(tn::TensorNetwork; kwargs...)
return Makie.FigureAxisPlot(f, ax, p)
end

# NOTE this is a hack! we did it in order not to depend on NetworkLayout but can be unstable
__networklayout_dim(x) = typeof(x).super.parameters |> first

function Makie.plot!(f::Union{Figure,GridPosition}, tn::TensorNetwork; kwargs...)
ax = if haskey(kwargs, :layout) && dim(kwargs[:layout]) == 3
ax = if haskey(kwargs, :layout) && __networklayout_dim(kwargs[:layout]) == 3
Axis3(f[1, 1])
else
ax = Axis(f[1, 1])
Expand All @@ -56,7 +54,7 @@ function Makie.plot!(ax::Union{Axis,Axis3}, tn::TensorNetwork; labels = false, k

# TODO recognise `copytensors` by using `DeltaArray` or `Diagonal` representations
copytensors = findall(t -> haskey(t.meta, :dual), tensors(tn))
ghostnodes = map(Tenet.labels(tn, :open)) do ind
ghostnodes = map(inds(tn, :open)) do ind
# create new ghost node
add_vertex!(graph)
node = nv(graph)
Expand Down Expand Up @@ -92,14 +90,14 @@ function Makie.plot!(ax::Union{Axis,Axis3}, tn::TensorNetwork; labels = false, k

# configure labels
labels == true && get!(kwargs, :elabels) do
opentensors = findall(t -> !isdisjoint(Tenet.labels(t), Tenet.labels(tn, :open)), tensors(tn))
opentensors = findall(t -> !isdisjoint(inds(t), inds(tn, :open)), tensors(tn))
opencounter = IdDict(tensor => 0 for tensor in opentensors)

map(edges(graph)) do edge
# case: open edge
if any((ghostnodes), [src(edge), dst(edge)])
notghost = src(edge) ghostnodes ? dst(edge) : src(edge)
inds = Tenet.labels(tn, :open) Tenet.labels(tensors(tn)[notghost])
inds = Tenet.inds(tn, :open) Tenet.inds(tensors(tn)[notghost])
opencounter[notghost] += 1
return inds[opencounter[notghost]] |> string
end
Expand All @@ -110,7 +108,7 @@ function Makie.plot!(ax::Union{Axis,Axis3}, tn::TensorNetwork; labels = false, k
return tensors(tn)[i].meta[:dual] |> string
end

return join(Tenet.labels(tensors(tn)[src(edge)]) Tenet.labels(tensors(tn)[dst(edge)]), ',')
return join(Tenet.inds(tensors(tn)[src(edge)]) Tenet.inds(tensors(tn)[dst(edge)]), ',')
end
end
get!(() -> repeat([:black], ne(graph)), kwargs, :elabels_color)
Expand Down
7 changes: 1 addition & 6 deletions ext/TenetQuacExt.jl
Original file line number Diff line number Diff line change
@@ -1,11 +1,6 @@
module TenetQuacExt

if isdefined(Base, :get_extension)
using Tenet
else
using ..Tenet
end

using Tenet
using Quac: Circuit, lanes, arraytype, Swap
using Bijections

Expand Down
7 changes: 7 additions & 0 deletions src/Helpers.jl
Original file line number Diff line number Diff line change
Expand Up @@ -76,3 +76,10 @@ function superansatzes(T)
S = supertype(T)
return T === Ansatz ? (T,) : (T, superansatzes(S)...)
end

# NOTE from https://stackoverflow.com/q/54652787
function nonunique(x)
uniqueindexes = indexin(unique(x), x)
nonuniqueindexes = setdiff(1:length(x), uniqueindexes)
unique(x[nonuniqueindexes])
end
146 changes: 146 additions & 0 deletions src/Numerics.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,146 @@
using OMEinsum
using LinearAlgebra
using UUIDs: uuid4
using EinExprs: inds

# TODO test array container typevar on output
for op in [
:+,
:-,
:*,
:/,
:\,
:^,
:÷,
:fld,
:cld,
:mod,
:%,
:fldmod,
:fld1,
:mod1,
:fldmod1,
://,
:gcd,
:lcm,
:gcdx,
:widemul,
]
@eval Base.$op(a::Tensor{A,0}, b::Tensor{B,0}) where {A,B} = broadcast($op, a, b)
end

"""
contract(a::Tensor[, b::Tensor, dims=nonunique([inds(a)..., inds(b)...])])
Perform tensor contraction operation.
"""
function contract(a::Tensor, b::Tensor; dims = ((inds(a), inds(b))))
ia = inds(a)
ib = inds(b)
i = (dims, ia, ib)

ic = tuple(setdiff(ia ib, i isa Base.AbstractVecOrTuple ? i : (i,))...)

data = EinCode((String.(ia), String.(ib)), String.(ic))(parent(a), parent(b))

# TODO merge metadata?
return Tensor(data, ic)
end

function contract(a::Tensor; dims = nonunique(inds(a)))
ia = inds(a)
i = (dims, ia)

ic = tuple(setdiff(ia, i isa Base.AbstractVecOrTuple ? i : (i,))...)

data = EinCode((String.(ia),), String.(ic))(parent(a))

# TODO merge metadata
return Tensor(data, ic)
end

contract(a::Union{T,AbstractArray{T,0}}, b::Tensor{T}) where {T} = contract(Tensor(a), b)
contract(a::Tensor{T}, b::Union{T,AbstractArray{T,0}}) where {T} = contract(a, Tensor(b))
contract(a::AbstractArray{<:Any,0}, b::AbstractArray{<:Any,0}) = contract(Tensor(a), Tensor(b)) |> only
contract(a::Number, b::Number) = contract(fill(a), fill(b))
contract(tensors::Tensor...; kwargs...) = reduce((x, y) -> contract(x, y; kwargs...), tensors)

"""
*(::Tensor, ::Tensor)
Alias for [`contract`](@ref).
"""
Base.:*(a::Tensor, b::Tensor) = contract(a, b)
Base.:*(a::Tensor, b) = contract(a, b)
Base.:*(a, b::Tensor) = contract(a, b)

LinearAlgebra.svd(t::Tensor{<:Any,2}; kwargs...) = Base.@invoke svd(t::Tensor; left_inds = (first(inds(t)),), kwargs...)

function LinearAlgebra.svd(t::Tensor; left_inds, kwargs...)
if isempty(left_inds)
throw(ErrorException("no left-indices in SVD factorization"))
elseif any((inds(t)), left_inds)
# TODO better error exception and checks
throw(ErrorException("all left-indices must be in $(inds(t))"))
end

right_inds = setdiff(inds(t), left_inds)
if isempty(right_inds)
# TODO better error exception and checks
throw(ErrorException("no right-indices in SVD factorization"))
end

# permute array
tensor = permutedims(t, (left_inds..., right_inds...))
data = reshape(parent(tensor), prod(i -> size(t, i), left_inds), prod(i -> size(t, i), right_inds))

# compute SVD
U, s, V = svd(data; kwargs...)

# tensorify results
U = reshape(U, ([size(t, ind) for ind in left_inds]..., size(U, 2)))
s = Diagonal(s)
Vt = reshape(V', (size(V', 1), [size(t, ind) for ind in right_inds]...))

vlind = Symbol(uuid4())
vrind = Symbol(uuid4())

U = Tensor(U, (left_inds..., vlind))
s = Tensor(s, (vlind, vrind))
Vt = Tensor(Vt, (vrind, right_inds...))

return U, s, Vt
end

LinearAlgebra.qr(t::Tensor{<:Any,2}; kwargs...) = Base.@invoke qr(t::Tensor; left_inds = (first(inds(t)),), kwargs...)

function LinearAlgebra.qr(t::Tensor; left_inds = (), right_inds = (), virtualind::Symbol = Symbol(uuid4()), kwargs...)
isdisjoint(left_inds, right_inds) ||
throw(ArgumentError("left ($left_inds) and right $(right_inds) indices must be disjoint"))

left_inds, right_inds =
isempty(left_inds) ? (setdiff(inds(t), right_inds), right_inds) :
isempty(right_inds) ? (left_inds, setdiff(inds(t), left_inds)) :
throw(ArgumentError("cannot set both left and right indices"))

all(!isempty, (left_inds, right_inds)) || throw(ArgumentError("no right-indices left in QR factorization"))
all((inds(t)), left_inds right_inds) || throw(ArgumentError("indices must be in $(inds(t))"))

virtualind inds(t) || throw(ArgumentError("new virtual bond name ($virtualind) cannot be already be present"))

# permute array
tensor = permutedims(t, (left_inds..., right_inds...))
data = reshape(parent(tensor), prod(i -> size(t, i), left_inds), prod(i -> size(t, i), right_inds))

# compute QR
Q, R = qr(data; kwargs...)

# tensorify results
Q = reshape(Q, ([size(t, ind) for ind in left_inds]..., size(Q, 2)))
R = reshape(R, (size(R, 1), [size(t, ind) for ind in right_inds]...))

Q = Tensor(Q, (left_inds..., virtualind))
R = Tensor(R, (virtualind, right_inds...))

return Q, R
end
11 changes: 6 additions & 5 deletions src/Quantum/MP.jl
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ using Base.Iterators: flatten
using Random
using Bijections
using Muscle: gramschmidt!
using EinExprs: inds

"""
MatrixProduct{P<:Plug,B<:Boundary} <: Quantum
Expand Down Expand Up @@ -32,7 +33,7 @@ function checkmeta(::Type{MatrixProduct{P,B}}, tn::TensorNetwork) where {P,B}
isnothing(tn.χ) || tn.χ > 0 || return false

# no virtual index has dimensionality bigger than χ
all(i -> isnothing(tn.χ) || size(tn, i) <= tn.χ, labels(tn, :virtual)) || return false
all(i -> isnothing(tn.χ) || size(tn, i) <= tn.χ, inds(tn, :virtual)) || return false

return true
end
Expand Down Expand Up @@ -89,7 +90,7 @@ function MatrixProduct{P,B}(
tensors = map(enumerate(arrays)) do (i, array)
dirs = _sitealias(MatrixProduct{P,B}, order, n, i)

labels = map(dirs) do dir
inds = map(dirs) do dir
if dir === :l
vinds[(mod1(i - 1, n), i)]
elseif dir === :r
Expand All @@ -100,9 +101,9 @@ function MatrixProduct{P,B}(
iinds[i]
end
end
alias = Dict(dir => label for (dir, label) in zip(dirs, labels))
alias = Dict(dir => label for (dir, label) in zip(dirs, inds))

Tensor(array, labels; alias = alias)
Tensor(array, inds; alias = alias)
end

return TensorNetwork{MatrixProduct{P,B}}(tensors; χ, plug = P, interlayer, metadata...)
Expand All @@ -122,7 +123,7 @@ Base.length(ψ::TensorNetwork{MatrixProduct{P,Infinite}}) where {P<:Plug} = Inf

# b = replace(b, [nameof(outsiteind(b, s)) => nameof(outsiteind(a, s)) for s in sites(a)]...)
# path = nameof.(flatten([physicalinds(a), flatten(zip(virtualinds(a), virtualinds(b)))]) |> collect)
# inputs = flatten([tensors(a), tensors(b)]) .|> labels
# inputs = flatten([tensors(a), tensors(b)]) .|> inds
# output = Symbol[]
# size_dict = merge(size(a), size(b))

Expand Down
9 changes: 5 additions & 4 deletions src/Quantum/PEP.jl
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
using UUIDs: uuid4
using EinExprs: inds

"""
ProjectedEntangledPair{P<:Plug,B<:Boundary} <: Quantum
Expand Down Expand Up @@ -28,7 +29,7 @@ function checkmeta(::Type{ProjectedEntangledPair{P,B}}, tn::TensorNetwork) where
isnothing(tn.χ) || tn.χ > 0 || return false

# no virtual index has dimensionality bigger than χ
all(i -> isnothing(tn.χ) || size(tn, i) <= tn.χ, labels(tn, :virtual)) || return false
all(i -> isnothing(tn.χ) || size(tn, i) <= tn.χ, inds(tn, :virtual)) || return false

return true
end
Expand Down Expand Up @@ -91,7 +92,7 @@ function ProjectedEntangledPair{P,B}(
tensors = map(zip(Iterators.map(Tuple, eachindex(IndexCartesian(), arrays)), arrays)) do ((i, j), array)
dirs = _sitealias(ProjectedEntangledPair{P,B}, order, (m, n), (i, j))

labels = map(dirs) do dir
inds = map(dirs) do dir
if dir === :l
hinds[(i, (mod1(j - 1, n), j))]
elseif dir === :r
Expand All @@ -106,9 +107,9 @@ function ProjectedEntangledPair{P,B}(
oinds[(i, j)]
end
end
alias = Dict(dir => label for (dir, label) in zip(dirs, labels))
alias = Dict(dir => label for (dir, label) in zip(dirs, inds))

Tensor(array, labels; alias = alias)
Tensor(array, inds; alias = alias)
end |> vec

return TensorNetwork{ProjectedEntangledPair{P,B}}(tensors; χ, plug = P, interlayer, metadata...)
Expand Down
Loading

0 comments on commit 6a67594

Please sign in to comment.