Skip to content

Commit

Permalink
[ITensors] [ENHANCEMENT] Fix svd and qr for empty input left or r…
Browse files Browse the repository at this point in the history
…ight indices (#917)
  • Loading branch information
mtfishman authored May 16, 2022
1 parent ece285d commit 1ca54fb
Show file tree
Hide file tree
Showing 13 changed files with 212 additions and 11 deletions.
9 changes: 9 additions & 0 deletions NDTensors/NEWS.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,15 @@ Note that as of Julia v1.5, in order to see deprecation warnings you will need t

After we release v1 of the package, we will start following [semantic versioning](https://semver.org).

NDTensors v0.1.39 Release Notes
===============================

Bugs:

Enhancements:

- Fix `svd` and `qr` for empty input left or right indices (#917)

NDTensors v0.1.38 Release Notes
===============================

Expand Down
2 changes: 1 addition & 1 deletion NDTensors/Project.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
name = "NDTensors"
uuid = "23ae76d9-e61a-49c4-8f12-3f1a16adf9cf"
authors = ["Matthew Fishman <mfishman@flatironinstitute.org>"]
version = "0.1.38"
version = "0.1.39"

[deps]
Compat = "34da2185-b29b-5c13-b0c7-acf172513d20"
Expand Down
2 changes: 1 addition & 1 deletion NDTensors/src/linearalgebra.jl
Original file line number Diff line number Diff line change
Expand Up @@ -119,7 +119,7 @@ function LinearAlgebra.svd(T::DenseTensor{ElT,2,IndsT}; kwargs...) where {ElT,In

maxdim::Int = get(kwargs, :maxdim, minimum(dims(T)))
mindim::Int = get(kwargs, :mindim, 1)
cutoff::Float64 = get(kwargs, :cutoff, 0.0)
cutoff = get(kwargs, :cutoff, 0.0)
use_absolute_cutoff::Bool = get(kwargs, :use_absolute_cutoff, use_absolute_cutoff)
use_relative_cutoff::Bool = get(kwargs, :use_relative_cutoff, use_relative_cutoff)
alg::String = get(kwargs, :alg, "divide_and_conquer")
Expand Down
7 changes: 6 additions & 1 deletion NDTensors/src/truncate.jl
Original file line number Diff line number Diff line change
@@ -1,6 +1,11 @@
export truncate!

function truncate!(P::Vector{Float64}; kwargs...)::Tuple{Float64,Float64}
cutoff = get(kwargs, :cutoff, 0.0)
if isnothing(cutoff)
return 0.0, 0.0
end

# Keyword argument deprecations
use_absolute_cutoff = false
if haskey(kwargs, :absoluteCutoff)
Expand All @@ -15,7 +20,7 @@ function truncate!(P::Vector{Float64}; kwargs...)::Tuple{Float64,Float64}

maxdim::Int = min(get(kwargs, :maxdim, length(P)), length(P))
mindim::Int = max(get(kwargs, :mindim, 1), 1)
cutoff::Float64 = max(get(kwargs, :cutoff, 0.0), 0.0)
cutoff = max(cutoff, 0.0)
use_absolute_cutoff::Bool = get(kwargs, :use_absolute_cutoff, use_absolute_cutoff)
use_relative_cutoff::Bool = get(kwargs, :use_relative_cutoff, use_relative_cutoff)

Expand Down
9 changes: 9 additions & 0 deletions NEWS.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,15 @@ Note that as of Julia v1.5, in order to see deprecation warnings you will need t

After we release v1 of the package, we will start following [semantic versioning](https://semver.org).

ITensors v0.3.12 Release Notes
==============================

Bugs:

Enhancements:

- Fix `svd` and `qr` for empty input left or right indices (#917)

ITensors v0.3.11 Release Notes
==============================

Expand Down
4 changes: 2 additions & 2 deletions Project.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
name = "ITensors"
uuid = "9136182c-28ba-11e9-034c-db9fb085ebd5"
authors = ["Matthew Fishman <mfishman@flatironinstitute.org>", "Miles Stoudenmire <mstoudenmire@flatironinstitute.org>"]
version = "0.3.11"
version = "0.3.12"

[deps]
BitIntegers = "c3b6d118-76ef-56ca-8cc7-ebb389d030a1"
Expand Down Expand Up @@ -36,7 +36,7 @@ HDF5 = "0.14, 0.15, 0.16"
IsApprox = "0.1"
KrylovKit = "0.4.2, 0.5"
LinearMaps = "3"
NDTensors = "0.1.38"
NDTensors = "0.1.39"
PackageCompiler = "1.0.0, 2"
Requires = "1.1"
SerializedElementArrays = "0.1"
Expand Down
54 changes: 50 additions & 4 deletions src/decomp.jl
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,10 @@ The first three return arguments are `U`, `S`, and `V`, such that
Whether or not the SVD performs a trunction depends on the keyword
arguments provided.
If the left or right set of indices are empty, all input indices are
put on `V` or `U` respectively. To specify an empty set of left indices,
you must explicitly use `svd(A, ())` (`svd(A)` is currently undefined).
# Examples
```julia
Expand Down Expand Up @@ -89,10 +93,19 @@ function svd(A::ITensor, Linds...; kwargs...)
Lis = commoninds(A, indices(Linds))
Ris = uniqueinds(A, Lis)

if length(Lis) == 0 || length(Ris) == 0
error(
"In `svd`, the left or right indices are empty (the indices of `A` are ($(inds(A))), but the input indices are ($Lis)). For now, this is not supported. You may have accidentally input the wrong indices.",
)
Lis_original = Lis
Ris_original = Ris
if isempty(Lis_original)
α = trivial_index(Ris)
vLα = onehot=> 1)
A *= vLα
Lis = [α]
end
if isempty(Ris_original)
α = trivial_index(Lis)
vRα = onehot=> 1)
A *= vRα
Ris = [α]
end

CL = combiner(Lis...)
Expand Down Expand Up @@ -127,9 +140,18 @@ function svd(A::ITensor, Linds...; kwargs...)
u = settags(u, utags)
v = settags(v, vtags)

if isempty(Lis_original)
U *= dag(vLα)
end
if isempty(Ris_original)
V *= dag(vRα)
end

return TruncSVD(U, S, V, spec, u, v)
end

svd(A::ITensor; kwargs...) = error("Must specify indices in `svd`")

"""
TruncEigen
Expand Down Expand Up @@ -312,13 +334,37 @@ function qr(A::ITensor, Linds...; kwargs...)
tags::TagSet = get(kwargs, :tags, "Link,qr")
Lis = commoninds(A, indices(Linds))
Ris = uniqueinds(A, Lis)

Lis_original = Lis
Ris_original = Ris
if isempty(Lis_original)
α = trivial_index(Ris)
vLα = onehot=> 1)
A *= vLα
Lis = [α]
end
if isempty(Ris_original)
α = trivial_index(Lis)
vRα = onehot=> 1)
A *= vRα
Ris = [α]
end

Lpos, Rpos = NDTensors.getperms(inds(A), Lis, Ris)
QT, RT = qr(tensor(A), Lpos, Rpos; kwargs...)
Q, R = itensor(QT), itensor(RT)
q = commonind(Q, R)
settags!(Q, tags, q)
settags!(R, tags, q)
q = settags(q, tags)

if isempty(Lis_original)
Q *= dag(vLα)
end
if isempty(Ris_original)
R *= dag(vRα)
end

return Q, R, q
end

Expand Down
3 changes: 3 additions & 0 deletions src/index.jl
Original file line number Diff line number Diff line change
Expand Up @@ -231,6 +231,9 @@ function sim(i::Index; tags=copy(tags(i)), plev=plev(i), dir=dir(i))
return Index(rand(index_id_rng(), IDType), copy(space(i)), dir, tags, plev)
end

trivial_space(i::Index) = 1
trivial_index(i::Index) = Index(trivial_space(i))

"""
dag(i::Index)
Expand Down
10 changes: 10 additions & 0 deletions src/indexset.jl
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,9 @@ tuple_to_vector(t::Tuple) = collect(t)
tuple_to_vector(t) = t

function _narrow_eltype(v::Vector{T}) where {T}
if isempty(v)
return v
end
return convert(Vector{mapreduce(typeof, promote_type, v)}, v)
end
narrow_eltype(v::Vector{T}) where {T} = isconcretetype(T) ? v : _narrow_eltype(v)
Expand Down Expand Up @@ -160,6 +163,13 @@ You can also use the broadcast version `sim.(is)`.
"""
sim(is::Indices) = map(i -> sim(i), is)

function trivial_index(is::Indices)
if isempty(is)
return Index(1)
end
return trivial_index(first(is))
end

"""
mindim(is::Indices)
Expand Down
2 changes: 2 additions & 0 deletions src/qn/qnindex.jl
Original file line number Diff line number Diff line change
Expand Up @@ -470,6 +470,8 @@ hassameflux(::Index, ::QNIndex) = false
# Split the blocks into blocks of size 1 with the same QNs
splitblocks(i::Index) = setspace(i, splitblocks(space(i)))

trivial_space(i::QNIndex) = [QN() => 1]

function mutable_storage(::Type{Order{N}}, ::Type{IndexT}) where {N,IndexT<:QNIndex}
return SizedVector{N,IndexT}(undef)
end
Expand Down
3 changes: 1 addition & 2 deletions test/itensor.jl
Original file line number Diff line number Diff line change
Expand Up @@ -1420,9 +1420,8 @@ end

@testset "Test error for bad decomposition inputs" begin
@test_throws ErrorException svd(A)
@test_throws ErrorException svd(A, inds(A))
@test_throws ErrorException factorize(A)
@test_throws ErrorException eigen(A, inds(A), inds(A))
#@test_throws ErrorException factorize(A)
end
end
end # End Dense storage test
Expand Down
23 changes: 23 additions & 0 deletions test/mpo.jl
Original file line number Diff line number Diff line change
Expand Up @@ -695,6 +695,29 @@ end
@test A(psi) noprime(Apsi)
@test inner(noprime(Apsi), Apply(A, psi)) inner(Apsi, Apsi)
end

@testset "MPO with no link indices" for conserve_qns in [false, true]
s = siteinds("S=1/2", 4; conserve_qns)
H = MPO([op("Id", sn) for sn in s])
@test linkinds(H) == fill(nothing, length(s) - 1)
@test norm(H) == (2^length(s))

Hortho = orthogonalize(H, 1)
@test Hortho H
@test linkdims(Hortho) == fill(1, length(s) - 1)

Htrunc = truncate(H; cutoff=1e-8)
@test Htrunc H
@test linkdims(Htrunc) == fill(1, length(s) - 1)

= apply(H, H; cutoff=1e-8)
H̃² = MPO([apply(H[n], H[n]) for n in 1:length(s)])
@test linkdims(H²) == fill(1, length(s) - 1)
@test H̃²

e, ψ = dmrg(H, randomMPS(s, n -> isodd(n) ? "" : ""); nsweeps=2, outputlevel=0)
@test e 1
end
end

nothing
95 changes: 95 additions & 0 deletions test/svd.jl
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,101 @@ include("util.jl")
@test norm(U * S * V - T) / norm(T) < 1E-10
end

@testset "svd with empty left or right indices" for space in
(2, [QN(0, 2) => 1, QN(1, 2) => 1]),
cutoff in (nothing, 1e-15)

i = Index(space)
j = Index(space)
A = randomITensor(i, j)

U, S, V = svd(A, i, j; cutoff)
@test U * S * V A
@test hassameinds(uniqueinds(U, S), A)
@test isempty(uniqueinds(V, S))
@test dim(U) == dim(A)
@test dim(S) == 1
@test dim(V) == 1
@test order(U) == order(A) + 1
@test order(S) == 2
@test order(V) == 1

U, S, V = svd(A, (); cutoff)
@test U * S * V A
@test hassameinds(uniqueinds(V, S), A)
@test isempty(uniqueinds(U, S))
@test dim(U) == 1
@test dim(S) == 1
@test dim(V) == dim(A)
@test order(U) == 1
@test order(S) == 2
@test order(V) == order(A) + 1

@test_throws ErrorException svd(A)
end

@testset "factorize with empty left or right indices" for space in (
2, [QN(0, 2) => 1, QN(1, 2) => 1]
),
cutoff in (nothing, 1e-15)

i = Index(space)
j = Index(space)
A = randomITensor(i, j)

X, Y = factorize(A, i, j; cutoff)
@test X * Y A
@test hassameinds(uniqueinds(X, Y), A)
@test isempty(uniqueinds(Y, X))
@test dim(X) == dim(A)
@test dim(Y) == 1
@test order(X) == order(A) + 1
@test order(Y) == 1

X, Y = factorize(A, (); cutoff)
@test X * Y A
@test hassameinds(uniqueinds(Y, X), A)
@test isempty(uniqueinds(X, Y))
@test dim(X) == 1
@test dim(Y) == dim(A)
@test order(X) == 1
@test order(Y) == order(A) + 1

@test_throws ErrorException factorize(A)
end

@testset "svd with empty left and right indices" for cutoff in (nothing, 1e-15)
A = ITensor(3.4)

U, S, V = svd(A, (); cutoff)
@test U * S * V A
@test isempty(uniqueinds(U, S))
@test isempty(uniqueinds(V, S))
@test dim(U) == 1
@test dim(S) == 1
@test dim(V) == 1
@test order(U) == 1
@test order(S) == 2
@test order(V) == 1

@test_throws ErrorException svd(A)
end

@testset "factorize with empty left and right indices" for cutoff in (nothing, 1e-15)
A = ITensor(3.4)

X, Y = factorize(A, (); cutoff)
@test X * Y A
@test isempty(uniqueinds(X, Y))
@test isempty(uniqueinds(Y, X))
@test dim(X) == 1
@test dim(Y) == 1
@test order(X) == 1
@test order(Y) == 1

@test_throws ErrorException factorize(A)
end

# TODO: remove this test, it takes a long time
## @testset "Ill-conditioned matrix" begin
## d = 5000
Expand Down

2 comments on commit 1ca54fb

@mtfishman
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@JuliaRegistrator register subdir=NDTensors

@JuliaRegistrator
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Registration pull request created: JuliaRegistries/General/60375

After the above pull request is merged, it is recommended that a tag is created on this repository for the registered package version.

This will be done automatically if the Julia TagBot GitHub Action is installed, or can be done manually through the github interface, or via:

git tag -a NDTensors-v0.1.39 -m "<description of version>" 1ca54fbad99750d7b02d4a97e4b58cbce0237a11
git push origin NDTensors-v0.1.39

Please sign in to comment.