Skip to content

Keep track of S#98

Open
dartsushi wants to merge 5 commits intoVictorVanthilt:masterfrom
dartsushi:sloop
Open

Keep track of S#98
dartsushi wants to merge 5 commits intoVictorVanthilt:masterfrom
dartsushi:sloop

Conversation

@dartsushi
Copy link
Contributor

Save the three leg tensor S as well to access interesting information

gradalg = LBFGS(10; verbosity = 0, gradtol = 6.0e-7, maxiter = 40000),
finalize = (finalize!),
)
return new(T, T, gradalg, finalize)
Copy link
Owner

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

If S is a three-leg tensor, why is it instantiated as a four-leg one?

end

function cost_looptnr(S, T)
function to_const_TT(T)
Copy link
Owner

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

What does this function do?

@github-actions
Copy link

github-actions bot commented Oct 13, 2025

Your PR requires formatting changes to meet the project's style guidelines.
Please consider running Runic (git runic master) to apply these changes.

Click here to view the suggested changes.
diff --git a/src/schemes/symmetric_looptnr.jl b/src/schemes/symmetric_looptnr.jl
index 9b4d9bb..1948dea 100644
--- a/src/schemes/symmetric_looptnr.jl
+++ b/src/schemes/symmetric_looptnr.jl
@@ -28,10 +28,10 @@ mutable struct SLoopTNR <: TNRScheme
     gradalg::OptimKit.LBFGS
     finalize!::Function
     function SLoopTNR(
-        T::TensorMap;
-        gradalg = LBFGS(10; verbosity = 0, gradtol = 6.0e-7, maxiter = 40000),
-        finalize = (finalize!),
-    )
+            T::TensorMap;
+            gradalg = LBFGS(10; verbosity = 0, gradtol = 6.0e-7, maxiter = 40000),
+            finalize = (finalize!),
+        )
         return new(T, T, gradalg, finalize)
     end
 end
@@ -43,8 +43,8 @@ function classical_ising_inv(β)
 
     S = ℤ₂Space(0 => 1, 1 => 1)
     T = zeros(Float64, S ⊗ S ← S' ⊗ S')
-    block(T, Irrep[ℤ₂](0)) .= [2x^2 2x*y; 2x*y 2y^2]
-    block(T, Irrep[ℤ₂](1)) .= [2x*y 2x*y; 2x*y 2x*y]
+    block(T, Irrep[ℤ₂](0)) .= [2x^2 2x * y; 2x * y 2y^2]
+    block(T, Irrep[ℤ₂](1)) .= [2x * y 2x * y; 2x * y 2x * y]
 
     return permute(T, (1, 2, 3, 4))
 end
@@ -171,7 +171,7 @@ function ef_oneloop(T, trunc::TensorKit.TruncationScheme)
     ΨA = Ψ_center(T)
     ΨB = []
 
-    for i = 1:4
+    for i in 1:4
         s1, s2 = SVD12(ΨA[i], truncdim(trunc.dim * 2))
         push!(ΨB, s1)
         push!(ΨB, s2)
@@ -188,8 +188,8 @@ function ef_oneloop(T, trunc::TensorKit.TruncationScheme)
     )
 
     ΨB_disentangled = []
-    for i = 1:1
-        @tensor B1[-2 -1; -3] := ΨB[i][-1; -2 2] * PR_list[mod(i, 8)+1][2; -3]
+    for i in 1:1
+        @tensor B1[-2 -1; -3] := ΨB[i][-1; -2 2] * PR_list[mod(i, 8) + 1][2; -3]
         push!(ΨB_disentangled, B1)
     end
     S = ΨB_disentangled[1]
@@ -218,14 +218,14 @@ function step!(scheme, trunc, oneloop; ef_trunc = truncbelow(1.0e-14))
 end
 
 function run!(
-    scheme::SLoopTNR,
-    trscheme::TensorKit.TruncationScheme,
-    criterion::TNRKit.stopcrit;
-    finalize_beginning = true,
-    oneloop = true,
-    verbosity = 1,
-    ef_trunc = truncbelow(1.0e-14),
-)
+        scheme::SLoopTNR,
+        trscheme::TensorKit.TruncationScheme,
+        criterion::TNRKit.stopcrit;
+        finalize_beginning = true,
+        oneloop = true,
+        verbosity = 1,
+        ef_trunc = truncbelow(1.0e-14),
+    )
     data = []
 
     LoggingExtras.withlevel(; verbosity) do

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment

Labels

None yet

Projects

None yet

Development

Successfully merging this pull request may close these issues.

2 participants