Skip to content

Commit

Permalink
tensor_network_distance: add normalized option
Browse files Browse the repository at this point in the history
  • Loading branch information
jcmgray committed Jan 31, 2024
1 parent 8c4f425 commit 7a7c95d
Show file tree
Hide file tree
Showing 4 changed files with 31 additions and 14 deletions.
4 changes: 2 additions & 2 deletions docs/changelog.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,8 @@ Release notes for `quimb`.

**Enhancements:**

- add [`TensorNetwork.cut_bond`](quimb.tensor.tensor_core.TensorNetwork.cut_bond)
for cutting a bond index
- add `normalized=True` option to [`tensor_network_distance`](quimb.tensor.tensor_core.tensor_network_distance) for computing the normalized distance between tensor networks: $2 |A - B| / (|A| + |B|)$, which is useful for convergence checks. [`Tensor.distance_normalized`](quimb.tensor.tensor_core.Tensor.distance_normalized) and [`TensorNetwork.distance_normalized`](quimb.tensor.tensor_core.TensorNetwork.distance_normalized) added as aliases.
- add [`TensorNetwork.cut_bond`](quimb.tensor.tensor_core.TensorNetwork.cut_bond) for cutting a bond index


(whats-new-1-7-1)=
Expand Down
23 changes: 18 additions & 5 deletions quimb/tensor/tensor_core.py
Original file line number Diff line number Diff line change
Expand Up @@ -1251,6 +1251,7 @@ def tensor_network_distance(
xAB=None,
xBB=None,
method="auto",
normalized=False,
**contract_opts,
):
r"""Compute the Frobenius norm distance between two tensor networks:
Expand Down Expand Up @@ -1284,6 +1285,10 @@ def tensor_network_distance(
directly formed and the norm computed, which can be quicker when the
exterior dimensions are small. If ``'auto'``, the dense method will
be used if the total operator (outer) size is ``<= 2**16``.
normalized : bool, optional
If ``True``, then normalize the distance by the norm of the two
operators, i.e. ``2 * D(A, B) / (|A| + |B|)``. The resulting distance
lies between 0 and 2 and is more useful for assessing convergence.
contract_opts
Supplied to :meth:`~quimb.tensor.tensor_core.TensorNetwork.contract`.
Expand All @@ -1310,11 +1315,10 @@ def tensor_network_distance(
else:
method = "overlap"

# directly form vectorizations of both
# directly from vectorizations of both
if method == "dense":
A = tnA.to_dense(oix)
B = tnB.to_dense(oix)
return do("linalg.norm", A - B)
tnA = tnA.contract(..., output_inds=oix, preserve_tensor=True)
tnB = tnB.contract(..., output_inds=oix, preserve_tensor=True)

# overlap method
if xAA is None:
Expand All @@ -1324,7 +1328,12 @@ def tensor_network_distance(
if xBB is None:
xBB = (tnB | tnB.H).contract(..., **contract_opts)

return do("abs", xAA - 2 * do("real", xAB) + xBB) ** 0.5
dAB = do("abs", xAA - 2 * do("real", xAB) + xBB) ** 0.5

if normalized:
dAB *= 2 / (do("abs", xAA)**0.5 + do("abs", xBB)**0.5)

return dAB


def tensor_network_fit_autodiff(
Expand Down Expand Up @@ -2495,6 +2504,8 @@ def compute_reduced_factor(
def distance(self, other, **contract_opts):
return tensor_network_distance(self, other, **contract_opts)

distance_normalized = functools.partialmethod(distance, normalized=True)

def gate(
self,
G,
Expand Down Expand Up @@ -8837,6 +8848,8 @@ def insert_compressor_between_regions(
def distance(self, *args, **kwargs):
return tensor_network_distance(self, *args, **kwargs)

distance_normalized = functools.partialmethod(distance, normalized=True)

def fit(
self,
tn_target,
Expand Down
4 changes: 2 additions & 2 deletions quimb/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -163,12 +163,12 @@ def progbar(*args, **kwargs):
def deprecated(fn, old_name, new_name):
"""Mark a function as deprecated, and indicate the new name."""

@functools.wraps(fn)
def new_fn(*args, **kwargs):
import warnings

warnings.warn(
f"The {old_name} function is deprecated in favor "
f"of {new_name}",
f"The {old_name} function is deprecated in favor of {new_name}",
Warning,
)
return fn(*args, **kwargs)
Expand Down
14 changes: 9 additions & 5 deletions tests/test_tensor/test_tensor_core.py
Original file line number Diff line number Diff line change
Expand Up @@ -1027,15 +1027,19 @@ def test_contract_to_dense_reduced_factor(self):
assert_allclose(Ur @ Ur.T, np.eye(4), atol=1e-10)

@pytest.mark.parametrize("method", ("auto", "dense", "overlap"))
def test_tensor_network_distance(self, method):
@pytest.mark.parametrize("normalized", (True, False))
def test_tensor_network_distance(self, method, normalized):
n = 6
A = qtn.TN_rand_reg(n=n, reg=3, D=2, phys_dim=2, dtype=complex)
Ad = A.to_dense([f"k{i}" for i in range(n)])
B = qtn.TN_rand_reg(n=6, reg=3, D=2, phys_dim=2, dtype=complex)
Bd = B.to_dense([f"k{i}" for i in range(n)])
d1 = np.linalg.norm(Ad - Bd)
d2 = A.distance(B, method=method)
assert d1 == pytest.approx(d2)
d2 = A.distance(B, method=method, normalized=normalized)
if normalized:
assert 0 <= d2 <= 2
else:
assert d1 == pytest.approx(d2)

@pytest.mark.parametrize(
"method,opts",
Expand All @@ -1057,9 +1061,9 @@ def test_tensor_network_distance(self, method):
def test_fit_mps(self, method, opts):
k1 = qtn.MPS_rand_state(5, 3, seed=666)
k2 = qtn.MPS_rand_state(5, 3, seed=667)
assert k1.distance(k2) > 1e-3
assert k1.distance_normalized(k2) > 1e-3
k1.fit_(k2, method=method, progbar=True, **dict(opts))
assert k1.distance(k2) < 1e-3
assert k1.distance_normalized(k2) < 1e-3

@pytest.mark.parametrize(
"method,opts",
Expand Down

0 comments on commit 7a7c95d

Please sign in to comment.