% IMPORTANT: The following is UTF-8 encoded. This means that in the presence
% of non-ASCII characters, it will not work with BibTeX 0.99 or older.
% Instead, you should use an up-to-date BibTeX implementation like “bibtex8” or
% “biber”.
@PHDTHESIS{Krmer:789753,
author = {Krämer, Sebastian},
othercontributors = {Grasedyck, Lars and Schneider, Reinhold and Backmayr,
Markus},
title = {{T}ree tensor networks, associated singular values and
high-dimensional approximation},
school = {RWTH Aachen University},
type = {Dissertation},
address = {Aachen},
reportid = {RWTH-2020-05412},
pages = {1 Online-Ressource (xvi, 205 Seiten) : Illustrationen,
Diagramme},
year = {2020},
note = {Veröffentlicht auf dem Publikationsserver der RWTH Aachen
University; Dissertation, RWTH Aachen University, 2020},
abstract = {In this thesis, we develop an algebraic and graph
theoretical reinterpretation of tensor networks and formats.
We investigate properties of associated singular values and
demonstrate their importance for high-dimensional
approximation, in particular for model complexity adaption.
This leads us to a concept of stability for iterative
optimizations methods which we discuss at length for
discrete matrix and tensor completion. We further generalize
these ideas to the approximate interpolation of scattered
data, and demonstrate the potential of the introduced
algorithms on a data set that describes rolling press
experiments. These largely algorithmic considerations are
supplemented and supported by the theoretical examination of
the interrelation between tensor singular values, and its
relation to the quantum marginal problem. Tensor networks
are essentially multilinear maps which reflect the
connections between collections of tensors. In the first
part, we discuss how two familiar concepts in mathematics
yield an arithmetic that naturally describes such networks,
and which formalizes the underlying, simple graph structures
through universal assertions. The practicability of this
calculus is reflected on by the straightforward
implementations, which we provide also of well known
algorithms. As a central theorem of this thesis serves the
generalizing tree singular value decomposition, which, while
not novel in its basic idea, incorporates various gauge
conditions that stem from different, corresponding tensor
formats. In the second part, we discuss details of
high-dimensional, alternating least squares optimization in
tree tensor networks, which are those families of tensors
that form tree graphs. Due to the special properties of this
class of formats, even high-dimensional problems can
effectively be handled, in particular when the occurring,
linear subproblems are solved via a conjugate gradient
method. Subsequent to this introductory segment, we
investigate the meaning of singular values in this context.
As the model complexity is determined by the tensor ranks of
the iterate, the proper calibration of such becomes
essential in order to obtain reasonable solutions to
recovery problems. Based on a specific definition of
stability, we introduce and discuss modifications to
standard alternating least squares as well as the relation
to reweighted l1-minimization. We in particular demonstrate
the use of these concepts for rank-adaptive algorithms. Such
are further generalized from the discrete to the continuous
setting, which we apply to the approximate interpolation of
rolling press simulations. As the singular values associated
to tensor networks stem from different matricizations of the
same tensor, the question about the interrelation between
such arises. In the third part we first show that the tensor
feasibility problem is equivalent to a version of the
quantum marginal problem. While the latter one has been well
known in physics for multiple decades, the tensor version
originating from mathematics has only recently been
considered. We transfer several results into our setting and
subsequently utilize the tree singular value decomposition
in order to decouple high-dimensional feasibility problems
into much simpler, smaller ones. Last but not least, we
specifically consider this situation for the tensor train
format, which leads us to cone theory, so-called honeycombs
and the application of linear programming algorithms.},
cin = {111410 / 110000},
ddc = {510},
cid = {$I:(DE-82)111410_20170801$ / $I:(DE-82)110000_20140620$},
typ = {PUB:(DE-HGF)11},
doi = {10.18154/RWTH-2020-05412},
url = {https://publications.rwth-aachen.de/record/789753},
}