h1

h2

h3

h4

h5
h6
% IMPORTANT: The following is UTF-8 encoded.  This means that in the presence
% of non-ASCII characters, it will not work with BibTeX 0.99 or older.
% Instead, you should use an up-to-date BibTeX implementation like “bibtex8” or
% “biber”.

@PHDTHESIS{Tiberi:972001,
      author       = {Tiberi, Lorenzo},
      othercontributors = {Helias, Mortiz and Honerkamp, Carsten},
      title        = {{T}he role of nonlinear interactions and connectivity in
                      shaping critical and collective network dynamics},
      school       = {RWTH Aachen University},
      type         = {Dissertation},
      address      = {Aachen},
      publisher    = {RWTH Aachen University},
      reportid     = {RWTH-2023-09969},
      pages        = {1 Online-Ressource : Illustrationen},
      year         = {2023},
      note         = {Veröffentlicht auf dem Publikationsserver der RWTH Aachen
                      University; Dissertation, RWTH Aachen University, 2023},
      abstract     = {Neural computation is a collective phenomenon emerging from
                      the complex interaction of a large number of neurons.
                      Through evolution and learning, neural networks organize
                      their structure, giving rise to rich collective dynamics
                      that can support complex computational tasks. Some
                      observables of these dynamics indeed appear to be optimally
                      tuned for computation. Examples include the dimensionality
                      and pricipal components' spectrum of neuronal activity, or
                      the ubiquitous presence of observables following a power-law
                      scaling, which suggests that the brain might tune itself
                      into a critical state. Understanding which structural
                      properties of neural networks allow the brain to optimally
                      tune these dynamical observables is till an open question.
                      Furthermore, it is not fully clear, in the very first place,
                      what is the computational benefit of some of the brain's
                      observed behaviors, such as criticality. In this thesis, we
                      address these questions by characterizing the dynamics of
                      two biologically inspired neural network models, the
                      stochastic Wilson-Cowan model and the stochastic
                      Sompolinsky-Crisanti-Sommers (SCS) model. To this end, we
                      adapt tools from a field that has already seen great success
                      in understanding collective phenomena - statistical physics.
                      Seeking analytical understanding and interpretability, we
                      focus on the two minimal and essential ingredients of neural
                      computation: nonlinearities and connectivity. First, we
                      focus on nonlinearities in the Wilson-Cowan model. We
                      perform the first renormalization group (RG) analysis of a
                      neural network model. By deriving the so-called flow of
                      couplings, we are able to explore the computational
                      implications of a fundamental property of critical systems:
                      the presence of nonlinear interactions across all
                      length-scales. This property has been so far inaccessible by
                      previous studies, due to the use of mean-field
                      approximations. We find nonlinearities to be in a
                      Gell-Mann-Low regime: despite vanishing at very large length
                      scales, they do so logarithmically slowly, thus remaining
                      present on practically all intermediate scales. We argue
                      this regime to be optimal for computation, striking a
                      balance between linearity, optimal for memory, and
                      nonlinearity, required for computation. Second, we focus on
                      connectivity in the linearized SCS model. We ask which
                      connectivity structures can directly control the network's
                      dynamical observables, tuning them into the computationally
                      optimal values observed experimentally. We develop a novel
                      theory for random connectivity matrices, which shows that
                      these structures are encoded in the shape of the
                      connectivity's eigenvalue distribution. In particular, the
                      density of nearly critical eigenvalues controls the
                      power-law scaling of many dynamical observable, such as the
                      autocorrelation, autoresponse, dimensionality, and principal
                      components spectrum of neuronal activity. Differently than
                      more traditional connectivity structures, such as motifs,
                      these novel structures can account for phenomena such as a
                      fine-tuned power-law scaling of the principal components
                      spectrum, as observed in V1 of mice. Third, we focus on both
                      nonlinearities and connectivity in the SCS model. We work on
                      importing the RG analysis of nonlinear interactions to
                      networks, especially in the general case of asymmetric
                      heterogeneous networks, which is of most interest to
                      neuroscience. This is - and still remains - an open problem,
                      for which, however, we provide some novel steps forward. In
                      the very first place, we make it formally possible to apply
                      the RG methods to generic networks, by noticing an analogy
                      with classical critical phenomena. Then, we identify new
                      technical challenges specific to the asymmetric,
                      heterogeneous case and propose formal methods to solve them,
                      which rely on our newly developed random matrix theory. We
                      also identify a novel mechanism, which causes a breakdown of
                      the typical power-law scaling observed in classical critical
                      phenomena.},
      cin          = {136930 ; 136920 / 130000},
      ddc          = {530},
      cid          = {$I:(DE-82)136930_20160614$ / $I:(DE-82)130000_20140620$},
      pnm          = {HBP SGA3 - Human Brain Project Specific Grant Agreement 3
                      (945539) / ACA - Advanced Computing Architectures (SO-092) /
                      Transparent Deep Learning with Renormalized Flows
                      (BMBF-01IS19077A) / Impuls- und Vernetzungsfonds
                      (IVF-20140101)},
      pid          = {G:(EU-Grant)945539 / G:(DE-HGF)SO-092 /
                      G:(DE-Juel-1)BMBF-01IS19077A / G:(DE-HGF)IVF-20140101},
      typ          = {PUB:(DE-HGF)11},
      doi          = {10.18154/RWTH-2023-09969},
      url          = {https://publications.rwth-aachen.de/record/972001},
}