h1

h2

h3

h4

h5
h6
% IMPORTANT: The following is UTF-8 encoded.  This means that in the presence
% of non-ASCII characters, it will not work with BibTeX 0.99 or older.
% Instead, you should use an up-to-date BibTeX implementation like “bibtex8” or
% “biber”.

@PHDTHESIS{Nabbefeld:964006,
      author       = {Nabbefeld, Gerion},
      othercontributors = {Kampa, Björn M. and Musall, Simon Fritjof},
      title        = {{S}ignatures of cortical multisensory integration in mice
                      performing a novel visuotactile evidence accumulation task},
      school       = {RWTH Aachen University},
      type         = {Dissertation},
      address      = {Aachen},
      publisher    = {RWTH Aachen University},
      reportid     = {RWTH-2023-08096},
      pages        = {1 Online-Ressource : Illustrationen, Diagramme},
      year         = {2022},
      note         = {Veröffentlicht auf dem Publikationsserver der RWTH Aachen
                      University 2023; Dissertation, RWTH Aachen University, 2022},
      abstract     = {Much effort has been focused on studying how the brain
                      processes information from our individual senses. However,
                      the neural mechanisms, that allow the effortless integration
                      of unisensory inputs into multisensory percepts, are largely
                      unknown. To study how neural circuits integrate visual and
                      tactile information, we developed a multisensory
                      discrimination task for head-fixed mice. Here, two sequences
                      of visual, tactile or combined visuotactile stimuli are
                      presented on both sides of the mouse, which has to indicate
                      the higher-rate target-side to obtain a water reward. To
                      ensure integration of sensory information over the entire
                      stimulus period, a short delay was added before the
                      response. Mice achieved high accuracy in all conditions,
                      with improved performance in the multisensory condition.
                      This behavioral task gave us the opportunity to investigate
                      the neural circuits that allow mice to synergistically use
                      both the visual and tactile sensory information to solve the
                      behavioral task. We then used widefield imaging to measure
                      cortex-wide activity in transgenic mice expressing the
                      Ca2+-indicator GCaMP6s in all cortical excitatory neurons.
                      Here, we found that multisensory stimuli evoked higher
                      neuronal activity compared to unisensory stimulation. This
                      was most evident in the rostrolateral association area RL
                      and parts of medial frontal cortex (mFC), which reliably
                      responded to both visual and tactile stimuli. To better
                      isolate sensory responses from co-occurring task- or
                      behavior-related activity, we used a linear encoding model.
                      Including a multisensory interaction-term significantly
                      improved the predictions of cortical activity. With this
                      approach we identified two key features of sensory evoked
                      responses, depending on the stimulus condition. First, in
                      unisensory trials mice display cross-modal inhibition. Here,
                      in addition to the main sensory responses in the
                      corresponding sensory cortex, robust inhibition of activity
                      in the non-matching sensory cortex was found. Second, we
                      found additional superadditive responses in multisensory
                      trials, likely representing the absence of cross-modal
                      inhibition as well as increased activity in areas RL and
                      mFC. To understand how sensory information is used to guide
                      behavioral decisions, we first investigated which brain
                      areas displayed activity that reliably reflected the target
                      stimulus side. Here, the medial motor cortex more faithfully
                      reflected the target-side in tactile trials, while secondary
                      visual areas were more reliable in visual trials. In
                      multisensory trials, both regions accurately reflected the
                      target-side, likely resulting in higher certainty and
                      improved performance in multisensory trials. Finally, using
                      a choice-decoder we identified choice-related neural
                      activity in the anterolateral motor cortex (ALM), as well as
                      in licking-related regions of the primary motor and
                      somatosensory cortex. With this approach, we found no clear
                      modality-specific differences, suggesting that the same
                      neural circuits form decisions in all stimulus conditions.
                      Our results demonstrate that multisensory stimulation cause
                      widespread cortical activation in mice, which leads to
                      improved task performance. Here, cross-modal inhibition in
                      unisensory trials and superadditive multisensory integration
                      especially in RL and mFC were found in multisensory trials,
                      likely aiding mice in performing the individual task
                      condition. Sensory information is then accumulated over the
                      stimulus period in secondary visual areas and medial motor
                      cortex and this information converges in the secondary motor
                      cortex to form modality-unspecific decisions. These findings
                      give us a much deeper understanding of how the brain
                      processes and generalizes sensory information in order to
                      guide behavioral decisions.},
      cin          = {162320 / 160000},
      ddc          = {570},
      cid          = {$I:(DE-82)162320_20140620$ / $I:(DE-82)160000_20140620$},
      pnm          = {GRK 2416 - GRK 2416: MultiSenses-MultiScales: Neue Ansätze
                      zur Aufklärung neuronaler multisensorischer Integration
                      (368482240)},
      pid          = {G:(GEPRIS)368482240},
      typ          = {PUB:(DE-HGF)11},
      doi          = {10.18154/RWTH-2023-08096},
      url          = {https://publications.rwth-aachen.de/record/964006},
}