% IMPORTANT: The following is UTF-8 encoded. This means that in the presence
% of non-ASCII characters, it will not work with BibTeX 0.99 or older.
% Instead, you should use an up-to-date BibTeX implementation like “bibtex8” or
% “biber”.
@PHDTHESIS{Tnshoff:1009723,
author = {Tönshoff, Jan Martin},
othercontributors = {Grohe, Martin and Günnemann, Stephan},
title = {{D}eep learning on graphs : developing and understanding
neural architectures for structured data},
school = {RWTH Aachen University},
type = {Dissertation},
address = {Aachen},
publisher = {RWTH Aachen University},
reportid = {RWTH-2025-03641},
pages = {1 Online-Ressource : Illustrationen},
year = {2024},
note = {Veröffentlicht auf dem Publikationsserver der RWTH Aachen
University 2025; Dissertation, RWTH Aachen University, 2024},
abstract = {Graph Neural Networks (GNNs) have recently emerged as a
powerful class of deep learning architectures that can
directly learn from graph-structured data. Such data
naturally occurs in a wide range of structurally rich
learning domains, such as computational chemistry or social
network analysis. This thesis aims to study both practical
and theoretical aspects of GNNs, with a focus on
understanding their capabilities for solving hard learning
tasks, exploring new directions for GNN architecture design,
and understanding the relative strengths and weaknesses of
commonly used models. In this context, we present our main
research contributions: Firstly, we propose a GNN-based
approach for learning heuristics for Constraint Satisfaction
Problems (CSPs) through a general graph representation and
neural architecture. Using reinforcement learning, we show
that this approach can learn powerful search algorithms,
achieving superior performance compared to prior neural
methods and being competitive with classical solvers on a
range of complex combinatorial problems. Secondly, we
examine a novel GNN architecture based on 1D convolutions on
sequential features constructed from random walks. We
analyze the expressivity of this approach and prove it to be
incomparable to the Weisfeiler-Leman hierarchy and many
common GNN architectures. The model is further shown to
achieve strong empirical performance across various tasks,
suggesting a new direction for GNN architecture design
beyond traditional message passing. Thirdly, we compare
popular GNNs in the context of capturing long-range
dependencies in graphs. Through an empirical re-evaluation
of commonly used benchmark datasets, we show that standard
GNNs based on message passing perform better than previously
reported. A theoretical comparison between Graph
Transformers and GNNs with Virtual Nodes further shows that
neither architecture subsumes the other in terms of uniform
expressivity, highlighting the need to consider a wide range
of architectures for specific learning tasks.},
cin = {122910 / 120000},
ddc = {004},
cid = {$I:(DE-82)122910_20140620$ / $I:(DE-82)120000_20140620$},
pnm = {DFG project G:(GEPRIS)412400621 - Quantitative Analyse von
Datenbankanfragen (412400621) / 22. Runde der
Deutsch-Israelischen Projektkooperation (2019-2023)},
pid = {G:(GEPRIS)412400621 / G:(GEPRIS)412112094},
typ = {PUB:(DE-HGF)11},
doi = {10.18154/RWTH-2025-03641},
url = {https://publications.rwth-aachen.de/record/1009723},
}