h1

h2

h3

h4

h5
h6
% IMPORTANT: The following is UTF-8 encoded.  This means that in the presence
% of non-ASCII characters, it will not work with BibTeX 0.99 or older.
% Instead, you should use an up-to-date BibTeX implementation like “bibtex8” or
% “biber”.

@PHDTHESIS{SalmaniBarzoki:1002228,
      author       = {Salmani Barzoki, Bahare},
      othercontributors = {Katoen, Joost-Pieter and de Raedt, Luc},
      title        = {{P}robabilistic model checking and parameter tuning for
                      {B}ayesian networks},
      school       = {RWTH Aachen University},
      type         = {Dissertation},
      address      = {Aachen},
      publisher    = {RWTH Aachen University},
      reportid     = {RWTH-2025-00411},
      pages        = {1 Online-Ressource : Illustrationen},
      year         = {2024},
      note         = {Veröffentlicht auf dem Publikationsserver der RWTH Aachen
                      University 2025; Dissertation, RWTH Aachen University, 2024},
      abstract     = {Probabilistic reasoning is a key to handling uncertainties
                      and making decisions based on partial observations. Bayesian
                      networks (BNs) are popular probabilistic graphical models in
                      probabilistic decision-making and AI and have a wide range
                      of applications, including machine learning, medicine, gene
                      regulatory networks, and finance. The pivotal task in
                      Bayesian networks is probabilistic inference that amounts to
                      computing conditional probabilities. While there is a range
                      of efficient algorithms for inference in BNs with certain
                      topologies, such as those with bounded tree-width,
                      probabilistic inference remains computationally hard in
                      general; the theoretical complexity is PP-complete.
                      Probabilistic model checking (PMC) is a field in formal
                      verification that focuses on analyzing stochastic systems
                      with respect to a set of formally defined properties. The
                      stochastic systems are typically modelled as Markov models,
                      and the properties as probabilistic extensions of LTL or
                      CTL. The properties are mostly reduced to a pivotal task,
                      computing reachability probabilities. Recent advancements in
                      the field consider parametric extensions of Markov models,
                      where a subset of the probabilities in the model is unknown.
                      In this dissertation, we introduce a novel approach based on
                      probabilistic model checking for inference and parameter
                      tuning in Bayesian networks. The dissertation is categorized
                      into two main settings: non-parameteric and parameteric. In
                      the non-parameteric setting, we focus on classical Bayesian
                      networks, where the model is fully specified. We address
                      conditional inference on classical BNs: we present mappings
                      from Bayesian networks into discrete-time Markov chains and
                      mathematically reduce performing conditional inference in
                      the BN to computing reachability probabilities in MCs. This
                      enables applying state-of-the-art algorithms from PMC and
                      optimization techniques such as bisimulation minimization
                      for Bayesian inference. We exploit explicit and symbolic
                      methods and empirically evaluate our framework against the
                      state-of-the-art weighted model counting. In the parametric
                      setting, we consider parametric Bayesian networks (pBNs),
                      where a subset of local probability distributions are based
                      polynomials over variables rather than concrete
                      probabilities. We specifically address sensitivity analysis
                      and ratio/difference parameter tuning problems from the BN
                      literature, and the parameter space partitioning problem
                      from PMC literature. We propose an algorithm for the problem
                      of minimal distance parameter tuning, where the objective is
                      to find an instantiation for the unknown parameters that
                      satisfy the constraint of interest while minimally deviating
                      from the original instantiation in the given reference
                      model. Our detailed experimental evaluations indicate that
                      our parameter synthesis techniques can treat parameter
                      synthesis for Bayesian networks (with hundreds of unknown
                      parameters), which goes beyond the capabilities of existing
                      techniques for Bayesian networks with unknown probabilities.
                      We lift the severe restrictions in the literature on the
                      number of unknown parameters, the global dependencies
                      between the parameters, and the form of obtained solutions.},
      cin          = {121310 / 120000},
      ddc          = {004},
      cid          = {$I:(DE-82)121310_20140620$ / $I:(DE-82)120000_20140620$},
      pnm          = {FRAPPANT - Formal Reasoning About Probabilistic Programs:
                      Breaking New Ground for Automation (787914)},
      pid          = {G:(EU-Grant)787914},
      typ          = {PUB:(DE-HGF)11},
      doi          = {10.18154/RWTH-2025-00411},
      url          = {https://publications.rwth-aachen.de/record/1002228},
}