@phdthesis{f0a13485bf5e42dfa67f60aeaa660054,
title = "Simulation-based Inference: From Approximate Bayesian Computation and Particle Methods to Neural Density Estimation",
abstract = "This doctoral thesis in computational statistics utilizes both Monte Carlo methods(approximate Bayesian computation and sequential Monte Carlo) and machine-learning methods (deep learning and normalizing flows) to develop novel algorithms for inference in implicit Bayesian models. Implicit models are those for which calculating the likelihood function is very challenging (and often impossible), but model simulation is feasible. The inference methods developed in the thesis are simulation-based inference methods since they leverage the possibility to simulate data from the implicit models. Several approaches are considered in the thesis: Paper II and IV focus on classical methods (sequential Monte Carlo-based methods), while paper I and III focus on more recent machine learning methods (deep learning and normalizing flows, respectively).Paper I constructs novel deep learning methods for learning summary statistics for approximate Bayesian computation (ABC). To achieve this paper I introduces the partially exchangeable network (PEN), a deep learning architecture specifically designed for Markovian data (i.e., partially exchangeable data).Paper II considers Bayesian inference in stochastic differential equation mixed-effects models (SDEMEM). Bayesian inference for SDEMEMs is challenging due to the intractable likelihood function of SDEMEMs. Paper II addresses this problem by designing a novel a Gibbs-blocking strategy in combination with correlated pseudo marginal methods. The paper also discusses how custom particle filters can be adapted to the inference procedure.Paper III introduces the novel inference method sequential neural posterior and likelihood approximation (SNPLA). SNPLA is a simulation-based inference algorithm that utilizes normalizing flows for learning both the posterior distribution and the likelihood function of an implicit model via a sequential scheme. By learning both the likelihood and the posterior, and by leveraging the reverse Kullback Leibler (KL) divergence, SNPLA avoids ad-hoc correction steps and Markov chain Monte Carlo (MCMC) sampling.Paper IV introduces the accelerated-delayed acceptance (ADA) algorithm. ADA can be viewed as an extension of the delayed-acceptance (DA) MCMC algorithm that leverages connections between the two likelihood ratios of DA to further accelerate MCMC sampling from the posterior distribution of interest, although our approach introduces an approximation. The main case study of paper IV is a double-well potential stochastic differential equation (DWPSDE) model for protein-folding data (reaction coordinate data).",
keywords = "Bayesian statistics, computational statistics, deep learning, mixed-effects, sequential Monte Carlo, stochastic differential equations",
author = "Samuel Wiqvist",
note = "Defence details Date: 2021-09-24 Time: 13:00 Place: Centre for Mathematical Sciences, Lund. Join via zoom: https://lu-se.zoom.us/j/65559057133 External reviewer(s) Name: Wilkinson, Darren J Title: Professor Affiliation: Newcastle University ---",
year = "2021",
month = aug,
day = "16",
language = "English",
isbn = "9789178959679",
series = "Doctoral Theses in Mathematical Sciences ",
publisher = "Lund University (Media-Tryck)",
number = "09",
type = "Doctoral Thesis (compilation)",
school = "Mathematical Statistics",
}