@inbook{4a4dbd67b7834f5986a12a840e2cfd38,

title = "Large-scale and distributed optimization: An introduction",

abstract = "The recent explosion in size and complexity of datasets and the increased availability of computational resources has led us to what is sometimes called the big data era. In many big data fields, mathematical optimization has over the last decade emerged as a vital tool in extracting information from the data sets and creating predictors for unseen data. The large dimension of these data sets and the often parallel, distributed, or decentralized computational structures used for storing and handling the data, set new requirements on the optimization algorithms that solve these problems. This has led to a dramatic shift in focus in the optimization community over this period. Much effort has gone into developing algorithms that scale favorably with problem dimension and that can exploit structure in the problem as well as the computational environment. This is also the main focus of this book, which is comprised of individual chapters that further contribute to this development in different ways. In this introductory chapter, we describe the individual contributions, relate them to each other, and put them into a wider context.",

keywords = "Big data problems, Convex optimization, Monotone inclusions, Nonconvex methods, Operator splitting methods, Scalable methods, Stochastic methods",

author = "Pontus Giselsson and Anders Rantzer",

year = "2018",

doi = "10.1007/978-3-319-97478-1_1",

language = "English",

series = "Lecture Notes in Mathematics",

publisher = "Springer",

pages = "1--10",

booktitle = "Lecture Notes in Mathematics",

address = "Germany",

}