Skip to content

Commit

Permalink
additions to paper
Browse files Browse the repository at this point in the history
  • Loading branch information
KonstantinRiedl committed Oct 8, 2023
1 parent 068a169 commit 2c151d2
Show file tree
Hide file tree
Showing 2 changed files with 176 additions and 15 deletions.
142 changes: 141 additions & 1 deletion paper.bib
Original file line number Diff line number Diff line change
Expand Up @@ -124,6 +124,12 @@ @misc{pagmo2017
doi = {10.5281/zenodo.1054110},
url = {https://doi.org/10.5281/zenodo.1054110}
}
@software{Igor_CBOinPython,
author = {Tukh, Igor and Riedl, Konstantin},
title = {{cbo-in-python}},
url = {https://github.com/Igor-Tukh/cbo-in-python},
version = {1.0}
}
@software{Roith_polarcbo,
author = {Roith, Tim and Bungert, Leon and Wacker, Philipp},
title = {{polarcbo}},
Expand Down Expand Up @@ -172,6 +178,22 @@ @software{Bailo_consensus
url = {https://github.com/rafaelbailo/Consensus.jl},
version = {0.0.1}
}
@article {carrillo2018analytical,
AUTHOR = {Carrillo, Jos\'{e} A. and Choi, Young-Pil and Totzeck, Claudia and Tse, Oliver},
TITLE = {An analytical framework for consensus-based global
optimization method},
JOURNAL = {Math. Models Methods Appl. Sci.},
FJOURNAL = {Mathematical Models and Methods in Applied Sciences},
VOLUME = {28},
YEAR = {2018},
NUMBER = {6},
PAGES = {1037--1066},
ISSN = {0218-2025},
MRCLASS = {90C26 (35Q84 37N40 60H10 90C59)},
MRNUMBER = {3804923},
DOI = {10.1142/S0218202518500276},
URL = {https://doi-org.eaccess.ub.tum.de/10.1142/S0218202518500276},
}
@article{carrillo2021consensus,
title={A consensus-based global optimization method for high dimensional machine learning problems},
author={Carrillo, Jos{\'e} A and Jin, Shi and Li, Lei and Zhu, Yuhua},
Expand All @@ -181,5 +203,123 @@ @article{carrillo2021consensus
year={2021},
publisher={EDP Sciences}
}

@InProceedings{fornasier2021convergence,
author="Fornasier, Massimo
and Klock, Timo
and Riedl, Konstantin",
editor="Jim{\'e}nez Laredo, Juan Luis
and Hidalgo, J. Ignacio
and Babaagba, Kehinde Oluwatoyin",
title="Convergence of Anisotropic Consensus-Based Optimization in Mean-Field Law",
booktitle="Applications of Evolutionary Computation",
year="2022",
publisher="Springer",
address="Cham",
pages="738--754",
isbn="978-3-031-02462-7"
}
@article{riedl2022leveraging,
title={Leveraging Memory Effects and Gradient Information in Consensus-Based Optimization: On Global Convergence in Mean-Field Law},
author={Riedl, Konstantin},
journal={arXiv preprint arXiv:2211.12184},
year={2022}
}
@article{qiu2022PSOconvergence,
title={On the global convergence of particle swarm optimization methods},
author={Huang, Hui and Qiu, Jinniao and Riedl, Konstantin},
journal={Applied Mathematics \& Optimization},
volume={88},
number={2},
pages={30},
year={2023},
publisher={Springer}
}
@article{grassi2020particle,
AUTHOR = {Grassi, Sara and Pareschi, Lorenzo},
TITLE = {From particle swarm optimization to consensus based
optimization: stochastic modeling and mean-field limit},
JOURNAL = {Math. Models Methods Appl. Sci.},
FJOURNAL = {Mathematical Models and Methods in Applied Sciences},
VOLUME = {31},
YEAR = {2021},
NUMBER = {8},
PAGES = {1625--1657},
ISSN = {0218-2025},
MRCLASS = {35Q83 (65K10 65M75 82C40)},
MRNUMBER = {4307005},
DOI = {10.1142/S0218202521500342},
URL = {https://doi-org.eaccess.ub.tum.de/10.1142/S0218202521500342},
}
@article{riedl2023gradient,
title={Gradient is All You Need?},
author={Riedl, Konstantin and Klock, Timo and Geldhauser, Carina and Fornasier, Massimo},
journal={arXiv:2306.09778},
year={2023}
}
@article{carrillo2023fedcbo,
title={FedCBO: Reaching Group Consensus in Clustered Federated Learning through Consensus-based Optimization},
author={Carrillo, Jose A and Trillos, Nicolas Garcia and Li, Sixu and Zhu, Yuhua},
journal={arXiv:2305.02894},
year={2023}
}
@article{klamroth2022consensus,
title={Consensus-Based Optimization for Multi-Objective Problems: A Multi-Swarm Approach},
author={Klamroth, Kathrin and Stiglmayr, Michael and Totzeck, Claudia},
journal={arXiv:2211.15737},
year={2022}
}
@article{borghi2022adaptive,
title={An adaptive consensus based method for multi-objective optimization with uniform {P}areto front approximation},
author={Borghi, Giacomo and Herty, Michael and Pareschi, Lorenzo},
journal={Applied Mathematics \& Optimization},
volume={88},
number={2},
pages={1--43},
year={2023},
publisher={Springer}
}
@article{fornasier2020consensus_sphere_convergence,
AUTHOR = {Fornasier, Massimo and Huang, Hui and Pareschi, Lorenzo and
S\"{u}nnen, Philippe},
TITLE = {Consensus-based optimization on the sphere: convergence to
global minimizers and machine learning},
JOURNAL = {J. Mach. Learn. Res.},
FJOURNAL = {Journal of Machine Learning Research (JMLR)},
VOLUME = {22},
YEAR = {2021},
PAGES = {Paper No. 237, 55},
ISSN = {1532-4435},
MRCLASS = {90C56 (65K10 90C59)},
MRNUMBER = {4329816},
MRREVIEWER = {Ctirad Matonoha},
}
@article{borghi2021constrained,
title={Constrained consensus-based optimization},
author={Borghi, Giacomo and Herty, Michael and Pareschi, Lorenzo},
journal={SIAM Journal on Optimization},
volume={33},
number={1},
pages={211--236},
year={2023},
publisher={SIAM}
}
@article{huang2021MFLCBO,
AUTHOR = {Huang, Hui and Qiu, Jinniao},
TITLE = {On the mean-field limit for the consensus-based optimization},
JOURNAL = {Math. Methods Appl. Sci.},
FJOURNAL = {Mathematical Methods in the Applied Sciences},
VOLUME = {45},
YEAR = {2022},
NUMBER = {12},
PAGES = {7814--7831},
ISSN = {0170-4214},
MRCLASS = {60K35 (34F05 35Q70 35Q84 60B10 68W50 90C26 90C59)},
MRNUMBER = {4456068},
}
@article{huang2022consensus,
title={Consensus-based optimization for saddle point problems},
author={Huang, Hui and Qiu, Jinniao and Riedl, Konstantin},
journal={arXiv:2212.12334},
year={2022}
}

49 changes: 35 additions & 14 deletions paper.md
Original file line number Diff line number Diff line change
@@ -1,51 +1,72 @@
---
title: 'CBX: Python and Julia packages of consensus-based interacting particle methods'
title: 'CBX: Python and Julia packages for consensus-based interacting particle methods'
tags:
- Python
- Sampling
- Julia
- Optimization
- Sampling
authors:
- name: Tim Roith
orcid: 0000-0001-8440-2928
affiliation: 1
- name: Konstantin Riedl
orcid: 0000-0002-2206-4334
affiliation: "2, 3"
affiliations:
- name: Friedrich-Alexander-Universität Erlangen-Nürnberg
index: 1
date: 23 August 2023
- name: Technical University of Munich
index: 2
- name: Munich Center for Machine Learning
index: 3
date: 08 October 2023
bibliography: paper.bib
---

# Summary

We present CBXpy and CBX.jl which provide Python and respectively Julia implementations for consensus-based interacting particle methods. In detail, the packages focus on consensus-based optimization (CBO) [@pinnau2017consensus] and consensus-based sampling (CBS) [@carrillo2022consensus], which coined the acronym CBX. The Python and Julia implementations were developed in parallel, in order to provide a framework for researchers more familiar with either language. Here, we focused on having a similar API and core functionality in both packages, while taking advantage of the strengths of each language, and writing idiomatic code.
We present CBXpy and CBX.jl which provide Python and Julia implementations, respectively, for consensus-based interacting particle methods. In detail, the packages focus on consensus-based optimization (CBO) [@pinnau2017consensus] and consensus-based sampling (CBS) [@carrillo2022consensus], which coined the acronym CBX. The Python and Julia implementations were developed in parallel, in order to provide a framework for researchers more familiar with either language. While we focused on having a similar API and core functionality in both packages, we took advantage of the strengths of each language and wrote idiomatic code.

![Visualization of a CBO run for the Ackley function [@ackley2012connectionist].](JOSS.png){ width=50% }

# Statement of need

Consensus-based optimization (CBO) was proposed in [@pinnau2017consensus] as a zeroth-order particle-based scheme, to solve problems of the form

Consensus-based optimization (CBO) was proposed in [@pinnau2017consensus] as a zero-order (derivative-free) particle-based scheme, to solve problems of the form
$$
min_{x\in\mathcal{X}} f(x)
x^* = \mathrm{argmin}_{x\in\mathcal{X}} f(x),
$$
for some input space $\mathcal{X}$ and a possibly nonconvex and nonsmooth objective function $f:\mathcal{X}\to\mathbb{R}$. As an agent-based method, CBO is conceptually comparable to biologically and physically inspired methods such as particle-swarm optimization (PSO) [@kennedy1995particle], simulated annealing (SA) [@henderson2003theory] or several other heuristics [@mohan2012survey;@karaboga2014comprehensive;@yang2009firefly;@bayraktar2013wind]. However, compared to these methods, CBO was designed to be amenable to a rigorous theoretical convergence analysis [@carrillo2018analytical;@carrillo2021consensus;@fornasier2021consensus;@fornasier2021convergence]. From a computational perspective, the method is attractive as the particle interactions scale linearly with the number of particles.

for some input space $\mathcal{X}$ and a possibly non-convex objective function $f:\mathcal{X}\to\mathbb{R}$. As an agent-based method, CBO is conceptually comparable to biologically and physically inspired methods [@mohan2012survey;karaboga2014comprehensive;yang2009firefly;bayraktar2013wind], particle-swarm optimization (PSO) [@kennedy1995particle] or simulated annealing [@henderson2003theory]. However, compared to other heuristics, one can derive a limiting PDE in the infinite-particle limit, which has sparked considerable theoretical interest in recent years [@totzeck2021trends]. From a computational side, the method is also attractive, since the amount of particle interaction scales linearly with the number of particles.
For Python, PSO and SA implementations are already available [@miranda2018pyswarms;@scikitopt;@deapJMLR2012;@pagmo2017], which are widely used in the community and provide a rich framework for the respective methods. However, adjusting these implementations to CBO is not straightforward. Furthermore, in this project, we want to provide a lightweight and direct implementation of CBO methods, which are easy to understand and to modify. The first publicly available Python packages implementing CBO-type algorithms were given by some of the authors together with collaborators in [@Igor_CBOinPython], where CBO as in [@pinnau2017consensus] is implemented, as well as in [@Roith_polarcbo], where so-called polarized CBO [@bungert2022polarized] is implemented. The current Python package is a complete rewrite of the latter implementation.

For Python, PSO and SA implementations are already available [@miranda2018pyswarms;@scikitopt;@deapJMLR2012;@pagmo2017], which are widely used in the community and provide a rich framework for the respective methods. However, adjusting these implementations to CBO is not straightforward. Furthermore, in this project we want to provide a lightweight and direct implementation of the method, which is easy to understand and modify. The first publicly available python package implementing CBO type algorithms was given by one of the authors in [@Roith_polarcbo] implementing so-called polarized CBO [@bungert2022polarized]. The current package is a complete rewrite of this previous implementation.
For Julia, PSO and SA methods are, among others, implemented in [@mogensen2018optim;@mejia2022metaheuristics;@Bergmann2022]. Similarly, one of the authors provided the first specific Julia implementation of CBO [@Bailo_consensus]. However, the current version of the package deviates from the previous implementation and is more closely oriented toward the Python implementation.

For Julia PSO and SA methods are among others impemented in [@mogensen2018optim;mejia2022metaheuristics;Bergmann2022]. Similarly, one of the authors provided the first specific Julia implementation of CBO [@Bailo_consensus]. However, the current version of the package deviates from the previous implementation and is more closely related to the Python implementation.
We summarize the motivation and main features of the packages in what follows.

We summarizes the motivation and main features of the packages in the following:

- Provide a lightweight and easy to understand implementation of CBO and variants such as batched [@carrillo2021consensus] or polarized CBO [@bungert2022polarized]. The implementation relie
- Provide a lightweight, easy-to-understand, -use and -extend implementation of CBO together with several of its variants. These include CBO with mini-batching [@carrillo2021consensus], polarized CBO [@bungert2022polarized], CBO with memory effects [@grassi2020particle;@riedl2022leveraging], and CBS [@carrillo2022consensus]. The implementation relies on ...
- torch and tensorflow-like usage style (and implementation way via step), maybe provide here code snippet
-

# Mathematical background

CBO methods use a finite number of agents $X^1,\dots,X^N$ to explore the domain and to form a global consensus about the location of the minimizer $x^*$ as time passes. They are described through a system of stochastic differential equations (SDEs), expressed in It\^o's form as
$$
dX^i_t = -\lambda (X^i_t-x_\alpha(\widehat\rho_t^N)) dt + \sigma D(X^i_t-x_\alpha(\widehat\rho_t^N)) dB^i_t,
$$
where $\alpha,\lambda$ and $\sigma$ are user-specified parameters, $((B^i_t)_{t\geq0})_{i=1,\dots,N}$ denote independent standard Brownian motions and where $x_\alpha(\widehat\rho_t^N)$ denotes the consensus point, a suitably weighted average of the positions of the particles, which is computed as
$$
x_\alpha(\widehat\rho_t^N) = \frac{1}{\sum_{i=1}^N \omega_\alpha(X^i_t)} \sum_{i=1}^N X^i_t\omega_\alpha(X^i_t), \quad\text{ with }\quad \omega_\alpha(x) = \mathrm{exp}(-\alpha f(x)).
$$
A theoretical convergence analysis is not directly conducted on the above SDE system due its highly complex behavior, but on its macroscopic mean-field limit (infinite-particle limit) [@huang2021MFLCBO], which can be described by a nonlinear nonlocal Fokker-Planck equation [@pinnau2017consensus;@carrillo2018analytical;@carrillo2021consensus;@fornasier2021consensus;@fornasier2021convergence]. The implemented CBO code originates from a simple Euler-Maruyama time discretization of the above SDE system. A convergence statement therefore is available in [@fornasier2021consensus].
Similar analysis techniques further allowed to obtain theoretical convergence guarantees for PSO [@qiu2022PSOconvergence].

# Application areas of CBX


As of now, CBX methods have been deployed in a variety of different settings and for different purposes, such as for solving constrained optimizations [@fornasier2020consensus_sphere_convergence;@borghi2021constrained], multi-objective optimizations [@borghi2022adaptive;@klamroth2022consensus], saddle point problems [@huang2022consensus], federated learning tasks [@carrillo2023fedcbo], adversarial training [] or for sampling [@carrillo2022consensus].
In addition, recent work [@riedl2023gradient] establishes a connection of CBO to stochastic gradient descent-type methods, suggesting a more fundamental connection of theoretical interest between derivative-free and gradient-based methods.

# Acknowledgements

mention Lorentz Centre in Leiden

# References

0 comments on commit 2c151d2

Please sign in to comment.