diff --git a/paper/nested-sampling-diagram.pdf b/paper/nested-sampling-diagram.pdf new file mode 100644 index 00000000..4f2802fa Binary files /dev/null and b/paper/nested-sampling-diagram.pdf differ diff --git a/paper/paper.bib b/paper/paper.bib new file mode 100644 index 00000000..97e3a95a --- /dev/null +++ b/paper/paper.bib @@ -0,0 +1,223 @@ +@article{skillingNestedSampling2004, + title = {Nested {Sampling}}, + volume = {735}, + url = {https://ui.adsabs.harvard.edu/abs/2004AIPC..735..395S}, + doi = {10.1063/1.1835238}, + abstract = {"The evidence Z is often the single most important number in the [Bayesian] problem and I think every effort should be devoted to calculating it" (MacKay 2003). Nested sampling does this by giving a direct estimate of the density of states. Posterior samples are an optional by-product.}, + urldate = {2021-11-25}, + author = {Skilling, John}, + month = nov, + year = {2004}, + note = {Conference Name: Bayesian Inference and Maximum Entropy Methods in Science and Engineering: 24th International Workshop on Bayesian Inference and Maximum Entropy Methods in Science and Engineering +ADS Bibcode: 2004AIPC..735..395S}, + keywords = {02.50.Tt, Inference methods}, + pages = {395--405}, +} +@article{speagleDYNESTYDynamicNested2020, + title = {{DYNESTY}: a dynamic nested sampling package for estimating {Bayesian} posteriors and evidences}, + volume = {493}, + issn = {0035-8711}, + shorttitle = {{DYNESTY}}, + url = {https://ui.adsabs.harvard.edu/abs/2020MNRAS.493.3132S}, + doi = {10.1093/mnras/staa278}, + abstract = {We present DYNESTY, a public, open-source, PYTHON package to estimate Bayesian posteriors and evidences (marginal likelihoods) using the dynamic nested sampling methods developed by Higson et al. By adaptively allocating samples based on posterior structure, dynamic nested sampling has the benefits of Markov chain Monte Carlo (MCMC) algorithms that focus exclusively on posterior estimation while retaining nested sampling's ability to estimate evidences and sample from complex, multimodal distributions. We provide an overview of nested sampling, its extension to dynamic nested sampling, the algorithmic challenges involved, and the various approaches taken to solve them in this and previous work. We then examine DYNESTY's performance on a variety of toy problems along with several astronomical applications. We find in particular problems DYNESTY can provide substantial improvements in sampling efficiency compared to popular MCMC approaches in the astronomical literature. More detailed statistical results related to nested sampling are also included in the appendix.}, + urldate = {2021-11-25}, + journal = {Monthly Notices of the Royal Astronomical Society}, + author = {Speagle, Joshua S.}, + month = apr, + year = {2020}, + note = {ADS Bibcode: 2020MNRAS.493.3132S}, + keywords = {Astrophysics - Instrumentation and Methods for Astrophysics, methods: data analysis, methods: statistical, Statistics - Computation}, + pages = {3132--3158}, +} +@techreport{chopinPropertiesNestedSampling2008, + title = {Properties of {Nested} {Sampling}}, + url = {https://ui.adsabs.harvard.edu/abs/2008arXiv0801.3887C}, + abstract = {Nested sampling is a simulation method for approximating marginal likelihoods proposed by Skilling (2006). We establish that nested sampling has an approximation error that vanishes at the standard Monte Carlo rate and that this error is asymptotically Gaussian. We show that the asymptotic variance of the nested sampling approximation typically grows linearly with the dimension of the parameter. We discuss the applicability and efficiency of nested sampling in realistic problems, and we compare it with two current methods for computing marginal likelihood. We propose an extension that avoids resorting to Markov chain Monte Carlo to obtain the simulated points.}, + urldate = {2022-01-08}, + author = {Chopin, Nicolas and Robert, Christian}, + month = jan, + year = {2008}, + note = {Publication Title: arXiv e-prints +ADS Bibcode: 2008arXiv0801.3887C +Type: article}, + keywords = {Mathematics - Statistics Theory, Statistics - Computation}, +} +@article{skillingNestedSamplingGeneral2006a, + title = {Nested sampling for general {Bayesian} computation}, + volume = {1}, + issn = {1936-0975, 1931-6690}, + url = {https://projecteuclid.org/journals/bayesian-analysis/volume-1/issue-4/Nested-sampling-for-general-Bayesian-computation/10.1214/06-BA127.full}, + doi = {10.1214/06-BA127}, + abstract = {Nested sampling estimates directly how the likelihood function relates to prior mass. The evidence (alternatively the marginal likelihood, marginal density of the data, or the prior predictive) is immediately obtained by summation. It is the prime result of the computation, and is accompanied by an estimate of numerical uncertainty. Samples from the posterior distribution are an optional by-product, obtainable for any temperature. The method relies on sampling within a hard constraint on likelihood value, as opposed to the softened likelihood of annealing methods. Progress depends only on the shape of the "nested" contours of likelihood, and not on the likelihood values. This invariance (over monotonic re-labelling) allows the method to deal with a class of phase-change problems which effectively defeat thermal annealing.}, + number = {4}, + urldate = {2022-01-08}, + journal = {Bayesian Analysis}, + author = {Skilling, John}, + month = dec, + year = {2006}, + note = {Publisher: International Society for Bayesian Analysis}, + keywords = {algorithm, annealing, Bayesian computation, evidence, marginal likelihood, Model selection, nest, phase change}, + pages = {833--859}, +} +@article{nealSliceSampling2003, + title = {Slice sampling}, + volume = {31}, + issn = {0090-5364, 2168-8966}, + url = {https://projecteuclid.org/journals/annals-of-statistics/volume-31/issue-3/Slice-sampling/10.1214/aos/1056562461.full}, + doi = {10.1214/aos/1056562461}, + abstract = {Markov chain sampling methods that adapt to characteristics of the distribution being sampled can be constructed using the principle that one can ample from a distribution by sampling uniformly from the region under the plot of its density function. A Markov chain that converges to this uniform distribution can be constructed by alternating uniform sampling in the vertical direction with uniform sampling from the horizontal "slice" defined by the current vertical position, or more generally, with some update that leaves the uniform distribution over this slice invariant. Such "slice sampling" methods are easily implemented for univariate distributions, and can be used to sample from a multivariate distribution by updating each variable in turn. This approach is often easier to implement than Gibbs sampling and more efficient than simple Metropolis updates, due to the ability of slice sampling to adaptively choose the magnitude of changes made. It is therefore attractive for routine and automated use. Slice sampling methods that update all variables simultaneously are also possible. These methods can adaptively choose the magnitudes of changes made to each variable, based on the local properties of the density function. More ambitiously, such methods could potentially adapt to the dependencies between variables by constructing local quadratic approximations. Another approach is to improve sampling efficiency by suppressing random walks. This can be done for univariate slice sampling by "overrelaxation," and for multivariate slice sampling by "reflection" from the edges of the slice.}, + number = {3}, + urldate = {2022-01-08}, + journal = {The Annals of Statistics}, + author = {Neal, Radford M.}, + month = jun, + year = {2003}, + note = {Publisher: Institute of Mathematical Statistics}, + keywords = {65C05, 65C60, Adaptive methods, auxiliary variables, dynamical methods, Gibbs sampling, Markov chain Monte Carlo, Metropolis algorithm, overrelaxation}, + pages = {705--767}, +} +@article{handleyPolychordNestedSampling2015, + title = {polychord: nested sampling for cosmology.}, + volume = {450}, + issn = {0035-8711}, + shorttitle = {polychord}, + url = {https://ui.adsabs.harvard.edu/abs/2015MNRAS.450L..61H}, + doi = {10.1093/mnrasl/slv047}, + abstract = {POLYCHORD is a novel nested sampling algorithm tailored for high-dimensional parameter spaces. In addition, it can fully exploit a hierarchy of parameter speeds such as is found in COSMOMC and CAMB. It utilizes slice sampling at each iteration to sample within the hard likelihood constraint of nested sampling. It can identify and evolve separate modes of a posterior semi-independently and is parallelized using OPENMPI. POLYCHORD is available for download at http://ccpforge.cse.rl.ac.uk/gf/project/polychord/.}, + urldate = {2022-01-08}, + journal = {Monthly Notices of the Royal Astronomical Society}, + author = {Handley, W. J. and Hobson, M. P. and Lasenby, A. N.}, + month = jun, + year = {2015}, + note = {ADS Bibcode: 2015MNRAS.450L..61H}, + keywords = {Astrophysics - Cosmology and Nongalactic Astrophysics, Astrophysics - Instrumentation and Methods for Astrophysics, methods: data analysis, methods: statistical}, + pages = {L61--L65}, +} +@article{handleyPOLYCHORDNextgenerationNested2015, + title = {{POLYCHORD}: next-generation nested sampling}, + volume = {453}, + issn = {0035-8711}, + shorttitle = {{POLYCHORD}}, + url = {https://ui.adsabs.harvard.edu/abs/2015MNRAS.453.4384H}, + doi = {10.1093/mnras/stv1911}, + abstract = {POLYCHORD is a novel nested sampling algorithm tailored for high-dimensional parameter spaces. This paper coincides with the release of POLYCHORD v1.6, and provides an extensive account of the algorithm. POLYCHORD utilizes slice sampling at each iteration to sample within the hard likelihood constraint of nested sampling. It can identify and evolve separate modes of a posterior semi-independently, and is parallelized using OPENMPI. It is capable of exploiting a hierarchy of parameter speeds such as those present in COSMOMC and CAMB, and is now in use in the COSMOCHORD and MODECHORD codes. POLYCHORD is available for download from http://ccpforge.cse.rl.ac.uk/gf/project/polychord/.}, + urldate = {2022-01-08}, + journal = {Monthly Notices of the Royal Astronomical Society}, + author = {Handley, W. J. and Hobson, M. P. and Lasenby, A. N.}, + month = nov, + year = {2015}, + note = {ADS Bibcode: 2015MNRAS.453.4384H}, + keywords = {Astrophysics - Instrumentation and Methods for Astrophysics, methods: data analysis, methods: statistical}, + pages = {4384--4398}, +} +@article{mukherjeeNestedSamplingAlgorithm2006, + title = {A {Nested} {Sampling} {Algorithm} for {Cosmological} {Model} {Selection}}, + volume = {638}, + issn = {0004-637X}, + url = {https://ui.adsabs.harvard.edu/abs/2006ApJ...638L..51M}, + doi = {10.1086/501068}, + abstract = {The abundance of cosmological data becoming available means that a wider range of cosmological models are testable than ever before. However, an important distinction must be made between parameter fitting and model selection. While parameter fitting simply determines how well a model fits the data, model selection statistics, such as the Bayesian evidence, are now necessary to choose between these different models, and in particular to assess the need for new parameters. We implement a new evidence algorithm known as nested sampling, which combines accuracy, generality of application, and computational feasibility, and we apply it to some cosmological data sets and models. We find that a five-parameter model with a Harrison-Zel'dovich initial spectrum is currently preferred.}, + urldate = {2022-01-08}, + journal = {The Astrophysical Journal}, + author = {Mukherjee, Pia and Parkinson, David and Liddle, Andrew R.}, + month = feb, + year = {2006}, + note = {ADS Bibcode: 2006ApJ...638L..51M}, + keywords = {Astrophysics, Cosmology: Theory}, + pages = {L51--L54}, +} +@article{ferozMultimodalNestedSampling2008, + title = {Multimodal nested sampling: an efficient and robust alternative to {Markov} {Chain} {Monte} {Carlo} methods for astronomical data analyses}, + volume = {384}, + issn = {0035-8711}, + shorttitle = {Multimodal nested sampling}, + url = {https://ui.adsabs.harvard.edu/abs/2008MNRAS.384..449F}, + doi = {10.1111/j.1365-2966.2007.12353.x}, + abstract = {In performing a Bayesian analysis of astronomical data, two difficult problems often emerge. First, in estimating the parameters of some model for the data, the resulting posterior distribution may be multimodal or exhibit pronounced (curving) degeneracies, which can cause problems for traditional Markov Chain Monte Carlo (MCMC) sampling methods. Secondly, in selecting between a set of competing models, calculation of the Bayesian evidence for each model is computationally expensive using existing methods such as thermodynamic integration. The nested sampling method introduced by Skilling, has greatly reduced the computational expense of calculating evidence and also produces posterior inferences as a by-product. This method has been applied successfully in cosmological applications by Mukherjee, Parkinson \& Liddle, but their implementation was efficient only for unimodal distributions without pronounced degeneracies. Shaw, Bridges \& Hobson recently introduced a clustered nested sampling method which is significantly more efficient in sampling from multimodal posteriors and also determines the expectation and variance of the final evidence from a single run of the algorithm, hence providing a further increase in efficiency. In this paper, we build on the work of Shaw et al. and present three new methods for sampling and evidence evaluation from distributions that may contain multiple modes and significant degeneracies in very high dimensions; we also present an even more efficient technique for estimating the uncertainty on the evaluated evidence. These methods lead to a further substantial improvement in sampling efficiency and robustness, and are applied to two toy problems to demonstrate the accuracy and economy of the evidence calculation and parameter estimation. Finally, we discuss the use of these methods in performing Bayesian object detection in astronomical data sets, and show that they significantly outperform existing MCMC techniques. An implementation of our methods will be publicly released shortly.}, + urldate = {2021-11-25}, + journal = {Monthly Notices of the Royal Astronomical Society}, + author = {Feroz, F. and Hobson, M. P.}, + month = feb, + year = {2008}, + note = {ADS Bibcode: 2008MNRAS.384..449F}, + keywords = {Astrophysics, methods: data analysis, methods: statistical}, + pages = {449--463}, +} +@article{ferozMULTINESTEfficientRobust2009, + title = {{MULTINEST}: an efficient and robust {Bayesian} inference tool for cosmology and particle physics}, + volume = {398}, + issn = {0035-8711}, + shorttitle = {{MULTINEST}}, + url = {https://ui.adsabs.harvard.edu/abs/2009MNRAS.398.1601F}, + doi = {10.1111/j.1365-2966.2009.14548.x}, + abstract = {We present further development and the first public release of our multimodal nested sampling algorithm, called MULTINEST. This Bayesian inference tool calculates the evidence, with an associated error estimate, and produces posterior samples from distributions that may contain multiple modes and pronounced (curving) degeneracies in high dimensions. The developments presented here lead to further substantial improvements in sampling efficiency and robustness, as compared to the original algorithm presented in Feroz \& Hobson, which itself significantly outperformed existing Markov chain Monte Carlo techniques in a wide range of astrophysical inference problems. The accuracy and economy of the MULTINEST algorithm are demonstrated by application to two toy problems and to a cosmological inference problem focusing on the extension of the vanilla Λ cold dark matter model to include spatial curvature and a varying equation of state for dark energy. The MULTINEST software, which is fully parallelized using MPI and includes an interface to COSMOMC, is available at http://www.mrao.cam.ac.uk/software/multinest/. It will also be released as part of the SUPERBAYES package, for the analysis of supersymmetric theories of particle physics, at http://www.superbayes.org.}, + urldate = {2022-01-08}, + journal = {Monthly Notices of the Royal Astronomical Society}, + author = {Feroz, F. and Hobson, M. P. and Bridges, M.}, + month = oct, + year = {2009}, + note = {ADS Bibcode: 2009MNRAS.398.1601F}, + keywords = {Astrophysics, methods: data analysis, methods: statistical}, + pages = {1601--1614}, +} +@article{bezansonJuliaFreshApproach2017, + title = {Julia: {A} {Fresh} {Approach} to {Numerical} {Computing}}, + volume = {59}, + issn = {0036-1445}, + shorttitle = {Julia}, + url = {https://epubs.siam.org/doi/10.1137/141000671}, + doi = {10.1137/141000671}, + abstract = {Bridging cultures that have often been distant, Julia combines expertise from the diverse fields of computer science and computational science to create a new approach to numerical computing. Julia is designed to be easy and fast and questions notions generally held to be “laws of nature" by practitioners of numerical computing: {\textbackslash}beginlist {\textbackslash}item High-level dynamic programs have to be slow. {\textbackslash}item One must prototype in one language and then rewrite in another language for speed or deployment. {\textbackslash}item There are parts of a system appropriate for the programmer, and other parts that are best left untouched as they have been built by the experts. {\textbackslash}endlist We introduce the Julia programming language and its design---a dance between specialization and abstraction. Specialization allows for custom treatment. Multiple dispatch, a technique from computer science, picks the right algorithm for the right circumstance. Abstraction, which is what good computation is really about, recognizes what remains the same after differences are stripped away. Abstractions in mathematics are captured as code through another technique from computer science, generic programming. Julia shows that one can achieve machine performance without sacrificing human convenience.}, + number = {1}, + urldate = {2021-07-20}, + journal = {SIAM Rev.}, + author = {Bezanson, Jeff and Edelman, Alan and Karpinski, Stefan and Shah, Viral B.}, + month = jan, + year = {2017}, + note = {Publisher: Society for Industrial and Applied Mathematics}, + keywords = {65Y05, 68N15, 97P40, Julia, numerical, parallel, scientific computing}, + pages = {65--98}, +} +@techreport{buchnerStatisticalTestNested2014, + title = {A statistical test for {Nested} {Sampling} algorithms}, + url = {https://ui.adsabs.harvard.edu/abs/2014arXiv1407.5459B}, + abstract = {Nested sampling is an iterative integration procedure that shrinks the prior volume towards higher likelihoods by removing a "live" point at a time. A replacement point is drawn uniformly from the prior above an ever-increasing likelihood threshold. Thus, the problem of drawing from a space above a certain likelihood value arises naturally in nested sampling, making algorithms that solve this problem a key ingredient to the nested sampling framework. If the drawn points are distributed uniformly, the removal of a point shrinks the volume in a well-understood way, and the integration of nested sampling is unbiased. In this work, I develop a statistical test to check whether this is the case. This "Shrinkage Test" is useful to verify nested sampling algorithms in a controlled environment. I apply the shrinkage test to a test-problem, and show that some existing algorithms fail to pass it due to over-optimisation. I then demonstrate that a simple algorithm can be constructed which is robust against this type of problem. This RADFRIENDS algorithm is, however, inefficient in comparison to MULTINEST.}, + urldate = {2022-01-08}, + author = {Buchner, Johannes}, + month = jul, + year = {2014}, + note = {Publication Title: arXiv e-prints +ADS Bibcode: 2014arXiv1407.5459B +Type: article}, + keywords = {Statistics - Computation}, +} +@techreport{buchnerCollaborativeNestedSampling2017, + title = {Collaborative {Nested} {Sampling}: {Big} {Data} vs. complex physical models}, + shorttitle = {Collaborative {Nested} {Sampling}}, + url = {https://ui.adsabs.harvard.edu/abs/2017arXiv170704476B}, + abstract = {The data torrent unleashed by current and upcoming astronomical surveys demands scalable analysis methods. Many machine learning approaches scale well, but separating the instrument measurement from the physical effects of interest, dealing with variable errors, and deriving parameter uncertainties is often an after-thought. Classic forward-folding analyses with Markov Chain Monte Carlo or Nested Sampling enable parameter estimation and model comparison, even for complex and slow-to-evaluate physical models. However, these approaches require independent runs for each data set, implying an unfeasible number of model evaluations in the Big Data regime. Here I present a new algorithm, collaborative nested sampling, for deriving parameter probability distributions for each observation. Importantly, the number of physical model evaluations scales sub-linearly with the number of data sets, and no assumptions about homogeneous errors, Gaussianity, the form of the model or heterogeneity/completeness of the observations need to be made. Collaborative nested sampling has immediate application in speeding up analyses of large surveys, integral-field-unit observations, and Monte Carlo simulations.}, + urldate = {2022-01-08}, + author = {Buchner, Johannes}, + month = jul, + year = {2017}, + note = {Publication Title: arXiv e-prints +ADS Bibcode: 2017arXiv170704476B +Type: article}, + keywords = {Astrophysics - Instrumentation and Methods for Astrophysics, Physics - Data Analysis, Statistics - Computation, Statistics - Machine Learning, Statistics and Probability}, +} +@article{buchnerNestedSamplingMethods2021, + title = {Nested {Sampling} {Methods}}, + volume = {2101}, + url = {http://adsabs.harvard.edu/abs/2021arXiv210109675B}, + abstract = {Nested sampling (NS) computes parameter posterior distributions and makes Bayesian model comparison computationally feasible. Its strengths are the unsupervised navigation of complex, potentially multi-modal posteriors until a well-defined termination point. A systematic +literature review of nested sampling algorithms and variants is +presented. We focus on complete algorithms, including solutions to likelihood-restricted prior sampling. A new formulation of NS is presented, which casts the parameter space exploration as a search on a tree. Previously published ways of obtaining robust error estimates and dynamic variations of the number of live points are presented as special cases of this formulation.}, + urldate = {2021-02-10}, + journal = {arXiv e-prints}, + author = {Buchner, Johannes}, + month = jan, + year = {2021}, + keywords = {Astrophysics - Instrumentation and Methods for Astrophysics, Statistics - Computation}, + pages = {arXiv:2101.09675}, +} diff --git a/paper/paper.md b/paper/paper.md new file mode 100644 index 00000000..2338bafa --- /dev/null +++ b/paper/paper.md @@ -0,0 +1,53 @@ +--- +title: 'NestedSamplers.jl: Composable Nested Sampling in Julia' +tags: + - Julia + - statistics + - bayesian-statistics + - mcmc +authors: + - name: Miles Lucas + orcid: 0000-0001-6341-310X + affiliation: 1 + - name: Saranjeet Kaur Bhogal + orcid: 0000-0002-7038-1457 + affiliation: 2 + - name: Hong Ge + orcid: 0000-0001-9421-2677 + affiliation: 3 +affiliations: + - name: Institute for Astronomy, University of Hawaii, Honolulu, HI, USA + index: 1 + - name: Department of Statistics, Savitribai Phule Pune University, Pune, India + index: 2 + - name: University of Cambridge, Cambridge, UK + index: 3 +date: 1/8/2021 +bibliography: paper.bib +--- + +# Summary + +Nested sampling is a method for estimating the Bayesian evidence [@skillingNestedSampling2004]. The core of the algorithm is rooted in integrating iso-likelihood "shells" in the prior space. This simultaneously produces the Bayesian evidence and weighted posterior samples. In contrast, Markov Chain Monte Carlo (MCMC) only generates samples proportional to the posterior. Nested sampling also has a variety of appealing statistical properties, which include: well-defined stopping criteria, independently generated samples, flexibility to model complex, multi-modal distributions, and direct measurements of the statistical and sampling uncertainties from a single run. + +The basic algorithm can be described by the quadrature estimate of dynamically evolving points in the prior space. The Bayesian evidence is an integral, and the nested sampling algorithm estimates it with a sum of discrete measurements, represented by an array of points. The nested sampling algorithm successfully removes the point with the lowest likelihood and replaces it with a point of equal or higher likelihood. At each iteration, the full set of "live" points describes a volume, which has shrunk by the removal of the iso-likelihood shell around it. The volumes of these shells are used in the quadrature estimate of the Bayesian evidence described in @skillingNestedSampling2004. + +# Statement of need + +Nested sampling has grown immensely in popularity, due in part to its "black-box" nature, as well as the popularity of codes like MultiNest [@ferozMultimodalNestedSampling2008; @ferozMULTINESTEfficientRobust2009] in the astronomical community, where high-dimensional, multi-modal problems are commonplace. Recently, dynesty [@speagleDYNESTYDynamicNested2020] created an API which fully separates two independent steps of the nested sampling algorithm. These steps are, first: describing the statistical distribution of the live points, then second: likelihood-constrained sampling for replacing live points. This process is shown schematically in \autoref{fig:diagram}. + +![A diagram showing the two sampling steps in the nested sampling algorithm. The axes represent the parameters, in this example there are two dimensions. The blue contours show the likelihood and the black points are live points. In the proposal algorithms, the dead point is gray with a dashed circle, proposed points outside the likelihood constraint are shown as gray crosses, and the accepted point is black.\label{fig:diagram}](nested-sampling-diagram.pdf) + +NestedSamplers.jl mimics the API of dynesty by separating the independent steps of the nested sampling algorithm. Our library heavily utilizes multiple-dispatch enabled by the Julia programming language [@bezansonJuliaFreshApproach2017] to enable a highly-expressive, composable, and efficient nested sampling library. Multiple-dispatch allows fully encapsulating the independent components of the algorithm, in other words, the code specific to each bounding distribution and proposal algorithm is never repeated, and it comes with no performance loss thanks to the just-in-time compilation of Julia code. In addition, NestedSamplers.jl incorporates the AbstractMCMC.jl interface, which is an extensible interface for statistical sampling which enables entry points for using our nested samplers in various programming contexts. + +NestedSamplers.jl currently has three bounding distributions: `NoBounds`, which represents the entire prior space [@skillingNestedSampling2004], `Ellipsoid`, which bounds the live points in an ellipsoid (equivalent to a multivariate Gaussian) [@mukherjeeNestedSamplingAlgorithm2006], and `MultiEllipsoid`, which uses an optimal clusterign of ellipsoids, first demonstrated by MultiNest [@ferozMultimodalNestedSampling2008; @ferozMULTINESTEfficientRobust2009]. In the future, we plan to implement the ball and cube distributions derived in @buchnerStatisticalTestNested2014 and @buchnerCollaborativeNestedSampling2017. + +NestedSamplers.jl has five restricted-likelihood sampling algorithms (proposal algorithms), using a slew of MCMC techniques. The first is the `Rejection` algorithm, which simply generates samples from the prior and rejects those outside the likelihood constraint [@skillingNestedSampling2004]. `RWalk` and `RStagger` use a Metropolis-Hastings-like walk for evolving a sample [@skillingNestedSamplingGeneral2006a]. `RSlice` and `Slice` use slice sampling in random [@handleyPolychordNestedSampling2015; @handleyPOLYCHORDNextgenerationNested2015] or principal directions [@nealSliceSampling2003], respectively, to evolve points. In the future, we plan to support Hamiltonian slice sampling [@speagleDYNESTYDynamicNested2020], which requires gradients and Jacobians. + +NestedSamplers.jl currently uses a static nested sampler (integrator), where the number of live points is fixed throughout the sampling. Dynamic nested samplers allow tuning the integrator to avoid (or prefer) regions of high likelihood, which can be preferred in cases where posterior samples are more relevant than the Bayesian evidence estimate [@speagleDYNESTYDynamicNested2020]. In the future, we plan to implement a dynamic nested sampler which can make use of the existing bounding distributions and proposal algorithms. + +# Comparisons to existing software + +NestedSamplers.jl has many features similar to dynesty, and the predecessor code nestle. dynesty currently has more features, including a dynamic nested sampler, Hamiltonian slice sampling, and ball and cube distributions. The recent ultranest code [@buchnerNestedSamplingMethods2021] has a subset of the features of dynesty. MultiNest can be reproduced by using our `MultiEllipsoid` bounding distribution, and the exact algorithm can be reproduced with an appropriate `Rejection` proposal algorithm. The Polychord algorithm can be reproduced by using the `RSlice` algorithm, with typically an `Ellipsoid` bounding distribution. + +# References \ No newline at end of file