1% Generated by roxygen2: do not edit by hand
2% Please edit documentation in R/mlsl.R
3\name{mlsl}
4\alias{mlsl}
5\title{Multi-level Single-linkage}
6\usage{
7mlsl(x0, fn, gr = NULL, lower, upper, local.method = "LBFGS",
8  low.discrepancy = TRUE, nl.info = FALSE, control = list(), ...)
9}
10\arguments{
11\item{x0}{initial point for searching the optimum.}
12
13\item{fn}{objective function that is to be minimized.}
14
15\item{gr}{gradient of function \code{fn}; will be calculated numerically if
16not specified.}
17
18\item{lower, upper}{lower and upper bound constraints.}
19
20\item{local.method}{only \code{BFGS} for the moment.}
21
22\item{low.discrepancy}{logical; shall a low discrepancy variation be used.}
23
24\item{nl.info}{logical; shall the original NLopt info been shown.}
25
26\item{control}{list of options, see \code{nl.opts} for help.}
27
28\item{...}{additional arguments passed to the function.}
29}
30\value{
31List with components:
32  \item{par}{the optimal solution found so far.}
33  \item{value}{the function value corresponding to \code{par}.}
34  \item{iter}{number of (outer) iterations, see \code{maxeval}.}
35  \item{convergence}{integer code indicating successful completion (> 0)
36    or a possible error number (< 0).}
37  \item{message}{character string produced by NLopt and giving additional
38    information.}
39}
40\description{
41The ``Multi-Level Single-Linkage'' (MLSL) algorithm for global optimization
42searches by a sequence of local optimizations from random starting points.
43A modification of MLSL is included using a low-discrepancy sequence (LDS)
44instead of pseudorandom numbers.
45}
46\details{
47MLSL is a `multistart' algorithm: it works by doing a sequence of local
48optimizations (using some other local optimization algorithm) from random or
49low-discrepancy starting points.  MLSL is distinguished, however by a
50`clustering' heuristic that helps it to avoid repeated searches of the same
51local optima, and has some theoretical guarantees of finding all local
52optima in a finite number of local minimizations.
53
54The local-search portion of MLSL can use any of the other algorithms in
55NLopt, and in particular can use either gradient-based or derivative-free
56algorithms.  For this wrapper only gradient-based \code{L-BFGS} is available
57as local method.
58}
59\note{
60If you don't set a stopping tolerance for your local-optimization
61algorithm, MLSL defaults to \code{ftol_rel=1e-15} and \code{xtol_rel=1e-7}
62for the local searches.
63}
64\examples{
65
66### Minimize the Hartmann6 function
67hartmann6 <- function(x) {
68    n <- length(x)
69    a <- c(1.0, 1.2, 3.0, 3.2)
70    A <- matrix(c(10.0,  0.05, 3.0, 17.0,
71                   3.0, 10.0,  3.5,  8.0,
72                  17.0, 17.0,  1.7,  0.05,
73                   3.5,  0.1, 10.0, 10.0,
74                   1.7,  8.0, 17.0,  0.1,
75                   8.0, 14.0,  8.0, 14.0), nrow=4, ncol=6)
76    B  <- matrix(c(.1312,.2329,.2348,.4047,
77                   .1696,.4135,.1451,.8828,
78                   .5569,.8307,.3522,.8732,
79                   .0124,.3736,.2883,.5743,
80                   .8283,.1004,.3047,.1091,
81                   .5886,.9991,.6650,.0381), nrow=4, ncol=6)
82    fun <- 0.0
83    for (i in 1:4) {
84        fun <- fun - a[i] * exp(-sum(A[i,]*(x-B[i,])^2))
85    }
86    return(fun)
87}
88S <- mlsl(x0 = rep(0, 6), hartmann6, lower = rep(0,6), upper = rep(1,6),
89            nl.info = TRUE, control=list(xtol_rel=1e-8, maxeval=1000))
90## Number of Iterations....: 1000
91## Termination conditions:
92##   stopval: -Inf, xtol_rel: 1e-08, maxeval: 1000, ftol_rel: 0, ftol_abs: 0
93## Number of inequality constraints:  0
94## Number of equality constraints:    0
95## Current value of objective function:  -3.32236801141552
96## Current value of controls:
97##   0.2016895 0.1500107 0.476874 0.2753324 0.3116516 0.6573005
98
99}
100\references{
101A. H. G. Rinnooy Kan and G. T. Timmer, ``Stochastic global
102optimization methods'' Mathematical Programming, vol. 39, p. 27-78 (1987).
103
104Sergei Kucherenko and Yury Sytsko, ``Application of deterministic
105low-discrepancy sequences in global optimization,'' Computational
106Optimization and Applications, vol. 30, p. 297-318 (2005).
107}
108\seealso{
109\code{\link{direct}}
110}
111\author{
112Hans W. Borchers
113}
114