1"""
2Kriging: metamodel of the Branin-Hoo function
3==============================================
4"""
5# %%
6# As a popular use case in optimization we briefly present the construction of a metamodel
7# of the Branin (also refered to as Branin-Hoo) function.
8#
9
10# %%
11from numpy import sqrt
12import openturns as ot
13import openturns.viewer as viewer
14import openturns.viewer as otv
15from openturns.usecases import branin_function as branin_function
16from matplotlib import pylab as plt
17
18
19# %%
20# We load the Branin-Hoo model from the usecases module and use the model (stored in `objectiveFunction`)
21bm = branin_function.BraninModel()
22model = bm.objectiveFunction
23
24# %%
25# We shall represent this 2D function with isolines. We set the number of isolines to a maximum of 10
26# thanks to the following `ResourceMap` key :
27ot.ResourceMap.SetAsUnsignedInteger("Contour-DefaultLevelsNumber", 10)
28graphBasic = model.draw([0.0, 0.0], [1.0, 1.0], [100]*2)
29
30
31# %%
32# We get the values of all isolines :
33drawables = graphBasic.getDrawables()
34levels = []
35for contours in drawables:
36    levels.append(contours.getLevels()[0])
37
38# %%
39# We now build fancy isolines :
40
41# Take the first contour as the only one with multiple levels
42contour = graphBasic.getDrawable(0)
43# Build a range of colors
44ot.ResourceMap.SetAsUnsignedInteger(
45    'Drawable-DefaultPalettePhase', len(levels))
46palette = ot.Drawable.BuildDefaultPalette(len(levels))
47# Create the drawables list, appending each contour with its own color
48drawables = list()
49for i in range(len(levels)):
50    contour.setLevels([levels[i]])
51    # Inline the level values
52    contour.setDrawLabels(True)
53    # We have to copy the drawable because a Python list stores only pointers
54    drawables.append(ot.Drawable(contour))
55
56graphFineTune = ot.Graph("The exact Branin model",
57                         r"$x_1$", r"$x_2$", True, '')
58graphFineTune.setDrawables(drawables)  # Replace the drawables
59graphFineTune.setLegendPosition("")  # Remove the legend
60graphFineTune.setColors(palette)  # Add colors
61
62
63# %%
64# We also represent the three minima of the Branin function with orange diamonds :
65sample1 = ot.Sample([bm.xexact1, bm.xexact2, bm.xexact3])
66cloud1 = ot.Cloud(sample1, 'orange', 'diamond', 'First Cloud')
67graphFineTune.add(cloud1)
68view = otv.View(graphFineTune)
69
70#
71# The values of the exact model at these points are :
72print(bm.objectiveFunction(sample1))
73
74# %%
75# The Branin function has a global minimum attained at three different points. We shall build a
76# metamodel of this function that presents the same behaviour.
77
78
79# %%
80# Definition of the Kriging metamodel
81# -----------------------------------
82#
83# We use the :class:`~openturns.KrigingAlgorithm` class to perform the kriging analysis.
84# We first generate a design of experiments with LHS and store the input trainig points in `xdata`
85experiment = ot.LHSExperiment(ot.ComposedDistribution(
86    [ot.Uniform(0.0, 1.0), ot.Uniform(0.0, 1.0)]), 28, False, True)
87xdata = experiment.generate()
88
89# %%
90# We also get the output training values :
91ydata = bm.objectiveFunction(xdata)
92
93
94# %%
95# This use case is defined in dimension 2 and we use a constant basis for the trend estimation :
96dimension = bm.dim
97basis = ot.ConstantBasisFactory(dimension).build()
98
99# %%
100# We choose a squared exponential covariance model in dimension 2 :
101covarianceModel = ot.SquaredExponential([0.1]*dimension, [1.0])
102
103# %%
104# We have all the components to build a kriging algorithm and run it :
105algo = ot.KrigingAlgorithm(xdata, ydata, covarianceModel, basis)
106algo.run()
107
108# %%
109# We get the result of the kriging analysis with :
110result = algo.getResult()
111
112# %%
113# Metamodel visualization
114# -----------------------
115#
116# We draw the kriging metamodel of the Branin function. It is the mean of the random process.
117metamodel = result.getMetaModel()
118
119
120graphBasic = metamodel.draw([0.0, 0.0], [1.0, 1.0], [100]*2)
121drawables = graphBasic.getDrawables()
122levels = []
123for i in range(len(drawables)):
124    contours = drawables[i]
125    levels.append(contours.getLevels()[0])
126
127# Take the first contour as the only one with multiple levels
128contour = graphBasic.getDrawable(0)
129# Build a range of colors
130ot.ResourceMap.SetAsUnsignedInteger(
131    'Drawable-DefaultPalettePhase', len(levels))
132palette = ot.Drawable.BuildDefaultPalette(len(levels))
133# Create the drawables list, appending each contour with its own color
134drawables = list()
135for i in range(len(levels)):
136    contour.setLevels([levels[i]])
137    # Inline the level values
138    contour.setDrawLabels(True)
139    # We have to copy the drawable because a Python list stores only pointers
140    drawables.append(ot.Drawable(contour))
141
142graphFineTune = ot.Graph("Branin metamodel (mean)",
143                         r"$x_1$", r"$x_2$", True, '')
144graphFineTune.setDrawables(drawables)
145graphFineTune.setLegendPosition("")
146graphFineTune.setColors(palette)
147
148# %%
149# We also represent the location of the minima of the Branin function :
150sample1 = ot.Sample([bm.xexact1, bm.xexact2, bm.xexact3])
151cloud1 = ot.Cloud(sample1, 'orange', 'diamond', 'First Cloud')
152graphFineTune.add(cloud1)
153view = otv.View(graphFineTune)
154
155# %%
156# We evaluate the metamodel at the minima locations :
157print(metamodel(sample1))
158
159# %%
160# Graphically, both the metamodel and the exact function look the same. The metamodel also has three
161# basins around the minima and the value of the metamodel at each minimum location is comparable to
162# the exact value of -0.979476. We have roughly two correct digits for each isoline.
163
164
165# %%
166# Standard deviation
167# ------------------
168#
169# We finally take a look at the standard deviation in the :math:`[0,1] \times [0,1]` domain. It may be
170# seen as a measure of the error of the metamodel.
171
172# %%
173# We discretize the domain with 22 points (N inside points and 2 endpoints) :
174N = 20
175inputData = ot.Box([N, N]).generate()
176
177# %%
178# We compute the conditional variance of the model and take the square root to get the deviation :
179condCov = result.getConditionalMarginalVariance(inputData, 0)
180condCovSd = sqrt(condCov)
181
182# %%
183# As we have previously done we build contours with the following levels ans labels :
184levels = [0.01, 0.025, 0.050, 0.075, 0.1, 0.125, 0.150, 0.175]
185labels = ['0.01', '0.025', '0.050', '0.075', '0.1', '0.125', '0.150', '0.175']
186contour = ot.Contour(N+2, N+2, condCovSd)
187graph = ot.Graph('', 'x', 'y', True, '')
188graph.add(contour)
189
190
191# %%
192# We use fancy colored isolines for the contour plot :
193contour = graph.getDrawable(0)
194ot.ResourceMap.SetAsUnsignedInteger(
195    'Drawable-DefaultPalettePhase', len(levels))
196palette = ot.Drawable.BuildDefaultPalette(len(levels))
197drawables = list()
198for i in range(len(levels)):
199    contour.setLevels([levels[i]])
200    contour.setDrawLabels(True)
201    drawables.append(ot.Drawable(contour))
202
203graphFineTune = ot.Graph("Standard deviation", r"$x_1$", r"$x_2$", True, '')
204graphFineTune.setDrawables(drawables)
205graphFineTune.setLegendPosition("")
206graphFineTune.setColors(palette)
207
208# %%
209# We superimpose the training sample :
210cloud = ot.Cloud(xdata)
211cloud.setPointStyle("fcircle")
212cloud.setColor("red")
213graphFineTune.add(cloud)
214view = otv.View(graphFineTune)
215
216# %%
217# We observe that the standard deviation is small in the center of the domain where we have enough
218# data to learn the model.
219# We can print the value of the variance at the first 5 training points (they all behave similarly) :
220print(result.getConditionalMarginalVariance(xdata, 0)[0:5])
221
222# %%
223# These values are nearly zero which is expected as the kriging interpolates data. The value being
224# known it is not random anymore and the variance ought to be zero.
225
226# %%
227# Display all figures
228plt.show()
229