1# Copyright (c) 2016, the GPyOpt Authors
2# Licensed under the BSD 3-clause license (see LICENSE.txt)
3
4from ..core.task.cost import constant_cost_withGradients
5
6class AcquisitionBase(object):
7    """
8    Base class for acquisition functions in Bayesian Optimization
9
10    :param model: GPyOpt class of model
11    :param space: GPyOpt class of domain
12    :param optimizer: optimizer of the acquisition. Should be a GPyOpt optimizer
13
14    """
15
16    analytical_gradient_prediction = False
17
18    def __init__(self, model, space, optimizer, cost_withGradients=None):
19        self.model = model
20        self.space = space
21        self.optimizer = optimizer
22        self.analytical_gradient_acq = self.analytical_gradient_prediction and self.model.analytical_gradient_prediction # flag from the model to test if gradients are available
23
24        if cost_withGradients is  None:
25            self.cost_withGradients = constant_cost_withGradients
26        else:
27            self.cost_withGradients = cost_withGradients
28
29    @staticmethod
30    def fromDict(model, space, optimizer, cost_withGradients, config):
31        raise NotImplementedError()
32
33    def acquisition_function(self,x):
34        """
35        Takes an acquisition and weights it so the domain and cost are taken into account.
36        """
37        f_acqu = self._compute_acq(x)
38        cost_x, _ = self.cost_withGradients(x)
39        x_z = x if self.space.model_dimensionality == self.space.objective_dimensionality else self.space.zip_inputs(x)
40        return -(f_acqu*self.space.indicator_constraints(x_z))/cost_x
41
42
43    def acquisition_function_withGradients(self, x):
44        """
45        Takes an acquisition and it gradient and weights it so the domain and cost are taken into account.
46        """
47        f_acqu,df_acqu = self._compute_acq_withGradients(x)
48        cost_x, cost_grad_x = self.cost_withGradients(x)
49        f_acq_cost = f_acqu/cost_x
50        df_acq_cost = (df_acqu*cost_x - f_acqu*cost_grad_x)/(cost_x**2)
51        x_z = x if self.space.model_dimensionality == self.space.objective_dimensionality else self.space.zip_inputs(x)
52        return -f_acq_cost*self.space.indicator_constraints(x_z), -df_acq_cost*self.space.indicator_constraints(x_z)
53
54    def optimize(self, duplicate_manager=None):
55        """
56        Optimizes the acquisition function (uses a flag from the model to use gradients or not).
57        """
58        if not self.analytical_gradient_acq:
59            out = self.optimizer.optimize(f=self.acquisition_function, duplicate_manager=duplicate_manager)
60        else:
61            out = self.optimizer.optimize(f=self.acquisition_function, f_df=self.acquisition_function_withGradients, duplicate_manager=duplicate_manager)
62        return out
63
64    def _compute_acq(self,x):
65
66        raise NotImplementedError('')
67
68    def _compute_acq_withGradients(self, x):
69
70        raise NotImplementedError('')
71