1from . import IOHprofiler as IOH 2from .IOH_Utils import runParallelFunction 3from .IOH_logger import IOH_logger 4 5import pkg_resources, operator 6import numpy as np 7 8from pathlib import Path 9from functools import partial 10from itertools import product 11from collections.abc import Iterable 12 13bbob_fid_dict = { 14 1: IOH.Sphere, 2: IOH.Ellipsoid, 3: IOH.Rastrigin, 4: IOH.Bueche_Rastrigin, 15 5: IOH.Linear_Slope, 6: IOH.Attractive_Sector, 7: IOH.Step_Ellipsoid, 16 8: IOH.Rosenbrock, 9: IOH.Rosenbrock_Rotated, 10: IOH.Ellipsoid_Rotated, 17 11: IOH.Discus, 12: IOH.Bent_Cigar, 13: IOH.Sharp_Ridge, 14: IOH.Different_Powers, 18 15: IOH.Rastrigin_Rotated, 16: IOH.Weierstrass, 17: IOH.Schaffers10, 19 18: IOH.Schaffers1000, 19: IOH.Griewank_RosenBrock, 20: IOH.Schwefel, 20 21: IOH.Gallagher101, 22: IOH.Gallagher21, 23: IOH.Katsuura, 24: IOH.Lunacek_Bi_Rastrigin 21} 22 23pbo_fid_dict = { 24 1: IOH.OneMax, 2: IOH.LeadingOnes, 3: IOH.Linear, 4: IOH.OneMax_Dummy1, 25 5: IOH.OneMax_Dummy2, 6: IOH.OneMax_Neutrality, 7: IOH.OneMax_Epistasis, 26 8: IOH.OneMax_Ruggedness1, 9: IOH.OneMax_Ruggedness2, 10: IOH.OneMax_Ruggedness3, 27 11: IOH.LeadingOnes_Dummy1, 12: IOH.LeadingOnes_Dummy2, 13: IOH.LeadingOnes_Neutrality, 28 14: IOH.LeadingOnes_Epistasis, 15: IOH.LeadingOnes_Ruggedness1, 29 16: IOH.LeadingOnes_Ruggedness2, 17: IOH.LeadingOnes_Ruggedness3, 18: IOH.LABS, 30 19: IOH.MIS, 20: IOH.Ising_Ring, 21: IOH.Ising_Torus, 22: IOH.Ising_Triangular, 31 23: IOH.NQueens 32} 33 34 35class IOH_function(): 36 '''A wrapper around the functions from the IOHexperimenter 37 ''' 38 39 def __init__(self, fid, dim, iid, target_precision=0, suite="BBOB"): 40 '''Instansiate a problem based on its function ID, dimension, instance and suite 41 42 Parameters 43 ---------- 44 fid: 45 The function ID of the problem in the suite, or the name of the function as string 46 dim: 47 The dimension (number of variables) of the problem 48 iid: 49 The instance ID of the problem 50 target_precision: 51 Optional, how close to the optimum the problem is considered 'solved' 52 suite: 53 Which suite the problem is from. Either 'BBOB' or 'PBO'. Only used if fid is an integer 54 55 ''' 56 if isinstance(fid, int): 57 if suite == "BBOB": 58 self.f = bbob_fid_dict[fid](iid, dim) 59 elif suite == "PBO": 60 if fid in [21, 23]: 61 if not np.sqrt(dim).is_integer(): 62 raise Exception("For this function, the dimension needs to be a perfect square!") 63 self.f = pbo_fid_dict[fid](iid, dim) 64 else: 65 raise Exception("This suite is not yet supported") 66 else: 67 if fid in ["Ising_2D", "NQueens"]: 68 if not np.sqrt(dim).is_integer(): 69 raise Exception("For this function, the dimension needs to be a perfect square!") 70 exec (f"self.f = IOH.{fid}({iid}, {dim})") 71 72 self.f.reset_problem() 73 self.precision = target_precision 74 self.logger = None 75 self.track_oob = False 76 self.suite = suite 77 self.maximization = (self.suite == "PBO") 78 self.y_comparison = operator.gt if self.maximization else operator.lt 79 self.xopt = None 80 self.yopt = (self.maximization - 0.5) * -np.inf 81 82 def __call__(self, x): 83 '''Evaluates the function in point x and deals with logging in needed 84 85 Parameters 86 ---------- 87 x: 88 The point to evaluate 89 90 Returns 91 ------ 92 The value f(x) 93 ''' 94 y = self.f.evaluate(x) 95 if self.y_comparison(y, self.yopt): 96 self.yopt = y 97 self.xopt = x 98 if self.track_oob: 99 self.oob += any(x < self.lowerbound) or any(x > self.upperbound) 100 if self.logger is not None: 101 self.logger.process_parameters() 102 # for param_obj in self.tracked_params: 103 # print(param_obj) 104 # exec(f"print({param_obj}.__name__)") 105 # exec(f"print({param_obj})") 106 # self.logger.update_parameter(param_obj.__name__, param_obj) 107 if self.f.IOHprofiler_get_problem_type() == "bbob": 108 self.logger.process_evaluation(self.f.loggerCOCOInfo()) 109 else: 110 self.logger.process_evaluation(self.f.loggerInfo()) 111 return y 112 113 @property 114 def final_target_hit(self): 115 '''Returns whether the target has been reached 116 117 Returns 118 ------ 119 A boolean indicating whether the optimal value has been reached within the 120 pre-defined precision (default 0) 121 ''' 122 if self.f.IOHprofiler_get_problem_type() == "bbob": 123 return self.best_so_far_precision <= self.precision 124 else: 125 return self.f.IOHprofiler_get_best_so_far_raw_objectives()[0] >= self.f.IOHprofiler_get_optimal()[ 126 0] - self.precision 127 128 @property 129 def evaluations(self): 130 '''Returns number of evaluations performed so far 131 132 Returns 133 ------ 134 Number of evaluations performed so far 135 ''' 136 return self.f.IOHprofiler_get_evaluations() 137 138 @property 139 def best_so_far_precision(self): 140 '''Gets the best-so-far raw function value. Do NOT use directly in 141 the optimization algorithm, since that would break the black-box assumption! 142 instead, use best_so_far_fvalue. 143 144 Returns 145 ------ 146 The raw (precision in case of bbob, untransformed objective in case of PBO) objective value 147 ''' 148 if self.f.IOHprofiler_get_problem_type() == "PBO": 149 return self.f.IOHprofiler_get_best_so_far_raw_objectives()[0] 150 return self.yopt - self.get_target(True) 151 152 @property 153 def best_so_far_variables(self): 154 '''Gets the best variables evaluated so far 155 156 Returns 157 ------ 158 The best-so-far variables 159 ''' 160 return self.xopt 161 162 @property 163 def best_so_far_fvalue(self): 164 '''Gets the best-so-far function value as seen by the algorithm. 165 166 Returns 167 ------ 168 The best-so-far objective value 169 ''' 170 return self.yopt 171 172 @property 173 def number_of_variables(self): 174 '''Returns the number of variables (dimension) of the problem 175 176 Returns 177 ------ 178 The number of variables (dimension) of the problem 179 ''' 180 return self.f.IOHprofiler_get_number_of_variables() 181 182 @property 183 def upperbound(self): 184 '''Returns the upper bound of the search space 185 186 Returns 187 ------ 188 A vector containing the upper bound per coordinate 189 ''' 190 return np.array(self.f.IOHprofiler_get_upperbound()) 191 192 @property 193 def lowerbound(self): 194 '''Returns the lower bound of the search space 195 196 Returns 197 ------ 198 A vector containing the lower bound per coordinate 199 ''' 200 return np.array(self.f.IOHprofiler_get_lowerbound()) 201 202 @property 203 def number_out_of_bounds(self): 204 '''Returns the number of times an out-of-bounds solution was evaluated 205 This is only tracked is the parameter 'track_oob' is set to True on intiailization of this object 206 207 Returns 208 ------ 209 The number of the number of out-of-bounds solutions evaluated 210 ''' 211 if self.track_oob: 212 return self.oob 213 else: 214 return np.nan 215 216 def add_logger(self, logger): 217 '''Adds an observer to the problem 218 219 Parameters 220 ---------- 221 logger: 222 A logger object (of class IOH_logger) with which to track this problem. 223 ''' 224 if not isinstance(logger, IOH_logger): 225 raise TypeError 226 227 logger.track_problem( 228 self.f.IOHprofiler_get_problem_id(), self.f.IOHprofiler_get_number_of_variables(), 229 self.f.IOHprofiler_get_instance_id(), self.f.IOHprofiler_get_problem_name(), 230 self.f.IOHprofiler_get_optimization_type(), self.suite 231 ) 232 self.logger = logger 233 234 def clear_logger(self): 235 '''Clears the logger: finishes writing info-files and closes file connections 236 ''' 237 if self.logger is not None: 238 self.logger.clear_logger() 239 240 def get_target(self, raw=False): 241 '''Returns the target value to reach 242 243 Parameters 244 ---------- 245 raw: 246 Whether or not to add the defined precision to the target 247 Returns 248 ------ 249 Returns the target value of the function 250 ''' 251 target_raw = self.f.IOHprofiler_get_optimal()[0] 252 if raw: 253 return target_raw 254 return self.precision + target_raw 255 256 def reset(self): 257 '''Resets the internal problem. This clears all information about previous evaluations. 258 ''' 259 self.f.reset_problem() 260 self.yopt = (self.maximization - 0.5) * -np.inf 261 self.xopt = None 262 if self.logger is not None: 263 self.logger.track_problem(self.f.IOHprofiler_get_problem_id(), self.f.IOHprofiler_get_number_of_variables(), 264 self.f.IOHprofiler_get_instance_id(), self.f.IOHprofiler_get_problem_name(), 265 self.f.IOHprofiler_get_optimization_type(), self.suite) 266 267 def enable_oob_tracking(self): 268 '''Enables the tracking of number of out-of-bounds points evaluated. Can be accessed trough property 'number_out_of_bounds' 269 ''' 270 self.track_oob = True 271 self.oob = 0 272 273class custom_IOH_function(IOH_function): 274 '''A wrapper to turn any python function into an IOH_function 275 ''' 276 277 def __init__(self, internal_eval, fname, dim, fid=0, iid=0, maximization=False, suite="No Suite", upperbound=5, 278 lowerbound=-5): 279 '''Convert a regular function into an IOH_function, allowing for easy use with other IOHexperimenter functions, such as IOH_logger 280 281 Parameters 282 ---------- 283 internal_eval: 284 The function which will be evaluated. Should take only a vector as its arguments 285 fname: 286 The name of the problem 287 dim: 288 The dimension (number of variables) of the problem 289 fid: 290 A numerical identifier for the problem 291 iid: 292 The instance ID of the problem 293 maximization: 294 Boolean indicating whether to maximize (True) or minimize (False). Defaults to False 295 suite: 296 Optional, indicating which suite of functions this problem is from 297 upperbound: 298 The upperbound of the problem. Either one value (same for each dimension) or a vector of lenght dim 299 lowerbound: 300 The upperbound of the problem. Either one value (same for each dimension) or a vector of lenght dim 301 302 ''' 303 self.internal_eval = internal_eval 304 self.maximization = maximization 305 self.yopt = (self.maximization - 0.5) * -np.inf 306 self.evals = 0 307 self.fname = fname 308 self.dim = dim 309 self.fid = fid 310 self.iid = iid 311 self.suite = suite 312 if isinstance(upperbound, Iterable) and len(upperbound) == dim: 313 self.ub = np.array(upperbound) 314 else: 315 self.ub = np.array(dim * [upperbound]) 316 if isinstance(lowerbound, Iterable) and len(lowerbound) == dim: 317 self.lb = np.array(lowerbound) 318 else: 319 self.lb = np.array(dim * [lowerbound]) 320 self.logger = None 321 self.y_comparison = operator.gt if self.maximization else operator.lt 322 self.track_oob = False 323 324 def __call__(self, x): 325 y = self.internal_eval(x) 326 self.evals += 1 327 if self.y_comparison(y, self.yopt): 328 self.xopt = x 329 self.yopt = y 330 if self.track_oob: 331 self.oob += any(x < self.lb) or any(x > self.ub) 332 if self.logger is not None: 333 self.logger.process_parameters() 334 self.logger.process_evaluation((self.evaluations, y, self.yopt, y, self.yopt)) 335 return y 336 337 @property 338 def final_target_hit(self): 339 return False 340 341 @property 342 def evaluations(self): 343 return self.evals 344 345 @property 346 def best_so_far_precision(self): 347 return self.yopt 348 349 @property 350 def number_of_variables(self): 351 return self.dim 352 353 @property 354 def upperbound(self): 355 return self.ub 356 357 @property 358 def lowerbound(self): 359 return self.lb 360 361 def add_logger(self, logger): 362 if not isinstance(logger, IOH_logger): 363 raise TypeError 364 logger.track_problem(self.fid, self.dim, 365 self.iid, self.fname, 366 self.maximization, self.suite) 367 self.logger = logger 368 369 def get_target(self, raw=False): 370 return (self.maximization - 0.5) * np.inf 371 372 def reset(self): 373 self.yopt = (self.maximization - 0.5) * -np.inf 374 self.evals = 0 375 self.xopt = None 376 if self.logger is not None: 377 self.logger.track_problem(self.fid, self.dim, self.iid, self.fname, 378 self.maximization, self.suite) 379 380class W_model_function(IOH_function): 381 '''A wrapper around the W-model functions from the IOHexperimenter. 382 ''' 383 384 def __init__(self, base_function="OneMax", iid=0, dim=16, dummy=0, epistasis=0, 385 neutrality=0, ruggedness=0): 386 '''Instansiate a problem based on its function ID, dimension, instance and suite 387 388 Parameters 389 ---------- 390 base_function: 391 The base function on which the W-model should be applied. Currently only OneMax and LeadingOnes are supported 392 dim: 393 The dimension (number of variables) of the problem 394 iid: 395 The instance ID of the problem 396 dummy: 397 Float between 0 and 1, fractoin of valid bits. 398 epistasis: 399 size of sub-string for epistasis 400 neutrality: 401 size of sub-string for neutrality 402 ruggedness: 403 gamma for ruggedness layper 404 ''' 405 if base_function == "OneMax": 406 self.f = IOH.W_Model_OneMax(iid, dim) 407 elif base_function == "LeadingOnes": 408 self.f = IOH.W_Model_LeadingOnes(iid, dim) 409 assert epistasis <= dim, "Epistasis has to be less or equal to than dimension" 410 assert neutrality <= dim, "Neutrality has to be less than or equal to dimension" 411 assert ruggedness <= dim ** 2, "Ruggedness has to be less than or equal to dimension squared" 412 assert dummy <= 1 and dummy >= 0, "Dummy variable fraction has to be in [0,1]" 413 self.f.set_w_setting(dummy, epistasis, neutrality, ruggedness) 414 self.f.reset_problem() 415 self.precision = 0 416 self.logger = None 417 self.track_oob = False 418 self.suite = "W_model" 419 self.maximization = True 420 self.y_comparison = operator.gt 421 self.xopt = None 422 self.yopt = -np.inf 423