1 /*
2 * Copyright (c) 2007 - 2015 Joseph Gaeddert
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a copy
5 * of this software and associated documentation files (the "Software"), to deal
6 * in the Software without restriction, including without limitation the rights
7 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
8 * copies of the Software, and to permit persons to whom the Software is
9 * furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
17 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
18 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
19 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
20 * THE SOFTWARE.
21 */
22
23 #include "autotest/autotest.h"
24 #include "liquid.h"
25
26 #include <stdio.h>
27 #include <stdlib.h>
28 #include <string.h>
29 #include <math.h>
30 #include <getopt.h>
31
32 #include "liquid.h"
33
34 //
35 // AUTOTEST: Find minimum of Rosenbrock function, should be [1 1 1 ...]
36 //
autotest_gradsearch_rosenbrock()37 void autotest_gradsearch_rosenbrock()
38 {
39 float tol = 1e-2f; // error tolerance
40 unsigned int num_parameters = 6; // dimensionality of search (minimum 2)
41 unsigned int num_iterations = 4000; // number of iterations to run
42
43 // initialize vector for optimization
44 float v_opt[num_parameters];
45 unsigned int i;
46 for (i=0; i<num_parameters; i++)
47 v_opt[i] = 0.0f;
48
49 // create gradsearch object
50 gradsearch gs = gradsearch_create(NULL,
51 v_opt,
52 num_parameters,
53 liquid_rosenbrock,
54 LIQUID_OPTIM_MINIMIZE);
55
56 #if 0
57 // execute search
58 float u_opt = gradsearch_execute(gs, num_iterations, -1e-6f);
59 #else
60 // execute search one iteration at a time
61 unsigned int d=1;
62 for (i=0; i<num_iterations; i++) {
63 gradsearch_step(gs);
64
65 // periodically print updates
66 if (liquid_autotest_verbose) {
67 if (((i+1)%d)==0 || i==0 || i == num_iterations-1) {
68 printf("%5u: ", i+1);
69 gradsearch_print(gs);
70
71 if ((i+1)==10*d) d*=10;
72 }
73 }
74 }
75 #endif
76
77 // destroy gradient descent search object
78 gradsearch_destroy(gs);
79
80 // test results, optimum at [1, 1, 1, ... 1];
81 for (i=0; i<num_parameters; i++)
82 CONTEND_DELTA(v_opt[i], 1.0f, tol);
83
84 // test value of utility (should be nearly 0)
85 CONTEND_DELTA( liquid_rosenbrock(NULL, v_opt, num_parameters), 0.0f, tol );
86 }
87
88 //
89 // AUTOTEST: Find maximum of: exp{ -sum{ (v[i]-1)^2/sigma_i^2 } }, should be [1 1 1 ...]
90 //
91
92 // test utility function
utility_max_autotest(void * _userdata,float * _v,unsigned int _n)93 float utility_max_autotest(void * _userdata,
94 float * _v,
95 unsigned int _n)
96 {
97 if (_n == 0) {
98 fprintf(stderr,"error: liquid_invgauss(), input vector length cannot be zero\n");
99 exit(1);
100 }
101
102 float t = 0.0f;
103 float sigma = 1.0f;
104 unsigned int i;
105 for (i=0; i<_n; i++) {
106 t += (_v[i]-1.0f)*(_v[i]-1.0f) / (sigma*sigma);
107
108 // increase variance along this dimension
109 sigma *= 1.5f;
110 }
111
112 return expf(-t);
113 }
114
autotest_gradsearch_maxutility()115 void autotest_gradsearch_maxutility()
116 {
117 float tol = 1e-2f; // error tolerance
118 unsigned int num_parameters = 6; // dimensionality of search (minimum 2)
119 unsigned int num_iterations = 4000; // number of iterations to run
120
121 // initialize vector for optimization
122 float v_opt[num_parameters];
123 unsigned int i;
124 for (i=0; i<num_parameters; i++)
125 v_opt[i] = 0.0f;
126
127 // create gradsearch object
128 gradsearch gs = gradsearch_create(NULL,
129 v_opt,
130 num_parameters,
131 utility_max_autotest,
132 LIQUID_OPTIM_MAXIMIZE);
133
134 // execute search one iteration at a time
135 unsigned int d=1;
136 for (i=0; i<num_iterations; i++) {
137 gradsearch_step(gs);
138
139 // periodically print updates
140 if (liquid_autotest_verbose) {
141 if (((i+1)%d)==0 || i==0 || i == num_iterations-1) {
142 printf("%5u: ", i+1);
143 gradsearch_print(gs);
144
145 if ((i+1)==10*d) d*=10;
146 }
147 }
148 }
149
150 // destroy gradient descent search object
151 gradsearch_destroy(gs);
152
153 // test results, optimum at [1, 1, 1, ... 1];
154 for (i=0; i<num_parameters; i++)
155 CONTEND_DELTA(v_opt[i], 1.0f, tol);
156
157 // test value of utility (should be nearly 1)
158 CONTEND_DELTA( utility_max_autotest(NULL, v_opt, num_parameters), 1.0f, tol );
159 }
160
161