-
Notifications
You must be signed in to change notification settings - Fork 32
Expand file tree
/
Copy pathcontinmax.cc
More file actions
142 lines (127 loc) · 5.83 KB
/
continmax.cc
File metadata and controls
142 lines (127 loc) · 5.83 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
/*
* moses/moses/example-progs/continmax.cc
*
* Copyright (C) 2002-2008 Novamente LLC
* All Rights Reserved
*
* Written by Moshe Looks
* Documented by Linas Vepstas, 2011
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License v3 as
* published by the Free Software Foundation and including the exceptions
* at http://opencog.org/wiki/Licenses
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program; if not, write to:
* Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
#include "headers.h"
using std::string;
using std::vector;
using boost::lexical_cast;
// Demonstration program for the "max" optimization problem, for
// continuous variables. This is a variation of the standard
// onemax/nmax demonstraton problems. Here, a scoring function is
// used that sums the squares of the values of a set of discrete
// variables. This is the "sphere" scoring function. The optimizer is
// supposed to be able to find the best solution to this function:
// namely, all of the variables should be zero. Although finding a
// solution is trivial if one applies a bit of calculus, it becomes
// a bit tricker for evoilutionary algorithms, primarily because the
// need to handle continuously-valued variables makes the size of the
// search space uncountable (cardinality of the continuum), and because
// evolutionary algorithms typically do not track derivatives (which
// is how the problem becomes easy when applying calculus).
//
// XXX Currently, this doesn't really work well, or maybe at all, in
// part because the contin implementation in the field set is incomplete
// or broken or maybe both; there is a confusion between depth and arity
// in that code. (i.e. confusion between depth and breadth, between arity
// and log_2(arity), etc.) See
// https://bugs.launchpad.net/moses/+bug/908247
//
// NOTE: This is NOT a demonstration of program learning, which is what
// MOSES is designed for; rather, this a demonstration of the use of a
// certain component within MOSES, the so-called "optimizer". MOSES itself
// relies heavily on this optimizer to implement its meta-optimization
// algorithm.
//
// This program requires five arguments:
// -- an initial seed value for the random number generator
// -- the number of contin variables
// -- the population size
// -- the maximum number of generations to run.
// -- the number that is -log_2(epsilon) where epsilon is the smallest
// distinction between continuous variables what will be drawn.
//
// XXX todo -- finish documentation to make it look more like the
// onemax/nmax example programs.
int main(int argc, char** argv)
{
// Tell the system logger to print detailed debugging messages to
// stdout. This will let us watch what the optimizer is doing.
// Set to Logger::WARN to show only warnings and errors.
logger() = Logger("demo.log");
logger().set_level(Logger::FINE);
logger().set_print_to_stdout_flag(true);
// We also need to declare a specific logger for the aglo.
// This one uses the same system logger() above, and writes all
// messages ad the "debug" level. This allows the main loop of the
// algo to be traced.
cout_log_best_and_gen mlogger;
// Parse program arguments
vector<string> add_args{"<depth>"};
optargs args(argc, argv, add_args);
int depth = lexical_cast<int>(argv[5]);
// Initialize random number generator (from the first argument
// given to the program).
randGen().seed(args.rand_seed);
// Create a set of "fields". Each field is a contin variable.
/*field_set fs(field_set::spec(field_set::contin_spec(2.0,2.5,0.5,depth),
args.length));*/
field_set fs(field_set::contin_spec(0.0, 0.5, 0.5, depth), args.length);
instance_set<contin_t> population(args.popsize, fs);
for (instance& inst : population) {
occam_randomize_contin(fs, inst);
cout << fs.to_string(inst) << endl;
cout << fs.to_string_raw(inst) << endl;
}
contin_t epsilon = fs.contin().front().epsilon();
// Run the optimizer.
int num_score_evals =
optimize(population, // population of instances, from above.
args.popsize, // num to select
args.popsize / 2, // num to generate
args.max_gens, // max number of generations to run
sphere(fs), // ScoringPolicy
terminate_if_gte<contin_t>(-args.length*epsilon), // TerminationPolicy
//terminate_if_gte(args.length*(7-2*epsilon)*(7-2*epsilon)),
tournament_selection(2), // SelectionPolicy
univariate(), // StructureLearningPolicy
local_structure_probs_learning(), // ProbsLearningPolicy
replace_the_worst(), // ReplacementPolicy
mlogger);
// The logger is asynchronous, so flush it's output before
// writing to cout, else output will be garbled.
logger().flush();
cout << "A total of " << num_score_evals
<< " scoring funtion evaluations were done." << endl;
#if 0
// Show the final population
// cout << "Final population:\n" << population << endl;
cout << "The final population was:" << endl;
instance_set<int>::const_iterator it = population.begin();
for(; it != population.end(); it++) {
cout << "Score: " << it->second
<< "\tindividual: " << population.fields().to_string(it->first)
<< endl;
}
#endif
}