2
* This program is free software; you can redistribute it and/or modify
3
* it under the terms of the GNU General Public License as published by
4
* the Free Software Foundation; either version 2 of the License, or
5
* (at your option) any later version.
7
* This program is distributed in the hope that it will be useful,
8
* but WITHOUT ANY WARRANTY; without even the implied warranty of
9
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10
* GNU General Public License for more details.
12
* You should have received a copy of the GNU General Public License
13
* along with this program; if not, write to the Free Software
14
* Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
18
* SimulatedAnnealing.java
19
* Copyright (C) 2004 University of Waikato, Hamilton, New Zealand
23
package weka.classifiers.bayes.net.search.global;
25
import weka.classifiers.bayes.BayesNet;
26
import weka.core.Instances;
27
import weka.core.Option;
28
import weka.core.TechnicalInformation;
29
import weka.core.TechnicalInformation.Type;
30
import weka.core.TechnicalInformation.Field;
31
import weka.core.TechnicalInformationHandler;
32
import weka.core.Utils;
34
import java.util.Enumeration;
35
import java.util.Random;
36
import java.util.Vector;
39
<!-- globalinfo-start -->
40
* This Bayes Network learning algorithm uses the general purpose search method of simulated annealing to find a well scoring network structure.<br/>
42
* For more information see:<br/>
44
* R.R. Bouckaert (1995). Bayesian Belief Networks: from Construction to Inference. Utrecht, Netherlands.
46
<!-- globalinfo-end -->
48
<!-- technical-bibtex-start -->
51
* @phdthesis{Bouckaert1995,
52
* address = {Utrecht, Netherlands},
53
* author = {R.R. Bouckaert},
54
* institution = {University of Utrecht},
55
* title = {Bayesian Belief Networks: from Construction to Inference},
60
<!-- technical-bibtex-end -->
62
<!-- options-start -->
63
* Valid options are: <p/>
65
* <pre> -A <float>
66
* Start temperature</pre>
68
* <pre> -U <integer>
69
* Number of runs</pre>
71
* <pre> -D <float>
72
* Delta temperature</pre>
74
* <pre> -R <seed>
75
* Random number seed</pre>
78
* Applies a Markov Blanket correction to the network structure,
79
* after a network structure is learned. This ensures that all
80
* nodes in the network are part of the Markov blanket of the
81
* classifier node.</pre>
83
* <pre> -S [LOO-CV|k-Fold-CV|Cumulative-CV]
84
* Score type (LOO-CV,k-Fold-CV,Cumulative-CV)</pre>
87
* Use probabilistic or 0/1 scoring.
88
* (default probabilistic scoring)</pre>
92
* @author Remco Bouckaert (rrb@xm.co.nz)
93
* @version $Revision: 1.5 $
95
public class SimulatedAnnealing
96
extends GlobalScoreSearchAlgorithm
97
implements TechnicalInformationHandler {
99
/** for serialization */
100
static final long serialVersionUID = -5482721887881010916L;
102
/** start temperature **/
103
double m_fTStart = 10;
105
/** change in temperature at every run **/
106
double m_fDelta = 0.999;
108
/** number of runs **/
111
/** use the arc reversal operator **/
112
boolean m_bUseArcReversal = false;
114
/** random number seed **/
117
/** random number generator **/
121
* Returns an instance of a TechnicalInformation object, containing
122
* detailed information about the technical background of this class,
123
* e.g., paper reference or book this class is based on.
125
* @return the technical information about this class
127
public TechnicalInformation getTechnicalInformation() {
128
TechnicalInformation result;
130
result = new TechnicalInformation(Type.PHDTHESIS);
131
result.setValue(Field.AUTHOR, "R.R. Bouckaert");
132
result.setValue(Field.YEAR, "1995");
133
result.setValue(Field.TITLE, "Bayesian Belief Networks: from Construction to Inference");
134
result.setValue(Field.INSTITUTION, "University of Utrecht");
135
result.setValue(Field.ADDRESS, "Utrecht, Netherlands");
142
* @param bayesNet the bayes net to use
143
* @param instances the data to use
144
* @throws Exception if something goes wrong
146
public void search (BayesNet bayesNet, Instances instances) throws Exception {
147
m_random = new Random(m_nSeed);
149
// determine base scores
150
double fCurrentScore = calcScore(bayesNet);
152
// keep track of best scoring network
153
double fBestScore = fCurrentScore;
154
BayesNet bestBayesNet = new BayesNet();
155
bestBayesNet.m_Instances = instances;
156
bestBayesNet.initStructure();
157
copyParentSets(bestBayesNet, bayesNet);
159
double fTemp = m_fTStart;
160
for (int iRun = 0; iRun < m_nRuns; iRun++) {
161
boolean bRunSucces = false;
162
double fDeltaScore = 0.0;
163
while (!bRunSucces) {
164
// pick two nodes at random
165
int iTailNode = Math.abs(m_random.nextInt()) % instances.numAttributes();
166
int iHeadNode = Math.abs(m_random.nextInt()) % instances.numAttributes();
167
while (iTailNode == iHeadNode) {
168
iHeadNode = Math.abs(m_random.nextInt()) % instances.numAttributes();
170
if (isArc(bayesNet, iHeadNode, iTailNode)) {
172
// either try a delete
173
bayesNet.getParentSet(iHeadNode).deleteParent(iTailNode, instances);
174
double fScore = calcScore(bayesNet);
175
fDeltaScore = fScore - fCurrentScore;
176
//System.out.println("Try delete " + iTailNode + "->" + iHeadNode + " dScore = " + fDeltaScore);
177
if (fTemp * Math.log((Math.abs(m_random.nextInt()) % 10000)/10000.0 + 1e-100) < fDeltaScore) {
178
//System.out.println("success!!!");
179
fCurrentScore = fScore;
182
bayesNet.getParentSet(iHeadNode).addParent(iTailNode, instances);
186
if (addArcMakesSense(bayesNet, instances, iHeadNode, iTailNode)) {
188
double fScore = calcScoreWithExtraParent(iHeadNode, iTailNode);
189
fDeltaScore = fScore - fCurrentScore;
190
//System.out.println("Try add " + iTailNode + "->" + iHeadNode + " dScore = " + fDeltaScore);
191
if (fTemp * Math.log((Math.abs(m_random.nextInt()) % 10000)/10000.0 + 1e-100) < fDeltaScore) {
192
//System.out.println("success!!!");
193
bayesNet.getParentSet(iHeadNode).addParent(iTailNode, instances);
194
fCurrentScore = fScore;
199
if (fCurrentScore > fBestScore) {
200
copyParentSets(bestBayesNet, bayesNet);
202
fTemp = fTemp * m_fDelta;
205
copyParentSets(bayesNet, bestBayesNet);
208
/** CopyParentSets copies parent sets of source to dest BayesNet
209
* @param dest destination network
210
* @param source source network
212
void copyParentSets(BayesNet dest, BayesNet source) {
213
int nNodes = source.getNrOfNodes();
214
// clear parent set first
215
for (int iNode = 0; iNode < nNodes; iNode++) {
216
dest.getParentSet(iNode).copy(source.getParentSet(iNode));
223
public double getDelta() {
230
public double getTStart() {
237
public int getRuns() {
243
* @param fDelta The m_fDelta to set
245
public void setDelta(double fDelta) {
250
* Sets the m_fTStart.
251
* @param fTStart The m_fTStart to set
253
public void setTStart(double fTStart) {
259
* @param nRuns The m_nRuns to set
261
public void setRuns(int nRuns) {
266
* @return random number seed
268
public int getSeed() {
273
* Sets the random number seed
274
* @param nSeed The number of the seed to set
276
public void setSeed(int nSeed) {
281
* Returns an enumeration describing the available options.
283
* @return an enumeration of all the available options.
285
public Enumeration listOptions() {
286
Vector newVector = new Vector(3);
288
newVector.addElement(new Option("\tStart temperature", "A", 1, "-A <float>"));
289
newVector.addElement(new Option("\tNumber of runs", "U", 1, "-U <integer>"));
290
newVector.addElement(new Option("\tDelta temperature", "D", 1, "-D <float>"));
291
newVector.addElement(new Option("\tRandom number seed", "R", 1, "-R <seed>"));
293
Enumeration enu = super.listOptions();
294
while (enu.hasMoreElements()) {
295
newVector.addElement(enu.nextElement());
297
return newVector.elements();
301
* Parses a given list of options. <p/>
303
<!-- options-start -->
304
* Valid options are: <p/>
306
* <pre> -A <float>
307
* Start temperature</pre>
309
* <pre> -U <integer>
310
* Number of runs</pre>
312
* <pre> -D <float>
313
* Delta temperature</pre>
315
* <pre> -R <seed>
316
* Random number seed</pre>
319
* Applies a Markov Blanket correction to the network structure,
320
* after a network structure is learned. This ensures that all
321
* nodes in the network are part of the Markov blanket of the
322
* classifier node.</pre>
324
* <pre> -S [LOO-CV|k-Fold-CV|Cumulative-CV]
325
* Score type (LOO-CV,k-Fold-CV,Cumulative-CV)</pre>
328
* Use probabilistic or 0/1 scoring.
329
* (default probabilistic scoring)</pre>
333
* @param options the list of options as an array of strings
334
* @throws Exception if an option is not supported
336
public void setOptions(String[] options) throws Exception {
337
String sTStart = Utils.getOption('A', options);
338
if (sTStart.length() != 0) {
339
setTStart(Double.parseDouble(sTStart));
341
String sRuns = Utils.getOption('U', options);
342
if (sRuns.length() != 0) {
343
setRuns(Integer.parseInt(sRuns));
345
String sDelta = Utils.getOption('D', options);
346
if (sDelta.length() != 0) {
347
setDelta(Double.parseDouble(sDelta));
349
String sSeed = Utils.getOption('R', options);
350
if (sSeed.length() != 0) {
351
setSeed(Integer.parseInt(sSeed));
353
super.setOptions(options);
357
* Gets the current settings of the search algorithm.
359
* @return an array of strings suitable for passing to setOptions
361
public String[] getOptions() {
362
String[] superOptions = super.getOptions();
363
String[] options = new String[8 + superOptions.length];
365
options[current++] = "-A";
366
options[current++] = "" + getTStart();
368
options[current++] = "-U";
369
options[current++] = "" + getRuns();
371
options[current++] = "-D";
372
options[current++] = "" + getDelta();
374
options[current++] = "-R";
375
options[current++] = "" + getSeed();
377
// insert options from parent class
378
for (int iOption = 0; iOption < superOptions.length; iOption++) {
379
options[current++] = superOptions[iOption];
382
// Fill up rest with empty strings, not nulls!
383
while (current < options.length) {
384
options[current++] = "";
390
* This will return a string describing the classifier.
391
* @return The string.
393
public String globalInfo() {
395
"This Bayes Network learning algorithm uses the general purpose search method "
396
+ "of simulated annealing to find a well scoring network structure.\n\n"
397
+ "For more information see:\n\n"
398
+ getTechnicalInformation().toString();
402
* @return a string to describe the TStart option.
404
public String TStartTipText() {
405
return "Sets the start temperature of the simulated annealing search. "+
406
"The start temperature determines the probability that a step in the 'wrong' direction in the " +
407
"search space is accepted. The higher the temperature, the higher the probability of acceptance.";
411
* @return a string to describe the Runs option.
413
public String runsTipText() {
414
return "Sets the number of iterations to be performed by the simulated annealing search.";
418
* @return a string to describe the Delta option.
420
public String deltaTipText() {
421
return "Sets the factor with which the temperature (and thus the acceptance probability of " +
422
"steps in the wrong direction in the search space) is decreased in each iteration.";
426
* @return a string to describe the Seed option.
428
public String seedTipText() {
429
return "Initialization value for random number generator." +
430
" Setting the seed allows replicability of experiments.";
433
} // SimulatedAnnealing