-
Notifications
You must be signed in to change notification settings - Fork 0
/
MOSBO.m
150 lines (118 loc) · 5 KB
/
MOSBO.m
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
%> @file "MOSBO.m"
%> @authors: SUMO Lab Team
%> @version x.x.x ($Revision: 7155 $)
%> @date $LastChangedDate: 2011-06-02 10:46:47 +0200 (Thu, 02 Jun 2011) $
%> @date Copyright 200x-20xx
%>
%> This file is part of the Surrogate Modeling Toolbox ("SUMO Toolbox")
%> and you can redistribute it and/or modify it under the terms of the
%> GNU Affero General Public License version 3 as published by the
%> Free Software Foundation. With the additional provision that a commercial
%> license must be purchased if the SUMO Toolbox is used, modified, or extended
%> in a commercial setting. For details see the included LICENSE.txt file.
%> When referring to the SUMO Toolbox please make reference to the corresponding
%> publication:
%> - A Surrogate Modeling and Adaptive Sampling Toolbox for Computer Based Design
%> D. Gorissen, K. Crombecq, I. Couckuyt, T. Dhaene, P. Demeester,
%> Journal of Machine Learning Research,
%> Vol. 11, pp. 2051-2055, July 2010.
%>
%> Contact : [email protected] - http://sumo.intec.ugent.be
%> Signature
%> MOSBO(samplesValuesPath, outDimIdx)
%
% ======================================================================
%> @brief Multi Objective Surrogate Based Optimizer (MOSBO)
% ======================================================================
% Modified by : Michael Mehari
% Email: [email protected]
function MOSBO(samplesValuesPath, outDimIdx)
% import samples and values data
samplesValueData = importdata(samplesValuesPath);
samplesValues = samplesValueData.data;
% bounds of the input variables
bounds = eval(samplesValueData.textdata{1});
inDimIdx = (1:size(bounds,2));
nLengths = (bounds(2,:) - bounds(1,:))./bounds(3,:) + 1; % size of input variables
transl = (bounds(2,:) + bounds(1,:))/2.0;
scale = (bounds(2,:) - bounds(1,:))/2.0;
[inFunc, outFunc] = calculateTransformationFunctions( [transl; scale] );
samples = inFunc(samplesValues(:,inDimIdx)); % convert samples to simulator space
values = samplesValues(:,outDimIdx);
inDim = size(samples,2); % number of input variables
outDim = size(values,2); % number of objectives
% general options
distanceThreshold = 2.*eps;
% setup kriging model options
type = 'Kriging';
opts = feval([type '.getDefaultOptions'] );
opts.type = type;
theta0 = repmat(0.25,1,inDim);
lb = repmat(-2,1,inDim);
ub = repmat(2,1,inDim);
% CHANGEME: correlation function to use
%bf = BasisFunction( 'corrgauss', inDim, lb, ub, {'log'});
%bf = BasisFunction( 'correxp', inDim, lb, ub, {'log'});
bf = BasisFunction( 'corrmatern32', inDim, lb, ub, {'log'});
opts.hpOptimizer = SQPLabOptimizer( inDim, 1 );
%% select optimizer to use
optimizer = DiscreteOptimizer(inDim, 1, 'levels', nLengths);
optimizer = optimizer.setBounds(-ones(1,inDim), ones(1,inDim));
%% candidateRankers to use
rankers = {expectedImprovementHypervolume(ParetoFront(), inDim, 1, 'scaling', 'none') modelVariance(inDim, 1, 'scaling', 'none') };
%% main loop
% build and fit Kriging object
state.lastModels = cell(outDim,1);
for i=1:outDim
state.lastModels{i}{1} = KrigingModel( opts, theta0, 'regpoly0', bf, 'useLikelihood' );
state.lastModels{i}{1} = state.lastModels{i}{1}.constructInModelSpace( samples, values(:,i) );
end
% optimize it
state.samples = samples;
state.values = values;
for i=1:length(rankers)
rankers{i} = rankers{i}.initNewSamples(state);
%% optimize best candidate
% give the state to the optimizer - might contain useful info such as # samples
optimizer = optimizer.setState(state);
optimFunc = @(x) rankers{i}.scoreMinimize(x, state);
[~, xmin, fmin] = optimizer.optimize(optimFunc);
% Predict objectives for each design parameter from the model
pred_obj = zeros(size(xmin,1), outDim);
for j=1:outDim
pred_obj(:,j) = state.lastModels{j}{1}.evaluateInModelSpace( xmin );
end
dups = buildDistanceMatrix( xmin, samples, 1 );
index = find(all(dups > distanceThreshold, 2));
xmin = xmin(index,:);
fmin = fmin(index,:);
if ~isempty( xmin )
break;
end
fprintf(1, 'No unique point found. Maxvar.\n');
end
if isempty( xmin )
xmin = 2.*(rand(1,inDim) - 0.5);
fprintf(1, 'No unique point found. Random.\n');
end
%% evaluate new samples and add to set
newSample = round(outFunc(xmin(1,:))); % convert the new sample back to model space
HV_PoI = abs(fmin(1)); % Hyper Volume Probability of Improvement
% Display new sample
newSample_str = sprintf('newSample =\n');
for i = 1:inDim
newSample_str = sprintf('%s%u,', newSample_str, newSample(i));
end
newSample_str = sprintf('%s\n', newSample_str(1:end-1));
disp(newSample_str);
% Display HV_PoI
HV_PoI_str = sprintf('HV_PoI =\n%f\n', HV_PoI);
disp(HV_PoI_str);
% Display predicted objectives
pred_obj_str = sprintf('pred_obj =\n');
for i = 1:outDim
pred_obj_str = sprintf('%s%f\n', pred_obj_str, pred_obj(i));
end
disp(pred_obj_str);
format longG;
end