-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathCCEI.m
91 lines (83 loc) · 3.48 KB
/
CCEI.m
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
function minF = CCEI(problemIndex)
%
% INPUT:
% - problemIndex: the problem to be solved
%
% OUTPUT
% - minF : the optimized objective function value
%
% *** General settings here ****
maxFEs = 100; % Maximum number of function evaluations
nTest = 30; % Population size of DE to optimize the acquisition function
maxGen = 200; % Maximum number of generations to optimize the acquisition function
% ******************************
format long
format compact
addpath(genpath('scripts'));
[nO, nC, nD, lu] = problem(problemIndex);
% Latin hypercube design
rand('seed', sum(100*clock));
iniSize = 11*nD - 1;
x_train = lhsdesign(iniSize, nD, 'criterion','maximin', 'iteration',100);
P = repmat(lu(1,:), iniSize, 1) + x_train.*repmat((lu(2,:) - lu(1,:)), iniSize, 1);
[objF, conV] = fitness(P, problemIndex);
y_train = [objF, conV];
% Normalizaion (data pre-processing)
maxY = max(abs(y_train));
trainY = y_train./maxY;
FEs = 1;
nTask = nO + nC;
minF = inf; minNorG = inf; minNorF = inf;
boolFeasible = max(max(0, conV), [], 2) == 0;
if size(objF(boolFeasible), 1) == 0
% Infeasible case
feasibleFlag = 0;
G = trainY(:, 2:end);
minNorG = min(max(max(0, G), [], 2), [], 1);
[model, data] = Training(x_train, G(:), nTask-1);
else
% Feasible case
feasibleFlag = 1;
minF = min(objF(boolFeasible), [], 1);
F = trainY(:,1);
minNorF = min(F(boolFeasible), [], 1);
[model,data] = Training(x_train, trainY(:), nTask);
end
while FEs <= maxFEs
x_test = rand(nTest, nD);
EI_parent = calEI(nTask, x_test, feasibleFlag, nC, minNorF, minNorG, model, data);
for g = 1:maxGen
x_child = DEgenerator(x_test, [zeros(1,nD); ones(1,nD)]);
EI_child = calEI(nTask, x_child, feasibleFlag, nC, minNorF, minNorG, model,data);
EI_parent(EI_child>=EI_parent) = EI_child(EI_child>=EI_parent);
x_test(EI_child>=EI_parent,:) = x_child(EI_child>=EI_parent,:);
end
[~, max_index] = max(EI_parent);
ind = repmat(lu(1,:), size(x_test(max_index,:), 1), 1) + x_test(max_index,:).*repmat((lu(2,:) - lu(1,:)), size(x_test(max_index,:), 1), 1);
[indf, indv] = fitness(ind, problemIndex);
x_train = [x_train; x_test(max_index,:)];
objF = [objF; indf];
conV = [conV; indv];
FEs = FEs + 1;
y_train = [objF, conV];
% Normalizaion
maxY = max(abs(y_train));
trainY = y_train./maxY;
% Judge whether there are feasible solutions in the database
boolFeasible = max(max(0, conV), [], 2) == 0;
if size(objF(boolFeasible), 1) == 0
% Infeasible case
feasibleFlag = 0;
G = trainY(:, 2:end);
minNorG = min(max(max(0, G), [], 2), [], 1);
[model,data] = Training(x_train, G(:), nTask-1);
else
% Feasible case
feasibleFlag = 1;
minF = min(objF(boolFeasible), [], 1);
F = trainY(:,1);
minNorF = min(F(boolFeasible), [], 1);
[model,data] = Training(x_train, trainY(:), nTask);
end
end
end