1 | package geniusweb.bagga.dicehaggler;
|
---|
2 |
|
---|
3 | import java.util.ArrayList;
|
---|
4 | import java.util.Collections;
|
---|
5 | import java.util.Comparator;
|
---|
6 | import java.util.Dictionary;
|
---|
7 | import java.util.Enumeration;
|
---|
8 | import java.util.HashMap;
|
---|
9 | import java.util.Hashtable;
|
---|
10 | import java.util.LinkedHashMap;
|
---|
11 | import java.util.List;
|
---|
12 | import java.util.Map;
|
---|
13 | import java.util.Map.Entry;
|
---|
14 | import java.util.Random;
|
---|
15 | import java.util.stream.Collectors;
|
---|
16 |
|
---|
17 | import geniusweb.issuevalue.Bid;
|
---|
18 | import geniusweb.issuevalue.DiscreteValueSet;
|
---|
19 | import geniusweb.issuevalue.Domain;
|
---|
20 | import geniusweb.issuevalue.Value;
|
---|
21 |
|
---|
22 | public class DH_UserModel {
|
---|
23 |
|
---|
24 | private Domain domain;
|
---|
25 | private static List<String> issues;
|
---|
26 | private Dictionary<String, DH_Issue> utilSpace = new Hashtable<String, DH_Issue>();
|
---|
27 | private double beta = 1.5;
|
---|
28 | private double p_a = 0.25;
|
---|
29 | private List<Double> fitnessList = new ArrayList<>();
|
---|
30 | private List<Dictionary<String, DH_Issue>> nests = new ArrayList<>();
|
---|
31 | private List<Bid> orderedBids;
|
---|
32 | private List<Dictionary<String, DH_Issue>> sortedNestsToCheckForVariance;
|
---|
33 |
|
---|
34 | public DH_UserModel(Domain domain, List<Bid> orderedBids) {
|
---|
35 | issues = domain.getIssues().stream().collect(Collectors.toList());
|
---|
36 | this.domain = domain;
|
---|
37 | this.orderedBids = orderedBids;
|
---|
38 | sortedNestsToCheckForVariance = new ArrayList<>();
|
---|
39 | }
|
---|
40 |
|
---|
41 | public void initializeUtilitySpace() {
|
---|
42 |
|
---|
43 | for (String issue : issues) {
|
---|
44 | DH_Issue dh_i = new DH_Issue();
|
---|
45 | dh_i.setIssueWeight( 1.0/issues.size());
|
---|
46 |
|
---|
47 | Dictionary<String, Double> issueValueDict = new Hashtable<String, Double>();
|
---|
48 | DiscreteValueSet availableVals = (DiscreteValueSet) domain.getValues(issue);
|
---|
49 | for (Value v : availableVals)
|
---|
50 | issueValueDict.put(v.toString(), 1D);
|
---|
51 |
|
---|
52 | dh_i.setIssueValueDict(issueValueDict);
|
---|
53 |
|
---|
54 | utilSpace.put(issue, dh_i);
|
---|
55 | }
|
---|
56 | }
|
---|
57 |
|
---|
58 | public Dictionary<String, DH_Issue> estimateutilityspace() {
|
---|
59 |
|
---|
60 | long startTime = System.currentTimeMillis();
|
---|
61 | System.out.println("I come here only once to estimate the user utility space");
|
---|
62 | initializeUtilitySpace();
|
---|
63 | int numNests = 100;
|
---|
64 | int maxGeneration = 200;
|
---|
65 |
|
---|
66 | //initial random population of cuckoo nest/egg (we assume that each nest has only one egg)
|
---|
67 | for(int i = 0; i < numNests; i++) {
|
---|
68 | nests.add(generateRandomUtilitySpace());
|
---|
69 | }
|
---|
70 |
|
---|
71 | for(int gen = 0; gen < maxGeneration; gen++) {
|
---|
72 |
|
---|
73 | for(Dictionary<String, DH_Issue> individual : nests) {
|
---|
74 | fitnessList.add(getFitness(individual));
|
---|
75 | }
|
---|
76 | //find the best nest with more fitness
|
---|
77 | int best_index = 0;
|
---|
78 | for (int i = 0; i < nests.size(); i++) {
|
---|
79 | if (fitnessList.get(best_index) < fitnessList.get(i)) {
|
---|
80 | best_index = i;
|
---|
81 | }
|
---|
82 | }
|
---|
83 |
|
---|
84 | Dictionary<String, DH_Issue> best_cuckoo = nests.get(best_index);
|
---|
85 |
|
---|
86 | //generate a cuckoo by Levy Flight from the current best cuckoo nest and evaluate its fitness
|
---|
87 |
|
---|
88 | Dictionary<String, DH_Issue> newcuckoo = generateCuckooByLevyFlight(best_cuckoo);
|
---|
89 | double newCuckooFitness = getFitness(newcuckoo);
|
---|
90 | if(newCuckooFitness > fitnessList.get(best_index)) {
|
---|
91 | //replace the cuckoo
|
---|
92 | nests.set(best_index, newcuckoo);
|
---|
93 | }
|
---|
94 | //sort the nests in an increasing order
|
---|
95 | sortNests();
|
---|
96 | //take the worst nests out
|
---|
97 | nests = reInitializeWorstNests();
|
---|
98 | }
|
---|
99 | sortNests();
|
---|
100 |
|
---|
101 | for(int i = nests.size()-1; i >= nests.size()-20; i--)
|
---|
102 | {
|
---|
103 | sortedNestsToCheckForVariance.add(nests.get(i));
|
---|
104 | }
|
---|
105 |
|
---|
106 | checkVariance();
|
---|
107 |
|
---|
108 | List<Dictionary<String, DH_Issue>> bestFinalNests = chooseBestClusterOfFinalNests(numNests);
|
---|
109 |
|
---|
110 | //int n = 5; //can be the size of best cluster to choose a random index from.
|
---|
111 | //List<Dictionary<String, DH_Issue>> bestNests = chooseNBestNests(n);
|
---|
112 | //choose random one out of n best nests
|
---|
113 | //int rIndex = new Random().nextInt(n-1); //https://mkyong.com/java/java-generate-random-integers-in-a-range/
|
---|
114 |
|
---|
115 | //int rIndex = new Random().nextInt(bestFinalNests.size()-1);
|
---|
116 |
|
---|
117 | //choose the best out of different clusters
|
---|
118 | double f = 0D;
|
---|
119 | for(Dictionary<String, DH_Issue> n : bestFinalNests) {
|
---|
120 | if(f < getFitness(n)) {
|
---|
121 | f = getFitness(n);
|
---|
122 | utilSpace = n;
|
---|
123 | }
|
---|
124 | }
|
---|
125 |
|
---|
126 | //utilSpace = bestFinalNests.get(index);
|
---|
127 | //choose the best nest from the sorted list
|
---|
128 | //utilSpace = nests.get(nests.size()-1);
|
---|
129 | System.out.println("My estimated user model:\n"+utilSpace);
|
---|
130 | long endTime = System.currentTimeMillis();
|
---|
131 | System.out.println("Total user model estimation time = "+ (endTime - startTime) + " ms");
|
---|
132 | return utilSpace;
|
---|
133 | }
|
---|
134 |
|
---|
135 | private List<Dictionary<String, DH_Issue>> chooseBestClusterOfFinalNests(int numNests) {
|
---|
136 |
|
---|
137 | int numOfClusters = 5;
|
---|
138 | List<Dictionary<String, DH_Issue>> bestClusterOfNests = new ArrayList<Dictionary<String,DH_Issue>>();
|
---|
139 |
|
---|
140 | double[][] featureMatrix = new double[numNests][numNests];
|
---|
141 |
|
---|
142 | int i =0;
|
---|
143 | for(Dictionary<String, DH_Issue> nest1 :nests) {
|
---|
144 | int j =0;
|
---|
145 | for(Dictionary<String, DH_Issue> nest2 : nests) {
|
---|
146 | double spearmanValue = calculateSpearmanRankCorrelationCoefficient(nest1, nest2);
|
---|
147 | featureMatrix[i][j] = spearmanValue;
|
---|
148 | j++;
|
---|
149 | }i++;
|
---|
150 | }
|
---|
151 | //System.out.println("Matrix is: \n" + featureMatrix);
|
---|
152 | bestClusterOfNests = performClustering(featureMatrix, numOfClusters);
|
---|
153 |
|
---|
154 | return bestClusterOfNests;
|
---|
155 | }
|
---|
156 |
|
---|
157 | private List<Dictionary<String, DH_Issue>> performClustering(double[][] featureMatrix, int numOfClusters) {
|
---|
158 |
|
---|
159 | //it returns the best cluster out of n Clusters. One cluster consists of many solutions
|
---|
160 | int maxIterations = 50;
|
---|
161 | int numClusters = numOfClusters;
|
---|
162 |
|
---|
163 | //calculate initial means ... randomly one of the data point can be chosen as initial centroid
|
---|
164 | double[][] means = new double[numClusters][featureMatrix.length]; //numofRows = num ofcolumns, so featurematrix[0].length could have also been used
|
---|
165 | for(int i=0; i<means.length; i++) {
|
---|
166 | //choose randomly data points/rows equal to numClusters required
|
---|
167 | int rnd = new Random().nextInt(featureMatrix.length);
|
---|
168 | means[i] = featureMatrix[rnd];
|
---|
169 | }
|
---|
170 |
|
---|
171 | Map<Integer, List<Integer>> clusters = new HashMap<>(); //it stores the row numbers (nest indexes) which belong to this particular cluster;
|
---|
172 |
|
---|
173 | for(int i = 0 ; i < maxIterations; i++) {
|
---|
174 | clusters.clear();
|
---|
175 | for(int row = 0; row < featureMatrix.length; row++) {
|
---|
176 | //for each nest, we find the cluster index with minimum distance
|
---|
177 | //and then for that cluster index, we store the index of the nest in the list of that cluster
|
---|
178 | double minDist = Double.POSITIVE_INFINITY;
|
---|
179 | int minClusterIndex = 0;
|
---|
180 | for(int clusterNum = 0 ; clusterNum < numClusters; clusterNum ++) {
|
---|
181 | if(minDist > euclideanDistance(means[clusterNum], featureMatrix[row])){
|
---|
182 | minDist = euclideanDistance(means[clusterNum], featureMatrix[row]);
|
---|
183 | minClusterIndex = clusterNum;
|
---|
184 | }
|
---|
185 | }
|
---|
186 | List<Integer> list;
|
---|
187 | if(!clusters.containsKey(minClusterIndex)) {
|
---|
188 | clusters.put(minClusterIndex, null);
|
---|
189 | list = new ArrayList<Integer>();
|
---|
190 | }
|
---|
191 | else {
|
---|
192 | list = clusters.get(minClusterIndex);
|
---|
193 | }
|
---|
194 | list.add(row);
|
---|
195 | clusters.put(minClusterIndex, list);
|
---|
196 | }
|
---|
197 | //update the centroids/means of each cluster again
|
---|
198 | means = updateCentroids(means, clusters, featureMatrix);
|
---|
199 |
|
---|
200 | }
|
---|
201 |
|
---|
202 | List<Integer> bestCluster= findTheBestCluster(clusters, featureMatrix);
|
---|
203 |
|
---|
204 | //put best cluster in the dictionary
|
---|
205 | List<Dictionary<String, DH_Issue>> bestChosenNests = new ArrayList<Dictionary<String,DH_Issue>>() ;
|
---|
206 |
|
---|
207 | for(Integer i: bestCluster) {
|
---|
208 | bestChosenNests.add(nests.get(i));
|
---|
209 | }
|
---|
210 |
|
---|
211 | return bestChosenNests;
|
---|
212 | }
|
---|
213 |
|
---|
214 | private List<Integer> findTheBestCluster(Map<Integer, List<Integer>> clusters, double[][] featureMatrix) {
|
---|
215 |
|
---|
216 | //Silhouette of one data point..the value ranges between -1 and 1.
|
---|
217 | //high value means the data point is closely compacted in that cluster
|
---|
218 |
|
---|
219 | Map<Integer, Double> silhouetteWidthMap = new HashMap<>(); //for each cluster, mean silhouette width of each data point belonging to that cluster is stored
|
---|
220 |
|
---|
221 | for ( Entry<Integer, List<Integer>> cluster : clusters.entrySet()) {
|
---|
222 | //calculate the mean distance of each data point with other point in the cluster
|
---|
223 | //this is called average dissimilarity between each dataPoint i and other dataPoints
|
---|
224 |
|
---|
225 | Map<Integer, Double> distanceMap = new HashMap<>(); //for each point in the cluster, the distance between every other point in the same cluster
|
---|
226 | List<Integer> dataPoints = cluster.getValue();
|
---|
227 | double sum = 0D;
|
---|
228 | for(Integer each1: dataPoints) {
|
---|
229 | for(Integer each2 : dataPoints) {
|
---|
230 | if(each1 != each2) {
|
---|
231 | sum += euclideanDistance(featureMatrix[each1], featureMatrix[each2]);
|
---|
232 | }
|
---|
233 | }
|
---|
234 | double avgDistance = sum/(dataPoints.size()-1); //-1 because we dont compare each1 with each1
|
---|
235 | distanceMap.put(each1, avgDistance);
|
---|
236 | }
|
---|
237 |
|
---|
238 | Map<Integer, Double> dissimilarityMap = new HashMap<>(); //for each data point, we find the mean distance from all points in different cluster
|
---|
239 | //we find the neighbour cluster (with min cluster distance) to each data point.
|
---|
240 | //find the distance between each point to every other point in every other non-belonging cluster
|
---|
241 |
|
---|
242 | for(Integer each1: dataPoints) {
|
---|
243 | double sum1 =0D;
|
---|
244 |
|
---|
245 | double minDistance = Double.POSITIVE_INFINITY;
|
---|
246 | for ( Entry<Integer, List<Integer>> cluster2 : clusters.entrySet()) {
|
---|
247 | double avgdist = 0D;
|
---|
248 | if(!cluster.equals(cluster2)) {
|
---|
249 | for(Integer eachPoint : cluster2.getValue()) {
|
---|
250 | sum1+= euclideanDistance(featureMatrix[each1], featureMatrix[eachPoint]);
|
---|
251 | }
|
---|
252 | }
|
---|
253 | avgdist = sum1/cluster2.getValue().size();
|
---|
254 | if(minDistance > avgdist) minDistance = avgdist;
|
---|
255 | }
|
---|
256 | dissimilarityMap.put(each1, minDistance);
|
---|
257 | }
|
---|
258 |
|
---|
259 | //calculate the silhouette value of each data point
|
---|
260 | Map<Integer, Double> silhouetteValueMap = new HashMap<Integer, Double>();
|
---|
261 | for(Integer each1: dataPoints) {
|
---|
262 | double silVal = (dissimilarityMap.get(each1) - distanceMap.get(each1))/Math.max(dissimilarityMap.get(each1), distanceMap.get(each1));
|
---|
263 | silhouetteValueMap.put(each1, silVal);
|
---|
264 | }
|
---|
265 |
|
---|
266 | double widthSum = 0D;
|
---|
267 | for(Integer each1: dataPoints) {
|
---|
268 | widthSum += silhouetteValueMap.get(each1);
|
---|
269 | }
|
---|
270 | double meanSilhouetteWidth = widthSum/dataPoints.size();
|
---|
271 | silhouetteWidthMap.put(cluster.getKey(), meanSilhouetteWidth);
|
---|
272 | }
|
---|
273 |
|
---|
274 | /*
|
---|
275 | * to choose the best cluster key, we will use TOPSIS to maximize the following objectives
|
---|
276 | * number of solutions in each cluster (w = 0.4)
|
---|
277 | * average fitness value (w = 0.4)
|
---|
278 | * mean silhoutte width (w = 0.2)
|
---|
279 | */
|
---|
280 |
|
---|
281 | Map<Integer, Integer> numOfSolutionsInClusterMap = new HashMap<Integer, Integer>();
|
---|
282 | Map<Integer, Double> avgFitnessInClusterMap = new HashMap<>();
|
---|
283 |
|
---|
284 | for ( Entry<Integer, List<Integer>> cluster : clusters.entrySet()) {
|
---|
285 | numOfSolutionsInClusterMap.put(cluster.getKey(), cluster.getValue().size());
|
---|
286 | }
|
---|
287 |
|
---|
288 | for ( Entry<Integer, List<Integer>> cluster : clusters.entrySet()) {
|
---|
289 | double avg = 0D;
|
---|
290 | double sum = 0D;
|
---|
291 | List<Integer> indices = cluster.getValue();
|
---|
292 | for (Integer i : indices) {
|
---|
293 | sum += getFitness(nests.get(i));
|
---|
294 | }
|
---|
295 | avg = sum/indices.size();
|
---|
296 | avgFitnessInClusterMap.put(cluster.getKey(), avg);
|
---|
297 | }
|
---|
298 |
|
---|
299 | DH_Topsis topsis = new DH_Topsis();
|
---|
300 | int bestClusterKey = topsis.performTopsis(clusters, avgFitnessInClusterMap, numOfSolutionsInClusterMap, silhouetteWidthMap);
|
---|
301 |
|
---|
302 | //return the map key with the highest value i.e. maximum silhouette width
|
---|
303 |
|
---|
304 | //int bestClusterKey = silhouetteWidthMap.entrySet().stream().max((entry1, entry2) -> entry1.getValue() > entry2.getValue() ? 1 : -1).get().getKey();
|
---|
305 | //Integer bestClusterKey = Collections.max(silhouetteWidthMap.keySet());
|
---|
306 | //return clusters.get(bestClusterKey.intValue());
|
---|
307 | return clusters.get(bestClusterKey);
|
---|
308 | }
|
---|
309 |
|
---|
310 | private double[][] updateCentroids(double[][] means, Map<Integer, List<Integer>> clusters, double[][] featureMatrix) {
|
---|
311 |
|
---|
312 | for(Entry<Integer, List<Integer>> mapEntry : clusters.entrySet()) {
|
---|
313 | int i = mapEntry.getKey();
|
---|
314 | List<Integer> nestIndices = mapEntry.getValue();
|
---|
315 | double sum = 0D;
|
---|
316 | for(int col = 0; col < means[0].length; col ++) {
|
---|
317 | for(Integer index : nestIndices) {
|
---|
318 | sum += featureMatrix[index][col];
|
---|
319 | }
|
---|
320 | means[i][col] = sum/nestIndices.size();
|
---|
321 | }
|
---|
322 | }
|
---|
323 | return means;
|
---|
324 | }
|
---|
325 |
|
---|
326 | private double euclideanDistance(double[] ds1, double[] ds2) {
|
---|
327 | double sum = 0D;
|
---|
328 | for(int i = 0; i < ds1.length ; i ++) {
|
---|
329 | sum += Math.pow(ds1[i] - ds2[i] , 2);
|
---|
330 | }
|
---|
331 | return Math.sqrt(sum);
|
---|
332 | }
|
---|
333 |
|
---|
334 | private List<Dictionary<String, DH_Issue>> chooseNBestNests(int n) {
|
---|
335 | List<Dictionary<String, DH_Issue>> bestNests = new ArrayList<>();
|
---|
336 |
|
---|
337 | Map<Dictionary<String, DH_Issue>, Double> averageValueMap = new HashMap<>();
|
---|
338 |
|
---|
339 | for(Dictionary<String, DH_Issue> nest1 :nests) {
|
---|
340 | double spearmanValue = 0;
|
---|
341 | for(Dictionary<String, DH_Issue> nest2 : nests) {
|
---|
342 | if(!nest1.equals(nest2)) {
|
---|
343 | spearmanValue += calculateSpearmanRankCorrelationCoefficient(nest1, nest2);
|
---|
344 | }
|
---|
345 | }
|
---|
346 | averageValueMap.put(nest1, spearmanValue/bestNests.size()); //inspired from AHP (getting issue weights importance from pairwise comparison)
|
---|
347 | }
|
---|
348 |
|
---|
349 | List<Dictionary<String, DH_Issue>> sortedList = averageValueMap.keySet().stream().collect(Collectors.toList());
|
---|
350 | for(int i=sortedList.size()-1; i > sortedList.size() - 5; i--) {
|
---|
351 | bestNests.add(sortedList.get(i));
|
---|
352 | }
|
---|
353 | return bestNests;
|
---|
354 | }
|
---|
355 |
|
---|
356 |
|
---|
357 | private double calculateSpearmanRankCorrelationCoefficient(Dictionary<String, DH_Issue> nest1, Dictionary<String, DH_Issue> nest2) {
|
---|
358 |
|
---|
359 | List<Bid> givenOrderedBids = orderedBids;
|
---|
360 | RankableMap<String, Double> nest1Map = new RankableMap<>();
|
---|
361 | RankableMap<String, Double> nest2Map = new RankableMap<>();
|
---|
362 |
|
---|
363 | for (Bid bid : givenOrderedBids) {
|
---|
364 | nest1Map.put(bid.toString(), getUtility(bid, nest1));
|
---|
365 | nest2Map.put(bid.toString(), getUtility(bid, nest2));
|
---|
366 | }
|
---|
367 |
|
---|
368 | //Ranking of bids based on two
|
---|
369 | Map<Bid, Integer> nest1Rank = new HashMap<>();
|
---|
370 | Map<Bid, Integer> nest2Rank = new HashMap<>();
|
---|
371 | for (Bid bid : givenOrderedBids) {
|
---|
372 | nest1Rank.put(bid, nest1Map.rank(bid.toString()));
|
---|
373 | nest2Rank.put(bid, nest2Map.rank(bid.toString()));
|
---|
374 | }
|
---|
375 |
|
---|
376 | double errors = 0D;
|
---|
377 | for (Bid b : givenOrderedBids) {
|
---|
378 | errors += Math.pow(nest1Rank.get(b) - nest2Rank.get(b), 2);
|
---|
379 | }
|
---|
380 | double spearman = 1.0D - 6.0D * errors / (Math.pow(givenOrderedBids.size(), 3) - givenOrderedBids.size());
|
---|
381 | return spearman;
|
---|
382 |
|
---|
383 |
|
---|
384 | }
|
---|
385 |
|
---|
386 | private List<Dictionary<String, DH_Issue>> reInitializeWorstNests() {
|
---|
387 | //Ignore the worst Cuckoo eggs (or nest) from the sorted nests, the host bird will find them with p_a probability
|
---|
388 | List<Dictionary<String, DH_Issue>> sortedNests = nests;
|
---|
389 | int numAbandonedNests = (int) Math.ceil(p_a * sortedNests.size()); //a fraction p_a nests will be abandoned
|
---|
390 | Dictionary<String, DH_Issue> newNest;
|
---|
391 | for (int i = 0; i < numAbandonedNests; i++) {
|
---|
392 | Dictionary<String, DH_Issue> oldNest = sortedNests.get(i);
|
---|
393 | newNest = generateCuckooByLevyFlight(oldNest);
|
---|
394 | sortedNests.set(i, newNest);
|
---|
395 | }
|
---|
396 | return sortedNests;
|
---|
397 | }
|
---|
398 |
|
---|
399 | private void sortNests() {
|
---|
400 | //sort the nests in an increasing order of their fitness
|
---|
401 |
|
---|
402 | Map<Dictionary<String, DH_Issue>, Double> fitnessMap = new HashMap<>();
|
---|
403 | for(Dictionary<String, DH_Issue> nest : nests) {
|
---|
404 | fitnessMap.put(nest, getFitness(nest));
|
---|
405 | }
|
---|
406 |
|
---|
407 | Map<Dictionary<String, DH_Issue>, Double> sortedMap = fitnessMap.entrySet().stream().sorted(Map.Entry.comparingByValue(Comparator.naturalOrder()))
|
---|
408 | .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue,
|
---|
409 | (oldValue, newValue) -> oldValue, LinkedHashMap ::new));
|
---|
410 |
|
---|
411 | nests.clear();
|
---|
412 | for(Entry<Dictionary<String, DH_Issue>, Double> nest : sortedMap.entrySet()) {
|
---|
413 | nests.add(nest.getKey());
|
---|
414 | //System.out.println(nest.getValue());
|
---|
415 | }
|
---|
416 | }
|
---|
417 |
|
---|
418 | private Dictionary<String, DH_Issue> generateCuckooByLevyFlight(Dictionary<String, DH_Issue> best_cuckoo) {
|
---|
419 | Dictionary<String, DH_Issue> chosenNest = generateRandomUtilitySpace();
|
---|
420 | double evaluationIssueValueLb = 0.0; //Evaluation values have to be >= 0
|
---|
421 | //double evaluationIssueValueUb = 10.0; //Can be changed
|
---|
422 | double sigmaU = getSigmaU();
|
---|
423 | double sigmaV = getSigmaV();
|
---|
424 | double step = getStep(sigmaU, sigmaV);
|
---|
425 | double levy_distribution = 0.0;
|
---|
426 | // for (String i : issues) {
|
---|
427 | // double stepsize = 0.01 * step * (chosenNest.get(i).getIssueWeight()-(best_cuckoo.get(i).getIssueWeight()));
|
---|
428 | // levy_distribution = chosenNest.get(i).getIssueWeight() + stepsize * new Random().nextGaussian();
|
---|
429 | // chosenNest.get(i).setIssueWeight(levy_distribution);
|
---|
430 | //
|
---|
431 | // DiscreteValueSet availableVals = (DiscreteValueSet) domain.getValues(i);
|
---|
432 | // for (Value v : availableVals) {
|
---|
433 | // double stepsizeV = 0.01 * step * (chosenNest.get(i).getIssueValueDict().get(v.toString()) - best_cuckoo.get(i).getIssueValueDict().get(v.toString()));
|
---|
434 | // double levy_distributionV = chosenNest.get(i).getIssueValueDict().get(v.toString()) + stepsizeV * new Random().nextGaussian();
|
---|
435 | // if(levy_distributionV > evaluationIssueValueLb)
|
---|
436 | // {
|
---|
437 | // chosenNest.get(i).getIssueValueDict().put(v.toString(), levy_distributionV);
|
---|
438 | // }
|
---|
439 | // }
|
---|
440 | // }
|
---|
441 | //inspired from other agent
|
---|
442 | for (String i : issues) {
|
---|
443 | double stepsize = 0.01 * step * (chosenNest.get(i).getIssueWeight()-(best_cuckoo.get(i).getIssueWeight()));
|
---|
444 | Random rand = new Random();
|
---|
445 | if(rand.nextBoolean())
|
---|
446 | levy_distribution = chosenNest.get(i).getIssueWeight() + stepsize * new Random().nextGaussian()/ issues.size();
|
---|
447 | else
|
---|
448 | levy_distribution = chosenNest.get(i).getIssueWeight() - stepsize * new Random().nextGaussian()/ issues.size();
|
---|
449 |
|
---|
450 | chosenNest.get(i).setIssueWeight(levy_distribution);
|
---|
451 |
|
---|
452 | DiscreteValueSet availableVals = (DiscreteValueSet) domain.getValues(i);
|
---|
453 | double averageEval = 0D;
|
---|
454 | for (Value v : availableVals) {
|
---|
455 | averageEval += chosenNest.get(i).getIssueValueDict().get(v.toString());
|
---|
456 | }
|
---|
457 | averageEval /= availableVals.getValues().size() ;
|
---|
458 |
|
---|
459 | for (Value v : availableVals) {
|
---|
460 | double stepsizeV = 0.01 * step * (chosenNest.get(i).getIssueValueDict().get(v.toString()) - best_cuckoo.get(i).getIssueValueDict().get(v.toString()));
|
---|
461 | double levy_distributionV = 0D;
|
---|
462 | if(rand.nextBoolean())
|
---|
463 | {
|
---|
464 | levy_distributionV = chosenNest.get(i).getIssueValueDict().get(v.toString()) + stepsizeV * averageEval * new Random().nextGaussian();
|
---|
465 | }
|
---|
466 | else {
|
---|
467 | levy_distributionV = chosenNest.get(i).getIssueValueDict().get(v.toString()) - stepsizeV * averageEval * new Random().nextGaussian();
|
---|
468 | }
|
---|
469 |
|
---|
470 | if(levy_distributionV > evaluationIssueValueLb)
|
---|
471 | {
|
---|
472 | chosenNest.get(i).getIssueValueDict().put(v.toString(), levy_distributionV);
|
---|
473 | }
|
---|
474 | }
|
---|
475 | }
|
---|
476 | return normalizeMap(chosenNest);
|
---|
477 | }
|
---|
478 |
|
---|
479 | private double getStep(double sigmaU, double sigmaV) {
|
---|
480 | double u = new Random().nextGaussian() * sigmaU;
|
---|
481 | double v = new Random().nextGaussian() * sigmaV;
|
---|
482 | return u/Math.pow(Math.abs(v), (1/this.beta));
|
---|
483 | }
|
---|
484 |
|
---|
485 | private double getSigmaV() {
|
---|
486 | return 1.0;
|
---|
487 | }
|
---|
488 |
|
---|
489 | private double getSigmaU() {
|
---|
490 | double numerator = gamma(1+beta)*Math.sin((Math.PI*beta)/2);
|
---|
491 | double denominator = gamma((1+beta)/2)*beta*Math.pow(2,(beta-1)/2);
|
---|
492 | return Math.pow((numerator/denominator),(1/beta));
|
---|
493 | }
|
---|
494 |
|
---|
495 | private double gamma(double x) {
|
---|
496 | return Math.exp(logGamma(x));
|
---|
497 | }
|
---|
498 |
|
---|
499 | private double logGamma(double x) {
|
---|
500 | double tmp = (x - 0.5) * Math.log(x + 4.5) - (x + 4.5);
|
---|
501 | double ser = 1.0 + 76.18009173 / (x + 0) - 86.50532033 / (x + 1)
|
---|
502 | + 24.01409822 / (x + 2) - 1.231739516 / (x + 3)
|
---|
503 | + 0.00120858003 / (x + 4) - 0.00000536382 / (x + 5);
|
---|
504 | return tmp + Math.log(ser * Math.sqrt(2 * Math.PI));
|
---|
505 | }
|
---|
506 |
|
---|
507 | private Double getFitness(Dictionary<String, DH_Issue> individual) {
|
---|
508 |
|
---|
509 | double fitnessValue = 0D;
|
---|
510 |
|
---|
511 | List<Bid> givenOrderedBids = orderedBids;
|
---|
512 |
|
---|
513 | Map<Bid, Integer> givenRankMap = new HashMap<Bid, Integer>(); //will contain bids from minimal util to max
|
---|
514 |
|
---|
515 |
|
---|
516 | List<Double> estimatedUtilList = new ArrayList<>();
|
---|
517 |
|
---|
518 | int counter = 0;
|
---|
519 | for (Bid b : givenOrderedBids) {
|
---|
520 | counter++;
|
---|
521 | givenRankMap.put(b, counter); //put the rank based on its index
|
---|
522 |
|
---|
523 | estimatedUtilList.add(getUtility(b , individual)); //also calculate their utility values to be used for estimated rank map
|
---|
524 | }
|
---|
525 |
|
---|
526 | Collections.sort(estimatedUtilList);
|
---|
527 |
|
---|
528 | Map<Bid, Integer> estimatedRankMap = new HashMap<Bid, Integer>();
|
---|
529 | for (Bid b : givenOrderedBids) {
|
---|
530 | estimatedRankMap.put(b, estimatedUtilList.indexOf(getUtility(b, individual)));
|
---|
531 | }
|
---|
532 |
|
---|
533 | double errors = 0D;
|
---|
534 | for (Bid b : givenOrderedBids) {
|
---|
535 | errors += Math.pow(givenRankMap.get(b) - estimatedRankMap.get(b), 2);
|
---|
536 | }
|
---|
537 |
|
---|
538 | double spearman = 1.0D - 6.0D * errors / (Math.pow(givenRankMap.size(), 3) - givenRankMap.size());
|
---|
539 | double lowDiff = Math.abs(givenRankMap.get(givenOrderedBids.get(0)) - getUtility(givenOrderedBids.get(0), individual));
|
---|
540 | double highDiff = Math.abs(givenRankMap.get(givenOrderedBids.get(givenOrderedBids.size()-1))- getUtility(givenOrderedBids.get(givenOrderedBids.size()-1), individual));
|
---|
541 |
|
---|
542 | fitnessValue = spearman * 10.0D + (1.0D - lowDiff) + (1.0D - highDiff);
|
---|
543 |
|
---|
544 | return fitnessValue;
|
---|
545 | }
|
---|
546 |
|
---|
547 | private Dictionary<String, DH_Issue> generateRandomUtilitySpace() {
|
---|
548 |
|
---|
549 | Dictionary<String, DH_Issue> randomUtilSpace = new Hashtable<String, DH_Issue>();
|
---|
550 |
|
---|
551 | for (String issue : issues) {
|
---|
552 | DH_Issue dh_i = new DH_Issue();
|
---|
553 | dh_i.setIssueWeight(Math.random());
|
---|
554 |
|
---|
555 | Dictionary<String, Double> issueValueDict = new Hashtable<String, Double>();
|
---|
556 | DiscreteValueSet availableVals = (DiscreteValueSet) domain.getValues(issue);
|
---|
557 | for (Value v : availableVals)
|
---|
558 | issueValueDict.put(v.toString(), Math.random());
|
---|
559 |
|
---|
560 | dh_i.setIssueValueDict(issueValueDict);
|
---|
561 |
|
---|
562 | randomUtilSpace.put(issue, dh_i);
|
---|
563 | }
|
---|
564 |
|
---|
565 | // Normalize the weights, since we picked the values randomly in [0, 1]
|
---|
566 |
|
---|
567 | return randomUtilSpace = normalizeMap(randomUtilSpace);
|
---|
568 |
|
---|
569 | }
|
---|
570 |
|
---|
571 | private Dictionary<String, DH_Issue> normalizeMap(Dictionary<String, DH_Issue> randomUtilSpace2) {
|
---|
572 |
|
---|
573 | //normalize the issue weights..the sum of issue weights should be equal to 1
|
---|
574 |
|
---|
575 | double issueSum = 0D;
|
---|
576 |
|
---|
577 | for (Enumeration<String> keys = randomUtilSpace2.keys(); keys.hasMoreElements(); ) { // iterate over all keys
|
---|
578 | String issueKey = keys.nextElement();
|
---|
579 | issueSum += randomUtilSpace2.get(issueKey).getIssueWeight();
|
---|
580 | }
|
---|
581 |
|
---|
582 | for (Enumeration<String> keys = randomUtilSpace2.keys(); keys.hasMoreElements(); ) { // iterate over all keys
|
---|
583 | String issueKey = keys.nextElement();
|
---|
584 | double newWeight = randomUtilSpace2.get(issueKey).getIssueWeight()/issueSum;
|
---|
585 | randomUtilSpace2.get(issueKey).setIssueWeight(newWeight);
|
---|
586 | }
|
---|
587 |
|
---|
588 |
|
---|
589 | //normalize the issueValues
|
---|
590 |
|
---|
591 | for (Enumeration<String> keys = randomUtilSpace2.keys(); keys.hasMoreElements(); ) { // iterate over all keys
|
---|
592 | String issueKey = keys.nextElement();
|
---|
593 | double maxVal = 0D;
|
---|
594 | Dictionary<String, Double> valDict = randomUtilSpace2.get(issueKey).getIssueValueDict();
|
---|
595 | List<String> valKeysList = Collections.list(valDict.keys());
|
---|
596 | for(String valKey:valKeysList) {
|
---|
597 | if(valDict.get(valKey) > maxVal) maxVal = valDict.get(valKey);
|
---|
598 | }
|
---|
599 | for(String valKey:valKeysList) {
|
---|
600 | valDict.put(valKey, valDict.get(valKey)/maxVal);
|
---|
601 | }
|
---|
602 | }
|
---|
603 | return randomUtilSpace2;
|
---|
604 | }
|
---|
605 |
|
---|
606 | public static Double getUtility(Bid b, Dictionary<String, DH_Issue> givenUtilitySpace) {
|
---|
607 | double util = 0D;
|
---|
608 | //util = w1*v1 + w2*v2 * ...
|
---|
609 | for (String issue : issues) {
|
---|
610 | double w = givenUtilitySpace.get(issue).getIssueWeight();
|
---|
611 | String val = b.getIssueValues().get(issue).toString();
|
---|
612 | double v = givenUtilitySpace.get(issue).getIssueValueDict().get(val);
|
---|
613 | util += w * v;
|
---|
614 | }
|
---|
615 | return util;
|
---|
616 | }
|
---|
617 |
|
---|
618 | public boolean checkVariance() {
|
---|
619 | // TODO Auto-generated method stub
|
---|
620 |
|
---|
621 | int counter = 0;
|
---|
622 | double sum = 0D;
|
---|
623 | for(Dictionary<String, DH_Issue> nest1 : sortedNestsToCheckForVariance) {
|
---|
624 | for(Dictionary<String, DH_Issue> nest2 : sortedNestsToCheckForVariance) {
|
---|
625 | if(!nest1.equals(nest2))
|
---|
626 | counter++;
|
---|
627 | sum += calculateSpearmanRankCorrelationCoefficient(nest1, nest2);
|
---|
628 | }
|
---|
629 | }
|
---|
630 | int n = counter;
|
---|
631 | double mean = sum/n;
|
---|
632 |
|
---|
633 | double sum1 = 0D;
|
---|
634 | for(Dictionary<String, DH_Issue> nest1 : sortedNestsToCheckForVariance) {
|
---|
635 | for(Dictionary<String, DH_Issue> nest2 : sortedNestsToCheckForVariance) {
|
---|
636 | if(!nest1.equals(nest2))
|
---|
637 | sum1 += Math.pow(calculateSpearmanRankCorrelationCoefficient(nest1, nest2) - mean, 2);
|
---|
638 | }
|
---|
639 | }
|
---|
640 | double variance = sum1/(n - 1);
|
---|
641 | System.out.println("Variance = "+variance);
|
---|
642 | return (variance > 0.5);
|
---|
643 | }
|
---|
644 | }
|
---|