source: src/main/java/parties/in4010/q12015/group11/Group11.java@ 127

Last change on this file since 127 was 127, checked in by Wouter Pasman, 6 years ago

#41 ROLL BACK of rev.126 . So this version is equal to rev. 125

File size: 9.5 KB
Line 
1package parties.in4010.q12015.group11;
2
3import java.util.List;
4
5import java.util.HashMap;
6import java.util.Map.Entry;
7
8import genius.core.AgentID;
9import genius.core.Bid;
10import genius.core.BidHistory;
11import genius.core.BidIterator;
12import genius.core.actions.Accept;
13import genius.core.actions.Action;
14import genius.core.actions.DefaultAction;
15import genius.core.actions.Offer;
16import genius.core.bidding.BidDetails;
17import genius.core.issue.Issue;
18import genius.core.issue.IssueDiscrete;
19import genius.core.issue.Objective;
20import genius.core.issue.ValueDiscrete;
21import genius.core.parties.AbstractNegotiationParty;
22import genius.core.parties.NegotiationInfo;
23import genius.core.utility.AdditiveUtilitySpace;
24import genius.core.utility.Evaluator;
25import genius.core.utility.EvaluatorDiscrete;
26
27/************************************************
28 * Assignment AI Technique - Negotiation Agent By: J.K. van Schoubroeck
29 * (4329996) H.H. Choiri (4468457) T. Smit (4242785)
30 ************************************************/
31
32public class Group11 extends AbstractNegotiationParty {
33
34 // opponent model & bidding history for each opponent agent
35 private HashMap<Object, AdditiveUtilitySpace> opponentUtilitySpace = new HashMap<Object, AdditiveUtilitySpace>();
36 private HashMap<Object, BidHistory> bidHistory = new HashMap<Object, BidHistory>();
37
38 private Bid lastBid;
39
40 private int amountOfIssues;
41 private double learningRate = 0.2;
42 private int learnValueAddition;
43
44 @Override
45 public void init(NegotiationInfo info) {
46
47 super.init(info);
48 lastBid = new Bid(getUtilitySpace().getDomain());
49 }
50
51 @Override
52 public Action chooseAction(List<Class<? extends Action>> validActions) {
53
54 if (bidHistory.isEmpty()) {
55 // Opening Bid
56 try {
57 System.out.println("opening bid util:" + getUtility(utilitySpace.getMaxUtilityBid()));
58 return new Offer(getPartyId(), utilitySpace.getMaxUtilityBid());
59 } catch (Exception e) {
60 System.out.println("Fail to send offer");
61 e.printStackTrace();
62 }
63 }
64 Bid nextBid = determineNextBid();
65 double nextMyBidUtil = getUtility(nextBid);
66 // double time = getTimeLine().getTime();
67
68 // use AC Next approach, and accept if this agent get the highest
69 // utility
70 if ((getUtility(lastBid) >= nextMyBidUtil) && isGetHighest(lastBid)) {
71 return new Accept(getPartyId(), lastBid);
72 }
73
74 return new Offer(getPartyId(), nextBid);
75 }
76
77 @Override
78 public void receiveMessage(AgentID sender, Action action) {
79 super.receiveMessage(sender, action);
80 Bid currentOpBid = DefaultAction.getBidFromAction(action);
81 if (!(sender == null) && getUtility(currentOpBid) > 0) {
82 lastBid = currentOpBid;
83 try {
84 // System.out.println("Sender is "+sender.toString());
85 // Add the bid to the bidding history
86 BidHistory bidH = new BidHistory();
87 if (bidHistory.containsKey(sender.toString())) {
88 bidH = bidHistory.get(sender.toString());
89 }
90 bidH.add(new BidDetails(currentOpBid, getUtility(currentOpBid)));
91 bidHistory.put(sender.toString(), bidH);
92 } catch (Exception e) {
93 System.out.println("Error to add bid history of " + sender.toString());
94 e.printStackTrace();
95 }
96 updateModel(sender, currentOpBid, getTimeLine().getTime());
97 }
98 }
99
100 public void updateModel(Object sender, Bid opponentBid, double time) {
101
102 // use time-dependent learning value. 10*(1-t)
103 learnValueAddition = (int) Math.round((1 - getTimeLine().getTime()) * 10);
104
105 // This function handles the Opponent modelling
106 BidHistory bidHist = bidHistory.get(sender.toString());
107 if (!opponentUtilitySpace.containsKey(sender.toString())) {
108 // initialize opponent model's weight
109 AdditiveUtilitySpace oppUSpace = (AdditiveUtilitySpace) getUtilitySpace().copy();
110 amountOfIssues = oppUSpace.getDomain().getIssues().size();
111 for (Entry<Objective, Evaluator> e : oppUSpace.getEvaluators()) {
112 // set the issue weights equally to 1/#OfIssues
113 oppUSpace.unlock(e.getKey());
114 e.getValue().setWeight(1D / (double) amountOfIssues);
115 try {
116 // set all value weights to 10
117 for (ValueDiscrete vd : ((IssueDiscrete) e.getKey()).getValues())
118 ((EvaluatorDiscrete) e.getValue()).setEvaluation(vd, 10);
119 } catch (Exception ex) {
120 System.out.println("Fail to initialize opponent model");
121 ex.printStackTrace();
122 }
123 }
124 opponentUtilitySpace.put(sender.toString(), oppUSpace);
125 System.out.println("Opponent model successfully created");
126 return;
127 } else if (bidHist.size() < 2) {
128 return;
129 }
130
131 // Update existing opponent model
132 int numberOfUnchanged = 0;
133 BidDetails oppBid = bidHist.getHistory().get(bidHist.size() - 1);
134 BidDetails prevOppBid = bidHist.getHistory().get(bidHist.size() - 2);
135 HashMap<Integer, Integer> lastDiffSet = determineDifference(sender, prevOppBid, oppBid);
136
137 // count the number of changes in value
138 for (Integer i : lastDiffSet.keySet()) {
139 if (lastDiffSet.get(i) == 0)
140 numberOfUnchanged++;
141 }
142
143 double goldenValue = learningRate / (double) amountOfIssues;
144 double totalSum = 1D + goldenValue * (double) numberOfUnchanged;
145 double maximumWeight = 1D - ((double) amountOfIssues) * goldenValue / totalSum;
146
147 AdditiveUtilitySpace oppUSpace = opponentUtilitySpace.get(sender.toString());
148
149 // update and normalize issue weights
150 for (Integer i : lastDiffSet.keySet()) {
151 if (lastDiffSet.get(i) == 0 && oppUSpace.getWeight(i) < maximumWeight)
152 oppUSpace.setWeight(oppUSpace.getDomain().getObjectivesRoot().getObjective(i),
153 (oppUSpace.getWeight(i) + goldenValue) / totalSum);
154 else
155 oppUSpace.setWeight(oppUSpace.getDomain().getObjectivesRoot().getObjective(i),
156 oppUSpace.getWeight(i) / totalSum);
157 }
158
159 // update the issue value weights
160 try {
161 for (Entry<Objective, Evaluator> e : oppUSpace.getEvaluators()) {
162 ((EvaluatorDiscrete) e.getValue()).setEvaluation(
163 oppBid.getBid().getValue(((IssueDiscrete) e.getKey()).getNumber()),
164 (learnValueAddition + ((EvaluatorDiscrete) e.getValue()).getEvaluationNotNormalized(
165 ((ValueDiscrete) oppBid.getBid().getValue(((IssueDiscrete) e.getKey()).getNumber())))));
166 }
167 opponentUtilitySpace.put(sender.toString(), oppUSpace);
168 } catch (Exception ex) {
169 ex.printStackTrace();
170 }
171 }
172
173 private HashMap<Integer, Integer> determineDifference(Object sender, BidDetails first, BidDetails second) {
174 // get the value differences between 2 bids
175 HashMap<Integer, Integer> diff = new HashMap<Integer, Integer>();
176 try {
177 for (Issue i : opponentUtilitySpace.get(sender.toString()).getDomain().getIssues()) {
178 diff.put(i.getNumber(), (((ValueDiscrete) first.getBid().getValue(i.getNumber()))
179 .equals((ValueDiscrete) second.getBid().getValue(i.getNumber()))) ? 0 : 1);
180 }
181 } catch (Exception ex) {
182 ex.printStackTrace();
183 }
184
185 return diff;
186 }
187
188 public Bid determineNextBid() {
189 // This function handles Bidding strategy
190 Bid bestBid;
191 double time = getTimeLine().getTime();
192 double utilityGoal;
193 // calculate target utility
194 utilityGoal = (1 - Math.pow(time, 2)) * 0.5 + 0.5;
195 if (utilityGoal < 0.85) {
196 utilityGoal = 0.85;
197 }
198
199 if (time < 0.6) {
200 utilityGoal = (1 - Math.pow(time, 2)) * 0.5 + 0.5;
201 } else {
202 utilityGoal = 0.85;
203 }
204 try {
205 bestBid = getBidNearUtility(utilityGoal, 0.05);
206 return bestBid;
207 } catch (Exception e) {
208 System.out.println("Fail tp get bid near utility");
209 e.printStackTrace();
210 }
211
212 return null;
213 }
214
215 private Bid getBidNearUtility(double target, double delta) throws Exception {
216 // System.out.println("target utility:"+target+", delta: "+delta);
217 // This function searches the best bid based on target utility and
218 // tolerance
219 BidIterator iter = new BidIterator(utilitySpace.getDomain());
220 Bid bestBid = null;
221 double maxOpUtil = -1;
222 while (iter.hasNext()) {
223 Bid nBid = iter.next();
224 // check all bids
225 try {
226 double currMyU = getUtility(nBid);
227 if (Math.abs(currMyU - target) < delta) {
228 // bid's utility is in the range, check the opponent's
229 // utility
230 double oppUtil = 0;
231 boolean Iwin = true;
232 for (Entry<Object, AdditiveUtilitySpace> opU : opponentUtilitySpace.entrySet()) {
233 // sum all opponent's utility
234 double currOpU = opU.getValue().getUtility(nBid);
235 // make sure agent's utility is the highest
236 if (Iwin && (currMyU >= currOpU)) {
237 oppUtil += currOpU;
238 } else {
239 Iwin = false;
240 break;
241 }
242 }
243
244 if (Iwin && (oppUtil > maxOpUtil)) {
245 // choose maximum opponent total utility
246 bestBid = nBid;
247 maxOpUtil = oppUtil;
248 }
249 }
250 } catch (Exception e) {
251 System.out.println("Fail to get opponent utility space 2");
252 e.printStackTrace();
253 }
254 }
255 if (maxOpUtil == -1) {
256 // searching file, add the tolerance value
257 return getBidNearUtility(target, delta + 0.05);
258 }
259 return bestBid;
260 }
261
262 public boolean isGetHighest(Bid bid) {
263 double myUtil = getUtility(bid);
264 // compare the utility with opponent's utility and check if the agent
265 // gets the highest utility
266 for (Entry<Object, AdditiveUtilitySpace> opU : opponentUtilitySpace.entrySet()) {
267 try {
268 if (myUtil < opU.getValue().getUtility(bid)) {
269 return false;
270 }
271 } catch (Exception e) {
272 System.out.println("Fail to get opponent utility space 3");
273 e.printStackTrace();
274 }
275 }
276
277 return true;
278 }
279
280 @Override
281 public String getDescription() {
282 return "Agent11 - Multi party negotiation agent";
283 }
284
285}
Note: See TracBrowser for help on using the repository browser.