source: src/main/java/agents/anac/y2019/agentlarry/AgentLarry.java

Last change on this file was 201, checked in by Katsuhide Fujita, 5 years ago

Add ANAC 2019 agents (2)

  • Property svn:executable set to *
File size: 8.5 KB
Line 
1package agents.anac.y2019.agentlarry;
2
3import genius.core.AgentID;
4import genius.core.Bid;
5import genius.core.BidIterator;
6import genius.core.actions.Accept;
7import genius.core.actions.Action;
8import genius.core.actions.EndNegotiation;
9import genius.core.actions.Offer;
10import genius.core.list.Tuple;
11import genius.core.parties.AbstractNegotiationParty;
12import genius.core.parties.NegotiationInfo;
13import genius.core.persistent.PersistentDataType;
14import genius.core.persistent.StandardInfo;
15import genius.core.persistent.StandardInfoList;
16import genius.core.uncertainty.AdditiveUtilitySpaceFactory;
17import genius.core.utility.AbstractUtilitySpace;
18
19import java.util.ArrayList;
20import java.util.HashMap;
21import java.util.List;
22import java.util.Map;
23
24public class AgentLarry extends AbstractNegotiationParty {
25 private final Map<AgentID, BidHistory> agentsBidHistories = new HashMap<>();
26 private Bid lastOfferedBid = null;
27 private BidHistory initialHistory = new BidHistory();
28 private final VectorConverter vectorConverter = new VectorConverter();
29
30 private class BidHistory extends ArrayList<Tuple<Bid, Boolean>> {}
31
32 @Override
33 public void init(NegotiationInfo info) {
34 super.init(info);
35 if (this.getData().getPersistentDataType() == PersistentDataType.STANDARD) {
36 StandardInfoList infoList = (StandardInfoList) info.getPersistentData().get();
37 for (StandardInfo sessionInfo : infoList) {
38 Bid initialBid = sessionInfo.getAgreement().get1();
39 if (initialBid != null) {
40 System.out.println(String.format("initial bid: %s", initialBid.toString()));
41 initialHistory.add(new Tuple<>(initialBid, true));
42 }
43 }
44 }
45 }
46
47 /**
48 * Receive message and save it in the bid history
49 * If it is accept save the bid the agent accept with true
50 * if it is offer save the last bid with false since the agent did not accept and
51 * save the bid he made in his offer with true assuming because he offered it he would accept it
52 *
53 * @param sender The id of the agent who sent the message
54 * @param act the action that was sent
55 */
56 @Override
57 public void receiveMessage(AgentID sender, Action act) {
58 super.receiveMessage(sender, act);
59 if (act instanceof Offer || act instanceof Accept) {
60 if (!agentsBidHistories.containsKey(sender)) {
61 agentsBidHistories.put(sender, (BidHistory) initialHistory.clone());
62 }
63 }
64
65 if (act instanceof Offer) {
66 Bid bid = ((Offer) act).getBid();
67 this.agentsBidHistories.get(sender).add(new Tuple<>(bid, true));
68 if (this.lastOfferedBid != null) {
69 this.agentsBidHistories.get(sender).add(new Tuple<>(this.lastOfferedBid, false));
70 }
71 this.lastOfferedBid = bid;
72 } else if (act instanceof Accept) {
73 Bid bid = ((Accept) act).getBid();
74 this.agentsBidHistories.get(sender).add(new Tuple<>(bid, true));
75 }
76 }
77
78 /**
79 * Choose if to accept the last offer or make a new offer
80 *
81 * First we initialize a logistic regression model for each agent (except us) with his bid history
82 * and whether he accepted each bid or rejected it
83 * We train a new logistic regression each time and not use the same one for the whole session
84 * because retrain it each time gives a better results
85 * (probably because of the random weight before you start train it)
86 *
87 * Then for each bid we value the chances each agent will accept it using the logistic regression models
88 * and evaluate the bid by multiple them all together with the utility of the bid and choose the bid
89 * with the highest evaluation
90 *
91 * Then if the last offered bid has an higher utility we accept it, otherwise we offer the bid we chose
92 *
93 * @param list The available actions to do
94 * @return The chosen action
95 */
96 @Override
97 public Action chooseAction(List<Class<? extends Action>> list) {
98 try {
99 System.out.println(getPartyId());
100 System.out.println("choosing action");
101
102 System.out.println("initializing model");
103 List<LogisticRegression> logisticRegressionsModels = this.initializeModels();
104 LogisticRegression larryModel = this.initializeLarryModel();
105
106 System.out.println("searching for best bid");
107 Bid nextBid = this.findNextBid(logisticRegressionsModels, larryModel);
108
109 System.out.println("choosing of accepting or offering");
110
111 if (list.contains(Accept.class) && shouldAccept(nextBid)) {
112 System.out.println("Accepting");
113 return new Accept(getPartyId(), lastOfferedBid);
114 } else {
115 System.out.println("offering");
116 this.lastOfferedBid = nextBid;
117 return new Offer(this.getPartyId(), nextBid);
118 }
119
120 } catch (Exception e) {
121 e.printStackTrace();
122 return new EndNegotiation(getPartyId());
123 }
124 }
125
126 /**
127 * @param nextBid The next bid to offer
128 * @return Whether to accept the last bid or offer the nextBid
129 */
130 private boolean shouldAccept(Bid nextBid) {
131 if (lastOfferedBid != null) {
132 if (userModel.getBidRanking().getBidOrder().indexOf(lastOfferedBid) >=
133 userModel.getBidRanking().getBidOrder().indexOf(nextBid) * this.utilitySpace.getDiscountFactor()) {
134 return true;
135 }
136 }
137 return false;
138 }
139
140 /**
141 * @param logisticRegressionsModels The models of the agents
142 * @return The next bid to offer
143 */
144 private Bid findNextBid(List<LogisticRegression> logisticRegressionsModels, LogisticRegression larryModel) {
145 double bestBidEvaluation = 0;
146 Bid nextBid = null;
147
148 BidIterator bidIterator = new BidIterator(this.utilitySpace.getDomain());
149
150 while (bidIterator.hasNext()) {
151 Bid bid = bidIterator.next();
152 Vector vector = this.vectorConverter.convert(bid);
153 double chancesForAcceptance = 1;
154 for (LogisticRegression model : logisticRegressionsModels) {
155 chancesForAcceptance *= model.classify(vector);
156 }
157 double bidUtility = larryModel.classify(vector);
158 double bidEvaluation = bidUtility + chancesForAcceptance;
159
160 if (bidEvaluation >= bestBidEvaluation) {
161 nextBid = bid;
162 bestBidEvaluation = bidEvaluation;
163 }
164 }
165 System.out.println(String.format("next bid evaluation %f", bestBidEvaluation));
166 return nextBid;
167 }
168
169 /**
170 * Initialize the models of the agents
171 * each model gets a bid and returns the chances the agent will accept it
172 *
173 * @return The logistic models of the agents
174 */
175 private List<LogisticRegression> initializeModels() {
176 List<LogisticRegression> logisticRegressionsModels = new ArrayList<>();
177 for (BidHistory bidHistory : this.agentsBidHistories.values()) {
178 LogisticRegression logisticRegression = new LogisticRegression(
179 this.vectorConverter.getVectorSize(this.utilitySpace.getDomain()));
180 for (Tuple<Bid, Boolean> bidToDidAccept : bidHistory) {
181 Vector vector = this.vectorConverter.convert(bidToDidAccept.get1());
182 double label = bidToDidAccept.get2() ? 1 : 0;
183 logisticRegression.train(vector, label);
184 }
185 logisticRegressionsModels.add(logisticRegression);
186 }
187 return logisticRegressionsModels;
188 }
189
190 private LogisticRegression initializeLarryModel() {
191 LogisticRegression model = new LogisticRegression(this.vectorConverter.getVectorSize(this.utilitySpace.getDomain()));
192 List<Bid> bids = userModel.getBidRanking().getBidOrder();
193 for (int j = 0; j < 5; j++) {
194 for (int i = 0; i < bids.size(); i++) {
195 Vector vector = this.vectorConverter.convert(bids.get(i));
196 double label = (userModel.getBidRanking().getHighUtility() - userModel.getBidRanking().getLowUtility()) * i / bids.size();
197 model.train(vector, label);
198 }
199 }
200 return model;
201 }
202
203 @Override
204 public AbstractUtilitySpace estimateUtilitySpace() {
205 return new AdditiveUtilitySpaceFactory(getDomain()).getUtilitySpace();
206 }
207
208 @Override
209 public String getDescription() {
210 return "ANAC2019 AgentLarry";
211 }
212}
Note: See TracBrowser for help on using the repository browser.