package geniusweb.blingbling; //old one not used. import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Set; import java.util.logging.Level; import org.neuroph.core.Layer; import org.neuroph.core.NeuralNetwork; import org.neuroph.core.Neuron; import org.neuroph.core.data.DataSet; import org.neuroph.core.data.DataSetRow; import org.neuroph.core.learning.LearningRule; import org.neuroph.nnet.learning.BackPropagation; import org.neuroph.util.ConnectionFactory; import geniusweb.profile.DefaultPartialOrdering; import geniusweb.profile.Profile; import geniusweb.profile.utilityspace.UtilitySpace; import geniusweb.issuevalue.Bid; import geniusweb.issuevalue.Domain; import geniusweb.issuevalue.Value; //ranknet import geniusweb.blingbling.Ranknet.NeuralRankNet; import geniusweb.blingbling.Ranknet.*; import tudelft.utilities.logging.Reporter; public class MyUtilitySpace { //my estimate utility space class. private Domain domain; private List bidlist = new ArrayList<>(); private List sortedbidlist = new LinkedList<>(); private Bid reservationbid; private Bid maxBid; private Bid minBid; private HashMap> valuefrequency = new HashMap>();//new?or null? private NeuralNetwork ann = new NeuralNetwork();// the network private HashMap issueToNeuron = new HashMap(); //track the neuron position of issue private HashMap> valueToNeuron = new HashMap>(); int maxIter = 500; //Integer.parseInt(args[0]); double maxError = 0.01; // Double.parseDouble(args[1]); double learningRate = 0.0005 ; // Double.parseDouble(args[2]); DefaultPartialOrdering prof; List> betterlist; public MyUtilitySpace(Profile profile, Reporter reporter) { DefaultPartialOrdering prof = (DefaultPartialOrdering) profile; this.domain = prof.getDomain(); this.bidlist = prof.getBids(); this.reservationbid = prof.getReservationBid(); this.sortedbidlist = setSortedBids(prof); this.maxBid = sortedbidlist.get(sortedbidlist.size()-1); this.minBid = sortedbidlist.get(0); this.betterlist = prof.getBetter();//for learning to rank method. initValueFrequency(); setvaluefrequency(prof.getBids());//for elicit compare. buildNetwork(prof.getDomain()); initweight(); DataSet ds = setDataset(this.sortedbidlist); trainNetwork(ds); } public void buildNetwork(Domain domain) { //neural network model. Set issueset = domain.getIssues();//get all the issues. Layer inputlayer = new Layer(); // set the input layer. Layer weightlayer = new Layer(); // set the weight layer. int weightind = 0; // the ind tracks the issue position in weight layer int valueind = 0; // the ind tracks the value position in input layer for (String issue: issueset) {//iterate the issues weightlayer.addNeuron(weightind, new Neuron()); // add one neuron to the weight layer as issueToNeuron.put(issue, weightind);//put the index to the issue weightind ++;//update the weight index HashMap temp = new HashMap(); for (Value value: domain.getValues(issue)) { inputlayer.addNeuron(valueind, new Neuron());// add neuron for every value. temp.put(value, valueind); valueToNeuron.put(issue, temp); valueind ++; //update the value index(position) } } ann.addLayer(0, inputlayer);//set layer position ann.addLayer(1, weightlayer); Layer outputlayer = new Layer(); outputlayer.addNeuron(0, new Neuron());//add one neuron to the output layer. ann.addLayer(2, outputlayer); ann.setInputNeurons(inputlayer.getNeurons()); ann.setOutputNeurons(outputlayer.getNeurons()); } public void initweight() { //init the NN weight via a specific method. And create connections. Set issueset = domain.getIssues();// get all the issues for (String issue: issueset) { for (Value value: domain.getValues(issue)) { //create connections from value neurons to issue neurons if(valuefrequency.get(issue).get(value) == 0) { ann.createConnection(ann.getLayerAt(0).getNeuronAt(valueToNeuron.get(issue).get(value)), ann.getLayerAt(1).getNeuronAt(issueToNeuron.get(issue)), 0.0);// if one value never shows in the partial data, then we assign 0 to the weight of this value. continue; } ann.createConnection(ann.getLayerAt(0).getNeuronAt(valueToNeuron.get(issue).get(value)), ann.getLayerAt(1).getNeuronAt(issueToNeuron.get(issue)), 0.5);// otherwise we assign 0.5 to the weight. If there is another proper way to init this weight? } ann.createConnection(ann.getLayerAt(1).getNeuronAt(issueToNeuron.get(issue)), ann.getLayerAt(2).getNeuronAt(0), 1.0/issueset.size());//equal issueweight according to number of issues. Is there a way to control the sum of the weight to be 1.0? } } public void initValueFrequency() { // create an all-0 hashmap for (String issue: domain.getIssues()) { HashMap vmap = new HashMap(); for (Value value: domain.getValues(issue)) { vmap.put(value, 0); } this.valuefrequency.put(issue, vmap); } } public void setvaluefrequency(List inbidlist) { //init and update the valuefrequency. for (Bid bid: inbidlist) { for (String issue: bid.getIssues()) { Value v = bid.getValue(issue); HashMap temp = valuefrequency.get(issue); int cnt = temp.get(v); temp.put(v, cnt+1); valuefrequency.put(issue, temp); } } } public HashMap> getmostinformative(){ HashMap> infovalue = new HashMap>(); for (String issue : domain.getIssues()) { List elicitvalueset = new ArrayList(); int minfreq = 0; for (Value value: domain.getValues(issue)) { int freq = valuefrequency.get(issue).get(value); if (elicitvalueset.isEmpty()) { elicitvalueset.add(value); minfreq = freq; }else { if (freq inbidlist) { //Construct the training dataset. // input sorted bidlist. Utility from low to high. Trained using assigned utility. int inputsize = ann.getInputsCount(); int outputsize = ann.getOutputsCount(); DataSet ds = new DataSet(inputsize, outputsize); int datasize = inbidlist.size();// the size double cnt = 0;// for (Bid bid: inbidlist) { double[] input = new double[inputsize]; double[] output = new double[outputsize]; Set indset = new HashSet(); for (String issue: domain.getIssues()) { Value v = bid.getValue(issue);//get bid's value of issue. indset.add(valueToNeuron.get(issue).get(v));// keep a set of the position of the bid values. //ValuetoNeural is a map maps every possible value into the position of input value. } for (int ind =0; ind setSortedBids(Profile profile) { //returns a sortedlist here. from low utility to high utility. DefaultPartialOrdering prof = (DefaultPartialOrdering) profile; List bidslist = prof.getBids(); //get all the bid in the partial information. // NOTE sort defaults to ascending order. Collections.sort(bidslist, new Comparator() { @Override public int compare(Bid b1, Bid b2) { return prof.isPreferredOrEqual(b1, b2) ? 1 : -1; // } }); return bidslist; } //get and update method public void update(Bid bid, List worseBids) {//note the frequency also need to be updated //relist the bids int n = 0; while (n < sortedbidlist.size() && worseBids.contains(sortedbidlist.get(n))) n++; LinkedList newbids = new LinkedList(sortedbidlist); newbids.add(n, bid); sortedbidlist = newbids; DataSet ds = setDataset(sortedbidlist); trainNetwork(ds); bidlist.add(bid);//this is unsorted bid list. } public double getUtility(Bid bid) { int inputsize = ann.getInputsCount(); double[] input = new double[inputsize]; Set indset = new HashSet(); for (String issue: domain.getIssues()) { Value v = bid.getValue(issue); indset.add(valueToNeuron.get(issue).get(v)); } for (int ind =0; ind getSortedBids(){ return this.sortedbidlist; } public Bid getBestBid() { return this.maxBid; } public Bid getWorstBid() { return this.minBid; } public NeuralNetwork getann() { return this.ann; } // The following is for the learning to rank method. use deeplearning4j. public DataSet setPairwiseDataset() { betterlist= prof.getBetter(); return null; } public void trainPairwise(DataSet ds) { //using pairwise information } }