List of usage examples for weka.core Instances instance
publicInstance instance(int index)
From source file:Classifiers.BRkNN.java
License:Open Source License
/** * Calculates the confidences of the labels, based on the neighboring * instances//from w w w .j a v a 2 s . c om * * @param neighbours * the list of nearest neighboring instances * @param distances * the distances of the neighbors * @return the confidences of the labels */ private double[] getConfidences(Instances neighbours, double[] distances) { double total, weight; double neighborLabels = 0; double[] confidences = new double[numLabels]; // Set up a correction to the estimator for (int i = 0; i < numLabels; i++) { confidences[i] = 1.0 / Math.max(1, train.numInstances()); } total = (double) numLabels / Math.max(1, train.numInstances()); for (int i = 0; i < neighbours.numInstances(); i++) { // Collect class counts Instance current = neighbours.instance(i); distances[i] = distances[i] * distances[i]; distances[i] = Math.sqrt(distances[i] / (train.numAttributes() - numLabels)); weight = 1.0; weight *= current.weight(); for (int j = 0; j < numLabels; j++) { double value = Double.parseDouble( current.attribute(labelIndices[j]).value((int) current.value(labelIndices[j]))); if (Utils.eq(value, 1.0)) { confidences[j] += weight; neighborLabels += weight; } } total += weight; } avgPredictedLabels = (int) Math.round(neighborLabels / total); // Normalise distribution if (total > 0) { Utils.normalize(confidences, total); } return confidences; }
From source file:classifiers.ComplexClassifier.java
@Override public void bootstrapvalidierungsmenge(Instances inst) { if (inst.numAttributes() != 0) { int[] hilf = new int[inst.numInstances()]; for (int i = 0; i < inst.numInstances(); i++) { int a = ((int) (Math.random() * inst.numInstances())); hilf[i] = a;/*from www .j a v a 2 s . c o m*/ } Modelsindexen = EliminiereDopelt(hilf); Modelmenge = new Instances(inst, Modelsindexen.length); for (int i = 0; i < Modelsindexen.length; i++) { Modelmenge.add(new Instance(inst.instance(Modelsindexen[i]))); } validierungsindexen = new int[inst.numInstances() - Modelsindexen.length]; validierungsmenge = new Instances(Modelmenge, validierungsindexen.length); for (int i = 0, j = 0; i < inst.numInstances() && j < validierungsindexen.length; i++, j++) { if (!(HasSet(Modelsindexen, i))) { validierungsindexen[j] = i; validierungsmenge.add(inst.instance(validierungsindexen[j])); } } } }
From source file:classifiers.ComplexClassifier.java
@Override public void Bootstrap(Instances inst) { if (Modelmenge.numAttributes() != 0) { int[] hilf = new int[Modelmenge.numInstances()]; for (int i = 0; i < Modelmenge.numInstances(); i++) { int a = ((int) (Math.random() * Modelmenge.numInstances())); hilf[i] = a;/* ww w. j a va2 s .c o m*/ } trainingsetindexen = EliminiereDopelt(hilf); traindaten = new Instances(Modelmenge, trainingsetindexen.length); for (int i = 0; i < trainingsetindexen.length; i++) { traindaten.add(new Instance(inst.instance(trainingsetindexen[i]))); } testsetindexen = new int[Modelsindexen.length - trainingsetindexen.length]; testdaten = new Instances(traindaten, testsetindexen.length); for (int i = 0, j = 0; i < Modelmenge.numInstances() && j < testsetindexen.length; i++, j++) { if (!(HasSet(trainingsetindexen, i))) { testsetindexen[j] = i; testdaten.add(inst.instance(testsetindexen[j])); } } } }
From source file:classifiers.ComplexClassifier.java
@Override public void train(Instances inst) throws Exception { Knoten[] k = Model.getDieknoten();/*from ww w .j a v a 2s. c om*/ Enumeration<Attribute> enu = inst.enumerateAttributes(); int attindex = 0; while (enu.hasMoreElements()) { Attribute att = enu.nextElement(); if (k[attindex].hatEltern()) { switch (att.type()) { case Attribute.NUMERIC: { for (int i = 0; i < k[attindex].anzahlEltern(); i++) { Attribute a = inst.attribute(k[attindex].getEltern(i).getID()); int c = a.index(); switch (a.type()) { case Attribute.NUMERIC: list.add(attindex, (new NumericNumericDistribution(inst, attindex, c))); break; case (Attribute.NOMINAL): list.add(attindex, new NumericNominalDistribution(inst, attindex, c)); break; case (Attribute.STRING): list.add(attindex, new NumericNominalDistribution(inst, attindex, c)); break; default: throw new Exception("Attributetype unbekannt"); } } } break; case Attribute.NOMINAL: { for (int i = 0; i < k[attindex].anzahlEltern(); i++) { Attribute a = inst.attribute(k[attindex].getEltern(i).getID()); int c = a.index(); switch (a.type()) { case Attribute.NUMERIC: list.add(attindex, new NumericNominalDistribution(inst, attindex, c)); break; case (Attribute.NOMINAL): list.add(attindex, new NominalNominalDistribution(inst, attindex, c)); break; case (Attribute.STRING): list.add(attindex, new NominalNominalDistribution(inst, attindex, c)); break; default: { throw new Exception("Attributetype unbekannt"); } } } } break; } } else { switch (att.type()) { case Attribute.NUMERIC: list.add(attindex, new NumericDistribution(inst, attindex)); break; case Attribute.NOMINAL: list.add(attindex, new NominalDistribution(inst, attindex)); break; case Attribute.STRING: list.add(attindex, new NominalDistribution(inst, attindex)); break; default: throw new Exception("Attributetype unbekannt"); } } attindex++; } for (int i = 0; i < inst.numClasses(); i++) { for (int j = 0; j < inst.numInstances(); j++) { if (inst.instance(j).classValue() == i) { Classparam[i]++; } } } for (int i = 0; i < inst.numClasses(); i++) { Classparam[i] /= inst.numInstances(); } }
From source file:classifiers.ComplexClassifier.java
@Override public double[][] test(Instances testinst) { double count = 0; long anfangszeit = System.currentTimeMillis(); ;/*ww w . j a va 2 s.com*/ long endzeit; double[][] ausgabe = new double[1][2]; if (testinst.numAttributes() != 0) { testinst.setClass(testinst.attribute(testinst.numAttributes() - 1)); for (int i = 0; i < testinst.numInstances(); i++) { if (!Classify(testinst.instance(i))) { count++; } else { } } endzeit = System.currentTimeMillis(); ausgabe[0][0] = (count / testinst.numInstances()) * 100; ausgabe[0][1] = ((endzeit - anfangszeit)); // System.out.println(testinst); return ausgabe; } else { // System.out.println(testinst); return ausgabe; } }
From source file:classifiers.ComplexClassifierZufall.java
@Override public void train(Instances inst) throws Exception { Knoten[] k = Model.getDieknoten();/*w ww .j a va 2 s . c om*/ Enumeration<Attribute> enu = inst.enumerateAttributes(); int attindex = 0; while (enu.hasMoreElements()) { Attribute att = enu.nextElement(); if (k[attindex].hatEltern()) { switch (att.type()) { case Attribute.NUMERIC: { for (int i = 0; i < k[attindex].anzahlEltern(); i++) { Attribute a = inst.attribute(k[attindex].getEltern(i).getID()); int c = a.index(); switch (a.type()) { case Attribute.NUMERIC: list.add(attindex, (new NumericNumericDistribution(inst, attindex, c))); break; case (Attribute.NOMINAL): list.add(attindex, new NumericNominalDistribution(inst, attindex, c)); break; case (Attribute.STRING): list.add(attindex, new NumericNominalDistribution(inst, attindex, c)); break; default: throw new Exception("Attributetype unbekannt"); } } } break; case Attribute.NOMINAL: { for (int i = 0; i < k[attindex].anzahlEltern(); i++) { Attribute a = inst.attribute(k[attindex].getEltern(i).getID()); int c = a.index(); switch (a.type()) { case Attribute.NUMERIC: list.add(attindex, new NumericNominalDistribution(inst, attindex, c)); break; case (Attribute.NOMINAL): list.add(attindex, new NominalNominalDistribution(inst, attindex, c)); break; case (Attribute.STRING): list.add(attindex, new NominalNominalDistribution(inst, attindex, c)); break; default: { throw new Exception("Attributetype unbekannt"); } } } } break; } } else { switch (att.type()) { case Attribute.NUMERIC: list.add(attindex, new NumericDistribution(inst, attindex)); break; case Attribute.NOMINAL: list.add(attindex, new NominalDistribution(inst, attindex)); break; case Attribute.STRING: list.add(attindex, new NominalDistribution(inst, attindex)); break; default: throw new Exception("Attributetype unbekannt"); } } attindex++; } for (int i = 0; i < inst.numClasses(); i++) { for (int j = 0; j < inst.numInstances(); j++) { if (inst.instance(j).classValue() == i) { Classparam[i]++; } } } for (int i = 0; i < inst.numClasses(); i++) { Classparam[i] /= inst.numInstances(); } }
From source file:classifiers.ComplexClassifierZufall.java
@Override @SuppressWarnings("empty-statement") public double[][] test(Instances testinst) { double count = 0; long anfangszeit = System.currentTimeMillis(); ;/*w ww. j a v a 2s. c om*/ long endzeit; double[][] ausgabe = new double[1][2]; if (testinst.numAttributes() != 0) { testinst.setClass(testinst.attribute(testinst.numAttributes() - 1)); for (int i = 0; i < testinst.numInstances(); i++) { if (!Classify(testinst.instance(i))) { count++; } else { } } endzeit = System.currentTimeMillis(); ausgabe[0][0] = (count / testinst.numInstances()) * 100; ausgabe[0][1] = ((endzeit - anfangszeit)); // System.out.println(testinst); return ausgabe; } else { // System.out.println(testinst); return ausgabe; } }
From source file:Classifiers.MLkNN.java
License:Open Source License
protected MultiLabelOutput makePredictionInternal(Instance instance) throws Exception { double[] confidences = new double[numLabels]; boolean[] predictions = new boolean[numLabels]; Instances knn = null; try {/* www . j a v a2s .c om*/ knn = new Instances(lnn.kNearestNeighbours(instance, numOfNeighbors)); } catch (Exception ex) { Logger.getLogger(MLkNN.class.getName()).log(Level.SEVERE, null, ex); } for (int i = 0; i < numLabels; i++) { // compute sum of aces in KNN int aces = 0; // num of aces in Knn for i for (int k = 0; k < numOfNeighbors; k++) { double value = Double.parseDouble( train.attribute(labelIndices[i]).value((int) knn.instance(k).value(labelIndices[i]))); if (Utils.eq(value, 1.0)) { aces++; } } double Prob_in = PriorProbabilities[i] * CondProbabilities[i][aces]; double Prob_out = PriorNProbabilities[i] * CondNProbabilities[i][aces]; if (Prob_in > Prob_out) { predictions[i] = true; } else if (Prob_in < Prob_out) { predictions[i] = false; } else { Random rnd = new Random(); predictions[i] = (rnd.nextInt(2) == 1) ? true : false; } // ranking function confidences[i] = Prob_in / (Prob_in + Prob_out); } MultiLabelOutput mlo = new MultiLabelOutput(predictions, confidences); return mlo; }
From source file:classifiers.mlp.MultilayerPerceptronCustom.java
License:Open Source License
/** * Call this function to build and train a neural network for the training * data provided./* w w w . ja va 2s . c o m*/ * @param i The training data. * @throws Exception if can't build classification properly. */ public void buildClassifier(Instances i) throws Exception { // can classifier handle the data? getCapabilities().testWithFail(i); // remove instances with missing class i = new Instances(i); i.deleteWithMissingClass(); m_ZeroR = new weka.classifiers.rules.ZeroR(); m_ZeroR.buildClassifier(i); // only class? -> use ZeroR model if (i.numAttributes() == 1) { System.err.println( "Cannot build model (only class attribute present in data!), " + "using ZeroR model instead!"); m_useDefaultModel = true; return; } else { m_useDefaultModel = false; } m_epoch = 0; m_error = 0; m_instances = null; m_currentInstance = null; m_controlPanel = null; m_nodePanel = null; m_outputs = new NeuralEnd[0]; m_inputs = new NeuralEnd[0]; m_numAttributes = 0; m_numClasses = 0; m_neuralNodes = new NeuralConnection[0]; m_selected = new FastVector(4); m_graphers = new FastVector(2); m_nextId = 0; m_stopIt = true; m_stopped = true; m_accepted = false; m_instances = new Instances(i); m_random = new Random(m_randomSeed); m_instances.randomize(m_random); if (m_useNomToBin) { m_nominalToBinaryFilter = new NominalToBinary(); m_nominalToBinaryFilter.setInputFormat(m_instances); m_instances = Filter.useFilter(m_instances, m_nominalToBinaryFilter); } m_numAttributes = m_instances.numAttributes() - 1; m_numClasses = m_instances.numClasses(); setClassType(m_instances); //this sets up the validation set. Instances valSet = null; //numinval is needed later int numInVal = (int) (m_valSize / 100.0 * m_instances.numInstances()); if (m_valSize > 0) { if (numInVal == 0) { numInVal = 1; } valSet = new Instances(m_instances, 0, numInVal); } /////////// setupInputs(); setupOutputs(); if (m_autoBuild) { setupHiddenLayer(); } ///////////////////////////// //this sets up the gui for usage if (m_gui) { m_win = new JFrame(); m_win.addWindowListener(new WindowAdapter() { public void windowClosing(WindowEvent e) { boolean k = m_stopIt; m_stopIt = true; int well = JOptionPane .showConfirmDialog(m_win, "Are You Sure...\n" + "Click Yes To Accept" + " The Neural Network" + "\n Click No To Return", "Accept Neural Network", JOptionPane.YES_NO_OPTION); if (well == 0) { m_win.setDefaultCloseOperation(JFrame.DISPOSE_ON_CLOSE); m_accepted = true; blocker(false); } else { m_win.setDefaultCloseOperation(JFrame.DO_NOTHING_ON_CLOSE); } m_stopIt = k; } }); m_win.getContentPane().setLayout(new BorderLayout()); m_win.setTitle("Neural Network"); m_nodePanel = new NodePanel(); // without the following two lines, the NodePanel.paintComponents(Graphics) // method will go berserk if the network doesn't fit completely: it will // get called on a constant basis, using 100% of the CPU // see the following forum thread: // http://forum.java.sun.com/thread.jspa?threadID=580929&messageID=2945011 m_nodePanel.setPreferredSize(new Dimension(640, 480)); m_nodePanel.revalidate(); JScrollPane sp = new JScrollPane(m_nodePanel, JScrollPane.VERTICAL_SCROLLBAR_ALWAYS, JScrollPane.HORIZONTAL_SCROLLBAR_NEVER); m_controlPanel = new ControlPanel(); m_win.getContentPane().add(sp, BorderLayout.CENTER); m_win.getContentPane().add(m_controlPanel, BorderLayout.SOUTH); m_win.setSize(640, 480); m_win.setVisible(true); } //This sets up the initial state of the gui if (m_gui) { blocker(true); m_controlPanel.m_changeEpochs.setEnabled(false); m_controlPanel.m_changeLearning.setEnabled(false); m_controlPanel.m_changeMomentum.setEnabled(false); } //For silly situations in which the network gets accepted before training //commenses if (m_numeric) { setEndsToLinear(); } if (m_accepted) { m_win.dispose(); m_controlPanel = null; m_nodePanel = null; m_instances = new Instances(m_instances, 0); m_currentInstance = null; return; } //connections done. double right = 0; double driftOff = 0; double lastRight = Double.POSITIVE_INFINITY; double bestError = Double.POSITIVE_INFINITY; double tempRate; double totalWeight = 0; double totalValWeight = 0; double origRate = m_learningRate; //only used for when reset //ensure that at least 1 instance is trained through. if (numInVal == m_instances.numInstances()) { numInVal--; } if (numInVal < 0) { numInVal = 0; } for (int noa = numInVal; noa < m_instances.numInstances(); noa++) { if (!m_instances.instance(noa).classIsMissing()) { totalWeight += m_instances.instance(noa).weight(); } } if (m_valSize != 0) { for (int noa = 0; noa < valSet.numInstances(); noa++) { if (!valSet.instance(noa).classIsMissing()) { totalValWeight += valSet.instance(noa).weight(); } } } m_stopped = false; for (int noa = 1; noa < m_numEpochs + 1; noa++) { right = 0; for (int nob = numInVal; nob < m_instances.numInstances(); nob++) { m_currentInstance = m_instances.instance(nob); if (!m_currentInstance.classIsMissing()) { //this is where the network updating (and training occurs, for the //training set resetNetwork(); calculateOutputs(); tempRate = m_learningRate * m_currentInstance.weight(); if (m_decay) { tempRate /= noa; } right += (calculateErrors() / m_instances.numClasses()) * m_currentInstance.weight(); updateNetworkWeights(tempRate, m_momentum); } } right /= totalWeight; if (Double.isInfinite(right) || Double.isNaN(right)) { if (!m_reset) { m_instances = null; throw new Exception("Network cannot train. Try restarting with a" + " smaller learning rate."); } else { //reset the network if possible if (m_learningRate <= Utils.SMALL) throw new IllegalStateException( "Learning rate got too small (" + m_learningRate + " <= " + Utils.SMALL + ")!"); m_learningRate /= 2; buildClassifier(i); m_learningRate = origRate; m_instances = new Instances(m_instances, 0); m_currentInstance = null; return; } } ////////////////////////do validation testing if applicable if (m_valSize != 0) { right = 0; for (int nob = 0; nob < valSet.numInstances(); nob++) { m_currentInstance = valSet.instance(nob); if (!m_currentInstance.classIsMissing()) { //this is where the network updating occurs, for the validation set resetNetwork(); calculateOutputs(); right += (calculateErrors() / valSet.numClasses()) * m_currentInstance.weight(); //note 'right' could be calculated here just using //the calculate output values. This would be faster. //be less modular } } if (right < lastRight) { if (right < bestError) { bestError = right; // save the network weights at this point for (int noc = 0; noc < m_numClasses; noc++) { m_outputs[noc].saveWeights(); } driftOff = 0; } } else { driftOff++; } lastRight = right; if (driftOff > m_driftThreshold || noa + 1 >= m_numEpochs) { for (int noc = 0; noc < m_numClasses; noc++) { m_outputs[noc].restoreWeights(); } m_accepted = true; } right /= totalValWeight; } m_epoch = noa; m_error = right; //shows what the neuralnet is upto if a gui exists. updateDisplay(); //This junction controls what state the gui is in at the end of each //epoch, Such as if it is paused, if it is resumable etc... if (m_gui) { while ((m_stopIt || (m_epoch >= m_numEpochs && m_valSize == 0)) && !m_accepted) { m_stopIt = true; m_stopped = true; if (m_epoch >= m_numEpochs && m_valSize == 0) { m_controlPanel.m_startStop.setEnabled(false); } else { m_controlPanel.m_startStop.setEnabled(true); } m_controlPanel.m_startStop.setText("Start"); m_controlPanel.m_startStop.setActionCommand("Start"); m_controlPanel.m_changeEpochs.setEnabled(true); m_controlPanel.m_changeLearning.setEnabled(true); m_controlPanel.m_changeMomentum.setEnabled(true); blocker(true); if (m_numeric) { setEndsToLinear(); } } m_controlPanel.m_changeEpochs.setEnabled(false); m_controlPanel.m_changeLearning.setEnabled(false); m_controlPanel.m_changeMomentum.setEnabled(false); m_stopped = false; //if the network has been accepted stop the training loop if (m_accepted) { m_win.dispose(); m_controlPanel = null; m_nodePanel = null; m_instances = new Instances(m_instances, 0); m_currentInstance = null; return; } } if (m_accepted) { m_instances = new Instances(m_instances, 0); m_currentInstance = null; return; } //TODO: // Customization: store the model created after this epoch ObjectOutputStream oos = new ObjectOutputStream(new FileOutputStream("mlp/temp/" + noa + ".model")); oos.writeObject(this); oos.flush(); oos.close(); } if (m_gui) { m_win.dispose(); m_controlPanel = null; m_nodePanel = null; } m_instances = new Instances(m_instances, 0); m_currentInstance = null; }
From source file:classify.Classifier.java
public static void missingValuesRows(Instances data) { int[] missingValues = new int[data.numInstances()]; for (int i = 0; i < data.numInstances(); i++) { missingValues[i] = 0;/*ww w .ja v a2s . com*/ } Instance example; String value = ""; //get number of missing attributes per row int missValues = 0; for (int i = 0; i < data.numInstances(); i++) { example = data.instance(i); for (int j = 0; j < 15; j++) { if (example.attribute(j).isNominal()) { value = example.stringValue(j); } else if (example.attribute(j).isNumeric()) { value = Double.toString(example.value(j)); } if (value.equals("?") || value.equals("NaN")) { missingValues[i]++; missValues++; } } } System.out.println("Number of Missing Values: " + missValues); //get how many times i attributes are missing int[] frequency = new int[15]; for (int i = 0; i < data.numInstances(); i++) { frequency[missingValues[i]]++; } int numRows = 0; for (int i = 0; i < data.numInstances(); i++) { if (missingValues[i] > 0) { numRows++; } } System.out.println("Number of rows with missing values: " + numRows); System.out.println("Number of missing attributes per row:"); for (int i = 0; i < 15; i++) { System.out.println(i + ": " + frequency[i]); } }