Example usage for weka.core Instance classIndex

List of usage examples for weka.core Instance classIndex

Introduction

In this page you can find the example usage for weka.core Instance classIndex.

Prototype

public int classIndex();

Source Link

Document

Returns the class attribute's index.

Usage

From source file:classif.Prototyper.java

License:Open Source License

@Override
public void buildClassifier(Instances data) throws Exception {
    trainingData = data;//from   www  . j a  v  a  2 s .  c om
    Attribute classAttribute = data.classAttribute();
    prototypes = new ArrayList<>();

    classedData = new HashMap<String, ArrayList<Sequence>>();
    indexClassedDataInFullData = new HashMap<String, ArrayList<Integer>>();
    for (int c = 0; c < data.numClasses(); c++) {
        classedData.put(data.classAttribute().value(c), new ArrayList<Sequence>());
        indexClassedDataInFullData.put(data.classAttribute().value(c), new ArrayList<Integer>());
    }

    sequences = new Sequence[data.numInstances()];
    classMap = new String[sequences.length];
    for (int i = 0; i < sequences.length; i++) {
        Instance sample = data.instance(i);
        MonoDoubleItemSet[] sequence = new MonoDoubleItemSet[sample.numAttributes() - 1];
        int shift = (sample.classIndex() == 0) ? 1 : 0;
        for (int t = 0; t < sequence.length; t++) {
            sequence[t] = new MonoDoubleItemSet(sample.value(t + shift));
        }
        sequences[i] = new Sequence(sequence);
        String clas = sample.stringValue(classAttribute);
        classMap[i] = clas;
        classedData.get(clas).add(sequences[i]);
        indexClassedDataInFullData.get(clas).add(i);
        //         System.out.println("Element "+i+" of train is classed "+clas+" and went to element "+(indexClassedDataInFullData.get(clas).size()-1));
    }

    buildSpecificClassifier(data);

    if (fillPrototypes)
        addMissingPrototypesRandom();
}

From source file:classif.Prototyper.java

License:Open Source License

public double classifyInstance(Instance sample) throws Exception {
    // transform instance to sequence
    MonoDoubleItemSet[] sequence = new MonoDoubleItemSet[sample.numAttributes() - 1];
    int shift = (sample.classIndex() == 0) ? 1 : 0;
    for (int t = 0; t < sequence.length; t++) {
        sequence[t] = new MonoDoubleItemSet(sample.value(t + shift));
    }/*w w w. jav  a2s  .co m*/
    Sequence seq = new Sequence(sequence);

    double minD = Double.MAX_VALUE;
    String classValue = null;
    for (ClassedSequence s : prototypes) {
        double tmpD = seq.distance(s.sequence);
        if (tmpD < minD) {
            minD = tmpD;
            classValue = s.classValue;
        }
    }
    // System.out.println(prototypes.size());
    return sample.classAttribute().indexOfValue(classValue);
}

From source file:classif.Prototyper.java

License:Open Source License

public static ClassedSequence[] convertWekaSetToClassedSequence(Instances test) {

    Attribute classAttribute = test.classAttribute();
    ClassedSequence[] testSequences = new ClassedSequence[test.numInstances()];
    for (int i = 0; i < testSequences.length; i++) {
        Instance sample = test.instance(i);
        MonoDoubleItemSet[] sequence = new MonoDoubleItemSet[sample.numAttributes() - 1];
        int shift = (sample.classIndex() == 0) ? 1 : 0;
        for (int t = 0; t < sequence.length; t++) {
            sequence[t] = new MonoDoubleItemSet(sample.value(t + shift));
        }//from ww w .j a v a  2  s  .co  m
        String clas = sample.stringValue(classAttribute);
        testSequences[i] = new ClassedSequence(new Sequence(sequence), clas);
    }

    return testSequences;

}

From source file:classif.PrototyperEUC.java

License:Open Source License

@Override
public double classifyInstance(Instance sample) throws Exception {
    // transform instance to sequence
    MonoDoubleItemSet[] sequence = new MonoDoubleItemSet[sample.numAttributes() - 1];
    int shift = (sample.classIndex() == 0) ? 1 : 0;
    for (int t = 0; t < sequence.length; t++) {
        sequence[t] = new MonoDoubleItemSet(sample.value(t + shift));
    }/*from w ww . j a v  a2  s. c  om*/
    Sequence seq = new Sequence(sequence);

    double minD = Double.MAX_VALUE;
    String classValue = null;
    for (ClassedSequence s : prototypes) {
        double tmpD = seq.distanceEuc(s.sequence);
        if (tmpD < minD) {
            minD = tmpD;
            classValue = s.classValue;
        }
    }
    // System.out.println(prototypes.size());
    //      System.out.println(classValue);
    return sample.classAttribute().indexOfValue(classValue);
}

From source file:cluster.ABC.ClusterUtils.java

License:Open Source License

/** Normalizes the values of a normal Instance in L2 norm
 *
 * @author Sugato Basu//  w ww .j ava 2 s.c  o  m
 * @param inst Instance to be normalized
 */

public static void normalizeInstance(Instance inst) throws Exception {
    double norm = 0;
    double values[] = inst.toDoubleArray();

    if (inst instanceof SparseInstance) {
        System.err.println("Is SparseInstance, using normalizeSparseInstance function instead");
        normalizeSparseInstance(inst);
    }

    for (int i = 0; i < values.length; i++) {
        if (i != inst.classIndex()) { // don't normalize the class index 
            norm += values[i] * values[i];
        }
    }
    norm = Math.sqrt(norm);
    for (int i = 0; i < values.length; i++) {
        if (i != inst.classIndex()) { // don't normalize the class index 
            values[i] /= norm;
        }
    }
    for (int i = 0; i < inst.numAttributes(); i++) {
        inst.setValue(i, values[i]);
    }
    //inst.setValueArray(values);
}

From source file:cluster.ABC.ClusterUtils.java

License:Open Source License

/** Normalizes the values of a SparseInstance in L2 norm
 *
 * @author Sugato Basu/*from  www.  java2  s. c o m*/
 * @param inst SparseInstance to be normalized
 */

public static void normalizeSparseInstance(Instance inst) throws Exception {
    double norm = 0;
    int length = inst.numValues();

    if (!(inst instanceof SparseInstance)) {
        System.err.println("Not SparseInstance, using normalizeInstance function instead");
        normalizeInstance(inst);
    }

    for (int i = 0; i < length; i++) {
        if (inst.index(i) != inst.classIndex()) { // don't normalize the class index
            norm += inst.valueSparse(i) * inst.valueSparse(i);
        }
    }
    norm = Math.sqrt(norm);
    for (int i = 0; i < length; i++) { // don't normalize the class index
        if (inst.index(i) != inst.classIndex()) {
            inst.setValueSparse(i, inst.valueSparse(i) / norm);
        }
    }
}

From source file:com.mycompany.id3classifier.kNNClassifier.java

private static double findDistance(Instance instance1, Instance instance2) {
    double total = 0;
    int totalAttributes = instance1.numAttributes();
    for (int i = 0; i < totalAttributes; i++) {
        if (instance1.classIndex() == i)
            continue;

        double difference = 0;

        if (instance1.attribute(i).isNumeric()) {
            difference = Math.abs(instance1.value(i) - instance2.value(i));
        }/*from  ww  w. ja  v  a 2s.  c  o m*/

        else {
            if (!instance1.stringValue(i).equals(instance2.stringValue(i))) {
                difference = 1;
            }
        }

        total += Math.pow(difference, totalAttributes);
    }

    return Math.pow(total, 1.0 / totalAttributes);
}

From source file:com.mycompany.neuralnetwork.NeuralNetworkClassifier.java

@Override
public void buildClassifier(Instances instances) throws Exception {
    int inputCount = instances.numAttributes() - 1;

    List<Integer> nodesPerLayer = new ArrayList<>();

    for (int i = 0; i < layers - 1; i++) {
        nodesPerLayer.add(inputCount);/*from   w ww  .ja v a2 s.co  m*/
    }

    nodesPerLayer.add(instances.numDistinctValues(instances.classIndex()));

    network = new Network(inputCount, nodesPerLayer);

    ArrayList<Double> errorsPerIteration = new ArrayList<>();
    for (int j = 0; j < iterations; j++) {
        double errorsPer = 0;
        for (int k = 0; k < instances.numInstances(); k++) {
            Instance instance = instances.instance(k);

            List<Double> input = new ArrayList<>();
            for (int i = 0; i < instance.numAttributes(); i++) {
                if (Double.isNaN(instance.value(i)) && i != instance.classIndex())
                    input.add(0.0);
                else if (i != instance.classIndex())
                    input.add(instance.value(i));
            }

            errorsPer += network.train(input, instance.value(instance.classIndex()), learningFactor);
        }

        errorsPerIteration.add(errorsPer);

    }

    //Display Errors This is used to collect the data for the graph 
    //for (Double d : errorsPerIteration) 
    //{
    //  System.out.println(d);
    //}
}

From source file:com.mycompany.neuralnetwork.NeuralNetworkClassifier.java

@Override
public double classifyInstance(Instance instance) throws Exception {

    List<Double> input = new ArrayList<>();

    for (int i = 0; i < instance.numAttributes(); i++) {
        if (Double.isNaN(instance.value(i)) && i != instance.classIndex())
            input.add(0.0);/*from   www  .  j  a  v  a2s .  c om*/

        else if (i != instance.classIndex())
            input.add(instance.value(i));
    }

    List<Double> outputs = network.getOutputs(input);

    double largeVal = -1;
    int index = 0;

    for (int i = 0; i < outputs.size(); i++) {
        double temp = outputs.get(i);

        if (temp > largeVal) {
            largeVal = temp;
            index = i;
        }
    }

    return index;
}

From source file:com.spread.experiment.tempuntilofficialrelease.ClassificationViaClustering108.java

License:Open Source License

/**
 * Returns class probability distribution for the given instance.
 * //  w  w  w . j ava  2s. c o  m
 * @param instance the instance to be classified
 * @return the class probabilities
 * @throws Exception if an error occurred during the prediction
 */
@Override
public double[] distributionForInstance(Instance instance) throws Exception {

    if (m_ZeroR != null) {
        return m_ZeroR.distributionForInstance(instance);
    } else {
        double[] result = new double[instance.numClasses()];

        if (m_ActualClusterer != null) {
            // build new instance
            Instances tempData = m_ClusteringHeader.stringFreeStructure();
            double[] values = new double[tempData.numAttributes()];
            int n = 0;
            for (int i = 0; i < instance.numAttributes(); i++) {
                if (i == instance.classIndex()) {
                    continue;
                }
                if (instance.attribute(i).isString()) {
                    values[n] = tempData.attribute(n).addStringValue(instance.stringValue(i));
                } else if (instance.attribute(i).isRelationValued()) {
                    values[n] = tempData.attribute(n).addRelation(instance.relationalValue(i));
                } else {
                    values[n] = instance.value(i);
                }
                n++;
            }
            Instance newInst = new DenseInstance(instance.weight(), values);
            newInst.setDataset(tempData);

            if (!getLabelAllClusters()) {

                // determine cluster/class
                double r = m_ClustersToClasses[m_ActualClusterer.clusterInstance(newInst)];
                if (r == -1) {
                    return result; // Unclassified
                } else {
                    result[(int) r] = 1.0;
                    return result;
                }
            } else {
                double[] classProbs = new double[instance.numClasses()];
                double[] dist = m_ActualClusterer.distributionForInstance(newInst);
                for (int i = 0; i < dist.length; i++) {
                    for (int j = 0; j < instance.numClasses(); j++) {
                        classProbs[j] += dist[i] * m_ClusterClassProbs[i][j];
                    }
                }
                Utils.normalize(classProbs);
                return classProbs;
            }
        } else {
            return result; // Unclassified
        }
    }
}