List of usage examples for weka.classifiers AbstractClassifier makeCopy
public static Classifier makeCopy(Classifier model) throws Exception
From source file:meka.classifiers.multilabel.cc.CNode.java
License:Open Source License
/** * Build - Create transformation for this node, and train classifier of type H upon it. * The dataset should have class as index 'j', and remove all indices less than L *not* in paY. */// w ww .j a v a 2 s. c om public void build(Instances D, Classifier H) throws Exception { // transform data T = transform(D); // build SLC 'h' h = AbstractClassifier.makeCopy(H); h.buildClassifier(T); // save templates //t_ = new SparseInstance(T.numAttributes()); //t_.setDataset(T); //t_.setClassMissing(); // [?,x,x,x] T.clear(); }
From source file:meka.classifiers.multilabel.incremental.meta.BaggingMLUpdateableADWIN.java
License:Open Source License
/** * DistributionForInstance - And Check for drift by measuring a type of error. *///from ww w . j av a 2s . c o m @Override public double[] distributionForInstance(Instance x) throws Exception { // classification double y[] = new double[x.classIndex()]; for (int i = 0; i < m_NumIterations; i++) { double y_i[] = m_Classifiers[i].distributionForInstance(x); for (int j = 0; j < y_i.length; j++) { y[j] += y_i[j]; } accuracies[i] += error(y_i, MLUtils.toDoubleArray(x, y.length)); } for (int j = 0; j < y.length; j++) { y[j] = y[j] / m_NumIterations; } double d = error(y, MLUtils.toDoubleArray(x, y.length)); // ADWIN stuff double ErrEstim = this.adwin.getEstimation(); if (this.adwin.setInput(1.0 - d)) if (this.adwin.getEstimation() > ErrEstim) { // find worst classifier int index = Utils.minIndex(accuracies); if (getDebug()) System.out.println("------- CHANGE DETECTED / Reset Model #" + index + " ------- "); // reset this classifier m_Classifiers[index] = (ProblemTransformationMethod) AbstractClassifier.makeCopy(m_Classifier); m_Classifiers[index].buildClassifier(new Instances(m_InstancesTemplate)); // ... and reset ADWIN this.adwin = new ADWIN(); accuracies = new double[m_NumIterations]; } return y; }
From source file:meka.classifiers.multilabel.PMCC.java
License:Open Source License
/** * RebuildCC - rebuild a classifier chain 'h_old' to have a new sequence 's_new'. *///from ww w.j a va 2 s.com protected CC rebuildCC(CC h_old, int s_new[], Instances D) throws Exception { // make a deep copy CC h = (CC) AbstractClassifier.makeCopy(h_old); // rebuild this chain h.rebuildClassifier(s_new, new Instances(D)); return h; }
From source file:meka.experiment.DefaultExperiment.java
License:Open Source License
/** * Runs the experiment.//from w w w . j a va 2s .com * * @return null if successfully run, otherwise error message */ public String run() { String result; Instances dataset; List<EvaluationStatistics> stats; boolean incremental; debug("pre: run"); result = null; m_Running = true; incremental = (m_StatisticsHandler instanceof IncrementalEvaluationStatisticsHandler) && (((IncrementalEvaluationStatisticsHandler) m_StatisticsHandler).supportsIncrementalUpdate()); debug("Incremental statistics? " + incremental); notifyExecutionStageListeners(ExecutionStageEvent.Stage.RUNNING); while (m_DatasetProvider.hasNext()) { // next dataset debug("pre: next-dataset"); dataset = m_DatasetProvider.next(); debug("post: next-dataset"); if (dataset == null) { result = "Failed to obtain next dataset!"; log(result); m_Running = false; break; } log("Using dataset: " + dataset.relationName()); // iterate classifiers for (MultiLabelClassifier classifier : m_Classifiers) { // evaluation required? if (incremental) { if (!((IncrementalEvaluationStatisticsHandler) m_StatisticsHandler).requires(classifier, dataset)) { log("Already present, skipping: " + Utils.toCommandLine(classifier) + " --> " + dataset.relationName()); List<EvaluationStatistics> priorStats = ((IncrementalEvaluationStatisticsHandler) m_StatisticsHandler) .retrieve(classifier, dataset); m_Statistics.addAll(priorStats); notifyStatisticsNotificationListeners(priorStats); continue; } } try { classifier = (MultiLabelClassifier) AbstractClassifier.makeCopy(classifier); } catch (Exception e) { result = handleException( "Failed to create copy of classifier: " + classifier.getClass().getName(), e); log(result); m_Running = false; break; } if (m_Running && !m_Stopping) { // notify listeners notifyIterationNotificationListeners(classifier, dataset); log("Using classifier: " + OptionUtils.toCommandLine(classifier)); // perform evaluation debug("pre: evaluator init"); result = m_Evaluator.initialize(); debug("post: evaluator init"); if (result != null) { m_Running = false; break; } try { debug("pre: evaluator evaluate"); stats = m_Evaluator.evaluate(classifier, dataset); debug("post: evaluator evaluate"); } catch (Exception e) { result = handleException("Failed to evaluate dataset '" + dataset.relationName() + "' with classifier: " + Utils.toCommandLine(classifier), e); log(result); m_Running = false; break; } if (stats != null) { m_Statistics.addAll(stats); if (incremental) ((IncrementalEvaluationStatisticsHandler) m_StatisticsHandler).append(stats); notifyStatisticsNotificationListeners(stats); } } if (!m_Running || m_Stopping) break; } if (!m_Running || m_Stopping) break; } if (m_Running && !m_Stopping) { if (!incremental) m_StatisticsHandler.write(m_Statistics); } if (!m_Running) { if (result == null) result = "Experiment interrupted!"; else result = "Experiment interrupted: " + result; } if (result != null) log(result); m_Running = false; m_Stopping = false; debug("post: run"); return result; }
From source file:mulan.classifier.meta.SubsetLearner.java
License:Open Source License
/** * We get the initial dataset through trainingSet. Then for each split as specified by splitOrder * we remove the unneeded labels and train the classifiers using a different method for multi-label splits * and single label splits.//from w w w.ja va 2s. c o m * @param trainingSet The initial {@link MultiLabelInstances} dataset * @throws Exception */ @Override protected void buildInternal(MultiLabelInstances trainingSet) throws Exception { int countSingle = 0, countMulti = 0; remove = new Remove[splitOrder.length]; //Get values into absoluteIndicesToRemove int numofSplits = splitOrder.length;//Number of sets the main is going to be split into for (int r = 0; r < splitOrder.length; r++) {//Initialization required to avoid NullPointer exception absoluteIndicesToRemove[r] = new int[numLabels - splitOrder[r].length]; } //Initialize an array containing which labels we want boolean[][] Selected = new boolean[splitOrder.length][numLabels]; for (int i = 0; i < numofSplits; i++) {//Set true for the labels we need to keep for (int j = 0; j < splitOrder[i].length; j++) { Selected[i][splitOrder[i][j]] = true; } } for (int i = 0; i < numofSplits; i++) {//Get the labels you need to KEEP int k = 0; for (int j = 0; j < numLabels; j++) { if (Selected[i][j] != true) { absoluteIndicesToRemove[i][k] = labelIndices[j]; k++; } } } //Create the lists which will contain the learners multiLabelLearners = new ArrayList<MultiLabelLearner>(); singleLabelLearners = new ArrayList<FilteredClassifier>(); countSingle = 0;//Reset the values to zero and reuse the variables countMulti = 0; //TODO: Add more comments for the procedure for (int totalSplitNo = 0; totalSplitNo < splitOrder.length; totalSplitNo++) { debug("Building set " + (totalSplitNo + 1) + "/" + splitOrder.length); if (splitOrder[totalSplitNo].length > 1) { //Remove the unneeded labels Instances trainSubset = trainingSet.getDataSet(); remove[totalSplitNo] = new Remove(); remove[totalSplitNo].setAttributeIndicesArray(absoluteIndicesToRemove[totalSplitNo]); remove[totalSplitNo].setInputFormat(trainSubset); remove[totalSplitNo].setInvertSelection(false); trainSubset = Filter.useFilter(trainSubset, remove[totalSplitNo]); //Reintegrate dataset and train learner multiLabelLearners.add(baseMultiLabelLearner.makeCopy()); multiLabelLearners.get(countMulti).build(trainingSet.reintegrateModifiedDataSet(trainSubset)); countMulti++; } else { debug("Single Label model."); //Initialize the FilteredClassifiers singleLabelLearners.add(new FilteredClassifier()); singleLabelLearners.get(countSingle).setClassifier(AbstractClassifier.makeCopy(baseClassifier)); Instances trainSubset = trainingSet.getDataSet(); //Set the remove filter for the FilteredClassifiers remove[totalSplitNo] = new Remove(); remove[totalSplitNo].setAttributeIndicesArray(absoluteIndicesToRemove[totalSplitNo]); remove[totalSplitNo].setInputFormat(trainSubset); remove[totalSplitNo].setInvertSelection(false); singleLabelLearners.get(countSingle).setFilter(remove[totalSplitNo]); //Set the remaining label as the class index trainSubset.setClassIndex(labelIndices[splitOrder[totalSplitNo][0]]); //Train singleLabelLearners.get(countSingle).buildClassifier(trainSubset); countSingle++; } } }
From source file:mulan.classifier.transformation.BinaryRelevance.java
License:Open Source License
protected void buildInternal(MultiLabelInstances train) throws Exception { numLabels = train.getNumLabels();/* ww w. j a va 2 s . c o m*/ ensemble = new FilteredClassifier[numLabels]; Instances trainingData = train.getDataSet(); for (int i = 0; i < numLabels; i++) { ensemble[i] = new FilteredClassifier(); ensemble[i].setClassifier(AbstractClassifier.makeCopy(baseClassifier)); // Indices of attributes to remove int[] indicesToRemove = new int[numLabels - 1]; int counter2 = 0; for (int counter1 = 0; counter1 < numLabels; counter1++) { if (labelIndices[counter1] != labelIndices[i]) { indicesToRemove[counter2] = labelIndices[counter1]; counter2++; } } Remove remove = new Remove(); remove.setAttributeIndicesArray(indicesToRemove); remove.setInputFormat(trainingData); remove.setInvertSelection(false); ensemble[i].setFilter(remove); trainingData.setClassIndex(labelIndices[i]); // debug("Bulding model " + (i + 1) + "/" + numLabels); System.out.println("Bulding model " + (i + 1) + "/" + numLabels); ensemble[i].buildClassifier(trainingData); } }
From source file:mulan.classifier.transformation.ClassifierChain.java
License:Open Source License
protected void buildInternal(MultiLabelInstances train) throws Exception { if (chain == null) { chain = new int[numLabels]; for (int i = 0; i < numLabels; i++) { chain[i] = i;//from w w w .j a v a2 s . c o m } } Instances trainDataset; numLabels = train.getNumLabels(); ensemble = new FilteredClassifier[numLabels]; trainDataset = train.getDataSet(); for (int i = 0; i < numLabels; i++) { ensemble[i] = new FilteredClassifier(); ensemble[i].setClassifier(AbstractClassifier.makeCopy(baseClassifier)); // Indices of attributes to remove first removes numLabels attributes // the numLabels - 1 attributes and so on. // The loop starts from the last attribute. int[] indicesToRemove = new int[numLabels - 1 - i]; int counter2 = 0; for (int counter1 = 0; counter1 < numLabels - i - 1; counter1++) { indicesToRemove[counter1] = labelIndices[chain[numLabels - 1 - counter2]]; counter2++; } Remove remove = new Remove(); remove.setAttributeIndicesArray(indicesToRemove); remove.setInputFormat(trainDataset); remove.setInvertSelection(false); ensemble[i].setFilter(remove); trainDataset.setClassIndex(labelIndices[chain[i]]); debug("Bulding model " + (i + 1) + "/" + numLabels); //=============================================================== System.out.println("Bulding model " + (i + 1) + "/" + numLabels); //=============================================================== ensemble[i].buildClassifier(trainDataset); } }
From source file:mulan.regressor.transformation.RegressorChainSimple.java
License:Open Source License
protected void buildInternal(MultiLabelInstances train) throws Exception { // if no chain has been defined, create the default chain if (chain == null) { chain = new int[numLabels]; for (int j = 0; j < numLabels; j++) { chain[j] = labelIndices[j];/*from w ww . j a v a 2 s .c om*/ } } if (chainSeed != 0) { // a random chain will be created by shuffling the existing chain Random rand = new Random(chainSeed); ArrayList<Integer> chainAsList = new ArrayList<Integer>(numLabels); for (int j = 0; j < numLabels; j++) { chainAsList.add(chain[j]); } Collections.shuffle(chainAsList, rand); for (int j = 0; j < numLabels; j++) { chain[j] = chainAsList.get(j); } } debug("Using chain: " + Arrays.toString(chain)); chainRegressors = new FilteredClassifier[numLabels]; Instances trainDataset = train.getDataSet(); for (int i = 0; i < numLabels; i++) { chainRegressors[i] = new FilteredClassifier(); chainRegressors[i].setClassifier(AbstractClassifier.makeCopy(baseRegressor)); // Indices of attributes to remove. // First removes numLabels attributes, then numLabels - 1 attributes and so on. // The loop starts from the last attribute. int[] indicesToRemove = new int[numLabels - 1 - i]; for (int counter1 = 0; counter1 < numLabels - i - 1; counter1++) { indicesToRemove[counter1] = chain[numLabels - 1 - counter1]; } Remove remove = new Remove(); remove.setAttributeIndicesArray(indicesToRemove); remove.setInvertSelection(false); remove.setInputFormat(trainDataset); chainRegressors[i].setFilter(remove); trainDataset.setClassIndex(chain[i]); debug("Bulding model " + (i + 1) + "/" + numLabels); chainRegressors[i].setDebug(true); chainRegressors[i].buildClassifier(trainDataset); } }
From source file:mulan.regressor.transformation.SingleTargetRegressor.java
License:Open Source License
protected void buildInternal(MultiLabelInstances mlTrainSet) throws Exception { stRegressors = new FilteredClassifier[numLabels]; // any changes are applied to a copy of the original dataset Instances trainSet = new Instances(mlTrainSet.getDataSet()); for (int i = 0; i < numLabels; i++) { stRegressors[i] = new FilteredClassifier(); stRegressors[i].setClassifier(AbstractClassifier.makeCopy(baseRegressor)); // Indices of attributes to remove. All labelIndices except for the current index int[] indicesToRemove = new int[numLabels - 1]; int counter2 = 0; for (int counter1 = 0; counter1 < numLabels; counter1++) { if (labelIndices[counter1] != labelIndices[i]) { indicesToRemove[counter2] = labelIndices[counter1]; counter2++;/* w ww . java2s. co m*/ } } Remove remove = new Remove(); remove.setAttributeIndicesArray(indicesToRemove); remove.setInvertSelection(false); remove.setInputFormat(trainSet); stRegressors[i].setFilter(remove); trainSet.setClassIndex(labelIndices[i]); debug("Bulding model " + (i + 1) + "/" + numLabels); stRegressors[i].buildClassifier(trainSet); } }
From source file:net.sf.jclal.classifier.BinaryRelevance.java
License:Open Source License
/** * {@inheritDoc}// ww w.j ava2s.c om */ protected void buildInternal(MultiLabelInstances train) throws Exception { ensemble = new Classifier[numLabels]; correspondence = new String[numLabels]; for (int i = 0; i < numLabels; i++) { correspondence[i] = train.getDataSet().attribute(labelIndices[i]).name(); } debug("preparing shell"); brt = new BinaryRelevanceTransformation(train); for (int i = 0; i < numLabels; i++) { ensemble[i] = AbstractClassifier.makeCopy(baseClassifier); Instances shell = brt.transformInstances(i); debug("Bulding model " + (i + 1) + "/" + numLabels); ensemble[i].buildClassifier(shell); } }