List of usage examples for weka.core Instances classAttribute
publicAttribute classAttribute()
From source file:br.com.ufu.lsi.rebfnetwork.RBFNetwork.java
License:Open Source License
/** * Builds the classifier/*from ww w . j a v a2 s . co m*/ * * @param instances the training data * @throws Exception if the classifier could not be built successfully */ public void buildClassifier(Instances instances) throws Exception { // can classifier handle the data? getCapabilities().testWithFail(instances); // remove instances with missing class instances = new Instances(instances); instances.deleteWithMissingClass(); // only class? -> build ZeroR model if (instances.numAttributes() == 1) { System.err.println( "Cannot build model (only class attribute present in data!), " + "using ZeroR model instead!"); m_ZeroR = new weka.classifiers.rules.ZeroR(); m_ZeroR.buildClassifier(instances); return; } else { m_ZeroR = null; } m_standardize = new Standardize(); m_standardize.setInputFormat(instances); instances = Filter.useFilter(instances, m_standardize); SimpleKMeans sk = new SimpleKMeans(); sk.setNumClusters(m_numClusters); sk.setSeed(m_clusteringSeed); MakeDensityBasedClusterer dc = new MakeDensityBasedClusterer(); dc.setClusterer(sk); dc.setMinStdDev(m_minStdDev); m_basisFilter = new ClusterMembership(); m_basisFilter.setDensityBasedClusterer(dc); m_basisFilter.setInputFormat(instances); Instances transformed = Filter.useFilter(instances, m_basisFilter); if (instances.classAttribute().isNominal()) { m_linear = null; m_logistic = new Logistic(); m_logistic.setRidge(m_ridge); m_logistic.setMaxIts(m_maxIts); m_logistic.buildClassifier(transformed); } else { m_logistic = null; m_linear = new LinearRegression(); m_linear.setAttributeSelectionMethod( new SelectedTag(LinearRegression.SELECTION_NONE, LinearRegression.TAGS_SELECTION)); m_linear.setRidge(m_ridge); m_linear.buildClassifier(transformed); } }
From source file:cerebro.Id3.java
License:Open Source License
/** * Method for building an Id3 tree.//from ww w .ja v a 2 s .c o m * * @param data the training data * @exception Exception if decision tree can't be built successfully */ private void makeTree(Instances data) throws Exception { // Check if no instances have reached this node. if (data.numInstances() == 0) { m_Attribute = null; m_ClassValue = Instance.missingValue(); m_Distribution = new double[data.numClasses()]; return; } // Compute attribute with maximum information gain. double[] infoGains = new double[data.numAttributes()]; Enumeration attEnum = data.enumerateAttributes(); while (attEnum.hasMoreElements()) { Attribute att = (Attribute) attEnum.nextElement(); infoGains[att.index()] = computeInfoGain(data, att); } m_Attribute = data.attribute(Utils.maxIndex(infoGains)); // Make leaf if information gain is zero. // Otherwise create successors. if (Utils.eq(infoGains[m_Attribute.index()], 0)) { m_Attribute = null; m_Distribution = new double[data.numClasses()]; Enumeration instEnum = data.enumerateInstances(); while (instEnum.hasMoreElements()) { Instance inst = (Instance) instEnum.nextElement(); m_Distribution[(int) inst.classValue()]++; } Utils.normalize(m_Distribution); m_ClassValue = Utils.maxIndex(m_Distribution); m_ClassAttribute = data.classAttribute(); } else { Instances[] splitData = splitData(data, m_Attribute); m_Successors = new Id3[m_Attribute.numValues()]; for (int j = 0; j < m_Attribute.numValues(); j++) { m_Successors[j] = new Id3(); m_Successors[j].makeTree(splitData[j]); } } }
From source file:cezeri.evaluater.FactoryEvaluation.java
public static Evaluation performCrossValidate(Classifier model, Instances datax, int folds, boolean show_text, boolean show_plot, TFigureAttribute attr) { Random rand = new Random(1); Instances randData = new Instances(datax); randData.randomize(rand);/*from w w w . ja v a2s. c om*/ if (randData.classAttribute().isNominal()) { randData.stratify(folds); } Evaluation eval = null; try { // perform cross-validation eval = new Evaluation(randData); // double[] simulated = new double[0]; // double[] observed = new double[0]; // double[] sim = new double[0]; // double[] obs = new double[0]; for (int n = 0; n < folds; n++) { Instances train = randData.trainCV(folds, n, rand); Instances validation = randData.testCV(folds, n); // build and evaluate classifier Classifier clsCopy = Classifier.makeCopy(model); clsCopy.buildClassifier(train); // sim = eval.evaluateModel(clsCopy, validation); // obs = validation.attributeToDoubleArray(validation.classIndex()); // if (show_plot) { // double[][] d = new double[2][sim.length]; // d[0] = obs; // d[1] = sim; // CMatrix f1 = CMatrix.getInstance(d); // f1.transpose().plot(attr); // } // if (show_text) { // // output evaluation // System.out.println(); // System.out.println("=== Setup for each Cross Validation fold==="); // System.out.println("Classifier: " + model.getClass().getName() + " " + Utils.joinOptions(model.getOptions())); // System.out.println("Dataset: " + randData.relationName()); // System.out.println("Folds: " + folds); // System.out.println("Seed: " + 1); // System.out.println(); // System.out.println(eval.toSummaryString("=== " + folds + "-fold Cross-validation ===", false)); // } simulated = FactoryUtils.concatenate(simulated, eval.evaluateModel(clsCopy, validation)); observed = FactoryUtils.concatenate(observed, validation.attributeToDoubleArray(validation.classIndex())); // simulated = FactoryUtils.mean(simulated,eval.evaluateModel(clsCopy, validation)); // observed = FactoryUtils.mean(observed,validation.attributeToDoubleArray(validation.classIndex())); } if (show_plot) { double[][] d = new double[2][simulated.length]; d[0] = observed; d[1] = simulated; CMatrix f1 = CMatrix.getInstance(d); attr.figureCaption = "overall performance"; f1.transpose().plot(attr); } if (show_text) { // output evaluation System.out.println(); System.out.println("=== Setup for Overall Cross Validation==="); System.out.println( "Classifier: " + model.getClass().getName() + " " + Utils.joinOptions(model.getOptions())); System.out.println("Dataset: " + randData.relationName()); System.out.println("Folds: " + folds); System.out.println("Seed: " + 1); System.out.println(); System.out.println(eval.toSummaryString("=== " + folds + "-fold Cross-validation ===", false)); } } catch (Exception ex) { Logger.getLogger(FactoryEvaluation.class.getName()).log(Level.SEVERE, null, ex); } return eval; }
From source file:cezeri.feature.selection.FeatureSelectionRanker.java
private static TFeatureRank[] computeCombinationPairs(String[] lstComb, Instances data, Classifier model, int nFolds, boolean show_text, boolean show_plot) { TFeatureRank[] ret = new TFeatureRank[lstComb.length]; int m = lstComb.length; double q = m * 1.0 / 100; int n = 0;//from w ww. ja va 2 s .c o m for (int i = 0; i < m; i++) { if (n != (int) Math.round(i / q)) { n = (int) Math.round(i / q); System.out.println("progress:" + n + "%"); } TFeatureRank obj = new TFeatureRank(); obj.featureName = lstComb[i]; obj.index = i + ""; Instances subsetData = FactoryInstance.getSubsetData(data, lstComb[i].split(",")); Evaluation eval = FactoryEvaluation.performCrossValidate(model, subsetData, nFolds, show_text, show_plot); try { if (data.classAttribute().isNominal()) { obj.value = eval.pctCorrect(); } else { obj.value = eval.correlationCoefficient(); } } catch (Exception ex) { Logger.getLogger(FeatureSelectionRanker.class.getName()).log(Level.SEVERE, null, ex); } ret[i] = obj; } ArrayList<TFeatureRank> lst = toArrayList(ret); Collections.sort(lst, new CustomComparatorForFeatureRank()); ret = toArray(lst); return ret; }
From source file:cezeri.feature.selection.FeatureSelectionRanker.java
private static double computeCombinationFeature(String lstComb, Instances data, int folds, Classifier model, boolean show_text, boolean show_plot) { TFeatureRank obj = new TFeatureRank(); obj.featureName = lstComb;//from w w w . j av a2s .co m obj.index = ""; Instances subsetData = FactoryInstance.getSubsetData(data, lstComb.split(",")); Evaluation eval = FactoryEvaluation.performCrossValidate(model, subsetData, folds, show_text, show_plot); try { if (data.classAttribute().isNominal()) { obj.value = eval.pctCorrect(); } else { obj.value = eval.correlationCoefficient(); } } catch (Exception ex) { Logger.getLogger(FeatureSelectionRanker.class.getName()).log(Level.SEVERE, null, ex); } return obj.value; }
From source file:cezeri.utils.FactoryInstance.java
public static String[] getAttributeListExceptClassAttribute(Instances data) { int n = data.numAttributes(); String[] ret = new String[n - 1]; String classAtt = data.classAttribute().name(); int k = 0;// ww w . j a v a2 s . c om for (int i = 0; i < n; i++) { if (!classAtt.equals(data.attribute(i).name())) { ret[k++] = data.attribute(i).name(); } } return ret; }
From source file:cezeri.utils.FactoryInstance.java
public static String[] getDefaultClasses(Instances data) { // String[] str = getOriginalClasses(data); int n = data.numDistinctValues(data.classAttribute()); // int n = data.numClasses(); String[] ret = new String[n]; for (int i = 0; i < n; i++) { ret[i] = i + ""; }//from w ww . j a va2 s . c o m return ret; }
From source file:cezeri.utils.FactoryInstance.java
public static Instances getSubsetData(Instances data, String[] attList) { Instances temp = new Instances(data); for (int i = 0; i < data.numAttributes(); i++) { if (!temp.attribute(0).equals(temp.classAttribute())) { temp.deleteAttributeAt(0);//from w ww . ja v a2 s.co m } } double[][] m = new double[attList.length + 1][data.numInstances()]; for (int i = 0; i < attList.length; i++) { int n = attList.length - 1 - i; String str = attList[n]; Attribute t = data.attribute(str); double[] d = data.attributeToDoubleArray(t.index()); m[n] = d; temp.insertAttributeAt(t, 0); } m[attList.length] = data.attributeToDoubleArray(data.classIndex()); m = CMatrix.getInstance(m).transpose().get2DArrayDouble(); FastVector att = new FastVector(); for (int i = 0; i < temp.numAttributes(); i++) { att.addElement(temp.attribute(i)); } Instances ret = new Instances(temp.relationName(), att, m.length); for (int i = 0; i < m.length; i++) { Instance ins = new Instance(m[0].length); for (int j = 0; j < m[0].length; j++) { ins.setValue(j, m[i][j]); } ret.add(ins); } ret.setClassIndex(temp.classIndex()); return ret; }
From source file:Clases.RedNeuronal.RedNeuronal.java
public void redNeuronal(int puntaje, int tiempo, int error) throws Exception { //si puntaje >= 200 entonces aprendido //si tiempo <= 240 (4 minutos) entonces aprendido //si errores <= 3 entonces aprendido String[] dato = { obtnerPuntaje(puntaje), obtenerTiempo(tiempo), obtenerErrores(error) }; ConverterUtils.DataSource con = new ConverterUtils.DataSource( "C:\\Users\\USUARIO\\Documents\\SILVIIS\\10 Modulo\\2.ANTEPROYECTOS DE TESIS\\Proyecto\\Aplicacion\\redeAprendizaje.arff"); // ConverterUtils.DataSource con = new ConverterUtils.DataSource("E:\\Unl\\10 Modulo\\2.ANTEPROYECTOS DE TESIS\\Proyecto\\Aplicacion\\redeAprendizaje.arff"); Instances instances = con.getDataSet(); System.out.println(instances); instances.setClassIndex(instances.numAttributes() - 1); MultilayerPerceptron mp = new MultilayerPerceptron(); mp.buildClassifier(instances);/*from w w w. j a v a2 s . com*/ Evaluation evalucion = new Evaluation(instances); evalucion.evaluateModel(mp, instances); System.out.println(evalucion.toSummaryString()); System.out.println(evalucion.toMatrixString()); String datosEntrada = null; String datosSalida = "no se puede predecir"; for (int i = 0; i < instances.numInstances(); i++) { double predecido = mp.classifyInstance(instances.instance(i)); datosEntrada = dato[0] + " " + dato[1] + " " + dato[2]; if ((int) instances.instance(i).value(0) == Integer.parseInt(dato[0]) && (int) instances.instance(i).value(1) == Integer.parseInt(dato[1]) && (int) instances.instance(i).value(2) == Integer.parseInt(dato[2])) { datosSalida = instances.classAttribute().value((int) predecido); } } System.out.println("DATOS DE ENTRADA: " + datosEntrada); System.out.println("SALIDA PREDECIDA: " + datosSalida); switch (datosSalida) { case "0": resultado = "Excelente ha aprendido"; imgResultado = "Excelente.jpg"; imgREDneuronal = "0.png"; System.out.println("Excelente ha aprendido"); break; case "1": resultado = "Disminuir Errores"; imgResultado = "Bueno.jpg"; imgREDneuronal = "1.png"; System.out.println("Disminuir Errores"); break; case "2": resultado = "Disminuir Tiempo"; imgResultado = "Bueno.jpg"; imgREDneuronal = "2.png"; System.out.println("Disminuir Tiempo"); break; case "3": resultado = "Disminuir Errores y tiempo"; imgResultado = "Bueno.jpg"; imgREDneuronal = "3.png"; System.out.println("Disminuir Errores y tiempo"); break; case "4": resultado = "Subir Puntaje"; imgResultado = "pensando.jpg"; imgREDneuronal = "4.png"; System.out.println("Subir Puntaje"); break; case "5": resultado = "Subir Puntaje y disminuir Errores"; imgResultado = "pensando.jpg"; imgREDneuronal = "5.png"; System.out.println("Subir Puntaje y disminuir Errores"); break; case "6": resultado = "Subir Puntaje y disminuir Tiempo"; imgResultado = "pensando.jpg"; imgREDneuronal = "6.png"; System.out.println("Subir Puntaje y disminuir Tiempo"); break; case "7": resultado = "Ponle mas Empeo"; imgResultado = "pensando.jpg"; imgREDneuronal = "7.png"; System.out.println("Ponle mas Empeo"); break; default: resultado = "Verifique entradas, no se puede predecir"; imgResultado = "Error.jpg"; System.out.println("Verifique entradas, no se puede predecir"); break; } }
From source file:clasificacion.Clasificacion.java
public String clasificar(String[] testCases) throws Exception { String ruta = "nursery_model.model"; InputStream classModelStream; classModelStream = getClass().getResourceAsStream(ruta); //classModel = (Classifier)SerializationHelper.read(classModelStream); Classifier clasify = (Classifier) SerializationHelper.read(classModelStream); FastVector parents = new FastVector(); parents.addElement("usual"); parents.addElement("pretentious"); parents.addElement("great_pret"); Attribute _parent = new Attribute("parents", parents); FastVector nurs = new FastVector(); nurs.addElement("proper"); nurs.addElement("less_proper"); nurs.addElement("improper"); nurs.addElement("critical"); nurs.addElement("very_crit"); Attribute _has_nurs = new Attribute("has_nurs", nurs); FastVector form = new FastVector(); form.addElement("complete"); form.addElement("completed"); form.addElement("incomplete"); form.addElement("foster"); Attribute _form = new Attribute("form", form); FastVector children = new FastVector(); children.addElement("1"); children.addElement("2"); children.addElement("3"); children.addElement("more"); Attribute _children = new Attribute("children", children); FastVector housing = new FastVector(); housing.addElement("convenient"); housing.addElement("less_conv"); housing.addElement("critical"); Attribute _housing = new Attribute("housing", housing); FastVector finance = new FastVector(); finance.addElement("convenient"); finance.addElement("inconv"); Attribute _finance = new Attribute("finance", finance); FastVector social = new FastVector(); social.addElement("nonprob"); social.addElement("slightly_prob"); social.addElement("problematic"); Attribute _social = new Attribute("social", social); FastVector health = new FastVector(); health.addElement("recommended"); health.addElement("priority"); health.addElement("not_recom"); Attribute _health = new Attribute("health", health); FastVector Class = new FastVector(); Class.addElement("not_recom"); Class.addElement("recommend"); Class.addElement("very_recom"); Class.addElement("priority"); Class.addElement("spec_prior"); Attribute _Class = new Attribute("class", Class); FastVector atributos = new FastVector(9); atributos.addElement(_parent);//w w w. j a v a 2s . c o m atributos.addElement(_has_nurs); atributos.addElement(_form); atributos.addElement(_children); atributos.addElement(_housing); atributos.addElement(_finance); atributos.addElement(_social); atributos.addElement(_health); atributos.addElement(_Class); ArrayList<Attribute> atributs = new ArrayList<>(); atributs.add(_parent); atributs.add(_has_nurs); atributs.add(_form); atributs.add(_children); atributs.add(_housing); atributs.add(_finance); atributs.add(_social); atributs.add(_health); atributs.add(_Class); //Aqu se crea la instacia, que tiene todos los atributos del modelo Instances dataTest = new Instances("TestCases", atributos, 1); dataTest.setClassIndex(8); Instance setPrueba = new Instance(9); int index = -1; for (int i = 0; i < 8; i++) { index = atributs.get(i).indexOfValue(testCases[i]); //System.out.println(i + " " + atributs.get(i) + " " + index + " " + testCases[i]); setPrueba.setValue(atributs.get(i), index); } //Agregando el set que se desea evaluar. dataTest.add(setPrueba); //Realizando la Prediccin //La instancia es la 0 debido a que es la unica que se encuentra. double valorP = clasify.classifyInstance(dataTest.instance(0)); //get the name of the class value String prediccion = dataTest.classAttribute().value((int) valorP); return prediccion; }