List of usage examples for weka.core Instances Instances
public Instances(String name, ArrayList<Attribute> attInfo, int capacity)
From source file:csav2.Weka_additive.java
public void classifyTestSet1(String input) throws Exception { String ids = ""; ReaderWriter rw = new ReaderWriter(); //ATTRIBUTES//from w ww.j a v a 2 s. co m Attribute attr[] = new Attribute[50]; //numeric attr[0] = new Attribute("Autosentiment"); //class FastVector classValue = new FastVector(3); classValue.addElement("p"); classValue.addElement("n"); classValue.addElement("o"); attr[1] = new Attribute("answer", classValue); FastVector attrs = new FastVector(); attrs.addElement(attr[0]); attrs.addElement(attr[1]); // Add Instances Instances dataset = new Instances("my_dataset", attrs, 0); StringTokenizer tokenizer = new StringTokenizer(input); while (tokenizer.hasMoreTokens()) { Instance example = new Instance(2); for (int j = 0; j < 2; j++) { String st = tokenizer.nextToken(); System.out.println(j + " " + st); if (j == 0) example.setValue(attr[j], Float.parseFloat(st)); else if (j == 1) example.setValue(attr[j], st); else example.setValue(attr[j], Integer.parseInt(st)); } ids += tokenizer.nextToken() + "\t"; dataset.add(example); } //Save dataset String file = "Classifier\\featurefile_additive_test1.arff"; ArffSaver saver = new ArffSaver(); saver.setInstances(dataset); saver.setFile(new File(file)); saver.writeBatch(); //Read dataset ArffLoader loader = new ArffLoader(); loader.setFile(new File(file)); dataset = loader.getDataSet(); //Build classifier dataset.setClassIndex(1); //Read classifier back String file1 = "Classifier\\classifier_add_autosentiment.model"; InputStream is = new FileInputStream(file1); Classifier classifier; ObjectInputStream objectInputStream = new ObjectInputStream(is); classifier = (Classifier) objectInputStream.readObject(); //Evaluate Instances test = new Instances(dataset, 0, dataset.numInstances()); test.setClassIndex(1); //Do eval Evaluation eval = new Evaluation(test); //trainset eval.evaluateModel(classifier, test); //testset System.out.println(eval.toSummaryString()); System.out.println("WEIGHTED F-MEASURE:" + eval.weightedFMeasure()); System.out.println("WEIGHTED PRECISION:" + eval.weightedPrecision()); System.out.println("WEIGHTED RECALL:" + eval.weightedRecall()); //output predictions String optest = "", val = ""; StringTokenizer op = new StringTokenizer(ids); int count = 0; while (op.hasMoreTokens()) { double[] prediction = classifier.distributionForInstance(test.instance(count)); count += 1; //optest+=op.nextToken()+" "+Double.toString((double) Math.round((prediction[0]) * 1000) / 1000)+"\n"; if (prediction[0] > prediction[1]) { if (prediction[0] > prediction[2]) { val = "p: " + Double.toString((double) Math.round((prediction[0]) * 1000) / 1000); } else { val = "o: " + Double.toString((double) Math.round((prediction[2]) * 1000) / 1000); } } else { if (prediction[1] > prediction[2]) { val = "n: " + Double.toString((double) Math.round((prediction[1]) * 1000) / 1000); } else { val = "o: " + Double.toString((double) Math.round((prediction[2]) * 1000) / 1000); } } optest += op.nextToken() + "\t" + val + "\n"; } rw.writeToFile(optest, "Answers_additive_Test1", "txt"); }
From source file:csav2.Weka_additive.java
public void classifyTestSet2(String input) throws Exception { String ids = ""; ReaderWriter rw = new ReaderWriter(); //ATTRIBUTES//from ww w . j a v a2 s.co m Attribute attr[] = new Attribute[50]; //numeric attr[0] = new Attribute("Autosentiment"); attr[1] = new Attribute("PostiveMatch"); attr[2] = new Attribute("NegativeMatch"); //class FastVector classValue = new FastVector(3); classValue.addElement("p"); classValue.addElement("n"); classValue.addElement("o"); attr[3] = new Attribute("answer", classValue); FastVector attrs = new FastVector(); attrs.addElement(attr[0]); attrs.addElement(attr[1]); attrs.addElement(attr[2]); attrs.addElement(attr[3]); // Add Instances Instances dataset = new Instances("my_dataset", attrs, 0); StringTokenizer tokenizer = new StringTokenizer(input); while (tokenizer.hasMoreTokens()) { Instance example = new Instance(4); for (int j = 0; j < 4; j++) { String st = tokenizer.nextToken(); System.out.println(j + " " + st); if (j == 0) example.setValue(attr[j], Float.parseFloat(st)); else if (j == 3) example.setValue(attr[j], st); else example.setValue(attr[j], Integer.parseInt(st)); } ids += tokenizer.nextToken() + "\t"; dataset.add(example); } //Save dataset String file = "Classifier\\featurefile_additive_test2.arff"; ArffSaver saver = new ArffSaver(); saver.setInstances(dataset); saver.setFile(new File(file)); saver.writeBatch(); //Read dataset ArffLoader loader = new ArffLoader(); loader.setFile(new File(file)); dataset = loader.getDataSet(); //Build classifier dataset.setClassIndex(3); //Read classifier back String file1 = "Classifier\\classifier_add_asAndpolarwords.model"; InputStream is = new FileInputStream(file1); Classifier classifier; ObjectInputStream objectInputStream = new ObjectInputStream(is); classifier = (Classifier) objectInputStream.readObject(); //Evaluate Instances test = new Instances(dataset, 0, dataset.numInstances()); test.setClassIndex(3); //Do eval Evaluation eval = new Evaluation(test); //trainset eval.evaluateModel(classifier, test); //testset System.out.println(eval.toSummaryString()); System.out.println("WEIGHTED F-MEASURE:" + eval.weightedFMeasure()); System.out.println("WEIGHTED PRECISION:" + eval.weightedPrecision()); System.out.println("WEIGHTED RECALL:" + eval.weightedRecall()); //output predictions String optest = "", val = ""; StringTokenizer op = new StringTokenizer(ids); int count = 0; while (op.hasMoreTokens()) { double[] prediction = classifier.distributionForInstance(test.instance(count)); count += 1; if (prediction[0] > prediction[1]) { if (prediction[0] > prediction[2]) { val = "p: " + Double.toString((double) Math.round((prediction[0]) * 1000) / 1000); } else { val = "o: " + Double.toString((double) Math.round((prediction[2]) * 1000) / 1000); } } else { if (prediction[1] > prediction[2]) { val = "n: " + Double.toString((double) Math.round((prediction[1]) * 1000) / 1000); } else { val = "o: " + Double.toString((double) Math.round((prediction[2]) * 1000) / 1000); } } optest += op.nextToken() + "\t" + val + "\n"; } rw.writeToFile(optest, "Answers_additive_Test2", "txt"); }
From source file:csav2.Weka_additive.java
public void classifyTestSet3(String input) throws Exception { String ids = ""; ReaderWriter rw = new ReaderWriter(); //ATTRIBUTES/*from w w w .j ava2 s . co m*/ Attribute attr[] = new Attribute[50]; //numeric attr[0] = new Attribute("Autosentiment"); attr[1] = new Attribute("PositiveMatch"); attr[2] = new Attribute("NegativeMatch"); attr[3] = new Attribute("FW"); attr[4] = new Attribute("JJ"); attr[5] = new Attribute("RB"); attr[6] = new Attribute("RB_JJ"); //class FastVector classValue = new FastVector(3); classValue.addElement("p"); classValue.addElement("n"); classValue.addElement("o"); attr[7] = new Attribute("answer", classValue); FastVector attrs = new FastVector(); attrs.addElement(attr[0]); attrs.addElement(attr[1]); attrs.addElement(attr[2]); attrs.addElement(attr[3]); attrs.addElement(attr[4]); attrs.addElement(attr[5]); attrs.addElement(attr[6]); attrs.addElement(attr[7]); // Add Instances Instances dataset = new Instances("my_dataset", attrs, 0); StringTokenizer tokenizer = new StringTokenizer(input); while (tokenizer.hasMoreTokens()) { Instance example = new Instance(8); for (int j = 0; j < 8; j++) { String st = tokenizer.nextToken(); System.out.println(j + " " + st); if (j == 0) example.setValue(attr[j], Float.parseFloat(st)); else if (j == 7) example.setValue(attr[j], st); else example.setValue(attr[j], Integer.parseInt(st)); } ids += tokenizer.nextToken() + "\t"; dataset.add(example); } //Save dataset String file = "Classifier\\featurefile_additive_test3.arff"; ArffSaver saver = new ArffSaver(); saver.setInstances(dataset); saver.setFile(new File(file)); saver.writeBatch(); //Read dataset ArffLoader loader = new ArffLoader(); loader.setFile(new File(file)); dataset = loader.getDataSet(); //Build classifier dataset.setClassIndex(7); //Read classifier back String file1 = "Classifier\\classifier_add_asAndpolarwordsAndpos.model"; InputStream is = new FileInputStream(file1); Classifier classifier; ObjectInputStream objectInputStream = new ObjectInputStream(is); classifier = (Classifier) objectInputStream.readObject(); //Evaluate Instances test = new Instances(dataset, 0, dataset.numInstances()); test.setClassIndex(7); //Do eval Evaluation eval = new Evaluation(test); //trainset eval.evaluateModel(classifier, test); //testset System.out.println(eval.toSummaryString()); System.out.println("WEIGHTED F-MEASURE:" + eval.weightedFMeasure()); System.out.println("WEIGHTED PRECISION:" + eval.weightedPrecision()); System.out.println("WEIGHTED RECALL:" + eval.weightedRecall()); //output predictions String optest = "", val = ""; StringTokenizer op = new StringTokenizer(ids); int count = 0; while (op.hasMoreTokens()) { double[] prediction = classifier.distributionForInstance(test.instance(count)); count += 1; if (prediction[0] > prediction[1]) { if (prediction[0] > prediction[2]) { val = "p: " + Double.toString((double) Math.round((prediction[0]) * 1000) / 1000); } else { val = "o: " + Double.toString((double) Math.round((prediction[2]) * 1000) / 1000); } } else { if (prediction[1] > prediction[2]) { val = "n: " + Double.toString((double) Math.round((prediction[1]) * 1000) / 1000); } else { val = "o: " + Double.toString((double) Math.round((prediction[2]) * 1000) / 1000); } } optest += op.nextToken() + "\t" + val + "\n"; } rw.writeToFile(optest, "Answers_additive_Test3", "txt"); }
From source file:csav2.Weka_additive.java
public void classifyTestSet4(String input) throws Exception { String ids = ""; ReaderWriter rw = new ReaderWriter(); //ATTRIBUTES//w w w . j a v a 2 s . com Attribute attr[] = new Attribute[50]; //numeric attr[0] = new Attribute("Autosentiment"); attr[1] = new Attribute("PositiveMatch"); attr[2] = new Attribute("NegativeMatch"); attr[3] = new Attribute("FW"); attr[4] = new Attribute("JJ"); attr[5] = new Attribute("RB"); attr[6] = new Attribute("RB_JJ"); attr[7] = new Attribute("amod"); attr[8] = new Attribute("acomp"); attr[9] = new Attribute("advmod"); //class FastVector classValue = new FastVector(3); classValue.addElement("p"); classValue.addElement("n"); classValue.addElement("o"); attr[10] = new Attribute("answer", classValue); FastVector attrs = new FastVector(); attrs.addElement(attr[0]); attrs.addElement(attr[1]); attrs.addElement(attr[2]); attrs.addElement(attr[3]); attrs.addElement(attr[4]); attrs.addElement(attr[5]); attrs.addElement(attr[6]); attrs.addElement(attr[7]); attrs.addElement(attr[8]); attrs.addElement(attr[9]); attrs.addElement(attr[10]); // Add Instances Instances dataset = new Instances("my_dataset", attrs, 0); StringTokenizer tokenizer = new StringTokenizer(input); while (tokenizer.hasMoreTokens()) { Instance example = new Instance(11); for (int j = 0; j < 11; j++) { String st = tokenizer.nextToken(); System.out.println(j + " " + st); if (j == 0) example.setValue(attr[j], Float.parseFloat(st)); else if (j == 10) example.setValue(attr[j], st); else example.setValue(attr[j], Integer.parseInt(st)); } ids += tokenizer.nextToken() + "\t"; dataset.add(example); } //Save dataset String file = "Classifier\\featurefile_additive_test4.arff"; ArffSaver saver = new ArffSaver(); saver.setInstances(dataset); saver.setFile(new File(file)); saver.writeBatch(); //Read dataset ArffLoader loader = new ArffLoader(); loader.setFile(new File(file)); dataset = loader.getDataSet(); //Build classifier dataset.setClassIndex(10); //Read classifier back String file1 = "Classifier\\classifier_add_asAndpolarwordsAndposAnddep.model"; InputStream is = new FileInputStream(file1); Classifier classifier; ObjectInputStream objectInputStream = new ObjectInputStream(is); classifier = (Classifier) objectInputStream.readObject(); //Evaluate Instances test = new Instances(dataset, 0, dataset.numInstances()); test.setClassIndex(10); //Do eval Evaluation eval = new Evaluation(test); //trainset eval.evaluateModel(classifier, test); //testset System.out.println(eval.toSummaryString()); System.out.println("WEIGHTED F-MEASURE:" + eval.weightedFMeasure()); System.out.println("WEIGHTED PRECISION:" + eval.weightedPrecision()); System.out.println("WEIGHTED RECALL:" + eval.weightedRecall()); //output predictions String optest = "", val = ""; StringTokenizer op = new StringTokenizer(ids); int count = 0; while (op.hasMoreTokens()) { double[] prediction = classifier.distributionForInstance(test.instance(count)); count += 1; if (prediction[0] > prediction[1]) { if (prediction[0] > prediction[2]) { val = "p: " + Double.toString((double) Math.round((prediction[0]) * 1000) / 1000); } else { val = "o: " + Double.toString((double) Math.round((prediction[2]) * 1000) / 1000); } } else { if (prediction[1] > prediction[2]) { val = "n: " + Double.toString((double) Math.round((prediction[1]) * 1000) / 1000); } else { val = "o: " + Double.toString((double) Math.round((prediction[2]) * 1000) / 1000); } } optest += op.nextToken() + "\t" + val + "\n"; } rw.writeToFile(optest, "Answers_additive_Test4", "txt"); }
From source file:csav2.Weka_additive.java
public void classifyTestSet5(String input) throws Exception { String ids = ""; ReaderWriter rw = new ReaderWriter(); //ATTRIBUTES/* w w w . j ava2 s. c o m*/ Attribute attr[] = new Attribute[50]; //numeric attr[0] = new Attribute("Autosentiment"); attr[1] = new Attribute("PositiveMatch"); attr[2] = new Attribute("NegativeMatch"); attr[3] = new Attribute("FW"); attr[4] = new Attribute("JJ"); attr[5] = new Attribute("RB"); attr[6] = new Attribute("RB_JJ"); attr[7] = new Attribute("amod"); attr[8] = new Attribute("acomp"); attr[9] = new Attribute("advmod"); attr[10] = new Attribute("BLPos"); attr[11] = new Attribute("BLNeg"); //class FastVector classValue = new FastVector(3); classValue.addElement("p"); classValue.addElement("n"); classValue.addElement("o"); attr[12] = new Attribute("answer", classValue); FastVector attrs = new FastVector(); attrs.addElement(attr[0]); attrs.addElement(attr[1]); attrs.addElement(attr[2]); attrs.addElement(attr[3]); attrs.addElement(attr[4]); attrs.addElement(attr[5]); attrs.addElement(attr[6]); attrs.addElement(attr[7]); attrs.addElement(attr[8]); attrs.addElement(attr[9]); attrs.addElement(attr[10]); attrs.addElement(attr[11]); attrs.addElement(attr[12]); // Add Instances Instances dataset = new Instances("my_dataset", attrs, 0); StringTokenizer tokenizer = new StringTokenizer(input); while (tokenizer.hasMoreTokens()) { Instance example = new Instance(13); for (int j = 0; j < 13; j++) { String st = tokenizer.nextToken(); System.out.println(j + " " + st); if (j == 0) example.setValue(attr[j], Float.parseFloat(st)); else if (j == 12) example.setValue(attr[j], st); else example.setValue(attr[j], Integer.parseInt(st)); } ids += tokenizer.nextToken() + "\t"; dataset.add(example); } //Save dataset String file = "Classifier\\featurefile_additive_test5.arff"; ArffSaver saver = new ArffSaver(); saver.setInstances(dataset); saver.setFile(new File(file)); saver.writeBatch(); //Read dataset ArffLoader loader = new ArffLoader(); loader.setFile(new File(file)); dataset = loader.getDataSet(); //Build classifier dataset.setClassIndex(12); //Read classifier back String file1 = "Classifier\\classifier_add_asAndpolarwordsAndposAnddepAndbl.model"; InputStream is = new FileInputStream(file1); Classifier classifier; ObjectInputStream objectInputStream = new ObjectInputStream(is); classifier = (Classifier) objectInputStream.readObject(); //Evaluate Instances test = new Instances(dataset, 0, dataset.numInstances()); test.setClassIndex(12); //Do eval Evaluation eval = new Evaluation(test); //trainset eval.evaluateModel(classifier, test); //testset System.out.println(eval.toSummaryString()); System.out.println("WEIGHTED F-MEASURE:" + eval.weightedFMeasure()); System.out.println("WEIGHTED PRECISION:" + eval.weightedPrecision()); System.out.println("WEIGHTED RECALL:" + eval.weightedRecall()); //output predictions String optest = "", val = ""; StringTokenizer op = new StringTokenizer(ids); int count = 0; while (op.hasMoreTokens()) { double[] prediction = classifier.distributionForInstance(test.instance(count)); count += 1; if (prediction[0] > prediction[1]) { if (prediction[0] > prediction[2]) { val = "p: " + Double.toString((double) Math.round((prediction[0]) * 1000) / 1000); } else { val = "o: " + Double.toString((double) Math.round((prediction[2]) * 1000) / 1000); } } else { if (prediction[1] > prediction[2]) { val = "n: " + Double.toString((double) Math.round((prediction[1]) * 1000) / 1000); } else { val = "o: " + Double.toString((double) Math.round((prediction[2]) * 1000) / 1000); } } optest += op.nextToken() + "\t" + val + "\n"; } rw.writeToFile(optest, "Answers_additive_Test5", "txt"); }
From source file:csav2.Weka_additive.java
public void classifyTestSet6(String input) throws Exception { String ids = ""; ReaderWriter rw = new ReaderWriter(); //ATTRIBUTES//from ww w . j a va2s . c om Attribute attr[] = new Attribute[50]; //numeric attr[0] = new Attribute("Autosentiment"); attr[1] = new Attribute("PositiveMatch"); attr[2] = new Attribute("NegativeMatch"); attr[3] = new Attribute("FW"); attr[4] = new Attribute("JJ"); attr[5] = new Attribute("RB"); attr[6] = new Attribute("RB_JJ"); attr[7] = new Attribute("amod"); attr[8] = new Attribute("acomp"); attr[9] = new Attribute("advmod"); attr[10] = new Attribute("BLPos"); attr[11] = new Attribute("BLNeg"); attr[12] = new Attribute("VSPos"); attr[13] = new Attribute("VSNeg"); //class FastVector classValue = new FastVector(3); classValue.addElement("p"); classValue.addElement("n"); classValue.addElement("o"); attr[14] = new Attribute("answer", classValue); FastVector attrs = new FastVector(); attrs.addElement(attr[0]); attrs.addElement(attr[1]); attrs.addElement(attr[2]); attrs.addElement(attr[3]); attrs.addElement(attr[4]); attrs.addElement(attr[5]); attrs.addElement(attr[6]); attrs.addElement(attr[7]); attrs.addElement(attr[8]); attrs.addElement(attr[9]); attrs.addElement(attr[10]); attrs.addElement(attr[11]); attrs.addElement(attr[12]); attrs.addElement(attr[13]); attrs.addElement(attr[14]); // Add Instances Instances dataset = new Instances("my_dataset", attrs, 0); StringTokenizer tokenizer = new StringTokenizer(input); while (tokenizer.hasMoreTokens()) { Instance example = new Instance(15); for (int j = 0; j < 15; j++) { String st = tokenizer.nextToken(); System.out.println(j + " " + st); if (j == 0) example.setValue(attr[j], Float.parseFloat(st)); else if (j == 14) example.setValue(attr[j], st); else example.setValue(attr[j], Integer.parseInt(st)); } ids += tokenizer.nextToken() + "\t"; dataset.add(example); } //Save dataset String file = "Classifier\\featurefile_additive_test6.arff"; ArffSaver saver = new ArffSaver(); saver.setInstances(dataset); saver.setFile(new File(file)); saver.writeBatch(); //Read dataset ArffLoader loader = new ArffLoader(); loader.setFile(new File(file)); dataset = loader.getDataSet(); //Build classifier dataset.setClassIndex(14); //Read classifier back String file1 = "Classifier\\classifier_asAndpolarwordsAndposAnddepAndblAndvs.model"; InputStream is = new FileInputStream(file1); Classifier classifier; ObjectInputStream objectInputStream = new ObjectInputStream(is); classifier = (Classifier) objectInputStream.readObject(); //Evaluate Instances test = new Instances(dataset, 0, dataset.numInstances()); test.setClassIndex(14); //Do eval Evaluation eval = new Evaluation(test); //trainset eval.evaluateModel(classifier, test); //testset System.out.println(eval.toSummaryString()); System.out.println("WEIGHTED F-MEASURE:" + eval.weightedFMeasure()); System.out.println("WEIGHTED PRECISION:" + eval.weightedPrecision()); System.out.println("WEIGHTED RECALL:" + eval.weightedRecall()); //output predictions String optest = "", val = ""; StringTokenizer op = new StringTokenizer(ids); int count = 0; while (op.hasMoreTokens()) { double[] prediction = classifier.distributionForInstance(test.instance(count)); count += 1; if (prediction[0] > prediction[1]) { if (prediction[0] > prediction[2]) { val = "p: " + Double.toString((double) Math.round((prediction[0]) * 1000) / 1000); } else { val = "o: " + Double.toString((double) Math.round((prediction[2]) * 1000) / 1000); } } else { if (prediction[1] > prediction[2]) { val = "n: " + Double.toString((double) Math.round((prediction[1]) * 1000) / 1000); } else { val = "o: " + Double.toString((double) Math.round((prediction[2]) * 1000) / 1000); } } optest += op.nextToken() + "\t" + val + "\n"; } rw.writeToFile(optest, "Answers_additive_Test6", "txt"); }
From source file:cyber009.main.MainSyntacticData.java
public static void main(String[] args) { Random r = new Random(System.currentTimeMillis()); Variable v = new Variable(); long timeStart = 0, timeEnd = 0; ANN ann = new ANN(v, 0.014013); for (int f = 2; f <= 2; f++) { v.N = f;/*from w w w . j a v a2 s. c o m*/ v.D = 4000; v.threshold = 0.0; cyber009.function.LinearFunction func = new cyber009.function.LinearFunction(v.N); v.X = new double[v.D][]; v.TARGET = new double[v.D]; v.WEIGHT = new double[v.N + 1]; for (int d = 0; d < v.D; d++) { v.X[d] = new double[v.N + 1]; v.X[d][0] = 1.0; for (int n = 1; n <= v.N; n++) { v.X[d][n] = r.nextGaussian(); } v.TARGET[d] = func.syntacticFunction(v.X[d], v.threshold); } //v.showAll(); //Lib.Utility.writeCSVDataSet("data/syn_data_x_"+v.N+"_d_"+v.D+".csv", v); List<Attribute> atts = new ArrayList<>(); Attribute[] att = new Attribute[v.N + 2]; for (int i = 0; i <= v.N; i++) { att[i] = new Attribute("X" + i); atts.add(att[i]); } List<String> classValus = new ArrayList<>(); classValus.add("1.0"); classValus.add("0.0"); att[v.N + 1] = new Attribute("class", classValus); atts.add(att[v.N + 1]); Instances dataSet = new Instances("Syn Data", (ArrayList<Attribute>) atts, v.D); for (int d = 0; d < v.D; d++) { Instance ins = new DenseInstance(v.N + 2); for (int i = 0; i <= v.N; i++) { ins.setValue(atts.get(i), v.X[d][i]); } ins.setValue(atts.get(v.N + 1), v.TARGET[d]); dataSet.add(ins); } //System.out.println(dataSet); PlotData2D p2D = new PlotData2D(dataSet); p2D.setPlotName("Syn data"); VisualizePanel vp = new VisualizePanel(); vp.setName("Show Data"); try { vp.addPlot(p2D); JFrame frame = new JFrame("Show Data"); frame.setSize(600, 600); frame.setVisible(true); frame.getContentPane().setLayout(new BorderLayout()); frame.getContentPane().add(vp, BorderLayout.CENTER); frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); frame.setVisible(true); func.showCoefficients(); } catch (Exception ex) { Logger.getLogger(MainSyntacticData.class.getName()).log(Level.SEVERE, null, ex); } ann.weightReset(); timeStart = System.currentTimeMillis(); ann.gradientDescent(10000L, 2, v.D); timeEnd = System.currentTimeMillis(); //v.showTable(); //v.showWEIGHT(); System.out.println("feature #:" + v.N + " time:(" + (timeEnd - timeStart) + ")"); v.showResult(); //func.showCoefficients(); } }
From source file:cyber009.main.UDAL.java
public void showData() { List<Attribute> atts = new ArrayList<>(); Attribute[] att = new Attribute[v.N + 2]; for (int i = 0; i <= v.N; i++) { att[i] = new Attribute("X" + i); atts.add(att[i]);/*from w w w . j a v a 2 s . c o m*/ } List<String> classValus = new ArrayList<>(); classValus.add("1.0"); classValus.add("0.0"); att[v.N + 1] = new Attribute("class", classValus); atts.add(att[v.N + 1]); Instances dataSet = new Instances("Syn Data", (ArrayList<Attribute>) atts, v.D); for (int d = 0; d < v.D; d++) { Instance ins = new DenseInstance(v.N + 2); for (int i = 0; i <= v.N; i++) { ins.setValue(atts.get(i), v.X[d][i]); } ins.setValue(atts.get(v.N + 1), v.TARGET[d]); dataSet.add(ins); } //System.out.println(dataSet); PlotData2D p2D = new PlotData2D(dataSet); p2D.setPlotName("Syn data"); VisualizePanel vp = new VisualizePanel(); vp.setName("Show Data"); try { vp.addPlot(p2D); JFrame frame = new JFrame("Show Data"); frame.setSize(600, 600); frame.setVisible(true); frame.getContentPane().setLayout(new BorderLayout()); frame.getContentPane().add(vp, BorderLayout.CENTER); frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); frame.setVisible(true); func.showCoefficients(); } catch (Exception ex) { Logger.getLogger(MainSyntacticData.class.getName()).log(Level.SEVERE, null, ex); } }
From source file:cyber009.main.UDALNeuralNetwork.java
public void arraytoInstances() { List<Attribute> atts = new ArrayList<>(); Attribute[] att = new Attribute[v.N + 2]; for (int i = 0; i <= v.N; i++) { att[i] = new Attribute("X" + i); atts.add(att[i]);//from w w w . j a v a 2 s . co m } List<String> classValus = new ArrayList<>(); classValus.add("1.0"); classValus.add("0.0"); att[v.N + 1] = new Attribute("class", classValus); atts.add(att[v.N + 1]); dataSet = new Instances("Syn Data", (ArrayList<Attribute>) atts, v.D); dataSet.setClass(att[v.N + 1]); for (int d = 0; d < v.D; d++) { Instance ins = new DenseInstance(v.N + 2); for (int i = 0; i <= v.N; i++) { ins.setValue(atts.get(i), v.X[d][i]); } ins.setValue(atts.get(v.N + 1), v.TARGET[d]); dataSet.add(ins); } }
From source file:cyber009.udal.functions.LinearFunction.java
/** * <p>use for generate Synthetic Dataset</p> * @param data //from w w w . j a v a2s.com */ public void generateSyntheticDataset(Variable data) { List<Attribute> atts = new ArrayList<>(); for (int n = 0; n < data.numberOfFeature; n++) { atts.add(new Attribute("X" + n)); } List<String> classValus = new ArrayList<>(); classValus.add("1"); classValus.add("0"); atts.add(new Attribute("class", classValus)); data.unLabelDataSets = new Instances("Syn Data unlabel data set:" + data.numberOfDataset, (ArrayList<Attribute>) atts, data.numberOfDataset); data.labelDataSets = new Instances("Syn Data label data set:" + data.numberOfDataset, (ArrayList<Attribute>) atts, data.numberOfDataset); Instance set = null; for (int d = 0; d < data.numberOfDataset; d++) { set = new DenseInstance(data.numberOfFeature + 1); for (int n = 0; n < data.numberOfFeature; n++) { set.setValue(n, rand.nextGaussian()); } //set.setValue(data.numberOfFeature, ); // class value empty does not set any thing that put ? unlabel data set data.unLabelDataSets.add(set); } data.unLabelDataSets.setClassIndex(data.numberOfFeature); }