List of usage examples for weka.core Instances numAttributes
publicint numAttributes()
From source file:FFNN.MultiplePerceptron.java
public MultiplePerceptron(int itt, double learn, int numHLayer, Instances i) { listNodeHidden = new ArrayList<>();//inisialisasis listNodeHidden listNodeOutput = new ArrayList<>(); itteration = itt;/*w ww . ja v a2s .c o m*/ learningRate = learn; numHiddenLayer = numHLayer; for (int hiddenLayer = 0; hiddenLayer < numHiddenLayer + 1; hiddenLayer++) {//buat neuron untuk hidden layer //ditambah 1 untuk neuron bias listNodeHidden.add(new Node(i.numAttributes())); } for (int numInstance = 0; numInstance < i.numClasses(); numInstance++) {//buat neuron untuk output listNodeOutput.add(new Node(listNodeHidden.size())); } target = new ArrayList<>(); instancesToDouble = new double[i.numInstances()]; for (int numIns = 0; numIns < i.numInstances(); numIns++) { instancesToDouble[numIns] = i.instance(numIns).toDoubleArray()[i.classIndex()]; } }
From source file:FFNN.MultiplePerceptron.java
@Override public void buildClassifier(Instances i) { //iterasi/* w ww . j a v a 2s .c o m*/ for (int itt = 0; itt < itteration; itt++) { // System.out.println("Iterasi ke "+ itt); for (int indexInstance = 0; indexInstance < i.numInstances(); indexInstance++) { ArrayList<Double> listInput = new ArrayList<>(); //mengisi nilai listInput dengan nilai di instances listInput.add(1.0);//ini bias input for (int index = 0; index < i.numAttributes() - 1; index++) listInput.add(i.get(indexInstance).value(index)); ArrayList<Double> listOutputHidden = new ArrayList<>(); listOutputHidden.add(1.0);//input bias // System.out.println(); // System.out.println("Hidden layer"); listNodeHidden.get(0).setValue(1.0);//bias gak boleh ganti output //menghitung output hidden layer for (int index = 1; index < listNodeHidden.size(); index++) {//output bias tidak boleh ganti double value = listNodeHidden.get(index).output(listInput); listNodeHidden.get(index).setValue(value); listOutputHidden.add(value); // System.out.println("neuron "+index+" "+value); } // System.out.println(); // System.out.println("Output layer"); //menghitung output output layer for (int index = 0; index < listNodeOutput.size(); index++) { double value = listNodeOutput.get(index).output(listOutputHidden); listNodeOutput.get(index).setValue(value); // System.out.print(value+" "); } // System.out.println(listNodeHidden.get(1).getWeightFromList(0)); calculateError(indexInstance); updateBobot(i.instance(indexInstance)); } } for (int idx = 0; idx < listNodeHidden.size(); idx++) { System.out.println("Hidden value " + listNodeHidden.get(idx).getValue()); System.out.println("Hidden error " + listNodeHidden.get(idx).getError()); for (int idx2 = 0; idx2 < listNodeHidden.get(idx).getWeightSize(); idx2++) System.out.println("Hidden weight" + listNodeHidden.get(idx).getWeightFromList(idx2)); } System.out.println(); for (int idx = 0; idx < listNodeOutput.size(); idx++) { System.out.println("Output value " + listNodeOutput.get(idx).getValue()); System.out.println("Output error " + listNodeOutput.get(idx).getError()); for (int idx2 = 0; idx2 < listNodeOutput.get(idx).getWeightSize(); idx2++) System.out.println("Output weight" + listNodeOutput.get(idx).getWeightFromList(idx2)); } }
From source file:FFNN.MultiplePerceptron.java
public static void main(String args[]) throws Exception { // System.out.println("input jumlah layer 0/1 :"); // Scanner input = new Scanner(System.in); // int layer = input.nextInt(); // System.out.println("input learning rate"); // double rate = input.nextDouble(); // int hidden = 0; // if(layer==1){ // System.out.println("input jumlah neuron di hidden layer"); // hidden = input.nextInt(); // }/*from w ww. jav a2s .c om*/ // // System.out.print("Masukkan nama file : "); // String filename = input.next(); ConverterUtils.DataSource source = new ConverterUtils.DataSource( ("D:\\Program Files\\Weka-3-8\\data\\iris.arff")); Instances train = source.getDataSet(); // Normalize nm = new Normalize(); // nm.setInputFormat(train); // train = Filter.useFilter(train, nm); for (int i = 0; i < train.numAttributes(); i++) System.out.println(i + ". " + train.attribute(i).name()); System.out.print("Masukkan indeks kelas : "); //int classIdx = input.nextInt(); train.setClassIndex(train.numAttributes() - 1); MultiplePerceptron mlp = new MultiplePerceptron(10000, 1, 13, train); mlp.buildClassifier(train); Evaluation eval = new Evaluation(train); eval.evaluateModel(mlp, train); System.out.println(eval.toSummaryString()); // System.out.println(eval.toMatrixString()); }
From source file:ffnn.TucilWeka.java
public static Instances readDataSet(String filepath) { //Membaca dataset Instances data = null; try {//from w w w . j a v a 2 s. c om data = DataSource.read(filepath); } catch (Exception ex) { Logger.getLogger(TucilWeka.class.getName()).log(Level.SEVERE, null, ex); } data.setClassIndex(data.numAttributes() - 1); return data; }
From source file:ffnn.TucilWeka.java
public static Instances createInstances(int max) { //List of Attributes dan List of Class untuk Header //Jumlah attributes: 4 jika tanpa class, 5 jika dengan class ArrayList<Attribute> attrs = new ArrayList<Attribute>(5); ArrayList<String> classVal = new ArrayList<String>(); //Menambahkkan class yang mungkin ke List classVal.add("Iris-setosa"); classVal.add("Iris-versicolor"); classVal.add("Iris-virginica"); //Menambahkan attributes ke List Attribute sepallength = new Attribute("sepallength"); attrs.add(sepallength); //Numeric Attributes Attribute sepalwidth = new Attribute("sepalwidth"); attrs.add(sepalwidth); //Numeric Attributes Attribute petallength = new Attribute("petallength"); attrs.add(petallength); //Numeric Attributes Attribute petalwidth = new Attribute("petalwidth"); attrs.add(petalwidth); //Numeric Attributes Attribute classValue = new Attribute("@@class@@", classVal); attrs.add(classValue); //String Attributes //Pembuatan//from ww w.j a v a 2 s .c om //Constructor dengan param Nama, List of Attribute, size Instances dataRaw = new Instances("irisNew", attrs, 0); //Instances kosong dataRaw.setClassIndex(dataRaw.numAttributes() - 1); Scanner scan = new Scanner(System.in); for (int i = 0; i < max; i++) { //Weka mennyimpan instance sebagai double double temp; Instance inst = new DenseInstance(dataRaw.numAttributes()); System.out.println("Sepallength:"); temp = scan.nextDouble(); inst.setValue(sepallength, temp); System.out.println("Sepalwidth:"); temp = scan.nextDouble(); inst.setValue(sepalwidth, temp); System.out.println("Petallegth:"); temp = scan.nextDouble(); inst.setValue(petallength, temp); System.out.println("Petalwidth:"); temp = scan.nextDouble(); inst.setValue(petalwidth, temp); //System.out.println("Masukan kelima:"); //temp = scan.nextDouble(); //0 -> setosa, 1 -> vesicolor, 2-> virginica //instS.setValue(classValue, temp); //tidak dibutuhkan sebenarnya //Menambahkan instance ke instances dataRaw.add(inst); } return dataRaw; }
From source file:fr.loria.synalp.jtrans.phonetiseur.Classifieurs.java
License:Open Source License
private Instances loadInstances(String partialFilename) throws IOException { final String filename = repertoireFichiersARFF + partialFilename + ".arff"; // This may be overkill, but since Weka doesn't specify what charset // it will use, make sure we read the file as UTF-8. ArffLoader loader = new ArffLoader() { {//w w w . j a v a2 s . c o m m_sourceReader = new InputStreamReader(new FileInputStream(filename), "UTF-8"); } }; Instances i = loader.getStructure(); i.setClassIndex(i.numAttributes() - 1); return i; }
From source file:fr.loria.synalp.jtrans.phonetiseur.Classifieurs.java
License:Open Source License
private void entrainerClassifieurSimpleOuDoublePhoneme(String repertoireFichiersARFF) throws Exception { Instances instances; DataSource source = new DataSource( repertoireFichiersARFF + Configuration.NOM_FICHIER_ARFF_SIMPLE_OU_DOUBLE_PHONEME + ".arff"); instances = source.getDataSet();/*ww w . j ava 2 s. c o m*/ // On definit la sortie (dernier attibut) instances.setClassIndex(instances.numAttributes() - 1); // On ne garde certains attributs instances = appliquerFiltre(filtreSimpleOuDoublePhoneme, instances); // On lance l'apprentissage classifieurSimpleOuDoublePhoneme = new J48(); classifieurSimpleOuDoublePhoneme.buildClassifier(instances); }
From source file:fr.loria.synalp.jtrans.phonetiseur.Classifieurs.java
License:Open Source License
private void entrainerClassifieurDoublePhoneme1er(String repertoireFichiersARFF) throws Exception { Instances instances; DataSource source = new DataSource( repertoireFichiersARFF + Configuration.NOM_FICHIER_ARFF_1er_DOUBLE_PHONEME + ".arff"); instances = source.getDataSet();/* www. j av a2s. c om*/ // On definit la sortie (dernier attibut) instances.setClassIndex(instances.numAttributes() - 1); // On ne garde certains attributs instances = appliquerFiltre(filtreDoublePhoneme1er, instances); // On lance l'apprentissage classifieurDoublePhoneme1er = new J48(); classifieurDoublePhoneme1er.buildClassifier(instances); }
From source file:fr.loria.synalp.jtrans.phonetiseur.Classifieurs.java
License:Open Source License
private void entrainerClassifieurDoublePhoneme2eme(String repertoireFichiersARFF) throws Exception { Instances instances; DataSource source = new DataSource( repertoireFichiersARFF + Configuration.NOM_FICHIER_ARFF_2eme_DOUBLE_PHONEME + ".arff"); instances = source.getDataSet();/*www . j a v a 2s .com*/ // On definit la sortie (dernier attibut) instances.setClassIndex(instances.numAttributes() - 1); // On ne garde certains attributs instances = appliquerFiltre(filtreDoublePhoneme2eme, instances); // On lance l'apprentissage classifieurDoublePhoneme2eme = new J48(); classifieurDoublePhoneme2eme.buildClassifier(instances); }
From source file:fr.loria.synalp.jtrans.phonetiseur.Classifieurs.java
License:Open Source License
private void entrainerClassifieurSimplesPhonemes(String repertoireFichiersARFF) throws Exception { Instances instances; DataSource source = null;// w ww. j a v a 2 s . c o m tClassifieurSimplePhoneme = new J48[lexique.getNbGraphemes()]; for (int i = 0; i < lexique.getNbGraphemes(); i++) { String graphemeCourant = lexique.getGraphemeFromIndice(i); try { source = new DataSource(repertoireFichiersARFF + Configuration.NOM_FICHIER_ARFF_SIMPLE_PHONEME + "_" + graphemeCourant + ".arff"); } catch (Exception e) { // Fichier introuvable System.out.println("Pas de fichier " + repertoireFichiersARFF + Configuration.NOM_FICHIER_ARFF_SIMPLE_PHONEME + "_" + graphemeCourant + ".arff"); source = null; } if (source != null) { System.out.println( " * " + graphemeCourant + " (" + (i + 1) + "/" + lexique.getNbGraphemes() + ")"); instances = source.getDataSet(); // On definit la sortie (dernier attibut) instances.setClassIndex(instances.numAttributes() - 1); // On ne garde certains attributs instances = appliquerFiltre(filtreSimplePhoneme, instances); // On lance l'apprentissage tClassifieurSimplePhoneme[i] = new J48(); tClassifieurSimplePhoneme[i].buildClassifier(instances); System.gc(); } } }