List of usage examples for weka.core Instance value
public double value(Attribute att);
From source file:fantail.core.WekaLRHelper.java
License:Open Source License
public static Instances covertArff2Xarff(Instances data) { Instances xarffData = null;// ww w.ja v a2s. co m try { String userDIR = System.getProperty("user.dir"); String randFileName = Long.toString(System.nanoTime()).substring(10) + ".fantail.algorithms.LRT.temp.xarff"; String path_separator = System.getProperty("file.separator"); String xarffPath = userDIR + path_separator + randFileName; //System.out.println(m_xarffPath); int numObjects = Tools.getNumberTargets(data); StringBuilder sb = new StringBuilder(); sb.append("@relation arff2xarff").append(System.getProperty("line.separator")); for (int i = 0; i < data.numAttributes() - 1; i++) { sb.append("@attribute "); sb.append(data.attribute(i).name()); sb.append(" numeric").append(System.getProperty("line.separator")); } sb.append("@attribute L RANKING {"); for (int i = 0; i < numObjects; i++) { String spr = ","; if (i == numObjects - 1) { spr = ""; } String targetName = "T" + (i); sb.append(targetName).append(spr); } sb.append("}").append(System.getProperty("line.separator")); sb.append("@data ").append(System.getProperty("line.separator")); for (int i = 0; i < data.numInstances(); i++) { Instance inst = data.instance(i); for (int j = 0; j < data.numAttributes() - 1; j++) { sb.append(inst.value(j)).append(","); } for (int x = 1; x <= numObjects; x++) { int rank = x; String[] names = Tools.getTargetNames(inst); String algo = getName(rank, Tools.getTargetVector(inst), names); System.out.println("\t algo: " + algo + ". rank: " + rank + ", Tools.getTargetVector(inst):" + Arrays.toString(Tools.getTargetVector(inst)) + ", " + "names:" + Arrays.toString(names)); String sprr = ">"; if (x == names.length) { sprr = ""; } sb.append(algo).append(sprr); } sb.append(System.getProperty("line.separator")); } File file = new File(xarffPath); Writer output = new BufferedWriter(new FileWriter(file)); output.write(sb.toString()); output.close(); System.out.println(file.getAbsoluteFile()); weka.core.converters.XArffLoader xarffLoader = new weka.core.converters.XArffLoader(); xarffLoader.setSource(new File(xarffPath)); xarffData = xarffLoader.getDataSet(); // File tmpxarffFile = new File(xarffPath); if (tmpxarffFile.exists()) { tmpxarffFile.delete(); } } catch (Exception e) { e.printStackTrace(); System.exit(-1); } return xarffData; }
From source file:fantail.core.WekaLRHelper.java
License:Open Source License
public static Instances covertArff2Xarff2(DATA_TYPE data_type, Instances data) { Instances xarffData = null;//w w w . j a va 2 s . c o m try { String userDIR = System.getProperty("user.dir"); //String randFileName = Long.toString(System.nanoTime()).substring(10) + ".LRT.temp.xarff"; String randFileName = UUID.randomUUID().toString() + ".LRT.temp.xarff"; String path_separator = System.getProperty("file.separator"); String xarffPath = userDIR + path_separator + randFileName; //System.out.println(m_xarffPath); int numObjects = Tools.getNumberTargets(data); StringBuilder sb = new StringBuilder(); sb.append("@relation arff2xarff").append(System.getProperty("line.separator")); for (int i = 0; i < data.numAttributes() - 1; i++) { sb.append("@attribute "); sb.append(data.attribute(i).name()); sb.append(" numeric").append(System.getProperty("line.separator")); } sb.append("@attribute L RANKING {"); for (int i = 0; i < numObjects; i++) { String spr = ","; if (i == numObjects - 1) { spr = ""; } String targetName = "T" + (i); sb.append(targetName).append(spr); } sb.append("}").append(System.getProperty("line.separator")); sb.append("@data ").append(System.getProperty("line.separator")); for (int i = 0; i < data.numInstances(); i++) { Instance inst = data.instance(i); // determine a ranking of the class labels String ranking_result = determineRanking(Tools.getTargetObjects(inst)); if (ranking_result == null) continue; //System.out.println("\t ranking_result:" + ranking_result); // looking at a>b>c, if the 'a' part consists of more than two partial relations, we need to split them. List<ArrayList<String>> label_collection = new ArrayList<ArrayList<String>>(); // generate feature string String attr_set_str = ""; for (int j = 0; j < data.numAttributes() - 1; j++) { attr_set_str += (inst.value(j) + ","); } // split label string via ">" String items[] = ranking_result.split(">"); for (int j = 0; j < items.length; j++) { String labels[] = items[j].split("\\|"); // if the first label has more than or equal to 2 partial relations, we split it. ArrayList<String> label_list = new ArrayList<String>(); if (j == 0) { if (labels.length >= 2) { for (int k = 0; k < labels.length; k++) { label_list.add(labels[k]); } } else { label_list.add(items[j]); } label_collection.add(label_list); } else { if (labels.length >= 3) { for (int k = 0; k < labels.length; k++) { label_list.add(labels[k]); } } else { label_list.add(items[j]); } label_collection.add(label_list); } } List<String> prev_items_in_label_collection = new ArrayList<String>(); for (int j = 0; j < label_collection.size(); j++) { List<String> items_in_label_collection = new ArrayList<String>(); if (j == 0) { for (int k = 0; k < label_collection.get(j).size(); k++) { items_in_label_collection.add(label_collection.get(j).get(k)); } } else { for (int k = 0; k < label_collection.get(j).size(); k++) { for (int l = 0; l < prev_items_in_label_collection.size(); l++) { items_in_label_collection.add(prev_items_in_label_collection.get(l) + ">" + label_collection.get(j).get(k)); } } } prev_items_in_label_collection = items_in_label_collection; } for (int j = 0; j < prev_items_in_label_collection.size(); j++) { //System.out.println("\t\t line:" + prev_items_in_label_collection.get(j)); sb.append(attr_set_str + prev_items_in_label_collection.get(j) + "\n"); } InputStream is = new ByteArrayInputStream(sb.toString().getBytes()); weka.core.converters.XArffLoader xarffLoader = new weka.core.converters.XArffLoader(); xarffLoader.setSource(is); xarffData = xarffLoader.getDataSet(); } } catch (Exception e) { e.printStackTrace(); System.exit(-1); } return xarffData; }
From source file:faster_pca.faster_pca.java
License:Open Source License
/** * Modified version of PrincipalComponents.fillCovariance() * @throws Exception //from w w w .j ava 2 s .c o m */ protected void fillCovariance() throws Exception { if (!super.getCenterData()) { fillCorrelation(); return; } // now center the data by subtracting the mean //implemented below /*m_centerFilter = new Center(); m_centerFilter.setInputFormat(m_TrainInstances); m_TrainInstances = Filter.useFilter(m_TrainInstances, m_centerFilter);*/ // now compute the covariance matrix m_Correlation = new double[m_NumAttribs][m_NumAttribs]; double trainInstancesCopy[][] = new double[m_NumInstances][m_NumAttribs]; for (int i = 0; i < m_NumInstances; i++) { Instance in = m_TrainInstances.instance(i); for (int j = 0; j < m_NumAttribs; j++) { trainInstancesCopy[i][j] = in.value(j); } } //center the data by subtracting the mean double means[] = new double[m_NumAttribs]; for (int j = 0; j < m_NumAttribs; j++) { means[j] = 0; for (int i = 0; i < m_NumInstances; i++) { //online learning of the mean means[j] = means[j] + (trainInstancesCopy[i][j] - means[j]) / (i + 1); } for (int i = 0; i < m_NumInstances; i++) {//subtract the mean trainInstancesCopy[i][j] -= means[j]; } } f_center = new fast_center(means); for (int i = 0; i < m_NumAttribs; i++) { for (int j = 0; j <= i; j++) { double cov = 0; for (int k = 0; k < m_NumInstances; k++) { if (i == j) { //cov += (m_TrainInstances.instance(k).value(i) * m_TrainInstances.instance(k).value(i)); cov += trainInstancesCopy[k][i] * trainInstancesCopy[k][i]; } else { //cov += (m_TrainInstances.instance(k).value(i) * m_TrainInstances.instance(k).value(j)); cov += trainInstancesCopy[k][i] * trainInstancesCopy[k][j]; } } cov /= m_TrainInstances.numInstances() - 1; m_Correlation[i][j] = cov; m_Correlation[j][i] = cov; } } }
From source file:faster_pca.faster_pca.java
License:Open Source License
/** * Modified version of PrincipalComponents.fillCorrelation() * @throws Exception //w w w . j av a2 s.co m */ protected void fillCorrelation() throws Exception { int i; int j; int k; double[] att1; double[] att2; double corr; m_Correlation = new double[m_NumAttribs][m_NumAttribs]; att1 = new double[m_NumInstances]; att2 = new double[m_NumInstances]; double trainInstancesCopy[][] = new double[m_NumAttribs][m_NumInstances]; for (i = 0; i < m_NumInstances; i++) { Instance in = m_TrainInstances.instance(i); Enumeration enumer = in.enumerateAttributes(); for (j = 0; j < m_NumAttribs; j++) { trainInstancesCopy[j][i] = in.value(j); } } for (i = 0; i < m_NumAttribs; i++) { for (j = 0; j <= i; j++) { /*for (k = 0; k < m_NumInstances; k++) { //att1[k] = m_TrainInstances.instance(k).value(i); att1[k] = trainInstancesCopy[i][k]; //att2[k] = m_TrainInstances.instance(k).value(j); att2[k] = trainInstancesCopy[j][k]; }*/ if (i == j) { m_Correlation[i][j] = 1.0; } else { corr = Utils.correlation(trainInstancesCopy[i], trainInstancesCopy[j], m_NumInstances); m_Correlation[i][j] = corr; m_Correlation[j][i] = corr; } } } // now standardize the input data /*m_standardizeFilter = new Standardize(); m_standardizeFilter.setInputFormat(m_TrainInstances); m_TrainInstances = Filter.useFilter(m_TrainInstances, m_standardizeFilter);*/ //todo: see if this line actually needs called double mins[] = new double[m_NumAttribs]; double maxs[] = new double[m_NumAttribs]; for (j = 0; j < m_NumAttribs; j++) { mins[j] = Double.MAX_VALUE; maxs[j] = Double.MIN_VALUE; for (i = 0; i < m_NumInstances; i++) { double val = trainInstancesCopy[j][i]; if (val < mins[j]) mins[j] = val; if (val > maxs[j]) maxs[j] = val; } } f_norm = new fast_normalize(mins, maxs); }
From source file:faster_pca.faster_pca.java
License:Open Source License
/** * Transform an instance in original (unormalized) format. * * @param instance an instance in the original (unormalized) format * @return a transformed instance/*from w w w . j a v a 2 s.c om*/ * @throws Exception if instance can't be transformed */ protected Instance convertInstance(Instance instance) throws Exception { Instance result; double[] newVals; Instance tempInst; double cumulative; int i; int j; double tempval; int numAttsLowerBound; newVals = new double[m_OutputNumAtts]; tempInst = (Instance) instance.copy(); /*m_ReplaceMissingFilter.input(tempInst); m_ReplaceMissingFilter.batchFinished(); tempInst = m_ReplaceMissingFilter.output();*/ m_NominalToBinaryFilter.input(tempInst); m_NominalToBinaryFilter.batchFinished(); tempInst = m_NominalToBinaryFilter.output(); if (m_AttributeFilter != null) { m_AttributeFilter.input(tempInst); m_AttributeFilter.batchFinished(); tempInst = m_AttributeFilter.output(); } if (!super.getCenterData()) { tempInst = f_norm.filter(tempInst); } else { tempInst = f_center.filter(tempInst); } if (m_HasClass) { newVals[m_OutputNumAtts - 1] = instance.value(instance.classIndex()); } if (m_MaxAttributes > 0) { numAttsLowerBound = m_NumAttribs - m_MaxAttributes; } else { numAttsLowerBound = 0; } if (numAttsLowerBound < 0) { numAttsLowerBound = 0; } double tempInstCpy[] = new double[m_NumAttribs]; for (j = 0; j < m_NumAttribs; j++) { tempInstCpy[j] = tempInst.value(j); } cumulative = 0; for (i = m_NumAttribs - 1; i >= numAttsLowerBound; i--) { tempval = 0.0; for (j = 0; j < m_NumAttribs; j++) { tempval += m_Eigenvectors[j][m_SortedEigens[i]] * tempInstCpy[j]; } newVals[m_NumAttribs - i - 1] = tempval; cumulative += m_Eigenvalues[m_SortedEigens[i]]; if ((cumulative / m_SumOfEigenValues) >= m_CoverVariance) { break; } } // create instance if (instance instanceof SparseInstance) { result = new SparseInstance(instance.weight(), newVals); } else { result = new DenseInstance(instance.weight(), newVals); } return result; }
From source file:FeatureSelection.ReliefFAttributeEval.java
License:Open Source License
/** * update attribute weights given an instance when the class is numeric * * @param instNum//from www. j av a 2 s . com * the index of the instance to use when updating weights */ private void updateWeightsNumericClass(int instNum) { int i, j; double temp, temp2; int[] tempSorted = null; double[] tempDist = null; double distNorm = 1.0; int firstI, secondI; Instance inst = m_trainInstances.instance(instNum); // sort nearest neighbours and set up normalization variable if (m_weightByDistance) { tempDist = new double[m_stored[0]]; for (j = 0, distNorm = 0; j < m_stored[0]; j++) { // copy the distances tempDist[j] = m_karray[0][j][0]; // sum normalizer distNorm += m_weightsByRank[j]; } tempSorted = Utils.sort(tempDist); } for (i = 0; i < m_stored[0]; i++) { // P diff prediction (class) given nearest instances if (m_weightByDistance) { temp = difference(m_classIndex, inst.value(m_classIndex), m_trainInstances.instance((int) m_karray[0][tempSorted[i]][1]).value(m_classIndex)); temp *= (m_weightsByRank[i] / distNorm); } else { temp = difference(m_classIndex, inst.value(m_classIndex), m_trainInstances.instance((int) m_karray[0][i][1]).value(m_classIndex)); temp *= (1.0 / (double) m_stored[0]); // equal influence } m_ndc += temp; Instance cmp; cmp = (m_weightByDistance) ? m_trainInstances.instance((int) m_karray[0][tempSorted[i]][1]) : m_trainInstances.instance((int) m_karray[0][i][1]); double temp_diffP_diffA_givNearest = difference(m_classIndex, inst.value(m_classIndex), cmp.value(m_classIndex)); // now the attributes for (int p1 = 0, p2 = 0; p1 < inst.numValues() || p2 < cmp.numValues();) { if (p1 >= inst.numValues()) { firstI = m_trainInstances.numAttributes(); } else { firstI = inst.index(p1); } if (p2 >= cmp.numValues()) { secondI = m_trainInstances.numAttributes(); } else { secondI = cmp.index(p2); } if (firstI == m_trainInstances.classIndex()) { p1++; continue; } if (secondI == m_trainInstances.classIndex()) { p2++; continue; } temp = 0.0; temp2 = 0.0; if (firstI == secondI) { j = firstI; temp = difference(j, inst.valueSparse(p1), cmp.valueSparse(p2)); p1++; p2++; } else if (firstI > secondI) { j = secondI; temp = difference(j, 0, cmp.valueSparse(p2)); p2++; } else { j = firstI; temp = difference(j, inst.valueSparse(p1), 0); p1++; } temp2 = temp_diffP_diffA_givNearest * temp; // P of different prediction and different att value given // nearest instances if (m_weightByDistance) { temp2 *= (m_weightsByRank[i] / distNorm); } else { temp2 *= (1.0 / (double) m_stored[0]); // equal influence } m_ndcda[j] += temp2; // P of different attribute val given nearest instances if (m_weightByDistance) { temp *= (m_weightsByRank[i] / distNorm); } else { temp *= (1.0 / (double) m_stored[0]); // equal influence } m_nda[j] += temp; } } }
From source file:ffnn.FFNNTubesAI.java
@Override public void buildClassifier(Instances i) throws Exception { Instance temp_instance = null; RealMatrix error_output;/*from w w w .j ava2 s .c o m*/ RealMatrix error_hidden; RealMatrix input_matrix; RealMatrix hidden_matrix; RealMatrix output_matrix; Instances temp_instances; int r = 0; Scanner scan = new Scanner(System.in); output_layer = i.numDistinctValues(i.classIndex()); //3 temp_instances = filterNominalNumeric(i); if (output_layer == 2) { Add filter = new Add(); filter.setAttributeIndex("last"); filter.setAttributeName("dummy"); filter.setInputFormat(temp_instances); temp_instances = Filter.useFilter(temp_instances, filter); // System.out.println(temp_instances); for (int j = 0; j < temp_instances.numInstances(); j++) { if (temp_instances.instance(j).value(temp_instances.numAttributes() - 2) == 0) { temp_instances.instance(j).setValue(temp_instances.numAttributes() - 2, 1); temp_instances.instance(j).setValue(temp_instances.numAttributes() - 1, 0); } else { temp_instances.instance(j).setValue(temp_instances.numAttributes() - 2, 0); temp_instances.instance(j).setValue(temp_instances.numAttributes() - 1, 1); } } } //temp_instances.randomize(temp_instances.getRandomNumberGenerator(1)); //System.out.println(temp_instances); input_layer = temp_instances.numAttributes() - output_layer; //4 hidden_layer = 0; while (hidden_layer < 1) { System.out.print("Hidden layer : "); hidden_layer = scan.nextInt(); } int init_hidden = hidden_layer; error_hidden = new BlockRealMatrix(1, hidden_layer); error_output = new BlockRealMatrix(1, output_layer); input_matrix = new BlockRealMatrix(1, input_layer + 1); //Menambahkan bias buildWeight(input_layer, hidden_layer, output_layer); long last_time = System.nanoTime(); double last_error_rate = 1; double best_error_rate = 1; double last_update = System.nanoTime(); // brp iterasi // for( long itr = 0; last_error_rate > 0.001; ++ itr ){ for (long itr = 0; itr < 50000; ++itr) { if (r == 10) { break; } long time = System.nanoTime(); if (time - last_time > 2000000000) { Evaluation eval = new Evaluation(i); eval.evaluateModel(this, i); double accry = eval.correct() / eval.numInstances(); if (eval.errorRate() < last_error_rate) { last_update = System.nanoTime(); if (eval.errorRate() < best_error_rate) SerializationHelper.write(accry + "-" + time + ".model", this); } if (accry > 0) last_error_rate = eval.errorRate(); // 2 minute without improvement restart if (time - last_update > 30000000000L) { last_update = System.nanoTime(); learning_rate = random() * 0.05; hidden_layer = (int) (10 + floor(random() * 15)); hidden_layer = (int) floor((hidden_layer / 25) * init_hidden); if (hidden_layer == 0) { hidden_layer = 1; } itr = 0; System.out.println("RESTART " + learning_rate + " " + hidden_layer); buildWeight(input_layer, hidden_layer, output_layer); r++; } System.out.println(accry + " " + itr); last_time = time; } for (int j = 0; j < temp_instances.numInstances(); j++) { // foward !! temp_instance = temp_instances.instance(j); for (int k = 0; k < input_layer; k++) { input_matrix.setEntry(0, k, temp_instance.value(k)); } input_matrix.setEntry(0, input_layer, 1.0); // bias hidden_matrix = input_matrix.multiply(weight1); for (int y = 0; y < hidden_layer; ++y) { hidden_matrix.setEntry(0, y, sig(hidden_matrix.getEntry(0, y))); } output_matrix = hidden_matrix.multiply(weight2).add(bias2); for (int y = 0; y < output_layer; ++y) { output_matrix.setEntry(0, y, sig(output_matrix.getEntry(0, y))); } // backward << // error layer 2 double total_err = 0; for (int k = 0; k < output_layer; k++) { double o = output_matrix.getEntry(0, k); double t = temp_instance.value(input_layer + k); double err = o * (1 - o) * (t - o); total_err += err * err; error_output.setEntry(0, k, err); } // back propagation layer 2 for (int y = 0; y < hidden_layer; y++) { for (int x = 0; x < output_layer; ++x) { double wold = weight2.getEntry(y, x); double correction = learning_rate * error_output.getEntry(0, x) * hidden_matrix.getEntry(0, y); weight2.setEntry(y, x, wold + correction); } } for (int x = 0; x < output_layer; ++x) { double correction = learning_rate * error_output.getEntry(0, x); // anggap 1 inputnya bias2.setEntry(0, x, bias2.getEntry(0, x) + correction); } // error layer 1 for (int k = 0; k < hidden_layer; ++k) { double o = hidden_matrix.getEntry(0, k); double t = 0; for (int x = 0; x < output_layer; ++x) { t += error_output.getEntry(0, x) * weight2.getEntry(k, x); } double err = o * (1 - o) * t; error_hidden.setEntry(0, k, err); } // back propagation layer 1 for (int y = 0; y < input_layer + 1; ++y) { for (int x = 0; x < hidden_layer; ++x) { double wold = weight1.getEntry(y, x); double correction = learning_rate * error_hidden.getEntry(0, x) * input_matrix.getEntry(0, y); weight1.setEntry(y, x, wold + correction); } } } } }
From source file:ffnn.FFNNTubesAI.java
@Override public double[] distributionForInstance(Instance instance) throws Exception { RealMatrix input_matrix = new BlockRealMatrix(1, input_layer + 1); instance = filterNominalNumeric(instance); for (int k = 0; k < input_layer; k++) { input_matrix.setEntry(0, k, instance.value(k)); }//from w w w . j a v a 2 s. c om input_matrix.setEntry(0, input_layer, 1.0); // bias RealMatrix hidden_matrix = input_matrix.multiply(weight1); for (int y = 0; y < hidden_layer; ++y) { hidden_matrix.setEntry(0, y, sig(hidden_matrix.getEntry(0, y))); } RealMatrix output_matrix = hidden_matrix.multiply(weight2).add(bias2); for (int y = 0; y < output_layer; ++y) { output_matrix.setEntry(0, y, sig(output_matrix.getEntry(0, y))); } double[][] m = output_matrix.getData(); return m[0]; }
From source file:ffnn.MultilayerPerceptron.java
License:Open Source License
/** * This function sets what the m_numeric flag to represent the passed class it * also performs the normalization of the attributes if applicable and sets up * the info to normalize the class. (note that regardless of the options it * will fill an array with the range and base, set to normalize all attributes * and the class to be between -1 and 1) * //from w w w. ja v a 2 s . c o m * @param inst the instances. * @return The modified instances. This needs to be done. If the attributes * are normalized then deep copies will be made of all the instances * which will need to be passed back out. */ private Instances setClassType(Instances inst) throws Exception { if (inst != null) { // x bounds m_attributeRanges = new double[inst.numAttributes()]; m_attributeBases = new double[inst.numAttributes()]; for (int noa = 0; noa < inst.numAttributes(); noa++) { double min = Double.POSITIVE_INFINITY; double max = Double.NEGATIVE_INFINITY; for (int i = 0; i < inst.numInstances(); i++) { if (!inst.instance(i).isMissing(noa)) { double value = inst.instance(i).value(noa); if (value < min) { min = value; } if (value > max) { max = value; } } } m_attributeRanges[noa] = (max - min) / 2; m_attributeBases[noa] = (max + min) / 2; } if (m_normalizeAttributes) { for (int i = 0; i < inst.numInstances(); i++) { Instance currentInstance = inst.instance(i); double[] instance = new double[inst.numAttributes()]; for (int noa = 0; noa < inst.numAttributes(); noa++) { if (noa != inst.classIndex()) { if (m_attributeRanges[noa] != 0) { instance[noa] = (currentInstance.value(noa) - m_attributeBases[noa]) / m_attributeRanges[noa]; } else { instance[noa] = currentInstance.value(noa) - m_attributeBases[noa]; } } else { instance[noa] = currentInstance.value(noa); } } inst.set(i, new DenseInstance(currentInstance.weight(), instance)); } } if (inst.classAttribute().isNumeric()) { m_numeric = true; } else { m_numeric = false; } } return inst; }
From source file:FFNN.MultiplePerceptron.java
public void updateBobot(Instance i) { ArrayList<Double> listInput = new ArrayList<>(); //mengisi nilai listInput dengan nilai di instances listInput.add(1.0);// ww w . ja v a 2s . c o m for (int index = 0; index < i.numAttributes() - 1; index++) listInput.add(i.value(index)); //bobot hidden for (int index = 0; index < listNodeHidden.size(); index++) { for (int indexDalem = 0; indexDalem < listNodeHidden.get(index).getWeightSize(); indexDalem++) { double delta = learningRate * listNodeHidden.get(index).getError() * listInput.get(indexDalem); double newWeight = delta + listNodeHidden.get(index).getWeightFromList(indexDalem); listNodeHidden.get(index).setWeight(indexDalem, newWeight); // System.out.println(index+" "+indexDalem+" "+newWeight); } } //bobot output for (int index = 0; index < listNodeOutput.size(); index++) { for (int indexDalem = 0; indexDalem < listNodeHidden.size(); indexDalem++) { double delta = learningRate * listNodeOutput.get(index).getError() * listNodeHidden.get(indexDalem).getValue(); double newWeight = delta + listNodeOutput.get(index).getWeightFromList(indexDalem); listNodeOutput.get(index).setWeight(indexDalem, newWeight); } } }