List of usage examples for weka.core Instances toString
@Override
public String toString()
From source file:BaggingImprove.java
/** * Bagging method.//from ww w.j av a 2s . co m * * @param data the training data to be used for generating the bagged * classifier. * @throws Exception if the classifier could not be built successfully */ public void buildClassifier(Instances data) throws Exception { // can classifier handle the data? getCapabilities().testWithFail(data); // remove instances with missing class data = new Instances(data); //data.deleteWithMissingClass(); super.buildClassifier(data); if (m_CalcOutOfBag && (m_BagSizePercent != 100)) { throw new IllegalArgumentException( "Bag size needs to be 100% if " + "out-of-bag error is to be calculated!"); } //+ System.out.println("Classifier length" + m_Classifiers.length); int bagSize = data.numInstances() * m_BagSizePercent / 100; //+ System.out.println("Bag Size " + bagSize); Random random = new Random(m_Seed); boolean[][] inBag = null; if (m_CalcOutOfBag) { inBag = new boolean[m_Classifiers.length][]; } //+ //inisialisasi nama penamaan model BufferedWriter writer = new BufferedWriter(new FileWriter("Bootstrap.txt")); for (int j = 0; j < m_Classifiers.length; j++) { Instances bagData = null; // create the in-bag dataset if (m_CalcOutOfBag) { inBag[j] = new boolean[data.numInstances()]; //System.out.println("Inbag1 " + inBag[0][1]); //bagData = resampleWithWeights(data, random, inBag[j]); bagData = data.resampleWithWeights(random, inBag[j]); //System.out.println("num after resample " + bagData.numInstances()); //+ // for (int k = 0; k < bagData.numInstances(); k++) { // System.out.println("Bag Data after resample [calc out bag]" + bagData.instance(k)); // } } else { //+ System.out.println("Not m_Calc out of bag"); System.out.println("Please configure code inside!"); bagData = data.resampleWithWeights(random); if (bagSize < data.numInstances()) { bagData.randomize(random); Instances newBagData = new Instances(bagData, 0, bagSize); bagData = newBagData; } } if (m_Classifier instanceof Randomizable) { //+ System.out.println("Randomizable"); ((Randomizable) m_Classifiers[j]).setSeed(random.nextInt()); } //write bootstrap into file writer.write("Bootstrap " + j); writer.newLine(); writer.write(bagData.toString()); writer.newLine(); System.out.println("Berhasil menyimpan bootstrap ke file "); System.out.println("Bootstrap " + j + 1); // textarea.append("\nBootsrap " + (j + 1)); //System.out.println("num instance kedua kali "+bagData.numInstances()); for (int b = 1; b < bagData.numInstances(); b++) { System.out.println("" + bagData.instance(b)); // textarea.append("\n" + bagData.instance(b)); } // //+ // build the classifier m_Classifiers[j].buildClassifier(bagData); // //+ // // SerializationHelper serialization = new SerializationHelper(); // serialization.write("KnnData"+model+".model", m_Classifiers[j]); // System.out.println("Finish write into model"); // model++; } writer.flush(); writer.close(); // calc OOB error? if (getCalcOutOfBag()) { double outOfBagCount = 0.0; double errorSum = 0.0; boolean numeric = data.classAttribute().isNumeric(); for (int i = 0; i < data.numInstances(); i++) { double vote; double[] votes; if (numeric) { votes = new double[1]; } else { votes = new double[data.numClasses()]; } // determine predictions for instance int voteCount = 0; for (int j = 0; j < m_Classifiers.length; j++) { if (inBag[j][i]) { continue; } voteCount++; // double pred = m_Classifiers[j].classifyInstance(data.instance(i)); if (numeric) { // votes[0] += pred; votes[0] = m_Classifiers[j].classifyInstance(data.instance(i)); } else { // votes[(int) pred]++; double[] newProbs = m_Classifiers[j].distributionForInstance(data.instance(i)); //- // for(double a : newProbs) // { // System.out.println("Double new probs %.f "+a); // } // average the probability estimates for (int k = 0; k < newProbs.length; k++) { votes[k] += newProbs[k]; } } } System.out.println("Vote count %d" + voteCount); // "vote" if (numeric) { vote = votes[0]; if (voteCount > 0) { vote /= voteCount; // average } } else { if (Utils.eq(Utils.sum(votes), 0)) { } else { Utils.normalize(votes); } vote = Utils.maxIndex(votes); // predicted class //- System.out.println("Vote " + vote); } // error for instance outOfBagCount += data.instance(i).weight(); if (numeric) { errorSum += StrictMath.abs(vote - data.instance(i).classValue()) * data.instance(i).weight(); } else if (vote != data.instance(i).classValue()) { //+ System.out.println("Vote terakhir" + data.instance(i).classValue()); errorSum += data.instance(i).weight(); } } m_OutOfBagError = errorSum / outOfBagCount; } else { m_OutOfBagError = 0; } }
From source file:A_AdvanceMachineLearning.java
private void jButton1ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton1ActionPerformed try {//from w w w. j ava 2 s .c o m /*URL urlToTraining = this.getClass().getResourceAsStream("/" + "train.arff"); InputStream stream = urlToTraining.openStream();*/ InputStream stream = this.getClass().getResourceAsStream("/" + "train.arff"); //BufferedReader reader = new BufferedReader(new InputStreamReader(stream)); Instances data = new Instances(new BufferedReader(new InputStreamReader(stream))); String data1 = data.toString(); txtarea2.setText(data1); txtarea.setText("You have choose to display an example training file: "); } catch (IOException ex) { Logger.getLogger(A_AdvanceMachineLearning.class.getName()).log(Level.SEVERE, null, ex); } //GEN-LAST:event_jButton1ActionPerformed }
From source file:ab.demo.AIAssignment2.java
License:Open Source License
public GameState solve() { // capture Image BufferedImage screenshot = ActionRobot.doScreenShot(); // process image Vision vision = new Vision(screenshot); // find the slingshot Rectangle sling = vision.findSlingshotMBR(); // confirm the slingshot while (sling == null && aRobot.getState() == GameState.PLAYING) { System.out.println("No slingshot detected. Please remove pop up or zoom out"); ActionRobot.fullyZoomOut();/*w w w. j av a2s . c om*/ screenshot = ActionRobot.doScreenShot(); vision = new Vision(screenshot); sling = vision.findSlingshotMBR(); } // get all the pigs List<ABObject> pigs = vision.findPigsMBR(); List<ABObject> blocks = vision.findBlocksMBR(); GameState state = aRobot.getState(); // if there is a sling, then play, otherwise just skip. if (sling != null) { if (!pigs.isEmpty()) { //if there are pigs in the level Point releasePoint = null; Shot shot = new Shot(); int dx, dy; { //random pick up a pig ABObject pig = pigs.get(randomGenerator.nextInt(pigs.size())); Point _tpt = pig.getCenter(); // estimate the trajectory ArrayList<Point> pts = tp.estimateLaunchPoint(sling, _tpt); //define all of the wood, ice and stone in the stage ArrayList<ABObject> wood = new ArrayList<ABObject>(); ArrayList<ABObject> stone = new ArrayList<ABObject>(); ArrayList<ABObject> ice = new ArrayList<ABObject>(); ArrayList<ABObject> tnt = new ArrayList<ABObject>(); //initialise counters to store how many times the trajectory intersects blocks of these types int woodCount = 0; int stoneCount = 0; int iceCount = 0; int pigsCount = 0; int tntCount = 0; //populate the wood, stone and ice ArrayLists with the correct materials for (int i = 0; i < blocks.size(); i++) { if (blocks.get(i).type == ABType.Wood) wood.add(blocks.get(i)); if (blocks.get(i).type == ABType.Stone) stone.add(blocks.get(i)); if (blocks.get(i).type == ABType.Ice) ice.add(blocks.get(i)); if (blocks.get(i).type == ABType.TNT) tnt.add(blocks.get(i)); } //check whether the trajectory intersects any wood blocks for (int i = 0; i < wood.size(); i++) { for (int j = 0; j < pts.size(); j++) { if (wood.get(i).contains(pts.get(j))) { System.out.println("Trajectory intersects some wood"); woodCount++; } if (pig.contains(pts.get(j))) //if we find the pig on this point j = pts.size() - 1; //stop looking for wood on the trajectory (escape for loop) } } //check whether the trajectory intersects any ice blocks for (int i = 0; i < ice.size(); i++) { for (int j = 0; j < pts.size(); j++) { if (ice.get(i).contains(pts.get(j))) { System.out.println("Trajectory intersects some ice"); iceCount++; } if (pig.contains(pts.get(j))) //if we find the pig on this point j = pts.size() - 1; //stop looking for ice on the trajectory (escape for loop) } } //check whether the trajectory intersects any stone blocks for (int i = 0; i < stone.size(); i++) { for (int j = 0; j < pts.size(); j++) { if (stone.get(i).contains(pts.get(j))) { System.out.println("Trajectory intersects some stone"); stoneCount++; } if (pig.contains(pts.get(j))) //if we find the pig on this point j = pts.size() - 1; //stop looking for stone on the trajectory (escape for loop) } } //how many pigs the trajectory intersects (this should always be at least 1) for (int i = 0; i < pigs.size(); i++) { for (int j = 0; j < pts.size(); j++) { if (pigs.get(i).contains(pts.get(j))) { System.out.println("Trajectory intersects a pig"); pigsCount++; } } } //how many tnt blocks the trajectory intersects for (int i = 0; i < tnt.size(); i++) { for (int j = 0; j < pts.size(); j++) { if (tnt.get(i).contains(pts.get(j))) { System.out.println("Trajectory intersects some tnt"); tntCount++; } if (pig.contains(pts.get(j))) //if we find the pig on this point j = pts.size() - 1; //stop looking for tnt on the trajectory } } StringBuilder sb = new StringBuilder(); sb.append(pigsCount + "," + woodCount + "," + iceCount + "," + stoneCount + "," + tntCount + ",?"); String dataEntry = sb.toString(); try (PrintWriter out = new PrintWriter( new BufferedWriter(new FileWriter("dataset/birds.level.arff", true)))) { out.println(dataEntry); } catch (IOException e) { System.out.println("Error - dataset/birds.level.arff file not found or in use!"); } //indicator of if the agent should continue this shot or not (used in the bayes classifier) ArrayList<Boolean> takeShot = new ArrayList<Boolean>(); try { DataSource source = new DataSource("dataset/birds.data.arff"); //initialise the learning set for the agent Instances data = source.getDataSet(); // setting class attribute if the data format does not provide this information // For example, the XRFF format saves the class attribute information as well if (data.classIndex() == -1) data.setClassIndex(data.numAttributes() - 1); DataSource thisLevel = new DataSource("dataset/birds.level.arff"); //initialise the data retrieved from the current level for the agent Instances thisLevelData = thisLevel.getDataSet(); if (thisLevelData.classIndex() == -1) thisLevelData.setClassIndex(data.numAttributes() - 1); //build a new NaiveBayes classifier NaiveBayes bayes = new NaiveBayes(); bayes.buildClassifier(data); for (int i = 0; i < thisLevelData.numInstances(); i++) { //for all instances in the current level double label = bayes.classifyInstance(thisLevelData.instance(i)); //generate an outcome/classify an instance in the current level thisLevelData.instance(i).setClassValue(label); //store this outcome System.out.println(thisLevelData.instance(i).stringValue(5)); //print it if (thisLevelData.instance(i).stringValue(5) != "?") { //if there is a decisive choice as to whether a shot should be taken data.add(thisLevelData.instance(i)); //store it if (thisLevelData.instance(i).stringValue(5) == "yes") {//if the classifier classifies a good shot, store it takeShot.add(true); } else { //if no, store this too takeShot.add(false); } } } //add all non ? entries to the learning set BufferedWriter writer = new BufferedWriter(new FileWriter("dataset/birds.data.arff")); writer.write(data.toString()); writer.flush(); writer.close(); } catch (Exception e) { e.printStackTrace(); System.out.println("Exception caught - file handle error"); } //TODO: roll a random number to determine whether we take a shot or not. //populated using the bayesian classification above. //if we roll true, continue with the random pig shot as usual. //if not, take a new random pig and try again. //TODO: implement a failsafe so the agent does not get stuck randomly choosing pigs which the bayesian classification does not allow. Random rng = new Random(takeShot.size()); if (takeShot.get(rng.nextInt())) System.out.println("Taking this shot."); else { System.out.println("Not taking this shot. Finding another random pig."); return state; } // if the target is very close to before, randomly choose a // point near it if (prevTarget != null && distance(prevTarget, _tpt) < 10) { double _angle = randomGenerator.nextDouble() * Math.PI * 2; _tpt.x = _tpt.x + (int) (Math.cos(_angle) * 10); _tpt.y = _tpt.y + (int) (Math.sin(_angle) * 10); System.out.println("Randomly changing to " + _tpt); } prevTarget = new Point(_tpt.x, _tpt.y); // do a high shot when entering a level to find an accurate velocity if (firstShot && pts.size() > 1) { releasePoint = pts.get(1); } else if (pts.size() == 1) releasePoint = pts.get(0); else if (pts.size() == 2) { // randomly choose between the trajectories, with a 1 in // 6 chance of choosing the high one if (randomGenerator.nextInt(6) == 0) releasePoint = pts.get(1); else releasePoint = pts.get(0); } else if (pts.isEmpty()) { System.out.println("No release point found for the target"); System.out.println("Try a shot with 45 degree"); releasePoint = tp.findReleasePoint(sling, Math.PI / 4); } // Get the reference point Point refPoint = tp.getReferencePoint(sling); //Calculate the tapping time according the bird type if (releasePoint != null) { double releaseAngle = tp.getReleaseAngle(sling, releasePoint); System.out.println("Release Point: " + releasePoint); System.out.println("Release Angle: " + Math.toDegrees(releaseAngle)); int tapInterval = 0; switch (aRobot.getBirdTypeOnSling()) { case RedBird: tapInterval = 0; break; // start of trajectory case YellowBird: tapInterval = 65 + randomGenerator.nextInt(25); break; // 65-90% of the way case WhiteBird: tapInterval = 70 + randomGenerator.nextInt(20); break; // 70-90% of the way case BlackBird: tapInterval = 70 + randomGenerator.nextInt(20); break; // 70-90% of the way case BlueBird: tapInterval = 65 + randomGenerator.nextInt(20); break; // 65-85% of the way default: tapInterval = 60; } int tapTime = tp.getTapTime(sling, releasePoint, _tpt, tapInterval); dx = (int) releasePoint.getX() - refPoint.x; dy = (int) releasePoint.getY() - refPoint.y; shot = new Shot(refPoint.x, refPoint.y, dx, dy, 0, tapTime); } else { System.err.println("No Release Point Found"); return state; } } // check whether the slingshot is changed. the change of the slingshot indicates a change in the scale. { ActionRobot.fullyZoomOut(); screenshot = ActionRobot.doScreenShot(); vision = new Vision(screenshot); Rectangle _sling = vision.findSlingshotMBR(); if (_sling != null) { double scale_diff = Math.pow((sling.width - _sling.width), 2) + Math.pow((sling.height - _sling.height), 2); if (scale_diff < 25) { if (dx < 0) { aRobot.cshoot(shot); state = aRobot.getState(); if (state == GameState.PLAYING) { screenshot = ActionRobot.doScreenShot(); vision = new Vision(screenshot); List<Point> traj = vision.findTrajPoints(); tp.adjustTrajectory(traj, sling, releasePoint); firstShot = false; } } } else System.out.println( "Scale is changed, can not execute the shot, will re-segement the image"); } else System.out .println("no sling detected, can not execute the shot, will re-segement the image"); } } } return state; }
From source file:adams.flow.sink.WekaInstanceViewer.java
License:Open Source License
/** * Returns the displayed instances as ARFF. * * @param panel the panel to obtain the data form * @return the generated ARFF content or null if no data available *///w ww . j a v a2s .com protected static String supplyText(InstancePanel panel) { InstanceContainerManager manager; weka.core.Instances data; int i; if (panel == null) return null; manager = panel.getContainerManager(); if (manager.countVisible() == 0) return null; data = new weka.core.Instances(manager.getVisible(0).getData().getDatasetHeader()); for (i = 0; i < manager.countVisible(); i++) data.add(manager.getVisible(i).getData().toInstance()); return data.toString(); }
From source file:adams.opt.optimise.genetic.fitnessfunctions.AttributeSelection.java
License:Open Source License
/** * Callback for best measure so far// ww w. j av a 2 s. co m */ @Override public void newBest(double val, OptData opd) { int cnt = 0; int[] weights = getWeights(opd); Instances newInstances = new Instances(getInstances()); for (int i = 0; i < getInstances().numInstances(); i++) { Instance in = newInstances.instance(i); cnt = 0; for (int a = 0; a < getInstances().numAttributes(); a++) { if (a == getInstances().classIndex()) continue; if (weights[cnt++] == 0) { in.setValue(a, 0); } else { in.setValue(a, in.value(a)); } } } try { File file = new File(getOutputDirectory().getAbsolutePath() + File.separator + Double.toString(getMeasure().adjust(val)) + ".arff"); file.createNewFile(); Writer writer = new BufferedWriter(new FileWriter(file)); Instances header = new Instances(newInstances, 0); // remove filter setup Remove remove = new Remove(); remove.setAttributeIndices(getRemoveAsString(weights)); remove.setInvertSelection(true); header.setRelationName(OptionUtils.getCommandLine(remove)); writer.write(header.toString()); writer.write("\n"); for (int i = 0; i < newInstances.numInstances(); i++) { writer.write(newInstances.instance(i).toString()); writer.write("\n"); } writer.flush(); writer.close(); } catch (Exception e) { e.printStackTrace(); } }
From source file:arffcreator.arffFrame.java
private void createActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_createActionPerformed // TODO add your handling code here: FastVector atts;/* w w w .j a v a 2s . c om*/ FastVector attsRel; FastVector attVals; FastVector attValsRel; Instances data; Instances dataRel; double[] vals; double[] valsRel; int i; // 1. set up attributes atts = new FastVector(); // - numeric atts.addElement(new Attribute("att1")); // - nominal attVals = new FastVector(); for (i = 0; i < 5; i++) attVals.addElement("val" + (i + 1)); atts.addElement(new Attribute("att2", attVals)); // - string atts.addElement(new Attribute("att3", (FastVector) null)); // - date atts.addElement(new Attribute("att4", "yyyy-MM-dd")); // - relational attsRel = new FastVector(); // -- numeric attsRel.addElement(new Attribute("att5.1")); // -- nominal attValsRel = new FastVector(); for (i = 0; i < 5; i++) attValsRel.addElement("val5." + (i + 1)); attsRel.addElement(new Attribute("att5.2", attValsRel)); dataRel = new Instances("att5", attsRel, 0); atts.addElement(new Attribute("att5", dataRel, 0)); // 2. create Instances object data = new Instances("MyRelation", atts, 0); // 3. fill with data // first instance vals = new double[data.numAttributes()]; // - numeric vals[0] = Math.PI; // - nominal vals[1] = attVals.indexOf("val3"); // - string vals[2] = data.attribute(2).addStringValue("This is a string!"); try { // - date vals[3] = data.attribute(3).parseDate("2015-07-30"); } catch (ParseException ex) { Logger.getLogger(arffFrame.class.getName()).log(Level.SEVERE, null, ex); } // - relational dataRel = new Instances(data.attribute(4).relation(), 0); // -- first instance valsRel = new double[2]; valsRel[0] = Math.PI + 1; valsRel[1] = attValsRel.indexOf("val5.3"); dataRel.add(new Instance(1.0, valsRel)); // -- second instance valsRel = new double[2]; valsRel[0] = Math.PI + 2; valsRel[1] = attValsRel.indexOf("val5.2"); dataRel.add(new Instance(1.0, valsRel)); vals[4] = data.attribute(4).addRelation(dataRel); // add data.add(new Instance(1.0, vals)); // second instance vals = new double[data.numAttributes()]; // important: needs NEW array! // - numeric vals[0] = Math.E; // - nominal vals[1] = attVals.indexOf("val1"); // - string vals[2] = data.attribute(2).addStringValue("And another one!"); try { // - date vals[3] = data.attribute(3).parseDate("2015-07-30"); } catch (ParseException ex) { Logger.getLogger(arffFrame.class.getName()).log(Level.SEVERE, null, ex); } // - relational dataRel = new Instances(data.attribute(4).relation(), 0); // -- first instance valsRel = new double[2]; valsRel[0] = Math.E + 1; valsRel[1] = attValsRel.indexOf("val5.4"); dataRel.add(new Instance(1.0, valsRel)); // -- second instance valsRel = new double[2]; valsRel[0] = Math.E + 2; valsRel[1] = attValsRel.indexOf("val5.1"); dataRel.add(new Instance(1.0, valsRel)); vals[4] = data.attribute(4).addRelation(dataRel); // add data.add(new Instance(1.0, vals)); // 4. output data textArea.append(data.toString()); dataset = data.toString(); }
From source file:com.ivanrf.smsspam.SpamClassifier.java
License:Apache License
public static String classify(String model, String text, JTextArea log) { FilteredClassifier classifier = loadModel(model, log); //Create the instance ArrayList<String> fvNominalVal = new ArrayList<String>(); fvNominalVal.add("ham"); fvNominalVal.add("spam"); Attribute attribute1 = new Attribute("spam_class", fvNominalVal); Attribute attribute2 = new Attribute("text", (List<String>) null); ArrayList<Attribute> fvWekaAttributes = new ArrayList<Attribute>(); fvWekaAttributes.add(attribute1);/*from w w w . jav a2 s .co m*/ fvWekaAttributes.add(attribute2); Instances instances = new Instances("Test relation", fvWekaAttributes, 1); instances.setClassIndex(0); DenseInstance instance = new DenseInstance(2); instance.setValue(attribute2, text); instances.add(instance); publishEstado("=== Instance created ===", log); publishEstado(instances.toString(), log); //Classify the instance try { publishEstado("=== Classifying instance ===", log); double pred = classifier.classifyInstance(instances.instance(0)); publishEstado("=== Instance classified ===", log); String classPredicted = instances.classAttribute().value((int) pred); publishEstado("Class predicted: " + classPredicted, log); return classPredicted; } catch (Exception e) { publishEstado("Error found when classifying the text", log); return null; } }
From source file:de.tudarmstadt.ukp.alignment.framework.combined.WekaMachineLearning.java
License:Apache License
/** * * This method applies a serialized WEKA model file to an unlabeld .arff file for classification * * * @param input_arff the annotated gold standard in an .arff file * @param model output file for the model * @param output output file for evaluation of trained classifier (10-fold cross validation) * @throws Exception/*from w ww . jav a 2 s. com*/ */ public static void applyModelToUnlabeledArff(String input_arff, String model, String output) throws Exception { DataSource source = new DataSource(input_arff); Instances unlabeled = source.getDataSet(); if (unlabeled.classIndex() == -1) { unlabeled.setClassIndex(unlabeled.numAttributes() - 1); } Remove rm = new Remove(); rm.setAttributeIndices("1"); // remove ID attribute ObjectInputStream ois = new ObjectInputStream(new FileInputStream(model)); Classifier cls = (Classifier) ois.readObject(); ois.close(); // create copy Instances labeled = new Instances(unlabeled); // label instances for (int i = 0; i < unlabeled.numInstances(); i++) { double clsLabel = cls.classifyInstance(unlabeled.instance(i)); labeled.instance(i).setClassValue(clsLabel); } // save labeled data BufferedWriter writer = new BufferedWriter(new FileWriter(output)); writer.write(labeled.toString()); writer.newLine(); writer.flush(); writer.close(); }
From source file:development.CrossValidateShapelets.java
public void run() { //Perform cached on online FullShapeletTransform st = new ShapeletTransformDistCaching(); st.useCandidatePruning(10);// ww w .j a va2 s . c o m // if(train.numInstances()>=500 || train.numAttributes()>500) // st = new ShapeletTransform(); st.supressOutput(); st.setNumberOfShapelets(Math.max(train.numAttributes(), train.numInstances())); try { Instances sTrain = st.process(train); Instances sTest = st.process(test); OutFile of1 = new OutFile(path + fileName + "_TRAIN" + (fold + 1) + ".arff"); OutFile of2 = new OutFile(path + fileName + "_TEST" + (fold + 1) + ".arff"); of1.writeLine(sTrain.toString()); of2.writeLine(sTest.toString()); } catch (Exception ex) { Logger.getLogger(CrossValidateShapelets.class.getName()).log(Level.SEVERE, null, ex); } }
From source file:development.CrossValidateShapelets.java
public static void doSingleTransform(int problemNum, int foldNum) { String fileName = DataSets.fileNames[problemNum]; String clusterPath = "/gpfs/sys/ajb/TSC Problems/" + fileName + "/"; String path = clusterPath;/*from w w w . j ava 2s . co m*/ String shapeletPath = path + "ShapeletCV/"; File f1 = new File(shapeletPath + fileName + "_TRAIN" + (foldNum + 1) + ".arff"); File f2 = new File(shapeletPath + fileName + "_TEST" + (foldNum + 1) + ".arff"); if (f1.exists() && f2.exists()) { System.out.println(" Transform " + foldNum + " problem " + fileName + " already exists"); return; } Instances train = ClassifierTools.loadData(clusterPath + fileName + "_TRAIN" + (foldNum + 1)); Instances test = ClassifierTools.loadData(clusterPath + fileName + "_TEST" + (foldNum + 1)); FullShapeletTransform st = new ShapeletTransformDistCaching(); // if(train.numInstances()>=500 || train.numAttributes()>500) // st = new ShapeletTransform(); st.supressOutput(); st.setNumberOfShapelets(Math.max(train.numAttributes(), train.numInstances())); try { Instances sTrain = st.process(train); Instances sTest = st.process(test); OutFile of1 = new OutFile(shapeletPath + fileName + "_TRAIN" + (foldNum + 1) + ".arff"); OutFile of2 = new OutFile(shapeletPath + fileName + "_TEST" + (foldNum + 1) + ".arff"); of1.writeLine(sTrain.toString()); of2.writeLine(sTest.toString()); } catch (Exception ex) { Logger.getLogger(CrossValidateShapelets.class.getName()).log(Level.SEVERE, null, ex); } }