List of usage examples for weka.core Instances insertAttributeAt
public void insertAttributeAt(Attribute att, int position)
From source file:moa.streams.generators.multilabel.MetaMultilabelGenerator.java
License:Open Source License
/** * GenerateMultilabelHeader./*from www . j a va 2s . co m*/ * * @param si single-label Instances */ protected MultilabelInstancesHeader generateMultilabelHeader(Instances si) { Instances mi = new Instances(si, 0, 0); mi.setClassIndex(-1); mi.deleteAttributeAt(mi.numAttributes() - 1); FastVector bfv = new FastVector(); bfv.addElement("0"); bfv.addElement("1"); for (int i = 0; i < this.m_L; i++) { mi.insertAttributeAt(new Attribute("class" + i, bfv), i); } this.multilabelStreamTemplate = mi; this.multilabelStreamTemplate.setRelationName("SYN_Z" + this.labelCardinalityOption.getValue() + "L" + this.m_L + "X" + m_A + "S" + metaRandomSeedOption.getValue() + ": -C " + this.m_L); this.multilabelStreamTemplate.setClassIndex(this.m_L); return new MultilabelInstancesHeader(multilabelStreamTemplate, m_L); }
From source file:mulan.transformations.IncludeLabelsTransformation.java
License:Open Source License
/** * * @param mlData multi-label data//from w ww . jav a2 s.c o m * @return transformed instances * @throws Exception Potential exception thrown. To be handled in an upper level. */ public Instances transformInstances(MultiLabelInstances mlData) throws Exception { int numLabels = mlData.getNumLabels(); labelIndices = mlData.getLabelIndices(); // remove all labels Instances transformed = RemoveAllLabels.transformInstances(mlData); // add at the end an attribute with values the label names ArrayList<String> labelNames = new ArrayList<String>(numLabels); for (int counter = 0; counter < numLabels; counter++) { labelNames.add(mlData.getDataSet().attribute(labelIndices[counter]).name()); } Attribute attrLabel = new Attribute("Label", labelNames); transformed.insertAttributeAt(attrLabel, transformed.numAttributes()); // and at the end a binary attribute ArrayList<String> binaryValues = new ArrayList<String>(2); binaryValues.add("0"); binaryValues.add("1"); Attribute classAttr = new Attribute("Class", binaryValues); transformed.insertAttributeAt(classAttr, transformed.numAttributes()); // add instances transformed = new Instances(transformed, 0); transformed.setClassIndex(transformed.numAttributes() - 1); Instances data = mlData.getDataSet(); for (int instanceIndex = 0; instanceIndex < data.numInstances(); instanceIndex++) { for (int labelCounter = 0; labelCounter < numLabels; labelCounter++) { Instance temp; temp = RemoveAllLabels.transformInstance(data.instance(instanceIndex), labelIndices); temp.setDataset(null); temp.insertAttributeAt(temp.numAttributes()); temp.insertAttributeAt(temp.numAttributes()); temp.setDataset(transformed); temp.setValue(temp.numAttributes() - 2, (String) labelNames.get(labelCounter)); if (data.attribute(labelIndices[labelCounter]) .value((int) data.instance(instanceIndex).value(labelIndices[labelCounter])).equals("1")) { temp.setValue(temp.numAttributes() - 1, "1"); } else { temp.setValue(temp.numAttributes() - 1, "0"); } transformed.add(temp); } } return transformed; }
From source file:mulan.transformations.LabelPowersetTransformation.java
License:Open Source License
public Instances transformInstances(MultiLabelInstances mlData) throws Exception { Instances data = mlData.getDataSet(); int numLabels = mlData.getNumLabels(); int[] labelIndices = mlData.getLabelIndices(); Instances newData = null; // gather distinct label combinations HashSet<LabelSet> labelSets = new HashSet<LabelSet>(); int numInstances = data.numInstances(); for (int i = 0; i < numInstances; i++) { // construct labelset double[] dblLabels = new double[numLabels]; for (int j = 0; j < numLabels; j++) { int index = labelIndices[j]; dblLabels[j] = Double.parseDouble(data.attribute(index).value((int) data.instance(i).value(index))); }/*from w w w .j av a 2 s . c o m*/ LabelSet labelSet = new LabelSet(dblLabels); // add labelset if not already present labelSets.add(labelSet); } // create class attribute ArrayList<String> classValues = new ArrayList<String>(labelSets.size()); for (LabelSet subset : labelSets) { classValues.add(subset.toBitString()); } Attribute newClass = new Attribute("class", classValues); // remove all labels newData = RemoveAllLabels.transformInstances(data, labelIndices); // add new class attribute newData.insertAttributeAt(newClass, newData.numAttributes()); newData.setClassIndex(newData.numAttributes() - 1); // add class values for (int i = 0; i < newData.numInstances(); i++) { //System.out.println(newData.instance(i).toString()); String strClass = ""; for (int j = 0; j < numLabels; j++) { int index = labelIndices[j]; strClass = strClass + data.attribute(index).value((int) data.instance(i).value(index)); } //System.out.println(strClass); newData.instance(i).setClassValue(strClass); } transformedFormat = new Instances(newData, 0); return newData; }
From source file:mulan.transformations.multiclass.MultiClassTransformationBase.java
License:Open Source License
public Instances transformInstances(MultiLabelInstances mlData) throws Exception { labelIndices = mlData.getLabelIndices(); numOfLabels = mlData.getNumLabels(); Instances data = mlData.getDataSet(); Instances transformed = new Instances(mlData.getDataSet(), 0); // delete all labels transformed = RemoveAllLabels.transformInstances(transformed, labelIndices); // add single label attribute ArrayList<String> classValues = new ArrayList<String>(numOfLabels); for (int x = 0; x < numOfLabels; x++) { classValues.add("Class" + (x + 1)); }//w w w . j av a2 s. c o m Attribute newClass = new Attribute("Class", classValues); transformed.insertAttributeAt(newClass, transformed.numAttributes()); transformed.setClassIndex(transformed.numAttributes() - 1); for (int instanceIndex = 0; instanceIndex < data.numInstances(); instanceIndex++) { //System.out.println(data.instance(instanceIndex).toString()); List<Instance> result = transformInstance(data.instance(instanceIndex)); for (Instance instance : result) { //System.out.println(instance.toString()); transformed.add(instance); //System.out.println(transformed.instance(transformed.numInstances()-1)); } } return transformed; }
From source file:mulan.transformations.PT6Transformation.java
License:Open Source License
public Instances transformInstances(MultiLabelInstances mlData) throws Exception { int numLabels = mlData.getNumLabels(); labelIndices = mlData.getLabelIndices(); // remove all labels Instances transformed = RemoveAllLabels.transformInstances(mlData); // add at the end an attribute with values the label names ArrayList<String> labelNames = new ArrayList<String>(numLabels); for (int counter = 0; counter < numLabels; counter++) { labelNames.add(mlData.getDataSet().attribute(labelIndices[counter]).name()); }/*www . ja va 2s. c o m*/ Attribute attrLabel = new Attribute("Label", labelNames); transformed.insertAttributeAt(attrLabel, transformed.numAttributes()); // and at the end a binary attribute ArrayList<String> binaryValues = new ArrayList<String>(2); binaryValues.add("0"); binaryValues.add("1"); Attribute classAttr = new Attribute("Class", binaryValues); transformed.insertAttributeAt(classAttr, transformed.numAttributes()); // add instances transformed = new Instances(transformed, 0); transformed.setClassIndex(transformed.numAttributes() - 1); Instances data = mlData.getDataSet(); for (int instanceIndex = 0; instanceIndex < data.numInstances(); instanceIndex++) { for (int labelCounter = 0; labelCounter < numLabels; labelCounter++) { Instance temp; temp = RemoveAllLabels.transformInstance(data.instance(instanceIndex), labelIndices); temp.setDataset(null); temp.insertAttributeAt(temp.numAttributes()); temp.insertAttributeAt(temp.numAttributes()); temp.setDataset(transformed); temp.setValue(temp.numAttributes() - 2, (String) labelNames.get(labelCounter)); if (data.attribute(labelIndices[labelCounter]) .value((int) data.instance(instanceIndex).value(labelIndices[labelCounter])).equals("1")) { temp.setValue(temp.numAttributes() - 1, "1"); } else { temp.setValue(temp.numAttributes() - 1, "0"); } transformed.add(temp); } } return transformed; }
From source file:myJ48.MyJ48.java
public Instances NumericToNominalByThreshold(Instances numericSet, int idx_attribute, double threshold) throws Exception { double[] values; Instances NominalizedSet = new Instances(numericSet); //System.out.println("number of instances: " + NominalizedSet.numInstances()); values = numericSet.attributeToDoubleArray(idx_attribute); List<String> nominalValue = new ArrayList<String>(); nominalValue.add("low"); nominalValue.add("high"); Attribute nominalAttrib = new Attribute(numericSet.attribute(idx_attribute).name() + "_nominal", nominalValue);// ww w . j a v a2 s. c o m NominalizedSet.insertAttributeAt(nominalAttrib, idx_attribute); for (int i = 0; i < values.length; i++) { if (values[i] <= threshold) { NominalizedSet.instance(i).setValue(idx_attribute, "low"); } else { NominalizedSet.instance(i).setValue(idx_attribute, "high"); } } String[] options = { "-R", String.valueOf(idx_attribute + 2) }; Filter remove = (Filter) Class.forName("weka.filters.unsupervised.attribute.Remove").newInstance(); ((OptionHandler) remove).setOptions(options); remove.setInputFormat(NominalizedSet); NominalizedSet = Filter.useFilter(NominalizedSet, remove); return NominalizedSet; }
From source file:org.jaqpot.algorithm.resource.WekaMLR.java
License:Open Source License
@POST @Path("prediction") public Response prediction(PredictionRequest request) { try {//from w w w . j av a2s.c o m if (request.getDataset().getDataEntry().isEmpty() || request.getDataset().getDataEntry().get(0).getValues().isEmpty()) { return Response.status(Response.Status.BAD_REQUEST).entity( ErrorReportFactory.badRequest("Dataset is empty", "Cannot train model on empty dataset")) .build(); } String base64Model = (String) request.getRawModel(); byte[] modelBytes = Base64.getDecoder().decode(base64Model); ByteArrayInputStream bais = new ByteArrayInputStream(modelBytes); ObjectInput in = new ObjectInputStream(bais); WekaModel model = (WekaModel) in.readObject(); Classifier classifier = model.getClassifier(); Instances data = InstanceUtils.createFromDataset(request.getDataset()); List<String> additionalInfo = (List) request.getAdditionalInfo(); String dependentFeature = additionalInfo.get(0); String dependentFeatureName = additionalInfo.get(1); data.insertAttributeAt(new Attribute(dependentFeature), data.numAttributes()); List<LinkedHashMap<String, Object>> predictions = new ArrayList<>(); // data.stream().forEach(instance -> { // try { // double prediction = classifier.classifyInstance(instance); // Map<String, Object> predictionMap = new HashMap<>(); // predictionMap.put("Weka MLR prediction of " + dependentFeature, prediction); // predictions.add(predictionMap); // } catch (Exception ex) { // Logger.getLogger(WekaMLR.class.getName()).log(Level.SEVERE, null, ex); // } // }); for (int i = 0; i < data.numInstances(); i++) { Instance instance = data.instance(i); try { double prediction = classifier.classifyInstance(instance); LinkedHashMap<String, Object> predictionMap = new LinkedHashMap<>(); predictionMap.put("Weka MLR prediction of " + dependentFeatureName, prediction); predictions.add(predictionMap); } catch (Exception ex) { Logger.getLogger(WekaMLR.class.getName()).log(Level.SEVERE, null, ex); return Response.status(Response.Status.BAD_REQUEST).entity( ErrorReportFactory.badRequest("Error while gettting predictions.", ex.getMessage())) .build(); } } PredictionResponse response = new PredictionResponse(); response.setPredictions(predictions); return Response.ok(response).build(); } catch (Exception ex) { Logger.getLogger(WekaMLR.class.getName()).log(Level.SEVERE, null, ex); return Response.status(Response.Status.INTERNAL_SERVER_ERROR).entity(ex.getMessage()).build(); } }
From source file:org.jaqpot.algorithm.resource.WekaPLS.java
License:Open Source License
@POST @Path("prediction") public Response prediction(PredictionRequest request) { try {/*w w w. j av a2s . co m*/ if (request.getDataset().getDataEntry().isEmpty() || request.getDataset().getDataEntry().get(0).getValues().isEmpty()) { return Response .status(Response.Status.BAD_REQUEST).entity(ErrorReportFactory .badRequest("Dataset is empty", "Cannot make predictions on empty dataset")) .build(); } String base64Model = (String) request.getRawModel(); byte[] modelBytes = Base64.getDecoder().decode(base64Model); ByteArrayInputStream bais = new ByteArrayInputStream(modelBytes); ObjectInput in = new ObjectInputStream(bais); WekaModel model = (WekaModel) in.readObject(); Classifier classifier = model.getClassifier(); Instances data = InstanceUtils.createFromDataset(request.getDataset()); String dependentFeature = (String) request.getAdditionalInfo(); data.insertAttributeAt(new Attribute(dependentFeature), data.numAttributes()); data.setClass(data.attribute(dependentFeature)); List<LinkedHashMap<String, Object>> predictions = new ArrayList<>(); // data.stream().forEach(instance -> { // try { // double prediction = classifier.classifyInstance(instance); // Map<String, Object> predictionMap = new HashMap<>(); // predictionMap.put("Weka PLS prediction of " + dependentFeature, prediction); // predictions.add(predictionMap); // } catch (Exception ex) { // Logger.getLogger(WekaSVM.class.getName()).log(Level.SEVERE, null, ex); // } // }); for (int i = 0; i < data.numInstances(); i++) { Instance instance = data.instance(i); try { double prediction = classifier.classifyInstance(instance); LinkedHashMap<String, Object> predictionMap = new LinkedHashMap<>(); predictionMap.put("Weka PLS prediction of " + dependentFeature, prediction); predictions.add(predictionMap); } catch (Exception ex) { Logger.getLogger(WekaMLR.class.getName()).log(Level.SEVERE, null, ex); return Response.status(Response.Status.BAD_REQUEST).entity( ErrorReportFactory.badRequest("Error while gettting predictions.", ex.getMessage())) .build(); } } PredictionResponse response = new PredictionResponse(); response.setPredictions(predictions); return Response.ok(response).build(); } catch (IOException | ClassNotFoundException ex) { Logger.getLogger(WekaSVM.class.getName()).log(Level.SEVERE, null, ex); return Response.status(Response.Status.INTERNAL_SERVER_ERROR).entity(ex.getMessage()).build(); } }
From source file:org.jaqpot.algorithm.resource.WekaRBF.java
License:Open Source License
@POST @Path("prediction") public Response prediction(PredictionRequest request) { try {/*from w w w .java2 s . c o m*/ if (request.getDataset().getDataEntry().isEmpty() || request.getDataset().getDataEntry().get(0).getValues().isEmpty()) { return Response .status(Response.Status.BAD_REQUEST).entity(ErrorReportFactory .badRequest("Dataset is empty", "Cannot make predictions on empty dataset")) .build(); } String base64Model = (String) request.getRawModel(); byte[] modelBytes = Base64.getDecoder().decode(base64Model); ByteArrayInputStream bais = new ByteArrayInputStream(modelBytes); ObjectInput in = new ObjectInputStream(bais); WekaModel model = (WekaModel) in.readObject(); Classifier classifier = model.getClassifier(); Instances data = InstanceUtils.createFromDataset(request.getDataset()); String dependentFeature = (String) request.getAdditionalInfo(); data.insertAttributeAt(new Attribute(dependentFeature), data.numAttributes()); data.setClass(data.attribute(dependentFeature)); List<LinkedHashMap<String, Object>> predictions = new ArrayList<>(); // data.stream().forEach(instance -> { // try { // double prediction = classifier.classifyInstance(instance); // Map<String, Object> predictionMap = new HashMap<>(); // predictionMap.put("Weka MLR prediction of " + dependentFeature, prediction); // predictions.add(predictionMap); // } catch (Exception ex) { // Logger.getLogger(WekaMLR.class.getName()).log(Level.SEVERE, null, ex); // } // }); for (int i = 0; i < data.numInstances(); i++) { Instance instance = data.instance(i); try { double prediction = classifier.classifyInstance(instance); LinkedHashMap<String, Object> predictionMap = new LinkedHashMap<>(); predictionMap.put("Weka RBF prediction of " + dependentFeature, prediction); predictions.add(predictionMap); } catch (Exception ex) { Logger.getLogger(WekaMLR.class.getName()).log(Level.SEVERE, null, ex); return Response.status(Response.Status.BAD_REQUEST).entity( ErrorReportFactory.badRequest("Error while gettting predictions.", ex.getMessage())) .build(); } } PredictionResponse response = new PredictionResponse(); response.setPredictions(predictions); return Response.ok(response).build(); } catch (IOException | ClassNotFoundException ex) { Logger.getLogger(WekaMLR.class.getName()).log(Level.SEVERE, null, ex); return Response.status(Response.Status.INTERNAL_SERVER_ERROR).entity(ex.getMessage()).build(); } }
From source file:org.jaqpot.algorithm.resource.WekaSVM.java
License:Open Source License
@POST @Path("prediction") public Response prediction(PredictionRequest request) { try {//from w ww .j a v a2 s . c o m if (request.getDataset().getDataEntry().isEmpty() || request.getDataset().getDataEntry().get(0).getValues().isEmpty()) { return Response .status(Response.Status.BAD_REQUEST).entity(ErrorReportFactory .badRequest("Dataset is empty", "Cannot make predictions on empty dataset")) .build(); } String base64Model = (String) request.getRawModel(); byte[] modelBytes = Base64.getDecoder().decode(base64Model); ByteArrayInputStream bais = new ByteArrayInputStream(modelBytes); ObjectInput in = new ObjectInputStream(bais); WekaModel model = (WekaModel) in.readObject(); Classifier classifier = model.getClassifier(); Instances data = InstanceUtils.createFromDataset(request.getDataset()); String dependentFeature = (String) request.getAdditionalInfo(); data.insertAttributeAt(new Attribute(dependentFeature), data.numAttributes()); data.setClass(data.attribute(dependentFeature)); List<LinkedHashMap<String, Object>> predictions = new ArrayList<>(); // data.stream().forEach(instance -> { // try { // double prediction = classifier.classifyInstance(instance); // Map<String, Object> predictionMap = new HashMap<>(); // predictionMap.put("Weka SVM prediction of " + dependentFeature, prediction); // predictions.add(predictionMap); // } catch (Exception ex) { // Logger.getLogger(WekaSVM.class.getName()).log(Level.SEVERE, null, ex); // } // }); for (int i = 0; i < data.numInstances(); i++) { Instance instance = data.instance(i); try { double prediction = classifier.classifyInstance(instance); LinkedHashMap<String, Object> predictionMap = new LinkedHashMap<>(); predictionMap.put("Weka SVM prediction of " + dependentFeature, prediction); predictions.add(predictionMap); } catch (Exception ex) { Logger.getLogger(WekaMLR.class.getName()).log(Level.SEVERE, null, ex); return Response.status(Response.Status.BAD_REQUEST).entity( ErrorReportFactory.badRequest("Error while gettting predictions.", ex.getMessage())) .build(); } } PredictionResponse response = new PredictionResponse(); response.setPredictions(predictions); return Response.ok(response).build(); } catch (Exception ex) { Logger.getLogger(WekaSVM.class.getName()).log(Level.SEVERE, null, ex); return Response.status(Response.Status.INTERNAL_SERVER_ERROR).entity(ex.getMessage()).build(); } }