List of usage examples for weka.classifiers.functions LibSVM LibSVM
LibSVM
From source file:org.jaqpot.algorithms.resource.WekaSVM.java
License:Open Source License
@POST @Path("training") public Response training(TrainingRequest request) { try {//from w w w . jav a 2s . c om if (request.getDataset().getDataEntry().isEmpty() || request.getDataset().getDataEntry().get(0).getValues().isEmpty()) { return Response.status(Response.Status.BAD_REQUEST) .entity("Dataset is empty. Cannot train model on empty dataset.").build(); } List<String> features = request.getDataset().getDataEntry().stream().findFirst().get().getValues() .keySet().stream().collect(Collectors.toList()); Instances data = InstanceUtils.createFromDataset(request.getDataset(), request.getPredictionFeature()); Map<String, Object> parameters = request.getParameters() != null ? request.getParameters() : new HashMap<>(); LibSVM regressor = new LibSVM(); Double epsilon = Double.parseDouble(parameters.getOrDefault("epsilon", _epsilon).toString()); Double cacheSize = Double.parseDouble(parameters.getOrDefault("cacheSize", _cacheSize).toString()); Double gamma = Double.parseDouble(parameters.getOrDefault("gamma", _gamma).toString()); Double coeff0 = Double.parseDouble(parameters.getOrDefault("coeff0", _coeff0).toString()); Double cost = Double.parseDouble(parameters.getOrDefault("cost", _cost).toString()); Double nu = Double.parseDouble(parameters.getOrDefault("nu", _nu).toString()); Double loss = Double.parseDouble(parameters.getOrDefault("loss", _loss).toString()); Integer degree = Integer.parseInt(parameters.getOrDefault("degree", _degree).toString()); regressor.setEps(epsilon); regressor.setCacheSize(cacheSize); regressor.setDegree(degree); regressor.setCost(cost); regressor.setGamma(gamma); regressor.setCoef0(coeff0); regressor.setNu(nu); regressor.setLoss(loss); Integer svm_kernel = null; String kernel = parameters.getOrDefault("kernel", _kernel).toString(); if (kernel.equalsIgnoreCase("rbf")) { svm_kernel = LibSVM.KERNELTYPE_RBF; } else if (kernel.equalsIgnoreCase("polynomial")) { svm_kernel = LibSVM.KERNELTYPE_POLYNOMIAL; } else if (kernel.equalsIgnoreCase("linear")) { svm_kernel = LibSVM.KERNELTYPE_LINEAR; } else if (kernel.equalsIgnoreCase("sigmoid")) { svm_kernel = LibSVM.KERNELTYPE_SIGMOID; } regressor.setKernelType(new SelectedTag(svm_kernel, LibSVM.TAGS_KERNELTYPE)); Integer svm_type = null; String type = parameters.getOrDefault("type", _type).toString(); if (type.equalsIgnoreCase("NU_SVR")) { svm_type = LibSVM.SVMTYPE_NU_SVR; } else if (type.equalsIgnoreCase("NU_SVC")) { svm_type = LibSVM.SVMTYPE_NU_SVC; } else if (type.equalsIgnoreCase("C_SVC")) { svm_type = LibSVM.SVMTYPE_C_SVC; } else if (type.equalsIgnoreCase("EPSILON_SVR")) { svm_type = LibSVM.SVMTYPE_EPSILON_SVR; } else if (type.equalsIgnoreCase("ONE_CLASS_SVM")) { svm_type = LibSVM.SVMTYPE_ONE_CLASS_SVM; } regressor.setSVMType(new SelectedTag(svm_type, LibSVM.TAGS_SVMTYPE)); regressor.buildClassifier(data); WekaModel model = new WekaModel(); model.setClassifier(regressor); Map<String, Double> options = new HashMap<>(); options.put("gamma", gamma); options.put("coeff0", coeff0); options.put("degree", new Double(degree.toString())); Field modelField = LibSVM.class.getDeclaredField("m_Model"); modelField.setAccessible(true); svm_model svmModel = (svm_model) modelField.get(regressor); double[][] coefs = svmModel.sv_coef; List<Double> coefsList = IntStream.range(0, coefs[0].length).mapToObj(i -> coefs[0][i]) .collect(Collectors.toList()); svm_node[][] nodes = svmModel.SV; List<Map<Integer, Double>> vectors = IntStream.range(0, nodes.length).mapToObj(i -> { Map<Integer, Double> node = new TreeMap<>(); Arrays.stream(nodes[i]).forEach(n -> node.put(n.index, n.value)); return node; }).collect(Collectors.toList()); String pmml = PmmlUtils.createSVMModel(features, request.getPredictionFeature(), "SVM", kernel, svm_type, options, coefsList, vectors); TrainingResponse response = new TrainingResponse(); ByteArrayOutputStream baos = new ByteArrayOutputStream(); ObjectOutput out = new ObjectOutputStream(baos); out.writeObject(model); String base64Model = Base64.getEncoder().encodeToString(baos.toByteArray()); response.setRawModel(base64Model); List<String> independentFeatures = features.stream() .filter(feature -> !feature.equals(request.getPredictionFeature())) .collect(Collectors.toList()); response.setIndependentFeatures(independentFeatures); response.setPmmlModel(pmml); response.setAdditionalInfo(request.getPredictionFeature()); response.setPredictedFeatures( Arrays.asList("Weka SVM prediction of " + request.getPredictionFeature())); return Response.ok(response).build(); } catch (Exception ex) { Logger.getLogger(WekaSVM.class.getName()).log(Level.SEVERE, null, ex); return Response.status(Response.Status.INTERNAL_SERVER_ERROR).entity(ex.getMessage()).build(); } }
From source file:Tubes.Classification.java
public static void main(String[] args) throws FileNotFoundException, IOException, Exception { StringToWordVector filter = new StringToWordVector(); File training = new File(classTrain); File testing = new File(classTest); BufferedReader readTrain = new BufferedReader(new FileReader(training)); BufferedReader readTest = new BufferedReader(new FileReader(testing)); Instances dataTrain = new Instances(readTrain); Instances dataTest = new Instances(readTest); filter.setInputFormat(dataTrain);/* ww w. j av a 2 s . co m*/ dataTrain = Filter.useFilter(dataTrain, filter); dataTrain.setClassIndex(dataTrain.numAttributes() - 1); dataTest.setClassIndex(dataTest.numAttributes() - 1); Classification classify = new Classification(); NaiveBayes bayes = new NaiveBayes(); // RandomForest rf = new RandomForest(); // BayesNet bayesNet = new BayesNet(); LibSVM libSVM = new LibSVM(); System.out.println("==========================Naive Bayes Evaluation==========================="); Evaluation eval = classify.runClassifier(bayes, dataTrain, dataTest); System.out.println(eval.toSummaryString() + "\n"); System.out.println(eval.toClassDetailsString() + "\n"); System.out.println(eval.toMatrixString() + "\n"); System.out.println("==========================================================================="); // // ====System.out.println("==============================Random Forest================================"); // Evaluation eval2 = classify.runClassifier(rf, dataTrain, dataTest); // System.out.println(eval2.toSummaryString() + "\n"); // System.out.println(eval2.toClassDetailsString() + "\n"); // System.out.println(eval2.toMatrixString() + "\n"); // System.out.println("======================================================================="); // // System.out.println("==============================Bayesian Network================================"); // Evaluation eval3 = classify.runClassifier(bayesNet, dataTrain, dataTest); // System.out.println(eval3.toSummaryString() + "\n"); // System.out.println(eval3.toClassDetailsString() + "\n"); // System.out.println(eval3.toMatrixString() + "\n"); // System.out.println("==========================================================================="); System.out.println("==============================LibSVM================================"); libSVM.setCacheSize(512); // MB libSVM.setNormalize(true); libSVM.setShrinking(true); libSVM.setKernelType(new SelectedTag(LibSVM.KERNELTYPE_LINEAR, LibSVM.TAGS_KERNELTYPE)); libSVM.setDegree(3); libSVM.setSVMType(new SelectedTag(LibSVM.SVMTYPE_C_SVC, LibSVM.TAGS_SVMTYPE)); Evaluation eval4 = classify.runClassifier(libSVM, dataTrain, dataTest); System.out.println(eval4.toSummaryString() + "\n"); System.out.println(eval4.toClassDetailsString() + "\n"); System.out.println(eval4.toMatrixString() + "\n"); System.out.println("==========================================================================="); }