List of usage examples for weka.classifiers.functions LinearRegression LinearRegression
public LinearRegression()
From source file:task2.java
/** * Processes requests for both HTTP <code>GET</code> and <code>POST</code> * methods./*ww w . j av a 2 s .c om*/ * * @param request servlet request * @param response servlet response * @throws ServletException if a servlet-specific error occurs * @throws IOException if an I/O error occurs */ protected void processRequest(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { response.setContentType("text/html;charset=UTF-8"); try (PrintWriter out = response.getWriter()) { /* TODO output your page here. You may use following sample code. */ out.println("<!DOCTYPE html>"); out.println("<html>"); out.println("<head>"); out.println("<title>Servlet selection</title>"); out.println("</head>"); out.println("<body>"); CSVLoader loader = new CSVLoader(); loader.setSource(new File("C:/Users//Raguvinoth/Desktop/5339.csv")); Instances data = loader.getDataSet(); //Save ARFF ArffSaver saver = new ArffSaver(); saver.setInstances(data); saver.setFile(new File("\"C:/Users/Raguvinoth/Desktop/5339_converted.arff")); saver.writeBatch(); BufferedReader reader = new BufferedReader( new FileReader("C://Users//Raguvinoth//Desktop//weka1//5339_nominal.arff")); Instances data1 = new Instances(reader); if (data1.classIndex() == -1) data1.setClassIndex(data1.numAttributes() - 14); // 1. meta-classifier // useClassifier(data); // 2. AttributeSelector try { AttributeSelection attsel = new AttributeSelection(); GreedyStepwise search = new GreedyStepwise(); CfsSubsetEval eval = new CfsSubsetEval(); attsel.setEvaluator(eval); attsel.setSearch(search); attsel.SelectAttributes(data); int[] indices = attsel.selectedAttributes(); System.out.println("selected attribute indices:\n" + Utils.arrayToString(indices)); System.out.println("\n 4. Linear-Regression on above selected attributes"); long time1 = System.currentTimeMillis(); long sec1 = time1 / 1000; BufferedReader reader1 = new BufferedReader( new FileReader("C://Users//Raguvinoth//Desktop//weka1//5339_linear2.arff")); Instances data2 = new Instances(reader1); data2.setClassIndex(0); LinearRegression lr = new LinearRegression(); lr.buildClassifier(data2); System.out.println(lr.toString()); long time2 = System.currentTimeMillis(); long sec2 = time2 / 1000; long timeTaken = sec2 - sec1; System.out.println("Total time taken for building the model: " + timeTaken + " seconds"); for (int i = 0; i < 5; i++) { out.println("<p>" + "selected attribute indices:\n" + Utils.arrayToString(indices[i]) + "</p>"); } out.println("<p>" + "\n 4. Linear-Regression on above selected attributes" + "</p>"); out.println("<p>" + lr.toString() + "</p>"); out.println("<p>" + "Total time taken for building the model: " + timeTaken + " seconds" + "</p>"); out.println("</body>"); out.println("</html>"); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } } }
From source file:boa.aggregators.LinearRegressionAggregator.java
License:Apache License
/** {@inheritDoc} */ @Override//w w w.ja v a2 s. c o m public void finish() throws IOException, InterruptedException { int NumOfAttributes = this.getVectorSize(); List<Attribute> attribute = new ArrayList<Attribute>(); FastVector fvAttributes = new FastVector(NumOfAttributes); for (int i = 0; i < NumOfAttributes; i++) { attribute.add(new Attribute("Attribute" + i)); fvAttributes.addElement(attribute.get(i)); } Instances trainingSet = new Instances("LinearRegression", fvAttributes, 1); trainingSet.setClassIndex(NumOfAttributes - 1); for (List<Double> vector : this.vectors.values()) { Instance instance = new Instance(NumOfAttributes); for (int i = 0; i < vector.size(); i++) { instance.setValue((Attribute) fvAttributes.elementAt(i), vector.get(i)); } trainingSet.add(instance); } try { this.model = new LinearRegression(); this.model.setOptions(options); this.model.buildClassifier(trainingSet); } catch (Exception ex) { } this.saveModel(this.model); }
From source file:br.com.ufu.lsi.rebfnetwork.RBFNetwork.java
License:Open Source License
/** * Returns default capabilities of the classifier, i.e., and "or" of * Logistic and LinearRegression.//from ww w . ja va 2 s . c o m * * @return the capabilities of this classifier * @see Logistic * @see LinearRegression */ public Capabilities getCapabilities() { Capabilities result = new Logistic().getCapabilities(); result.or(new LinearRegression().getCapabilities()); Capabilities classes = result.getClassCapabilities(); result.and(new SimpleKMeans().getCapabilities()); result.or(classes); return result; }
From source file:br.com.ufu.lsi.rebfnetwork.RBFNetwork.java
License:Open Source License
/** * Builds the classifier//from w w w. j a va 2s . c o m * * @param instances the training data * @throws Exception if the classifier could not be built successfully */ public void buildClassifier(Instances instances) throws Exception { // can classifier handle the data? getCapabilities().testWithFail(instances); // remove instances with missing class instances = new Instances(instances); instances.deleteWithMissingClass(); // only class? -> build ZeroR model if (instances.numAttributes() == 1) { System.err.println( "Cannot build model (only class attribute present in data!), " + "using ZeroR model instead!"); m_ZeroR = new weka.classifiers.rules.ZeroR(); m_ZeroR.buildClassifier(instances); return; } else { m_ZeroR = null; } m_standardize = new Standardize(); m_standardize.setInputFormat(instances); instances = Filter.useFilter(instances, m_standardize); SimpleKMeans sk = new SimpleKMeans(); sk.setNumClusters(m_numClusters); sk.setSeed(m_clusteringSeed); MakeDensityBasedClusterer dc = new MakeDensityBasedClusterer(); dc.setClusterer(sk); dc.setMinStdDev(m_minStdDev); m_basisFilter = new ClusterMembership(); m_basisFilter.setDensityBasedClusterer(dc); m_basisFilter.setInputFormat(instances); Instances transformed = Filter.useFilter(instances, m_basisFilter); if (instances.classAttribute().isNominal()) { m_linear = null; m_logistic = new Logistic(); m_logistic.setRidge(m_ridge); m_logistic.setMaxIts(m_maxIts); m_logistic.buildClassifier(transformed); } else { m_logistic = null; m_linear = new LinearRegression(); m_linear.setAttributeSelectionMethod( new SelectedTag(LinearRegression.SELECTION_NONE, LinearRegression.TAGS_SELECTION)); m_linear.setRidge(m_ridge); m_linear.buildClassifier(transformed); } }
From source file:cn.ict.zyq.bestConf.COMT2.COMT2.java
License:Open Source License
@Override public Capabilities getCapabilities() { return new LinearRegression().getCapabilities(); }
From source file:com.github.fracpete.multisearch.optimize.PLSFilterAndLinearRegression.java
License:Open Source License
/** * The first parameter must be dataset,//from www . ja v a 2s .c o m * the (optional) second the class index (1-based, 'first' and 'last' * also supported). * * @param args the commandline options * @throws Exception if optimization fails for some reason */ public static void main(String[] args) throws Exception { if (args.length == 0) { System.err.println("\nUsage: PLSFilterAndLinearRegression <dataset> [classindex]\n"); System.exit(1); } // load data Instances data = ExampleHelper.loadData(args[0], (args.length > 1) ? args[1] : null); // configure classifier we want to optimize PLSFilter pls = new PLSFilter(); LinearRegression lr = new LinearRegression(); FilteredClassifier fc = new FilteredClassifier(); fc.setClassifier(lr); fc.setFilter(pls); // required for Weka > 3.7.13 fc.setDoNotCheckForModifiedClassAttribute(true); // configure multisearch // 1. number of components ListParameter numComp = new ListParameter(); numComp.setProperty("filter.numComponents"); numComp.setList("2 5 7"); // 2. ridge MathParameter ridge = new MathParameter(); ridge.setProperty("classifier.ridge"); ridge.setBase(10); ridge.setMin(-5); ridge.setMax(1); ridge.setStep(1); ridge.setExpression("pow(BASE,I)"); // assemble everything MultiSearch multi = new MultiSearch(); multi.setClassifier(fc); multi.setSearchParameters(new AbstractParameter[] { numComp, ridge }); SelectedTag tag = new SelectedTag(DefaultEvaluationMetrics.EVALUATION_RMSE, new DefaultEvaluationMetrics().getTags()); multi.setEvaluation(tag); // output configuration System.out.println("\nMultiSearch commandline:\n" + Utils.toCommandLine(multi)); // optimize System.out.println("\nOptimizing...\n"); multi.buildClassifier(data); System.out.println("Best setup:\n" + Utils.toCommandLine(multi.getBestClassifier())); System.out.println("Best parameters: " + multi.getGenerator().evaluate(multi.getBestValues())); }
From source file:com.github.fracpete.multisearch.setupgenerator.PLSFilterAndLinearRegression.java
License:Open Source License
/** * Outputs the commandlines.//from www. j ava 2 s . co m * * @param args the commandline options * @throws Exception if setup generator fails for some reason */ public static void main(String[] args) throws Exception { // configure classifier we want to generate setups for PLSFilter pls = new PLSFilter(); LinearRegression lr = new LinearRegression(); FilteredClassifier fc = new FilteredClassifier(); fc.setClassifier(lr); fc.setFilter(pls); // required for Weka > 3.7.13 fc.setDoNotCheckForModifiedClassAttribute(true); // configure generator // 1. number of components ListParameter numComp = new ListParameter(); numComp.setProperty("filter.numComponents"); numComp.setList("2 5 7"); // 2. ridge MathParameter ridge = new MathParameter(); ridge.setProperty("classifier.ridge"); ridge.setBase(10); ridge.setMin(-5); ridge.setMax(1); ridge.setStep(1); ridge.setExpression("pow(BASE,I)"); // assemble everything SetupGenerator generator = new SetupGenerator(); generator.setBaseObject(fc); generator.setParameters(new AbstractParameter[] { numComp, ridge }); // output configuration System.out.println("\nSetupgenerator commandline:\n" + Utils.toCommandLine(generator)); // output commandlines System.out.println("\nCommandlines:\n"); Enumeration<Serializable> enm = generator.setups(); while (enm.hasMoreElements()) System.out.println(Utils.toCommandLine(enm.nextElement())); }
From source file:com.github.r351574nc3.amex.assignment2.App.java
License:Open Source License
/** * Trains the model using a {@link LinearRegression} classifier. * * @throws an Exception/*from w w w . ja v a 2 s . c o m*/ */ public void train() throws Exception { setClassifier(new LinearRegression()); getClassifier().buildClassifier(getTrained()); }
From source file:controller.MineroControler.java
public String regresionLineal() { BufferedReader breader = null; Instances datos = null;/* ww w . j a v a 2 s . c o m*/ breader = new BufferedReader(fuente_arff); try { datos = new Instances(breader); datos.setClassIndex(datos.numAttributes() - 1); // clase principal, ltima en atributos } catch (IOException ex) { System.err.println("Problemas al intentar cargar los datos"); } LinearRegression regresionL = new LinearRegression(); try { regresionL.buildClassifier(datos); Instance nuevaCal = datos.lastInstance(); double calif = regresionL.classifyInstance(nuevaCal); setValorCalculado(new Double(calif)); } catch (Exception ex) { System.err.println("Problemas al clasificar instancia"); } return regresionL.toString(); }
From source file:data.Regression.java
public int regression(String fileName) { String arffName = FileTransfer.transfer(fileName); try {//from w w w . ja va 2 s. com //load data Instances data = new Instances(new BufferedReader(new FileReader(arffName))); data.setClassIndex(data.numAttributes() - 1); //build model LinearRegression model = new LinearRegression(); model.buildClassifier(data); //the last instance with missing class is not used System.out.println(model); //classify the last instance Instance num = data.lastInstance(); int people = (int) model.classifyInstance(num); System.out.println("NumOfEnrolled (" + num + "): " + people); return people; } catch (Exception e) { e.printStackTrace(); System.out.println("Regression fail"); } return 0; }