List of usage examples for weka.classifiers.functions LinearRegression setRidge
public void setRidge(double newRidge)
From source file:edu.utexas.cs.tactex.utils.RegressionUtils.java
License:Open Source License
public static Double leaveOneOutErrorLinRegLambda(double lambda, Instances data) { // MANUAL /* w w w . j ava 2 s. c om*/ // create a linear regression classifier with Xy_polynorm data LinearRegression linreg = createLinearRegression(); linreg.setRidge(lambda); double mse = 0; for (int i = 0; i < data.numInstances(); ++i) { log.info("fold " + i); Instances train = data.trainCV(data.numInstances(), i); log.info("train"); Instances test = data.testCV(data.numInstances(), i); log.info("test"); double actualY = data.instance(i).classValue(); log.info("actualY"); try { linreg.buildClassifier(train); log.info("buildClassifier"); } catch (Exception e) { log.error("failed to build classifier in cross validation", e); return null; } double predictedY = 0; try { predictedY = linreg.classifyInstance(test.instance(0)); log.info("predictedY"); } catch (Exception e) { log.error("failed to classify in cross validation", e); return null; } double error = predictedY - actualY; log.info("error " + error); mse += error * error; log.info("mse " + mse); } if (data.numInstances() == 0) { log.error("no instances in leave-one-out data"); return null; } mse /= data.numInstances(); log.info("mse " + mse); return mse; // // USING WEKA // // // create evaluation object // Evaluation eval = null; // try { // eval = new Evaluation(data); // } catch (Exception e) { // log.error("weka Evaluation() creation threw exception", e); // //e.printStackTrace(); // return null; // } // // // create a linear regression classifier with Xy_polynorm data // LinearRegression linreg = createLinearRegression(); // linreg.setRidge(lambda); // // try { // // linreg.buildClassifier(data); // // } catch (Exception e) { // // log.error("FAILED: linear regression threw exception", e); // // //e.printStackTrace(); // // return null; // // } // // // initialize the evaluation object // Classifier classifier = linreg; // int numFolds = data.numInstances(); // Random random = new Random(0); // try { // eval.crossValidateModel(classifier , data , numFolds , random); // } catch (Exception e) { // log.error("crossvalidation threw exception", e); // //e.printStackTrace(); // return null; // } // // double mse = eval.errorRate(); // return mse; }
From source file:edu.utexas.cs.tactex.utils.RegressionUtils.java
License:Open Source License
public static WekaLinRegData createWekaLinRegData(int timeslot, Instances X, Double[] yvals, ArrayList<Double> candidateLambdas) throws Exception { WekaLinRegData result;/* www . ja v a 2 s . com*/ // normalize Standardize standardize = new Standardize(); try { standardize.setInputFormat(X); } catch (Exception e) { log.error("PolyRegCust.predictNumSubs() data standardizing exception", e); throw e; } Instances nrmFeatures = RegressionUtils.featureNormalize(X, standardize); log.info("normalized features " + nrmFeatures); // add y to X since this is what weka expects Instances Xy = RegressionUtils.addYforWeka(nrmFeatures, yvals); // run cross validation for lambda Double bestLambda = findBestRegularizationParameter(Xy, candidateLambdas); if (null == bestLambda) { String message = "best regularization parameter is null, cannot predict"; log.error(message); throw new Exception(message); } // run linear regression LinearRegression linearRegression = RegressionUtils.createLinearRegression(); linearRegression.setRidge(bestLambda); try { linearRegression.buildClassifier(Xy); log.info("theta " + Arrays.toString(linearRegression.coefficients())); } catch (Exception e) { log.error("PolyRegCust.predictNumSubs() buildClassifier exception", e); throw e; } result = new WekaLinRegData(standardize, linearRegression, timeslot); return result; }