List of usage examples for java.lang Double toString
public String toString()
From source file:com.nridge.core.base.std.XMLUtl.java
public static void setAttrDoubleValue(Element anElement, String aName, double aValue) { Double doubleObject; if (StringUtils.isNotEmpty(aName)) { doubleObject = aValue;// ww w.j av a2s .c o m anElement.setAttribute(aName, doubleObject.toString()); } }
From source file:aula1.Aula1.java
public static void calcula(double[] x, String entrada) { String[] y = new String[x.length]; String[] lx = new String[x.length]; int i = 0;/*from w w w .ja v a 2 s . c o m*/ for (Double c : x) { lx[i] = c.toString(); i++; } for (i = 0; i < x.length; i++) { try { y[i] = conversor(entrada, lx[i]); } catch (Exception ex) { System.out.println("Erro: " + ex.getMessage()); } System.out.println("F(" + x[i] + ") = " + y[i]); } double[] ly = new double[y.length]; i = 0; for (String c : y) { ly[i] = Double.parseDouble(c); i++; } geraGrafico(x, ly); }
From source file:org.dkpro.similarity.experiments.sts2013baseline.util.Evaluator.java
public static void runEvaluationMetric(Mode mode, EvaluationMetric metric, Dataset... datasets) throws IOException { StringBuilder sb = new StringBuilder(); // Compute Pearson correlation for the specified datasets for (Dataset dataset : datasets) { computePearsonCorrelation(mode, dataset); }//from w w w . ja v a 2 s. c o m if (metric == PearsonAll) { List<Double> concatExp = new ArrayList<Double>(); List<Double> concatGS = new ArrayList<Double>(); // Concat the scores for (Dataset dataset : datasets) { File expScoresFile = new File( OUTPUT_DIR + "/" + mode.toString().toLowerCase() + "/" + dataset.toString() + ".csv"); List<String> lines = FileUtils.readLines(expScoresFile); for (String line : lines) { concatExp.add(Double.parseDouble(line)); } } // Concat the gold standard for (Dataset dataset : datasets) { String gsScoresFilePath = GOLDSTANDARD_DIR + "/" + mode.toString().toLowerCase() + "/" + "STS.gs." + dataset.toString() + ".txt"; PathMatchingResourcePatternResolver r = new PathMatchingResourcePatternResolver(); Resource res = r.getResource(gsScoresFilePath); File gsScoresFile = res.getFile(); List<String> lines = FileUtils.readLines(gsScoresFile); for (String line : lines) { concatGS.add(Double.parseDouble(line)); } } double[] concatExpArray = ArrayUtils.toPrimitive(concatExp.toArray(new Double[concatExp.size()])); double[] concatGSArray = ArrayUtils.toPrimitive(concatGS.toArray(new Double[concatGS.size()])); PearsonsCorrelation pearson = new PearsonsCorrelation(); Double correl = pearson.correlation(concatExpArray, concatGSArray); sb.append(correl.toString()); } else if (metric == PearsonMean) { List<Double> scores = new ArrayList<Double>(); for (Dataset dataset : datasets) { File resultFile = new File( OUTPUT_DIR + "/" + mode.toString().toLowerCase() + "/" + dataset.toString() + ".txt"); double score = Double.parseDouble(FileUtils.readFileToString(resultFile)); scores.add(score); } double mean = 0.0; for (Double score : scores) { mean += score; } mean = mean / scores.size(); sb.append(mean); } FileUtils.writeStringToFile( new File(OUTPUT_DIR + "/" + mode.toString().toLowerCase() + "/" + metric.toString() + ".txt"), sb.toString()); }
From source file:org.dkpro.similarity.experiments.sts2013baseline.util.Evaluator.java
public static void runLinearRegressionCV(Mode mode, Dataset... datasets) throws Exception { for (Dataset dataset : datasets) { // Set parameters int folds = 10; Classifier baseClassifier = new LinearRegression(); // Set up the random number generator long seed = new Date().getTime(); Random random = new Random(seed); // Add IDs to the instances AddID.main(new String[] { "-i", MODELS_DIR + "/" + mode.toString().toLowerCase() + "/" + dataset.toString() + ".arff", "-o", MODELS_DIR + "/" + mode.toString().toLowerCase() + "/" + dataset.toString() + "-plusIDs.arff" }); String location = MODELS_DIR + "/" + mode.toString().toLowerCase() + "/" + dataset.toString() + "-plusIDs.arff"; Instances data = DataSource.read(location); if (data == null) { throw new IOException("Could not load data from: " + location); }//from w w w . j av a2s . c om data.setClassIndex(data.numAttributes() - 1); // Instantiate the Remove filter Remove removeIDFilter = new Remove(); removeIDFilter.setAttributeIndices("first"); // Randomize the data data.randomize(random); // Perform cross-validation Instances predictedData = null; Evaluation eval = new Evaluation(data); for (int n = 0; n < folds; n++) { Instances train = data.trainCV(folds, n, random); Instances test = data.testCV(folds, n); // Apply log filter Filter logFilter = new LogFilter(); logFilter.setInputFormat(train); train = Filter.useFilter(train, logFilter); logFilter.setInputFormat(test); test = Filter.useFilter(test, logFilter); // Copy the classifier Classifier classifier = AbstractClassifier.makeCopy(baseClassifier); // Instantiate the FilteredClassifier FilteredClassifier filteredClassifier = new FilteredClassifier(); filteredClassifier.setFilter(removeIDFilter); filteredClassifier.setClassifier(classifier); // Build the classifier filteredClassifier.buildClassifier(train); // Evaluate eval.evaluateModel(classifier, test); // Add predictions AddClassification filter = new AddClassification(); filter.setClassifier(classifier); filter.setOutputClassification(true); filter.setOutputDistribution(false); filter.setOutputErrorFlag(true); filter.setInputFormat(train); Filter.useFilter(train, filter); // trains the classifier Instances pred = Filter.useFilter(test, filter); // performs predictions on test set if (predictedData == null) { predictedData = new Instances(pred, 0); } for (int j = 0; j < pred.numInstances(); j++) { predictedData.add(pred.instance(j)); } } // Prepare output scores double[] scores = new double[predictedData.numInstances()]; for (Instance predInst : predictedData) { int id = new Double(predInst.value(predInst.attribute(0))).intValue() - 1; int valueIdx = predictedData.numAttributes() - 2; double value = predInst.value(predInst.attribute(valueIdx)); scores[id] = value; // Limit to interval [0;5] if (scores[id] > 5.0) { scores[id] = 5.0; } if (scores[id] < 0.0) { scores[id] = 0.0; } } // Output StringBuilder sb = new StringBuilder(); for (Double score : scores) { sb.append(score.toString() + LF); } FileUtils.writeStringToFile( new File(OUTPUT_DIR + "/" + mode.toString().toLowerCase() + "/" + dataset.toString() + ".csv"), sb.toString()); } }
From source file:com.nridge.core.base.std.XMLUtl.java
public static void makeElemDoubleValue(Document aDocument, Element anElement, String aName, double aValue) { Element subElement;/* w ww.j a v a 2 s . c o m*/ Double doubleObject; if (StringUtils.isNotEmpty(aName)) { doubleObject = aValue; subElement = aDocument.createElement(aName); subElement.appendChild(aDocument.createTextNode(doubleObject.toString())); anElement.appendChild(subElement); } }
From source file:com.splicemachine.mrio.api.hive.SMSerDe.java
/** * Replace with Lazy eventually/* www . j a va2 s . com*/ * */ private static Object hiveTypeToObject(String hiveType, DataValueDescriptor dvd) throws SerDeException { final String lctype = trim(hiveType.toLowerCase()); try { switch (lctype) { case "string": case "varchar": HiveVarchar hiveVarchar = null; String s = dvd.getString(); if (s != null) { hiveVarchar = new HiveVarchar(); hiveVarchar.setValue(s); } return hiveVarchar; case "char": HiveChar hiveChar = null; s = dvd.getString(); if (s != null) { hiveChar = new HiveChar(); hiveChar.setValue(s); } return hiveChar; case "float": return dvd.getFloat(); case "double": return dvd.getDouble(); case "decimal": Double d = dvd.getDouble(); HiveDecimal hiveDecimal = HiveDecimal.create(d.toString()); return hiveDecimal; case "boolean": return dvd.getBoolean(); case "tinyint": return dvd.getByte(); case "int": return dvd.getInt(); case "smallint": return dvd.getShort(); case "bigint": return dvd.getLong(); case "timestamp": return dvd.getTimestamp(null); case "date": return dvd.getDate(null); case "binary": return dvd.getBytes(); default: throw new SerDeException("Unrecognized column type: " + hiveType); } } catch (StandardException se) { throw new SerDeException(se); } }
From source file:org.dkpro.similarity.experiments.sts2013.util.Evaluator.java
public static void runLinearRegressionCV(Mode mode, Dataset... datasets) throws Exception { for (Dataset dataset : datasets) { // Set parameters int folds = 10; Classifier baseClassifier = new LinearRegression(); // Set up the random number generator long seed = new Date().getTime(); Random random = new Random(seed); // Add IDs to the instances AddID.main(new String[] { "-i", MODELS_DIR + "/" + mode.toString().toLowerCase() + "/" + dataset.toString() + ".arff", "-o", MODELS_DIR + "/" + mode.toString().toLowerCase() + "/" + dataset.toString() + "-plusIDs.arff" }); Instances data = DataSource.read( MODELS_DIR + "/" + mode.toString().toLowerCase() + "/" + dataset.toString() + "-plusIDs.arff"); data.setClassIndex(data.numAttributes() - 1); // Instantiate the Remove filter Remove removeIDFilter = new Remove(); removeIDFilter.setAttributeIndices("first"); // Randomize the data data.randomize(random);/*from w ww. ja va 2 s. c o m*/ // Perform cross-validation Instances predictedData = null; Evaluation eval = new Evaluation(data); for (int n = 0; n < folds; n++) { Instances train = data.trainCV(folds, n, random); Instances test = data.testCV(folds, n); // Apply log filter Filter logFilter = new LogFilter(); logFilter.setInputFormat(train); train = Filter.useFilter(train, logFilter); logFilter.setInputFormat(test); test = Filter.useFilter(test, logFilter); // Copy the classifier Classifier classifier = AbstractClassifier.makeCopy(baseClassifier); // Instantiate the FilteredClassifier FilteredClassifier filteredClassifier = new FilteredClassifier(); filteredClassifier.setFilter(removeIDFilter); filteredClassifier.setClassifier(classifier); // Build the classifier filteredClassifier.buildClassifier(train); // Evaluate eval.evaluateModel(classifier, test); // Add predictions AddClassification filter = new AddClassification(); filter.setClassifier(classifier); filter.setOutputClassification(true); filter.setOutputDistribution(false); filter.setOutputErrorFlag(true); filter.setInputFormat(train); Filter.useFilter(train, filter); // trains the classifier Instances pred = Filter.useFilter(test, filter); // performs predictions on test set if (predictedData == null) { predictedData = new Instances(pred, 0); } for (int j = 0; j < pred.numInstances(); j++) { predictedData.add(pred.instance(j)); } } // Prepare output scores double[] scores = new double[predictedData.numInstances()]; for (Instance predInst : predictedData) { int id = new Double(predInst.value(predInst.attribute(0))).intValue() - 1; int valueIdx = predictedData.numAttributes() - 2; double value = predInst.value(predInst.attribute(valueIdx)); scores[id] = value; // Limit to interval [0;5] if (scores[id] > 5.0) { scores[id] = 5.0; } if (scores[id] < 0.0) { scores[id] = 0.0; } } // Output StringBuilder sb = new StringBuilder(); for (Double score : scores) { sb.append(score.toString() + LF); } FileUtils.writeStringToFile( new File(OUTPUT_DIR + "/" + mode.toString().toLowerCase() + "/" + dataset.toString() + ".csv"), sb.toString()); } }
From source file:cn.afterturn.easypoi.util.PoiPublicUtil.java
/** * double to String /*w w w . j a va 2s. co m*/ * @param value * @return */ public static String doubleToString(Double value) { String temp = value.toString(); if (temp.contains("E")) { BigDecimal bigDecimal = new BigDecimal(temp); temp = bigDecimal.toPlainString(); } return temp; }
From source file:org.dkpro.similarity.experiments.sts2013.util.Evaluator.java
@SuppressWarnings("unchecked") public static void runEvaluationMetric(Mode mode, EvaluationMetric metric, Dataset... datasets) throws IOException { StringBuilder sb = new StringBuilder(); // Compute Pearson correlation for the specified datasets for (Dataset dataset : datasets) { computePearsonCorrelation(mode, dataset); }/*w w w . j av a 2s. com*/ if (metric == PearsonAll) { List<Double> concatExp = new ArrayList<Double>(); List<Double> concatGS = new ArrayList<Double>(); // Concat the scores for (Dataset dataset : datasets) { File expScoresFile = new File( OUTPUT_DIR + "/" + mode.toString().toLowerCase() + "/" + dataset.toString() + ".csv"); List<String> lines = FileUtils.readLines(expScoresFile); for (String line : lines) { concatExp.add(Double.parseDouble(line)); } } // Concat the gold standard for (Dataset dataset : datasets) { String gsScoresFilePath = GOLDSTANDARD_DIR + "/" + mode.toString().toLowerCase() + "/" + "STS.gs." + dataset.toString() + ".txt"; PathMatchingResourcePatternResolver r = new PathMatchingResourcePatternResolver(); Resource res = r.getResource(gsScoresFilePath); File gsScoresFile = res.getFile(); List<String> lines = FileUtils.readLines(gsScoresFile); for (String line : lines) { concatGS.add(Double.parseDouble(line)); } } double[] concatExpArray = ArrayUtils.toPrimitive(concatExp.toArray(new Double[concatExp.size()])); double[] concatGSArray = ArrayUtils.toPrimitive(concatGS.toArray(new Double[concatGS.size()])); PearsonsCorrelation pearson = new PearsonsCorrelation(); Double correl = pearson.correlation(concatExpArray, concatGSArray); sb.append(correl.toString()); } else if (metric == PearsonMean) { List<Double> scores = new ArrayList<Double>(); for (Dataset dataset : datasets) { File resultFile = new File( OUTPUT_DIR + "/" + mode.toString().toLowerCase() + "/" + dataset.toString() + ".txt"); double score = Double.parseDouble(FileUtils.readFileToString(resultFile)); scores.add(score); } double mean = 0.0; for (Double score : scores) { mean += score; } mean = mean / scores.size(); sb.append(mean); } else if (metric == PearsonWeightedMean) { List<Double> scores = new ArrayList<Double>(); List<Integer> weights = new ArrayList<Integer>(); for (Dataset dataset : datasets) { File resultFile = new File( OUTPUT_DIR + "/" + mode.toString().toLowerCase() + "/" + dataset.toString() + ".txt"); double score = Double.parseDouble(FileUtils.readFileToString(resultFile)); File scoresFile = new File( OUTPUT_DIR + "/" + mode.toString().toLowerCase() + "/" + dataset.toString() + ".csv"); int weight = FileUtils.readLines(scoresFile).size(); scores.add(score); weights.add(weight); } double mean = 0.0; int weightsum = 0; for (int i = 0; i < scores.size(); i++) { Double score = scores.get(i); Integer weight = weights.get(i); mean += weight * score; weightsum += weight; } mean = mean / weightsum; sb.append(mean); } FileUtils.writeStringToFile( new File(OUTPUT_DIR + "/" + mode.toString().toLowerCase() + "/" + metric.toString() + ".txt"), sb.toString()); }
From source file:fxts.stations.util.UserPreferences.java
public static String getStringValue(Double aValue) { return aValue.toString(); }