Example usage for java.lang Integer doubleValue

List of usage examples for java.lang Integer doubleValue

Introduction

In this page you can find the example usage for java.lang Integer doubleValue.

Prototype

public double doubleValue() 

Source Link

Document

Returns the value of this Integer as a double after a widening primitive conversion.

Usage

From source file:org.killbill.billing.plugin.analytics.json.XY.java

public XY(final String x, final Integer y) {
    this(x, new Float(y.doubleValue()));
}

From source file:cc.kave.commons.pointsto.evaluation.PointsToSetEvaluation.java

public void run(Path contextsDir) throws IOException {
    StatementCounterVisitor stmtCounterVisitor = new StatementCounterVisitor();
    List<Context> contexts = getSamples(contextsDir).stream()
            .filter(cxt -> cxt.getSST().accept(stmtCounterVisitor, null) > 0).collect(Collectors.toList());
    log("Using %d contexts for evaluation\n", contexts.size());

    PointsToUsageExtractor extractor = new PointsToUsageExtractor();
    for (Context context : contexts) {
        PointstoSetSizeAnalysis analysis = new PointstoSetSizeAnalysis();
        extractor.extract(analysis.compute(context));
        results.addAll(analysis.getSetSizes());
    }/*ww  w . ja  v  a 2s  .com*/

    DescriptiveStatistics statistics = new DescriptiveStatistics();
    for (Integer setSize : results) {
        statistics.addValue(setSize.doubleValue());
    }
    log("mean: %.2f\n", statistics.getMean());
    log("stddev: %.2f\n", statistics.getStandardDeviation());
    log("min/max: %.2f/%.2f\n", statistics.getMin(), statistics.getMax());
}

From source file:org.apache.hadoop.hive.ql.udf.UDFToDouble.java

public Double evaluate(Integer i) {
    if (i == null) {
        return null;
    } else {/*  w  ww  . j  a v  a2s .  c  om*/
        return Double.valueOf(i.doubleValue());
    }
}

From source file:org.eda.fpsrv.StrTable.java

protected Integer parseInt(String sval, Integer defVal) {
    try {/* w  ww  .jav  a2s . co m*/
        return Integer.parseInt(sval);
    } catch (Exception e) {
        return parseDouble(sval, defVal.doubleValue()).intValue();
    }
}

From source file:org.apache.drill.exec.physical.impl.orderedpartitioner.TestOrderedPartitionExchange.java

/**
 * Starts two drillbits and runs a physical plan with a Mock scan, project, OrderedParititionExchange, Union Exchange,
 * and sort. The final sort is done first on the partition column, and verifies that the partitions are correct, in that
 * all rows in partition 0 should come in the sort order before any row in partition 1, etc. Also verifies that the standard
 * deviation of the size of the partitions is less than one tenth the mean size of the partitions, because we expect all
 * the partitions to be roughly equal in size.
 * @throws Exception//from   w  ww  .j  a va  2 s.  c om
 */
@Test
public void twoBitTwoExchangeRun() throws Exception {
    RemoteServiceSet serviceSet = RemoteServiceSet.getLocalServiceSet();

    try (Drillbit bit1 = new Drillbit(CONFIG, serviceSet);
            Drillbit bit2 = new Drillbit(CONFIG, serviceSet);
            DrillClient client = new DrillClient(CONFIG, serviceSet.getCoordinator());) {

        bit1.run();
        bit2.run();
        client.connect();
        List<QueryDataBatch> results = client.runQuery(
                org.apache.drill.exec.proto.UserBitShared.QueryType.PHYSICAL,
                Files.toString(FileUtils.getResourceAsFile("/sender/ordered_exchange.json"), Charsets.UTF_8));
        int count = 0;
        List<Integer> partitionRecordCounts = Lists.newArrayList();
        for (QueryDataBatch b : results) {
            if (b.getData() != null) {
                int rows = b.getHeader().getRowCount();
                count += rows;
                RecordBatchLoader loader = new RecordBatchLoader(
                        new BootStrapContext(DrillConfig.create()).getAllocator());
                loader.load(b.getHeader().getDef(), b.getData());
                BigIntVector vv1 = (BigIntVector) loader.getValueAccessorById(BigIntVector.class, loader
                        .getValueVectorId(new SchemaPath("col1", ExpressionPosition.UNKNOWN)).getFieldIds())
                        .getValueVector();
                Float8Vector vv2 = (Float8Vector) loader.getValueAccessorById(Float8Vector.class, loader
                        .getValueVectorId(new SchemaPath("col2", ExpressionPosition.UNKNOWN)).getFieldIds())
                        .getValueVector();
                IntVector pVector = (IntVector) loader.getValueAccessorById(IntVector.class,
                        loader.getValueVectorId(new SchemaPath("partition", ExpressionPosition.UNKNOWN))
                                .getFieldIds())
                        .getValueVector();
                long previous1 = Long.MIN_VALUE;
                double previous2 = Double.MIN_VALUE;
                int partPrevious = -1;
                long current1 = Long.MIN_VALUE;
                double current2 = Double.MIN_VALUE;
                int partCurrent = -1;
                int partitionRecordCount = 0;
                for (int i = 0; i < rows; i++) {
                    previous1 = current1;
                    previous2 = current2;
                    partPrevious = partCurrent;
                    current1 = vv1.getAccessor().get(i);
                    current2 = vv2.getAccessor().get(i);
                    partCurrent = pVector.getAccessor().get(i);
                    Assert.assertTrue(current1 >= previous1);
                    if (current1 == previous1) {
                        Assert.assertTrue(current2 <= previous2);
                    }
                    if (partCurrent == partPrevious || partPrevious == -1) {
                        partitionRecordCount++;
                    } else {
                        partitionRecordCounts.add(partitionRecordCount);
                        partitionRecordCount = 0;
                    }
                }
                partitionRecordCounts.add(partitionRecordCount);
                loader.clear();
            }

            b.release();
        }
        double[] values = new double[partitionRecordCounts.size()];
        int i = 0;
        for (Integer rc : partitionRecordCounts) {
            values[i++] = rc.doubleValue();
        }
        StandardDeviation stdDev = new StandardDeviation();
        Mean mean = new Mean();
        double std = stdDev.evaluate(values);
        double m = mean.evaluate(values);
        System.out.println("mean: " + m + " std dev: " + std);
        //Assert.assertTrue(std < 0.1 * m);
        assertEquals(31000, count);
    }
}

From source file:org.sonar.plugins.flex.flexmetrics.FlexMetricsParser.java

private void processFunctions(SMInputCursor functionsCursor) throws XMLStreamException {
    Map<String, Integer> ccnCountPerClass = new HashMap<String, Integer>();
    Map<String, RangeDistributionBuilder> ccnDistributionPerClass = new HashMap<String, RangeDistributionBuilder>();

    while (functionsCursor.getNext() != null) {
        Map<String, String> values = processChildren(functionsCursor);
        String fullFunctionName = values.get("name");
        Integer ccnForFunction = Integer.valueOf(values.get("ccn"));
        String packageAndClassName = getPackageAndClassFromFunction(fullFunctionName);
        addUpComplexityToClass(ccnCountPerClass, ccnForFunction, packageAndClassName);
        addUpComplexityToClassDistribution(ccnDistributionPerClass, ccnForFunction, packageAndClassName);
    }/*from ww w  . ja  v  a  2  s . c o m*/

    for (Map.Entry<String, Integer> entry : ccnCountPerClass.entrySet()) {
        String fullname = entry.getKey();
        Integer ccnForClass = entry.getValue();
        context.saveMeasure(new FlexFile(fullname), CoreMetrics.COMPLEXITY, ccnForClass.doubleValue());
        RangeDistributionBuilder ccnDistribution = new RangeDistributionBuilder(
                CoreMetrics.CLASS_COMPLEXITY_DISTRIBUTION, CLASSES_DISTRIB_BOTTOM_LIMITS);
        ccnDistribution.add(ccnForClass.doubleValue());
        context.saveMeasure(new FlexFile(fullname),
                ccnDistribution.build().setPersistenceMode(PersistenceMode.MEMORY));
    }
    for (Map.Entry<String, RangeDistributionBuilder> entry : ccnDistributionPerClass.entrySet()) {
        String fullname = entry.getKey();
        RangeDistributionBuilder ccnDistributionForClass = entry.getValue();
        context.saveMeasure(new FlexFile(fullname),
                ccnDistributionForClass.build().setPersistenceMode(PersistenceMode.MEMORY));
    }
}

From source file:net.sf.jdmf.algorithms.classification.BayesAlgorithm.java

/**
 * Calculates the likelihood for a single attribute value.
 *///from   ww w.jav  a2 s  .c o  m
protected Double calculateAttributeValueLikelihood(Attribute attribute, String decisionName,
        Comparable decisionValue, Integer decisionValueCount, Instance instanceToBeClassified,
        List<Instance> instances) {
    if (attribute.getType().equals(AttributeType.NUMERIC)) {
        List<Comparable> selectedValues = attributeConverter.groupFirstAttributeValuesBySecondAttributeValue(
                attribute.getName(), decisionName, decisionValue, instances);

        return mathCalculator.calculateNormalDistribution(
                selectedValues.toArray(new Comparable[selectedValues.size()]),
                (Double) instanceToBeClassified.getValue(attribute.getName()));
    }

    String attributeName = attribute.getName();

    Rule rule = generateRule(attributeName, instanceToBeClassified.getValue(attributeName), decisionName,
            decisionValue);

    Integer ruleCoverage = calculateCoverage(rule, instances);

    return ruleCoverage.doubleValue() / decisionValueCount;
}

From source file:com.autentia.wuija.widget.property.Variant.java

public void setValue(Integer asInteger) {
    Assert.state(type == Type.INTEGER);
    this.asNumber = Double.valueOf(asInteger.doubleValue());
}

From source file:com.autentia.wuija.widget.property.Variant.java

public void setAsInteger(Integer asInteger) {
    Assert.state(type == Type.INTEGER);
    this.asNumber = Double.valueOf(asInteger.doubleValue());
}

From source file:magicdeckmanager.card.CardManager.java

public ObservableList<PieChart.Data> getManaDistPieChartData(DeckData deckData) {
    ObservableList<PieChart.Data> pieChartData = FXCollections.observableArrayList();
    final List<String> main = deckData.getMain();
    Map<Color, Integer> colorDistribution = new HashMap<>();
    Integer totalManaCost = 0;/*from  w w w .  j  a va  2 s.  c  o m*/
    for (String cardName : main) {
        Card card = getCardFromName(cardName);
        if (!card.isLand()) {
            totalManaCost += card.cmc;
            final ManaCost manaCost = card.getManaCost();
            final List<ManaPart> cost = manaCost.getCost();
            addManaPartsToColorDistribution(cost, colorDistribution);
        }
    }
    for (Map.Entry<Color, Integer> entrySet : colorDistribution.entrySet()) {
        Color key = entrySet.getKey();
        Integer value = entrySet.getValue();
        double percent = (value.doubleValue() / totalManaCost.doubleValue());
        percent *= 100;
        Integer percentInteger = (int) Math.round(percent);
        final String percentString = key.toString() + " " + percentInteger.toString() + "%";
        pieChartData.add(new PieChart.Data(percentString, value));
    }
    return pieChartData;
}