List of usage examples for java.lang Long doubleValue
public double doubleValue()
From source file:com.github.jessemull.microflex.util.BigDecimalUtil.java
/** * Safely converts a number to a BigInteger. Loss of precision may occur. Throws * an arithmetic exception upon overflow. * @param Number object to parse/*from w w w. j a va 2 s . co m*/ * @return parsed object * @throws ArithmeticException on overflow */ public static BigDecimal toBigDecimal(Number number) { /* Switch on class and convert to BigDecimal */ String type = number.getClass().getSimpleName(); BigDecimal parsed; switch (type) { case "Byte": Byte by = (Byte) number; parsed = new BigDecimal(by.doubleValue()); break; case "Short": Short sh = (Short) number; parsed = new BigDecimal(sh.doubleValue()); break; case "Integer": Integer in = (Integer) number; parsed = new BigDecimal(in.doubleValue()); break; case "Long": Long lo = (Long) number; parsed = new BigDecimal(lo.doubleValue()); break; case "Float": Float fl = (Float) number; parsed = new BigDecimal(fl.doubleValue()); break; case "BigInteger": parsed = new BigDecimal(((BigInteger) number)); break; case "BigDecimal": parsed = (BigDecimal) number; break; case "Double": Double db = (Double) number; parsed = new BigDecimal(db); break; default: throw new IllegalArgumentException( "Invalid type: " + type + "\nData values " + "must extend the abstract Number class."); } return parsed; }
From source file:com.github.jessemull.microflex.util.DoubleUtil.java
/** * Safely converts an object to a double. Loss of precision may occur. Throws * an arithmetic exception upon overflow. * @param Object object to parse/*from w w w. j av a 2s .co m*/ * @return parsed object * @throws ArithmeticException on overflow */ public static double toDouble(Object obj) { /* Switch on class and convert to double */ String type = obj.getClass().getSimpleName(); double parsed; switch (type) { case "Byte": Byte by = (Byte) obj; parsed = by.doubleValue(); break; case "Short": Short sh = (Short) obj; parsed = sh.doubleValue(); break; case "Integer": Integer in = (Integer) obj; parsed = in.doubleValue(); break; case "Long": Long lo = (Long) obj; parsed = lo.doubleValue(); break; case "Float": Float fl = (Float) obj; parsed = fl.doubleValue(); break; case "BigInteger": BigInteger bi = (BigInteger) obj; if (!OverFlowUtil.doubleOverflow(bi)) { throw new ArithmeticException("Overflow casting " + obj + " to a double."); } parsed = bi.doubleValue(); break; case "BigDecimal": BigDecimal bd = (BigDecimal) obj; if (!OverFlowUtil.doubleOverflow(bd)) { throw new ArithmeticException("Overflow casting " + obj + " to a double."); } parsed = bd.doubleValue(); break; case "Double": Double db = (Double) obj; parsed = db.doubleValue(); break; default: throw new IllegalArgumentException( "Invalid type: " + type + "\nData values " + "must extend the abstract Number class."); } return parsed; }
From source file:com.github.jessemull.microflex.util.DoubleUtil.java
/** * Safely converts a number to a double. Loss of precision may occur. Throws * an arithmetic exception upon overflow. * @param Number number to parse/*from ww w .j a v a 2s . com*/ * @return parsed number * @throws ArithmeticException on overflow */ public static double toDouble(Number number) { /* Switch on class and convert to double */ String type = number.getClass().getSimpleName(); double parsed; switch (type) { case "Byte": Byte by = (Byte) number; parsed = by.doubleValue(); break; case "Short": Short sh = (Short) number; parsed = sh.doubleValue(); break; case "Integer": Integer in = (Integer) number; parsed = in.doubleValue(); break; case "Long": Long lo = (Long) number; parsed = lo.doubleValue(); break; case "Float": Float fl = (Float) number; parsed = fl.doubleValue(); break; case "BigInteger": BigInteger bi = (BigInteger) number; if (!OverFlowUtil.doubleOverflow(bi)) { throw new ArithmeticException("Overflow casting " + number + " to a double."); } parsed = bi.doubleValue(); break; case "BigDecimal": BigDecimal bd = (BigDecimal) number; if (!OverFlowUtil.doubleOverflow(bd)) { throw new ArithmeticException("Overflow casting " + number + " to a double."); } parsed = bd.doubleValue(); break; case "Double": Double db = (Double) number; parsed = db.doubleValue(); break; default: throw new IllegalArgumentException( "Invalid type: " + type + "\nData values " + "must extend the abstract Number class."); } return parsed; }
From source file:gov.nih.nci.rembrandt.web.helper.PCAAppletHelper.java
public static String generateParams(String sessionId, String taskId) { String htm = ""; DecimalFormat nf = new DecimalFormat("0.0000"); try {//from ww w .j a v a 2s . c o m //retrieve the Finding from cache and build the list of PCAData points PrincipalComponentAnalysisFinding principalComponentAnalysisFinding = (PrincipalComponentAnalysisFinding) businessTierCache .getSessionFinding(sessionId, taskId); ArrayList<PrincipalComponentAnalysisDataPoint> pcaData = new ArrayList(); Collection<ClinicalFactorType> clinicalFactors = new ArrayList<ClinicalFactorType>(); List<String> sampleIds = new ArrayList(); Map<String, PCAresultEntry> pcaResultMap = new HashMap<String, PCAresultEntry>(); List<PCAresultEntry> pcaResults = principalComponentAnalysisFinding.getResultEntries(); for (PCAresultEntry pcaEntry : pcaResults) { sampleIds.add(pcaEntry.getSampleId()); pcaResultMap.put(pcaEntry.getSampleId(), pcaEntry); } Collection<SampleResultset> validatedSampleResultset = ClinicalDataValidator .getValidatedSampleResultsetsFromSampleIDs(sampleIds, clinicalFactors); if (validatedSampleResultset != null) { String id; PCAresultEntry entry; for (SampleResultset rs : validatedSampleResultset) { id = rs.getBiospecimen().getSpecimenName(); entry = pcaResultMap.get(id); PrincipalComponentAnalysisDataPoint pcaPoint = new PrincipalComponentAnalysisDataPoint(id, entry.getPc1(), entry.getPc2(), entry.getPc3()); String diseaseName = rs.getDisease().getValueObject(); if (diseaseName != null) { pcaPoint.setDiseaseName(diseaseName); } else { pcaPoint.setDiseaseName(DiseaseType.NON_TUMOR.name()); } GenderDE genderDE = rs.getGenderCode(); if (genderDE != null) { String gt = genderDE.getValueObject(); if (gt != null) { GenderType genderType = GenderType.valueOf(gt); if (genderType != null) { pcaPoint.setGender(genderType); } } } Long survivalLength = rs.getSurvivalLength(); if (survivalLength != null) { //survival length is stored in days in the DB so divide by 30 to get the //approx survival in months double survivalInMonths = survivalLength.doubleValue() / 30.0; pcaPoint.setSurvivalInMonths(survivalInMonths); } pcaData.add(pcaPoint); } } //make a hashmap // [key=group] hold the array of double[][]s HashMap<String, ArrayList> hm = new HashMap(); //now we should have a collection of PCADataPts double[][] pts = new double[pcaData.size()][3]; for (int i = 0; i < pcaData.size(); i++) { //just create a large 1 set for now //are we breaking groups by gender or disease? PrincipalComponentAnalysisDataPoint pd = pcaData.get(i); pts[i][0] = pd.getPc1value(); pts[i][1] = pd.getPc2value(); pts[i][2] = pd.getPc3value(); ArrayList<double[]> al; try { if (hm.containsKey(pd.getDiseaseName())) { //already has it, so add this one al = (ArrayList) hm.get(pd.getDiseaseName()); } else { al = new ArrayList(); hm.put(pd.getDiseaseName(), new ArrayList()); } if (!al.contains(pts[i])) { al.add(pts[i]); } hm.put(pd.getDiseaseName(), al); } catch (Exception e) { System.out.print(e.toString()); } } int r = hm.size(); if (r == 1) { } //hm should now contain a hashmap of all the disease groups //generate the param tags htm += "<param name=\"key\" value=\"" + taskId + "\" >\n"; htm += "<param name=\"totalPts\" value=\"" + pts.length + "\" >\n"; htm += "<param name=\"totalGps\" value=\"" + hm.size() + "\" >\n"; int ii = 0; for (Object k : hm.keySet()) { String key = k.toString(); //for each group Color diseaseColor = Color.GRAY; if (DiseaseType.valueOf(key) != null) { DiseaseType disease = DiseaseType.valueOf(key); diseaseColor = disease.getColor(); } ArrayList<double[]> al = hm.get(key); htm += "<param name=\"groupLabel_" + ii + "\" value=\"" + key + "\" >\n"; htm += "<param name=\"groupCount_" + ii + "\" value=\"" + al.size() + "\" >\n"; htm += "<param name=\"groupColor_" + ii + "\" value=\"" + diseaseColor.getRGB() + "\" >\n"; int jj = 0; for (double[] d : al) { String comm = nf.format(d[0]) + "," + nf.format(d[1]) + "," + nf.format(d[2]); String h = "<param name=\"pt_" + ii + "_" + jj + "\" value=\"" + comm + "\">\n"; htm += h; jj++; } ii++; } /* //for bulk rendering for(int i=0; i<pts.length; i++) { String comm = String.valueOf(pts[i][0]) + "," + String.valueOf(pts[i][1]) + "," + String.valueOf(pts[i][2]); String h = "<param name=\"pt_"+i+"\" value=\""+ comm +"\">\n"; //htm += h; } */ } //try catch (Exception e) { } return htm; }
From source file:org.opennms.features.newts.converter.eventd.EventdStresser.java
private static void systemReport(long beginMillis, int trapsSent, int finalEventCount) { System.out.println(" Traps sent: " + trapsSent); System.out.println("Events persisted DB: " + finalEventCount); long totalMillis = Calendar.getInstance().getTimeInMillis() - beginMillis; // long processingMillis = totalMillis - m_sleepMillis; // Long processingSeconds = processingMillis/1000; Long totalSeconds = totalMillis / 1000L; System.out.println("Total Elapsed time (secs): " + totalSeconds); System.out.println("Events per second (persisted): " + finalEventCount / totalSeconds.doubleValue()); System.out.println();/*from w w w . j a v a2s . com*/ }
From source file:org.apache.storm.loadgen.CaptureLoad.java
static TopologyLoadConf captureTopology(Nimbus.Iface client, TopologySummary topologySummary) throws Exception { String topologyName = topologySummary.get_name(); LOG.info("Capturing {}...", topologyName); String topologyId = topologySummary.get_id(); TopologyInfo info = client.getTopologyInfo(topologyId); TopologyPageInfo tpinfo = client.getTopologyPageInfo(topologyId, ":all-time", false); @SuppressWarnings("checkstyle:VariableDeclarationUsageDistance") StormTopology topo = client.getUserTopology(topologyId); //Done capturing topology information... Map<String, Object> savedTopoConf = new HashMap<>(); Map<String, Object> topoConf = (Map<String, Object>) JSONValue.parse(client.getTopologyConf(topologyId)); for (String key : TopologyLoadConf.IMPORTANT_CONF_KEYS) { Object o = topoConf.get(key); if (o != null) { savedTopoConf.put(key, o);//from www . j ava 2s . c o m LOG.info("with config {}: {}", key, o); } } //Lets use the number of actually scheduled workers as a way to bridge RAS and non-RAS int numWorkers = tpinfo.get_num_workers(); if (savedTopoConf.containsKey(Config.TOPOLOGY_WORKERS)) { numWorkers = Math.max(numWorkers, ((Number) savedTopoConf.get(Config.TOPOLOGY_WORKERS)).intValue()); } savedTopoConf.put(Config.TOPOLOGY_WORKERS, numWorkers); Map<String, LoadCompConf.Builder> boltBuilders = new HashMap<>(); Map<String, LoadCompConf.Builder> spoutBuilders = new HashMap<>(); List<InputStream.Builder> inputStreams = new ArrayList<>(); Map<GlobalStreamId, OutputStream.Builder> outStreams = new HashMap<>(); //Bolts if (topo.get_bolts() != null) { for (Map.Entry<String, Bolt> boltSpec : topo.get_bolts().entrySet()) { String boltComp = boltSpec.getKey(); LOG.info("Found bolt {}...", boltComp); Bolt bolt = boltSpec.getValue(); ComponentCommon common = bolt.get_common(); Map<GlobalStreamId, Grouping> inputs = common.get_inputs(); if (inputs != null) { for (Map.Entry<GlobalStreamId, Grouping> input : inputs.entrySet()) { GlobalStreamId id = input.getKey(); LOG.info("with input {}...", id); Grouping grouping = input.getValue(); InputStream.Builder builder = new InputStream.Builder().withId(id.get_streamId()) .withFromComponent(id.get_componentId()).withToComponent(boltComp) .withGroupingType(grouping); inputStreams.add(builder); } } Map<String, StreamInfo> outputs = common.get_streams(); if (outputs != null) { for (String name : outputs.keySet()) { GlobalStreamId id = new GlobalStreamId(boltComp, name); LOG.info("and output {}...", id); OutputStream.Builder builder = new OutputStream.Builder().withId(name); outStreams.put(id, builder); } } LoadCompConf.Builder builder = new LoadCompConf.Builder() .withParallelism(common.get_parallelism_hint()).withId(boltComp); boltBuilders.put(boltComp, builder); } Map<String, Map<String, Double>> boltResources = getBoltsResources(topo, topoConf); for (Map.Entry<String, Map<String, Double>> entry : boltResources.entrySet()) { LoadCompConf.Builder bd = boltBuilders.get(entry.getKey()); if (bd != null) { Map<String, Double> resources = entry.getValue(); Double cpu = resources.get(Config.TOPOLOGY_COMPONENT_CPU_PCORE_PERCENT); if (cpu != null) { bd.withCpuLoad(cpu); } Double mem = resources.get(Config.TOPOLOGY_COMPONENT_RESOURCES_ONHEAP_MEMORY_MB); if (mem != null) { bd.withMemoryLoad(mem); } } } } //Spouts if (topo.get_spouts() != null) { for (Map.Entry<String, SpoutSpec> spoutSpec : topo.get_spouts().entrySet()) { String spoutComp = spoutSpec.getKey(); LOG.info("Found Spout {}...", spoutComp); SpoutSpec spout = spoutSpec.getValue(); ComponentCommon common = spout.get_common(); Map<String, StreamInfo> outputs = common.get_streams(); if (outputs != null) { for (String name : outputs.keySet()) { GlobalStreamId id = new GlobalStreamId(spoutComp, name); LOG.info("with output {}...", id); OutputStream.Builder builder = new OutputStream.Builder().withId(name); outStreams.put(id, builder); } } LoadCompConf.Builder builder = new LoadCompConf.Builder() .withParallelism(common.get_parallelism_hint()).withId(spoutComp); spoutBuilders.put(spoutComp, builder); } Map<String, Map<String, Double>> spoutResources = getSpoutsResources(topo, topoConf); for (Map.Entry<String, Map<String, Double>> entry : spoutResources.entrySet()) { LoadCompConf.Builder sd = spoutBuilders.get(entry.getKey()); if (sd != null) { Map<String, Double> resources = entry.getValue(); Double cpu = resources.get(Config.TOPOLOGY_COMPONENT_CPU_PCORE_PERCENT); if (cpu != null) { sd.withCpuLoad(cpu); } Double mem = resources.get(Config.TOPOLOGY_COMPONENT_RESOURCES_ONHEAP_MEMORY_MB); if (mem != null) { sd.withMemoryLoad(mem); } } } } //Stats... Map<String, List<ExecutorSummary>> byComponent = new HashMap<>(); for (ExecutorSummary executor : info.get_executors()) { String component = executor.get_component_id(); List<ExecutorSummary> list = byComponent.get(component); if (list == null) { list = new ArrayList<>(); byComponent.put(component, list); } list.add(executor); } List<InputStream> streams = new ArrayList<>(inputStreams.size()); //Compute the stats for the different input streams for (InputStream.Builder builder : inputStreams) { GlobalStreamId streamId = new GlobalStreamId(builder.getFromComponent(), builder.getId()); List<ExecutorSummary> summaries = byComponent.get(builder.getToComponent()); //Execute and process latency... builder.withProcessTime( new NormalDistStats(extractBoltValues(summaries, streamId, BoltStats::get_process_ms_avg))); builder.withExecTime( new NormalDistStats(extractBoltValues(summaries, streamId, BoltStats::get_execute_ms_avg))); //InputStream is done streams.add(builder.build()); } //There is a bug in some versions that returns 0 for the uptime. // To work around it we should get it an alternative (working) way. Map<String, Integer> workerToUptime = new HashMap<>(); for (WorkerSummary ws : tpinfo.get_workers()) { workerToUptime.put(ws.get_supervisor_id() + ":" + ws.get_port(), ws.get_uptime_secs()); } LOG.debug("WORKER TO UPTIME {}", workerToUptime); for (Map.Entry<GlobalStreamId, OutputStream.Builder> entry : outStreams.entrySet()) { OutputStream.Builder builder = entry.getValue(); GlobalStreamId id = entry.getKey(); List<Double> emittedRate = new ArrayList<>(); List<ExecutorSummary> summaries = byComponent.get(id.get_componentId()); if (summaries != null) { for (ExecutorSummary summary : summaries) { if (summary.is_set_stats()) { int uptime = summary.get_uptime_secs(); LOG.debug("UPTIME {}", uptime); if (uptime <= 0) { //Likely it is because of a bug, so try to get it another way String key = summary.get_host() + ":" + summary.get_port(); uptime = workerToUptime.getOrDefault(key, 1); LOG.debug("Getting uptime for worker {}, {}", key, uptime); } for (Map.Entry<String, Map<String, Long>> statEntry : summary.get_stats().get_emitted() .entrySet()) { String timeWindow = statEntry.getKey(); long timeSecs = uptime; try { timeSecs = Long.valueOf(timeWindow); } catch (NumberFormatException e) { //Ignored... } timeSecs = Math.min(timeSecs, uptime); Long count = statEntry.getValue().get(id.get_streamId()); if (count != null) { LOG.debug("{} emitted {} for {} secs or {} tuples/sec", id, count, timeSecs, count.doubleValue() / timeSecs); emittedRate.add(count.doubleValue() / timeSecs); } } } } } builder.withRate(new NormalDistStats(emittedRate)); //The OutputStream is done LoadCompConf.Builder comp = boltBuilders.get(id.get_componentId()); if (comp == null) { comp = spoutBuilders.get(id.get_componentId()); } comp.withStream(builder.build()); } List<LoadCompConf> spouts = spoutBuilders.values().stream().map((b) -> b.build()) .collect(Collectors.toList()); List<LoadCompConf> bolts = boltBuilders.values().stream().map((b) -> b.build()) .collect(Collectors.toList()); return new TopologyLoadConf(topologyName, savedTopoConf, spouts, bolts, streams); }
From source file:com.magnet.mmx.util.AlertEventsManagerTest.java
private double getAvg(List<Long> elapsedTimes) { double avg = 0.0; for (Long l : elapsedTimes) { avg += l.doubleValue(); }/*from w w w. j av a 2 s . c om*/ return avg / elapsedTimes.size(); }
From source file:com.aba.industry.overhead.impl.LHOverheadCalculatorImpl.java
@Override public Double getSalary(IndustryActivities industryActivities, Long seconds) { switch (industryActivities) { case MANUFACTURING: return seconds.doubleValue() / 60 / 60 / 2 * 200000; case INVENTION: return seconds.doubleValue() / 60 / 60 / 40 * 200000; }//from ww w . java 2 s . com return 0d; }
From source file:org.finra.herd.dao.helper.AwsHelper.java
/** * Returns transfer rate in kBytes/s. Please note that bytes->kBytes and ms->seconds conversions cancel each other (both use conversion factor of 1000). * * @param totalBytesTransferred Number of bytes transferred. * @param durationMillis Duration in milliseconds. * * @return the transfer rate in kBytes/s. *//* w ww.ja v a2 s .c o m*/ public Double getTransferRateInKilobytesPerSecond(Long totalBytesTransferred, Long durationMillis) { return totalBytesTransferred.doubleValue() / durationMillis; }
From source file:org.finra.herd.dao.helper.AwsHelper.java
/** * Returns transfer rate in Mbit/s (Decimal prefix: 1 Mbit/s = 1,000,000 bit/s). * * @param totalBytesTransferred Number of bytes transferred. * @param durationMillis Duration in milliseconds. * * @return the transfer rate in Mbit/s./*from w ww.j a va2s . c o m*/ */ public Double getTransferRateInMegabitsPerSecond(Long totalBytesTransferred, Long durationMillis) { return totalBytesTransferred.doubleValue() * BITS_PER_BYTE / durationMillis / 1000; }