List of usage examples for java.lang Long doubleValue
public double doubleValue()
From source file:Main.java
public static void main(String[] args) { Long longObject = new Long("1234567"); double d = longObject.doubleValue(); System.out.println("double:" + d); }
From source file:Main.java
public static void main(String[] args) { Long lObj = new Long("10"); byte b = lObj.byteValue(); System.out.println(b);//w w w. ja va 2 s . c o m short s = lObj.shortValue(); System.out.println(s); int i = lObj.intValue(); System.out.println(i); float f = lObj.floatValue(); System.out.println(f); double d = lObj.doubleValue(); System.out.println(d); }
From source file:com.example.geomesa.kafka08.KafkaLoadTester.java
public static void main(String[] args) throws Exception { // read command line args for a connection to Kafka CommandLineParser parser = new BasicParser(); Options options = getCommonRequiredOptions(); CommandLine cmd = parser.parse(options, args); String visibility = getVisibility(cmd); if (visibility == null) { System.out.println("visibility: null"); } else {/*from w ww. j av a 2 s . c o m*/ System.out.println("visibility: '" + visibility + "'"); } // create the producer and consumer KafkaDataStore objects Map<String, String> dsConf = getKafkaDataStoreConf(cmd); System.out.println("KDS config: " + dsConf); dsConf.put("isProducer", "true"); DataStore producerDS = DataStoreFinder.getDataStore(dsConf); dsConf.put("isProducer", "false"); DataStore consumerDS = DataStoreFinder.getDataStore(dsConf); // verify that we got back our KafkaDataStore objects properly if (producerDS == null) { throw new Exception("Null producer KafkaDataStore"); } if (consumerDS == null) { throw new Exception("Null consumer KafkaDataStore"); } // create the schema which creates a topic in Kafka // (only needs to be done once) final String sftName = "KafkaStressTest"; final String sftSchema = "name:String,age:Int,step:Double,lat:Double,dtg:Date,*geom:Point:srid=4326"; SimpleFeatureType sft = SimpleFeatureTypes.createType(sftName, sftSchema); // set zkPath to default if not specified String zkPath = (dsConf.get(ZK_PATH) == null) ? "/geomesa/ds/kafka" : dsConf.get(ZK_PATH); SimpleFeatureType preppedOutputSft = KafkaDataStoreHelper.createStreamingSFT(sft, zkPath); // only create the schema if it hasn't been created already if (!Arrays.asList(producerDS.getTypeNames()).contains(sftName)) producerDS.createSchema(preppedOutputSft); System.out.println("Register KafkaDataStore in GeoServer (Press enter to continue)"); System.in.read(); // the live consumer must be created before the producer writes features // in order to read streaming data. // i.e. the live consumer will only read data written after its instantiation SimpleFeatureStore producerFS = (SimpleFeatureStore) producerDS.getFeatureSource(sftName); SimpleFeatureSource consumerFS = consumerDS.getFeatureSource(sftName); // creates and adds SimpleFeatures to the producer every 1/5th of a second System.out.println("Writing features to Kafka... refresh GeoServer layer preview to see changes"); SimpleFeatureBuilder builder = new SimpleFeatureBuilder(sft); Integer numFeats = getLoad(cmd); System.out.println("Building a list of " + numFeats + " SimpleFeatures."); List<SimpleFeature> features = IntStream.range(1, numFeats) .mapToObj(i -> createFeature(builder, i, visibility)).collect(Collectors.toList()); // set variables to estimate feature production rate Long startTime = null; Long featuresSinceStartTime = 0L; int cycle = 0; int cyclesToSkip = 50000 / numFeats; // collect enough features // to get an accurate rate estimate while (true) { // write features features.forEach(feat -> { try { DefaultFeatureCollection featureCollection = new DefaultFeatureCollection(); featureCollection.add(feat); producerFS.addFeatures(featureCollection); } catch (Exception e) { System.out.println("Caught an exception while writing features."); e.printStackTrace(); } updateFeature(feat); }); // count features written Integer consumerSize = consumerFS.getFeatures().size(); cycle++; featuresSinceStartTime += consumerSize; System.out.println("At " + new Date() + " wrote " + consumerSize + " features"); // if we've collected enough features, calculate the rate if (cycle >= cyclesToSkip || startTime == null) { Long endTime = System.currentTimeMillis(); if (startTime != null) { Long diffTime = endTime - startTime; Double rate = (featuresSinceStartTime.doubleValue() * 1000.0) / diffTime.doubleValue(); System.out.printf("%.1f feats/sec (%d/%d)\n", rate, featuresSinceStartTime, diffTime); } cycle = 0; startTime = endTime; featuresSinceStartTime = 0L; } } }
From source file:com.example.geomesa.kafka.KafkaLoadTester.java
public static void main(String[] args) throws Exception { // read command line args for a connection to Kafka CommandLineParser parser = new BasicParser(); Options options = getCommonRequiredOptions(); CommandLine cmd = parser.parse(options, args); String visibility = getVisibility(cmd); Integer delay = getDelay(cmd); if (visibility == null) { System.out.println("visibility: null"); } else {/*w w w . j a v a2 s .c o m*/ System.out.println("visibility: '" + visibility + "'"); } // create the producer and consumer KafkaDataStore objects Map<String, String> dsConf = getKafkaDataStoreConf(cmd); System.out.println("KDS config: " + dsConf); dsConf.put("kafka.consumer.count", "0"); DataStore producerDS = DataStoreFinder.getDataStore(dsConf); dsConf.put("kafka.consumer.count", "1"); DataStore consumerDS = DataStoreFinder.getDataStore(dsConf); // verify that we got back our KafkaDataStore objects properly if (producerDS == null) { throw new Exception("Null producer KafkaDataStore"); } if (consumerDS == null) { throw new Exception("Null consumer KafkaDataStore"); } try { // create the schema which creates a topic in Kafka // (only needs to be done once) final String sftName = "KafkaStressTest"; final String sftSchema = "name:String,age:Int,step:Double,lat:Double,dtg:Date,*geom:Point:srid=4326"; SimpleFeatureType sft = SimpleFeatureTypes.createType(sftName, sftSchema); producerDS.createSchema(sft); System.out.println("Register KafkaDataStore in GeoServer (Press enter to continue)"); System.in.read(); // the live consumer must be created before the producer writes features // in order to read streaming data. // i.e. the live consumer will only read data written after its instantiation SimpleFeatureStore producerFS = (SimpleFeatureStore) producerDS.getFeatureSource(sftName); SimpleFeatureSource consumerFS = consumerDS.getFeatureSource(sftName); // creates and adds SimpleFeatures to the producer every 1/5th of a second System.out.println("Writing features to Kafka... refresh GeoServer layer preview to see changes"); SimpleFeatureBuilder builder = new SimpleFeatureBuilder(sft); Integer numFeats = getLoad(cmd); System.out.println("Building a list of " + numFeats + " SimpleFeatures."); List<SimpleFeature> features = IntStream.range(1, numFeats) .mapToObj(i -> createFeature(builder, i, visibility)).collect(Collectors.toList()); // set variables to estimate feature production rate Long startTime = null; Long featuresSinceStartTime = 0L; int cycle = 0; int cyclesToSkip = 50000 / numFeats; // collect enough features // to get an accurate rate estimate while (true) { // write features features.forEach(feat -> { try { DefaultFeatureCollection featureCollection = new DefaultFeatureCollection(); featureCollection.add(feat); producerFS.addFeatures(featureCollection); } catch (Exception e) { System.out.println("Caught an exception while writing features."); e.printStackTrace(); } updateFeature(feat); }); // count features written Integer consumerSize = consumerFS.getFeatures().size(); cycle++; featuresSinceStartTime += consumerSize; System.out.println("At " + new Date() + " wrote " + consumerSize + " features"); // if we've collected enough features, calculate the rate if (cycle >= cyclesToSkip || startTime == null) { Long endTime = System.currentTimeMillis(); if (startTime != null) { Long diffTime = endTime - startTime; Double rate = (featuresSinceStartTime.doubleValue() * 1000.0) / diffTime.doubleValue(); System.out.printf("%.1f feats/sec (%d/%d)\n", rate, featuresSinceStartTime, diffTime); } cycle = 0; startTime = endTime; featuresSinceStartTime = 0L; } // sleep before next write if (delay != null) { System.out.printf("Sleeping for %d ms\n", delay); Thread.sleep(delay); } } } finally { producerDS.dispose(); consumerDS.dispose(); } }
From source file:iracing.webapi.SeasonStandingsParser.java
public static long parse(String json, ItemHandler handler) { JSONParser parser = new JSONParser(); // System.err.println(json); long output = 0; try {//from w w w . j ava 2s .c o m JSONObject root = (JSONObject) parser.parse(json); JSONObject arrayRoot = (JSONObject) root.get("d"); output = getLong(arrayRoot, "27"); JSONArray results = (JSONArray) arrayRoot.get("r"); for (int i = 0; i < results.size(); i++) { JSONObject result = (JSONObject) results.get(i); SeasonStanding standing = new SeasonStanding(); standing.setDroppedWeeks(getInt(result, "1")); standing.setClubName(getString(result, "2", true)); standing.setCountryCode(getString(result, "3", true)); standing.setLicenseSubLevel(getString(result, "4")); standing.setAverageFinish(getInt(result, "5")); standing.setIrating(getInt(result, "6")); standing.setTotalTopFives(getInt(result, "7")); standing.setMaxLicenseLevel(getInt(result, "8")); standing.setDriverName(getString(result, "9", true)); standing.setClubId(getInt(result, "10")); standing.setTotalStarts(getInt(result, "11")); standing.setDisplayCountry(getString(result, "14", true)); standing.setTotalLapsLed(getInt(result, "13")); standing.setCountry(getString(result, "15", true)); standing.setTotalWins(getInt(result, "16")); standing.setTotalIncidents(getInt(result, "17")); Object o = result.get("18"); double d; if (o instanceof Long) { Long l = (Long) o; d = l.doubleValue(); } else { d = (Double) o; } standing.setTotalPoints(d); standing.setRank(getInt(result, "20")); standing.setDivision(getInt(result, "22")); standing.setDriverCustomerId(getLong(result, "24")); standing.setWeeksCounted(getInt(result, "26")); standing.setTotalLaps(getLong(result, "28")); standing.setAverageStart(getInt(result, "29")); standing.setTotalPoles(getInt(result, "30")); standing.setPosition(getLong(result, "31")); if (!handler.onSeasonStandingParsed(standing)) break; } } catch (ParseException ex) { Logger.getLogger(SeasonStandingsParser.class.getName()).log(Level.SEVERE, null, ex); } return output; }
From source file:ie.aib.nbp.zosresttest.RunTest.java
private static void runPerformanceTest(String username, String password, PrintStream out, boolean attachResponse, boolean formatResponse) { ZosRestServicePerformanceRunner runner = new ZosRestServicePerformanceRunner(config.getProperty("HOST"), Integer.parseInt(config.getProperty("PORT")), out); try {/* w w w . j av a 2 s.c o m*/ readConfig(RUN_PARAM_FILE_PATH); String url = config.getProperty("URL"); String payload = config.getProperty("PAYLOAD"); int loops = Integer.parseInt(config.getProperty("LOOPS")); SimpleDateFormat sdf = new SimpleDateFormat("dd/MM/yyyy HH:mm:ss z"); out.println("z/OS performance test started at: " + sdf.format(new Date())); out.println(" "); List<Long> elapsedTimes = new ArrayList<>(); for (int i = 0; i < loops; i++) { long elapsedTime = runner.executeServices(url, username, password, payload, out, attachResponse, formatResponse); elapsedTimes.add(elapsedTime); } // calculating avg. time Long totalTime = 0L; for (Long time : elapsedTimes) totalTime = totalTime + time; Double avgTime = totalTime.doubleValue() / elapsedTimes.size(); out.println("Average run time: " + avgTime + " miliseconds"); out.println(""); out.println("z/OS performance test ended at: " + sdf.format(new Date())); } catch (NumberFormatException ex) { out.println("Unable to run the test, elther the number of parameters or their values are incorrect."); } }
From source file:eagle.log.entity.EntityQualifierUtils.java
/** * * @param obj//ww w .j a va 2s . c om * @return double value, otherwise Double.NaN */ public static double convertObjToDouble(Object obj) { if (Long.class.equals(obj.getClass()) || long.class.equals(obj.getClass())) { Long _value = (Long) obj; return _value.doubleValue(); } else if (Integer.class.equals(obj.getClass()) || int.class.equals(obj.getClass())) { Integer _value = (Integer) obj; return _value.doubleValue(); } else if (Double.class.equals(obj.getClass()) || double.class.equals(obj.getClass())) { return (Double) obj; } else if (Float.class.equals(obj.getClass()) || float.class.equals(obj.getClass())) { Float _value = (Float) obj; return _value.doubleValue(); } else if (Short.class.equals(obj.getClass()) || short.class.equals(obj.getClass())) { Float _value = (Float) obj; return _value.doubleValue(); } else if (Byte.class.equals(obj.getClass()) || byte.class.equals(obj.getClass())) { Byte _value = (Byte) obj; return _value.doubleValue(); } LOG.warn("Failed to convert object " + obj.toString() + " in type of " + obj.getClass() + " to double"); return Double.NaN; }
From source file:com.nubits.nubot.utils.Utils.java
public static double getDouble(Object obj) { double toRet = -1; if (obj.getClass().equals((new Long((long) 10)).getClass())) //If the price is round (i.e. 100) the type will be parsed as Long {/*from w w w.ja v a 2s. c o m*/ Long l = new Long((long) obj); toRet = l.doubleValue(); } else { try { toRet = (Double) obj; } catch (ClassCastException e) { //probably a String try { toRet = Double.parseDouble((String) obj); } catch (ClassCastException ex) { LOG.error("cannot parse object : " + obj.toString()); return -1; } } } return toRet; }
From source file:com.github.jessemull.microflex.util.BigDecimalUtil.java
/** * Safely converts an object to a BigInteger. Loss of precision may occur. Throws * an arithmetic exception upon overflow. * @param Object object to parse// ww w .jav a 2s.c om * @return parsed object * @throws ArithmeticException on overflow */ public static BigDecimal toBigDecimal(Object obj) { /* Switch on class and convert to BigDecimal */ String type = obj.getClass().getSimpleName(); BigDecimal parsed; switch (type) { case "Byte": Byte by = (Byte) obj; parsed = new BigDecimal(by.doubleValue()); break; case "Short": Short sh = (Short) obj; parsed = new BigDecimal(sh.doubleValue()); break; case "Integer": Integer in = (Integer) obj; parsed = new BigDecimal(in.doubleValue()); break; case "Long": Long lo = (Long) obj; parsed = new BigDecimal(lo.doubleValue()); break; case "Float": Float fl = (Float) obj; parsed = new BigDecimal(fl.doubleValue()); break; case "BigInteger": parsed = new BigDecimal(((BigInteger) obj)); break; case "BigDecimal": parsed = (BigDecimal) obj; break; case "Double": Double db = (Double) obj; parsed = new BigDecimal(db); break; default: throw new IllegalArgumentException( "Invalid type: " + type + "\nData values " + "must extend the abstract Number class."); } return parsed; }
From source file:uk.org.funcube.fcdw.server.extract.csv.RealTimeCsvExtractor.java
public static double getPaPower(final Long value) { return 0.005 * Math.pow(value.doubleValue(), 2.0629); }