List of usage examples for java.lang Float parseFloat
public static float parseFloat(String s) throws NumberFormatException
From source file:gdsc.smlm.ij.plugins.LoadLocalisations.java
public static List<LocalisationModel> loadLocalisations(String filename) { List<LocalisationModel> localisations = new ArrayList<LocalisationModel>(); BufferedReader input = null;//from w w w .ja va 2 s. c o m try { FileInputStream fis = new FileInputStream(filename); input = new BufferedReader(new UnicodeReader(fis, null)); String line; while ((line = input.readLine()) != null) { if (line.length() == 0) continue; if (line.charAt(0) == '#') continue; String[] fields = line.split("\t"); if (fields.length >= 6) { int t = Integer.parseInt(fields[0]); int id = Integer.parseInt(fields[1]); float x = Float.parseFloat(fields[2]); float y = Float.parseFloat(fields[3]); float z = Float.parseFloat(fields[4]); float intensity = Float.parseFloat(fields[5]); localisations.add(new LocalisationModel(id, t, x, y, z, intensity, LocalisationModel.SINGLE)); } } } catch (IOException e) { // ignore } finally { try { if (input != null) input.close(); } catch (IOException e) { // Ignore } } return localisations; }
From source file:MSUmpire.SearchResultParser.PepXMLParseHandler.java
private void ParseSearchSummary(Element node) throws XmlPullParserException, XmlPullParserException, IOException { if (node.getAttributes().getNamedItem("search_engine") != null) { singleLCMSID.SearchEngine = node.getAttributes().getNamedItem("search_engine").getNodeValue(); }/*from w w w .j av a 2s.c om*/ if (node.getAttributes().getNamedItem("msDetector") != null) { singleLCMSID.msDetector = node.getAttributes().getNamedItem("msDetector").getNodeValue(); } if (node.getAttributes().getNamedItem("msIonization") != null) { singleLCMSID.msIonization = node.getAttributes().getNamedItem("msIonization").getNodeValue(); } if (node.getAttributes().getNamedItem("msManufacturer") != null) { singleLCMSID.msManufacturer = node.getAttributes().getNamedItem("msManufacturer").getNodeValue(); } if (node.getAttributes().getNamedItem("msMassAnalyzer") != null) { singleLCMSID.msMassAnalyzer = node.getAttributes().getNamedItem("msMassAnalyzer").getNodeValue(); } if (node.getAttributes().getNamedItem("msModel") != null) { singleLCMSID.msModel = node.getAttributes().getNamedItem("msModel").getNodeValue(); } for (int k = 0; k < node.getChildNodes().getLength(); k++) { if ("search_database".equals(node.getChildNodes().item(k).getNodeName())) { singleLCMSID.DataBase = node.getChildNodes().item(k).getAttributes().getNamedItem("local_path") .getNodeValue(); } if ("aminoacid_modification".equals(node.getChildNodes().item(k).getNodeName())) { if (node.getChildNodes().item(k).getAttributes().getNamedItem("aminoacid") != null) { String site = node.getChildNodes().item(k).getAttributes().getNamedItem("aminoacid") .getNodeValue(); float mass = (float) Math.round(Float.parseFloat( node.getChildNodes().item(k).getAttributes().getNamedItem("mass").getNodeValue()) * 1000) / 1000; float massdiff2 = Float.parseFloat( node.getChildNodes().item(k).getAttributes().getNamedItem("massdiff").getNodeValue()); AminoAcid aa = AminoAcid.getAminoAcid(site.charAt(0)); float massdiff = mass - (float) aa.monoisotopicMass; if (massdiff != 0f && Math.abs(massdiff - massdiff2) < 0.1f) { PTM ptm = PTMManager.GetInstance().GetPTM(site, massdiff); if (ptm == null) { Logger.getRootLogger().warn("Warning! modification in pepxml : amino acid " + site + "(mass diff:" + massdiff + ") doesn't exist in the library."); } else { singleLCMSID.AddModification(ptm, site); } } else { if (Math.abs(massdiff2 + 17.0265) < 0.01 && Math.abs(mass - 143.0041f) < 0.001 && "C".equals(site)) { PTM ptm = PTMManager.GetInstance().GetPTM(site, massdiff); if (ptm == null) { Logger.getRootLogger().warn("Warning! modification in pepxml : amino acid " + site + "(mass diff:" + massdiff + ") doesn't exist in the library."); } else { singleLCMSID.AddModification(ptm, site); } } else { Logger.getRootLogger().warn("Warning! modification in pepxml : amino acid " + site + "(mass: " + mass + ", massdiff:" + massdiff2 + ") ignored."); } } } } if ("terminal_modification".equals(node.getChildNodes().item(k).getNodeName())) { if (node.getChildNodes().item(k).getAttributes().getNamedItem("terminus") != null) { String site = ""; if ("c".equals(node.getChildNodes().item(k).getAttributes().getNamedItem("terminus") .getNodeValue().toLowerCase())) { site = "C-term"; } if ("n".equals(node.getChildNodes().item(k).getAttributes().getNamedItem("terminus") .getNodeValue().toLowerCase())) { site = "N-term"; } float massdiff = Float.parseFloat( node.getChildNodes().item(k).getAttributes().getNamedItem("massdiff").getNodeValue()); PTM ptm = PTMManager.GetInstance().GetPTM(site, massdiff); if (ptm == null) { Logger.getRootLogger().warn("Warning! term-modification:" + site + "(" + massdiff + ") doesn't exist in the library.\n"); } else { singleLCMSID.AddModification(ptm, site); } } } } }
From source file:cz.cuni.mff.d3s.tools.perfdoc.server.measuring.MeasureRequest.java
/** * Normalize incoming values. The normalizing includes converting to proper * types (e.g. integers will be saved as integers, enums like enums) and * shortening numeric types (e.g. integer value sent in format "0 to 0", * will be converted to integer with value 0). * * @param valuesList the List containing values to normalize. * @param rangeValue the number of the rangeValue. valueList[item] will be * left as it was./*from w w w . ja v a 2 s.co m*/ */ private Object[] normalize(List<Object> valuesList, int rangeValue) throws ClassNotFoundException, IOException { Object[] normalizedValues = valuesList.toArray(); for (int i = 0; i < valuesList.size(); i++) { //rangeValue shall stay in the incoming format if (i != rangeValue) { Object item = valuesList.get(i); String parameter = getArgName(workload, i); //if it is a number, it must be on it converted if (parameter.equals("int") || parameter.equals("float") || parameter.equals("double")) { if (((String) item).contains(" to ")) { String[] chunks = ((String) item).split(" to "); if (chunks.length == 2 && (chunks[0].equals(chunks[1]))) { switch (parameter) { case "int": normalizedValues[i] = Integer.parseInt(chunks[0]); break; case "float": normalizedValues[i] = Float.parseFloat(chunks[0]); break; case "double": normalizedValues[i] = Double.parseDouble(chunks[0]); break; } } } else { switch (parameter) { case "int": normalizedValues[i] = Integer.parseInt((String) item); break; case "float": normalizedValues[i] = Float.parseFloat((String) item); break; case "double": normalizedValues[i] = Double.parseDouble((String) item); break; } } } else if (!parameter.equals("java.lang.String") && !parameter.equals("String")) { //enum //enum can be of any type, therefore Enum<?>, however this format is not accepted by valueof @SuppressWarnings({ "unchecked", "rawtypes" }) Object pom = Enum.valueOf((Class<? extends Enum>) new ClassParser(parameter).getLoadedClass(), (String) item); normalizedValues[i] = pom; } } } return normalizedValues; }
From source file:com.orange.atk.graphAnalyser.CreateGraph.java
public void generateImage(String path) { for (int i = 0; i < tempVectGraph.size(); i++) { Graph temp = tempVectGraph.get(i); String name = temp.getName(); float Scale; try {//w w w.j a v a 2 s . c o m Scale = Float.parseFloat(temp.getScale()); } catch (NumberFormatException e1) { Logger.getLogger(this.getClass()).warn("non float value for scale in config file"); Scale = 1; } String ycomment = temp.getYcomment(); // Check if file is empty File file = new File(path + Platform.FILE_SEPARATOR + name + ".csv"); if (file.exists()) { Logger.getLogger(this.getClass()).debug("Length" + file.length()); if (file.length() == 0) { file.delete(); Logger.getLogger(this.getClass()).debug("delete file" + name + ".csv"); } else { try { PlotList plotlist = PlotReader .read(new BufferedReader(new FileReader(file.getAbsolutePath()))); // Storage, blue, x Storage, y Storage, 1, true GraphGenerator.generateGraphWithJFreeChart(plotlist, name, path, ycomment, path + Platform.FILE_SEPARATOR + name + ".png", Scale); } catch (FileNotFoundException e) { // TODO Auto-generated catch block Logger.getLogger(this.getClass()).error(e); } } } file = null; } }
From source file:com.zotoh.core.util.DataUte.java
/** * @param cz/* www.j av a 2s . co m*/ * @param s * @return */ public static Object conv2Obj(Class<?> cz, String s) { Object o = null; if (Boolean.class.equals(cz)) o = Boolean.parseBoolean(s); else if (Timestamp.class.equals(cz)) o = CoreUte.parseTimestamp(s); else if (Date.class.equals(cz)) o = CoreUte.parseDate(s); else if (Integer.class.equals(cz)) o = Integer.parseInt(s); else if (Long.class.equals(cz)) o = Long.parseLong(s); else if (Double.class.equals(cz)) o = Double.parseDouble(s); else if (Float.class.equals(cz)) o = Float.parseFloat(s); else if (String.class.equals(cz)) o = s; else if (Password.class.equals(cz)) { try { o = PwdFactory.getInstance().create(s); } catch (Exception e) { tlog().warn("", e); o = null; } } else if (byte[].class.equals(cz)) o = toBytes(s); return o; }
From source file:fr.bird.bloom.model.GeographicTreatment.java
/** * Check if coordinates (latitude and longitude) are included in the country indicated by the iso2 code * /*from w w w. jav a 2s .com*/ * @return void */ public List<String> checkCoordinatesIso2Code() { //this.getDarwinCore().associateIdData(); List<String> listToDelete = new ArrayList<>(); //Map<String, List<String>> idAssoData = this.getDarwinCore().getIdAssoData(); final String resourcePath = BloomConfig.getResourcePath(); List<String> idList = this.getDarwinCore().getID(); //int iLatitude = this.getDarwinCore().getIndiceFromTag("decimalLatitude_"); //int iLongitude = this.getDarwinCore().getIndiceFromTag("decimalLongitude_"); //int iIso2 = this.getDarwinCore().getIndiceFromTag("countryCode_"); //int iGbifID = this.getDarwinCore().getIndiceFromTag("gbifID_"); int nbWrongIso2 = 0; List<String> listIDtoDelete = new ArrayList<>(); for (int i = 0; i < idList.size(); i++) { String id_ = idList.get(i); //System.out.println(id_); //for (String id_ : idAssoData.keySet()) { //System.out.println(id_); if (!"id_".equals(id_)) { //List<String> listInfos = idAssoData.get(id_); boolean errorIso = true; boolean errorCoord = false; float latitude = -1; float longitude = -1; String iso2 = "error"; String gbifId_ = "error"; String iso3 = "error"; String valueLatitude = this.getDarwinCore().getValueFromColumn("decimalLatitude_", id_.replaceAll("\"", "")); System.err.println("decimalLatitude : " + valueLatitude); if (!valueLatitude.equals("error")) { try { latitude = Float.parseFloat(valueLatitude.replaceAll("\"", "")); } catch (NumberFormatException ex) { errorCoord = true; } } String valueLongitude = this.getDarwinCore().getValueFromColumn("decimalLongitude_", id_.replaceAll("\"", "")); if (!valueLongitude.equals("error")) { try { longitude = Float.parseFloat(valueLongitude.replaceAll("\"", "")); } catch (NumberFormatException ex) { errorCoord = true; } } iso2 = this.getDarwinCore().getValueFromColumn("countryCode_", id_.replaceAll("\"", "")) .replaceAll("\"", ""); //gbifId_ = this.getDarwinCore().getValueFromColumn("gbifID_", id_.replaceAll("\"", "")).replaceAll("\"", ""); if (!iso2.equals("error") && !errorCoord) { iso3 = this.convertIso2ToIso3(iso2); /* try { latitude = Float.parseFloat(listInfos.get(iLatitude).replace("\"", "")); } catch (NumberFormatException nfe){ System.err.println(listInfos.get(iLatitude).replace("\"", "")); } longitude = Float.parseFloat(listInfos.get(iLongitude).replace("\"", "")); iso2 = listInfos.get(iIso2); */ //gbifId_ = listInfos.get(iGbifID); if (!iso3.equals("error")) { File geoJsonFile = new File( resourcePath + "gadm_json/" + iso3.toUpperCase() + "_adm0.json"); GeometryFactory geometryFactory = new GeometryFactory(); Point point = geometryFactory.createPoint(new Coordinate(longitude, latitude)); System.out.println("--------------------------------------------------------------"); System.out.println("------------------ Check point in polygon --------------------"); System.out.println("Lat : " + latitude + "\tLong : " + longitude); System.out.println("id_ : " + id_ + "\tIso3 : " + iso3 + "\tiso2 : " + iso2); boolean isContained = this.polygonContainedPoint(point, geoJsonFile); System.out.println("The point is contained in the polygone : " + isContained); System.out.println("--------------------------------------------------------------\n"); if (!isContained) { errorIso = true; //nbWrongIso2 ++; //listIDtoDelete.add(id_); } else { errorIso = false; } } else { errorIso = true; } } else { errorIso = true; } if (errorIso) { nbWrongIso2++; listIDtoDelete.add(id_); } } } if (listIDtoDelete.size() > 0) { String sqlIDCleanToSelect = "SELECT abstract_,acceptedNameUsage_,acceptedNameUsageID_,accessRights_,accrualMethod_,accrualPeriodicity_,accrualPolicy_," + "alternative_,associatedMedia_,associatedOccurrences_,associatedOrganisms_,associatedReferences_,associatedSequences_,associatedTaxa_,audience_," + "available_,basisOfRecord_,bed_,behavior_,bibliographicCitation_,catalogNumber_,class_,classKey_,collectionCode_,collectionID_,conformsTo_,continent_," + "contributor_,coordinateAccuracy_,coordinatePrecision_,coordinateUncertaintyInMeters_,country_,countryCode_,county_,coverage_,created_,creator_," + "dataGeneralizations_,datasetID_,datasetKey_,datasetName_,date_,dateAccepted_,dateCopyrighted_,dateIdentified_,dateSubmitted_,day_,decimalLatitude_," + "decimalLongitude_,depth_,depthAccuracy_,description_,disposition_,distanceAboveSurface_,distanceAboveSurfaceAccuracy_,dynamicProperties_," + "earliestAgeOrLowestStage_,earliestEonOrLowestEonothem_,earliestEpochOrLowestSeries_,earliestEraOrLowestErathem_,earliestPeriodOrLowestSystem_," + "educationLevel_,elevation_,elevationAccuracy_,endDayOfYear_,establishmentMeans_,event_,eventDate_,eventID_,eventRemarks_,eventTime_,extent_,family_," + "familyKey_,fieldNotes_,fieldNumber_,footprintSpatialFit_,footprintSRS_,footprintWKT_,format_,formation_,gbifID_,genericName_,genus_,genusKey_," + "geodeticDatum_,geologicalContext_,geologicalContextID_,georeferencedBy_,georeferencedDate_,georeferenceProtocol_,georeferenceRemarks_," + "georeferenceSources_,georeferenceVerificationStatus_,group_,habitat_,hasCoordinate_,hasFormat_,hasGeospatialIssues_,hasPart_,hasVersion_," + "higherClassification_,higherGeography_,higherGeographyID_,highestBiostratigraphicZone_,identification_,identificationID_,identificationQualifier_," + "identificationReferences_,identificationRemarks_,identificationVerificationStatus_,identifiedBy_,identifier_,idFile_,individualCount_,individualID_," + "informationWithheld_,infraspecificEpithet_,institutionCode_,institutionID_,instructionalMethod_,isFormatOf_,island_,islandGroup_,isPartOf_," + "isReferencedBy_,isReplacedBy_,isRequiredBy_,issue_,issued_,isVersionOf_,kingdom_,kingdomKey_,language_,lastCrawled_,lastInterpreted_,lastParsed_," + "latestAgeOrHighestStage_,latestEonOrHighestEonothem_,latestEpochOrHighestSeries_,latestEraOrHighestErathem_,latestPeriodOrHighestSystem_,license_," + "lifeStage_,lithostratigraphicTerms_,livingSpecimen_,locality_,locationAccordingTo_,locationID_,locationRemarks_,lowestBiostratigraphicZone_," + "machineObservation_,materialSample_,materialSampleID_,maximumDepthinMeters_,maximumDistanceAboveSurfaceInMeters_,maximumElevationInMeters_," + "measurementAccuracy_,measurementDeterminedBy_,measurementDeterminedDate_,measurementID_,measurementMethod_,measurementOrFact_,measurementRemarks_," + "measurementType_,measurementUnit_,mediator_,mediaType_,medium_,member_,minimumDepthinMeters_,minimumDistanceAboveSurfaceInMeters_," + "minimumElevationInMeters_,modified_,month_,municipality_,nameAccordingTo_,nameAccordingToID_,namePublishedIn_,namePublishedInID_,namePublishedInYear_," + "nomenclaturalCode_,nomenclaturalStatus_,occurrence_,occurrenceDetails_,occurrenceID_,occurrenceRemarks_,occurrenceStatus_,order_,orderKey_,organism_," + "organismID_,organismName_,organismRemarks_,organismScope_,originalNameUsage_,originalNameUsageID_,otherCatalogNumbers_,ownerInstitutionCode_," + "parentNameUsage_,parentNameUsageID_,phylum_,phylumKey_,pointRadiusSpatialFit_,preparations_,preservedSpecimen_,previousIdentifications_,protocol_," + "provenance_,publisher_,publishingCountry_,recordedBy_,recordNumber_,references_,relatedResourceID_,relationshipAccordingTo_," + "relationshipEstablishedDate_,relationshipRemarks_,relation_,replaces_,reproductiveCondition_,requires_,resourceID_,resourceRelationship_," + "resourceRelationshipID_,rights_,rightsHolder_,samplingEffort_,samplingProtocol_,scientificName_,scientificNameAuthorship_,scientificNameID_,sex_," + "source_,spatial_,species_,speciesKey_,specificEpithet_,startDayOfYear_,stateProvince_,subgenus_,subgenusKey_,subject_,tableOfContents_,taxon_," + "taxonConceptID_,taxonID_,taxonKey_,taxonomicStatus_,taxonRank_,taxonRemarks_,temporal_,title_,type_,typeStatus_,typifiedName_,valid_," + "verbatimCoordinates_,verbatimCoordinateSystem_,verbatimDate_,verbatimDepth_,verbatimElevation_,verbatimEventDate_,verbatimLatitude_," + "verbatimLocality_,verbatimLongitude_,verbatimSRS_,verbatimTaxonRank_,vernacularName_,waterBody_,year_ FROM Workflow.Clean_" + this.getUuid() + " WHERE Clean_" + this.getUuid() + ".id_="; String sqlIDCleanToDelete = "DELETE FROM Workflow.Clean_" + this.getUuid() + " WHERE id_="; for (int l = 0; l < listIDtoDelete.size(); l++) { if (l != listIDtoDelete.size() - 1) { sqlIDCleanToDelete += listIDtoDelete.get(l) + " OR id_="; sqlIDCleanToSelect += listIDtoDelete.get(l) + " OR Clean_" + this.getUuid() + ".id_="; } else { sqlIDCleanToDelete += listIDtoDelete.get(l) + ";"; sqlIDCleanToSelect += listIDtoDelete.get(l) + ";"; } } Statement statement = null; try { statement = ConnectionDatabase.getConnection().createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY); } catch (SQLException e) { // TODO Auto-generated catch block e.printStackTrace(); } DatabaseTreatment newConnectionSelectID = new DatabaseTreatment(statement); List<String> messagesSelectID = new ArrayList<>(); //String sqlSelectID = + id_ + ";"; messagesSelectID.add("\n--- Select wrong matching between polygon and Iso2 code ---\n"); messagesSelectID.addAll(newConnectionSelectID.executeSQLcommand("executeQuery", sqlIDCleanToSelect)); //messagesSelectID.add(sqlIDCleanToSelect); for (int j = 0; j < messagesSelectID.size(); j++) { System.out.println(messagesSelectID.get(j)); } List<String> selectIDResults = newConnectionSelectID.getResultatSelect(); for (int k = 0; k < selectIDResults.size(); k++) { if (!listToDelete.contains(selectIDResults.get(k))) { listToDelete.add(selectIDResults.get(k)); } } Statement statementDelete = null; try { statementDelete = ConnectionDatabase.getConnection() .createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY); } catch (SQLException e) { // TODO Auto-generated catch block e.printStackTrace(); } DatabaseTreatment newConnectionDeleteID = new DatabaseTreatment(statementDelete); List<String> messagesDeleteID = new ArrayList<>(); //String sqlDeleteID = "DELETE FROM Workflow.Clean_" + this.getUuid() + " WHERE id_=" + id_ + ";"; messagesDeleteID.add("\n--- Delete wrong matching between polygon and Iso2 code ---\n"); messagesDeleteID.addAll(newConnectionDeleteID.executeSQLcommand("executeUpdate", sqlIDCleanToDelete)); List<String> deleteIDResults = newConnectionDeleteID.getResultatSelect(); messagesDeleteID.add("nb lignes affectes :" + listToDelete.size()); for (int i = 0; i < messagesDeleteID.size(); i++) { System.out.println(messagesDeleteID.get(i)); } } this.setNbWrongIso2(nbWrongIso2); return listToDelete; }
From source file:free.rm.skytube.gui.businessobjects.UpdatesChecker.java
/** * @return The current app's version number. */// ww w . j a v a2s. com private float getCurrentVerNumber() { String currentAppVersionStr = BuildConfig.VERSION_NAME; if (BuildConfig.FLAVOR.equalsIgnoreCase("extra")) { String[] ver = BuildConfig.VERSION_NAME.split("\\s+"); currentAppVersionStr = ver[0]; } return Float.parseFloat(currentAppVersionStr); }
From source file:blue.soundObject.Note.java
private void noteInit(String input, Note previousNote) throws Exception { // If for any reason there should be an exception // let it bubble up and the note factory method will // return null int i = 0;/* ww w . j av a2 s . c o m*/ ArrayList buffer = new ArrayList(); int size = input.length(); int start = 0; // the following code might be better off using // regular expressions, but should wait until // Java 1.4 is available on all platforms // PARSES PFIELDS FROM STRING while (i < size) { if (input.charAt(i) == '\"') { start = i++; while (i < size && input.charAt(i) != '\"') { i++; } buffer.add(input.substring(start, ++i)); // i++; } else if (input.charAt(i) == '[') { start = ++i; while (i < size && input.charAt(i) != ']') { i++; } float val = ScoreExpressionParser.eval(input.substring(start, i)); i++; buffer.add(Float.toString(val)); } else if (Character.isWhitespace(input.charAt(i))) { while (i < size && Character.isWhitespace(input.charAt(i))) { i++; // eat up empty spaces or tabs } } else { start = i; while (i < size && !(Character.isWhitespace(input.charAt(i)))) { i++; } buffer.add(input.substring(start, i)); } } if (previousNote != null) { boolean performCarry = buffer.get(0).equals(previousNote.getPField(1)); if (!performCarry) { try { int instr1 = (int) (Float.parseFloat((String) buffer.get(0))); int instr2 = (int) (Float.parseFloat(previousNote.getPField(1))); if (instr1 == instr2) { performCarry = true; } } catch (NumberFormatException nfe) { performCarry = false; } } if (performCarry) { int numFieldsToCopy = previousNote.getPCount() - buffer.size(); if (numFieldsToCopy > 0) { for (i = previousNote.getPCount() - numFieldsToCopy; i < previousNote.getPCount(); i++) { buffer.add(previousNote.getPField(i + 1)); } } } } // INITIALIZES PFIELD ARRAY fields = (String[]) buffer.toArray(new String[buffer.size()]); if (previousNote != null) { // SWAP PERIODS WITH VALUE FROM PREVIOUS NOTE for (i = 0; i < fields.length; i++) { if (fields[i].equals(".")) { fields[i] = previousNote.getPField(i + 1); } } } float dur = Float.parseFloat(fields[2]); setSubjectiveDuration(dur); setTied(dur < 0.0f); }
From source file:strat.mining.multipool.stats.jersey.client.impl.WaffleRestClientImpl.java
@Override public GlobalStats getGlobalStats() { GlobalStats result = null;/*w ww . ja v a2 s .c om*/ try { LOGGER.debug("Start to get the waffle global stats."); long startTime = System.currentTimeMillis(); Document statsPage = Jsoup.connect(WAFFLE_POOL_GLOBAL_STATS_URL) .userAgent("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; WOW64; Trident/6.0)").get(); PERF_LOGGER.info("Retrieved Wafflepool stats page in {} ms.", System.currentTimeMillis() - startTime); result = new GlobalStats(); try { Elements noteElements = statsPage.select("#note"); if (noteElements != null && !noteElements.isEmpty()) { String style = noteElements.get(0).attr("style"); if (style == null || (!style.contains("display:none") && !style.contains("visibility:hidden"))) { String note = noteElements.get(0).html(); result.setNote(note); } } } catch (Exception e) { LOGGER.error("Failed to get the last note.", e); } Elements headersElements = statsPage.select("#pool_stats"); String[] splitted = headersElements.get(0).text().split("\\s"); result.setMegaHashesPerSeconds(parsePower(splitted[1], splitted[2])); result.setNbMiners(Integer.parseInt(splitted[6])); result.setMiningCoin(splitted[10]); Elements contentElements = statsPage.select("#content"); splitted = contentElements.get(0).text().split("Bitcoins sent to miners:"); String[] splitted2 = splitted[1].split("Bitcoins earned \\(not yet sent\\):"); String rawPaidout = splitted2[0]; splitted = splitted2[1].split("Bitcoins unconverted \\(approximate\\):"); String rawBalance = splitted[0]; splitted2 = splitted[1].split("Date BTC"); String rawUnexchanged = splitted2[0]; result.setTotalPaidout(Float.parseFloat(rawPaidout.replaceAll(",", ""))); result.setTotalBalance(Float.parseFloat(rawBalance.replaceAll(",", ""))); result.setTotalUnexchanged(Float.parseFloat(rawUnexchanged.replaceAll(",", ""))); LOGGER.debug("Global stats from waffle retreived."); } catch (IOException e) { LOGGER.error("Failed to retrieve the stats page of Wafflepool.", e); } return result; }