List of usage examples for java.text ParseException getMessage
public String getMessage()
From source file:net.sourceforge.fenixedu.presentationTier.backBeans.departmentAdmOffice.FunctionsManagementBackingBean.java
public String editFunction() throws FenixServiceException { DateFormat format = new SimpleDateFormat("dd/MM/yyyy"); Double credits = Double.valueOf(this.getCredits()); Date beginDate_ = null, endDate_ = null; try {//from ww w . ja v a2s .c om if (this.getBeginDate() != null) { beginDate_ = format.parse(this.getBeginDate()); } else { setErrorMessage("error.notBeginDate"); return ""; } if (this.getEndDate() != null) { endDate_ = format.parse(this.getEndDate()); } else { setErrorMessage("error.notEndDate"); return ""; } EditPersonFunction.runEditPersonFunction(this.getPersonFunctionID(), this.getFunctionID(), YearMonthDay.fromDateFields(beginDate_), YearMonthDay.fromDateFields(endDate_), credits); setErrorMessage("message.success"); return "alterFunction"; } catch (ParseException e) { setErrorMessage("error.date1.format"); } catch (FenixServiceException e) { setErrorMessage(e.getMessage()); } catch (DomainException e) { setErrorMessage(e.getMessage()); } return ""; }
From source file:net.sourceforge.fenixedu.presentationTier.backBeans.departmentAdmOffice.FunctionsManagementBackingBean.java
public String associateNewFunction() throws FenixServiceException, ParseException { if (this.getUnit() == null || (!this.getUnit().getTopUnits().isEmpty() && !this.getUnit().getTopUnits().contains(getEmployeeDepartmentUnit())) || (this.getUnit().getTopUnits().isEmpty() && !this.getUnit().equals(getEmployeeDepartmentUnit()))) { setErrorMessage("error.invalid.unit"); } else if (!this.getUnit().getFunctionsSet().contains(this.getFunction())) { setErrorMessage("error.invalid.function"); } else {/*from w w w . j a va 2 s . co m*/ DateFormat format = new SimpleDateFormat("dd/MM/yyyy"); Double credits = Double.valueOf(this.getCredits()); Date beginDate_ = null, endDate_ = null; try { if (this.getBeginDate() != null) { beginDate_ = format.parse(this.getBeginDate()); } else { setErrorMessage("error.notBeginDate"); return ""; } if (this.getEndDate() != null) { endDate_ = format.parse(this.getEndDate()); } else { setErrorMessage("error.notEndDate"); return ""; } AssociateNewFunctionToPerson.runAssociateNewFunctionToPerson(this.getFunctionID(), this.getPersonID(), credits, YearMonthDay.fromDateFields(beginDate_), YearMonthDay.fromDateFields(endDate_)); setErrorMessage("message.success"); return "success"; } catch (ParseException e) { setErrorMessage("error.date1.format"); } catch (FenixServiceException e) { setErrorMessage(e.getMessage()); } catch (DomainException e) { setErrorMessage(e.getMessage()); } } return ""; }
From source file:org.b3log.solo.service.ArticleMgmtService.java
/** * Archive the create date with the specified article. * * @param article the specified article, for example, * <pre>// w w w .j a v a 2 s.c o m * { * ...., * "oId": "", * "articleCreateDate": java.util.Date, * .... * } * </pre> * @throws RepositoryException repository exception */ private void archiveDate(final JSONObject article) throws RepositoryException { final Date createDate = (Date) article.opt(Article.ARTICLE_CREATE_DATE); final String createDateString = ArchiveDate.DATE_FORMAT.format(createDate); JSONObject archiveDate = archiveDateRepository.getByArchiveDate(createDateString); if (null == archiveDate) { archiveDate = new JSONObject(); try { archiveDate.put(ArchiveDate.ARCHIVE_TIME, ArchiveDate.DATE_FORMAT.parse(createDateString).getTime()); archiveDate.put(ArchiveDate.ARCHIVE_DATE_ARTICLE_COUNT, 0); archiveDate.put(ArchiveDate.ARCHIVE_DATE_PUBLISHED_ARTICLE_COUNT, 0); archiveDateRepository.add(archiveDate); } catch (final ParseException e) { LOGGER.log(Level.SEVERE, e.getMessage(), e); throw new RepositoryException(e); } } final JSONObject newArchiveDate = new JSONObject(archiveDate, CollectionUtils.jsonArrayToArray(archiveDate.names(), String[].class)); newArchiveDate.put(ArchiveDate.ARCHIVE_DATE_ARTICLE_COUNT, archiveDate.optInt(ArchiveDate.ARCHIVE_DATE_ARTICLE_COUNT) + 1); if (article.optBoolean(Article.ARTICLE_IS_PUBLISHED)) { newArchiveDate.put(ArchiveDate.ARCHIVE_DATE_PUBLISHED_ARTICLE_COUNT, archiveDate.optInt(ArchiveDate.ARCHIVE_DATE_PUBLISHED_ARTICLE_COUNT) + 1); } archiveDateRepository.update(archiveDate.optString(Keys.OBJECT_ID), newArchiveDate); final JSONObject archiveDateArticleRelation = new JSONObject(); archiveDateArticleRelation.put(ArchiveDate.ARCHIVE_DATE + "_" + Keys.OBJECT_ID, archiveDate.optString(Keys.OBJECT_ID)); archiveDateArticleRelation.put(Article.ARTICLE + "_" + Keys.OBJECT_ID, article.optString(Keys.OBJECT_ID)); archiveDateArticleRepository.add(archiveDateArticleRelation); }
From source file:gov.nasa.arc.spife.core.plan.advisor.resources.ProfileConstraintPlanAdvisor.java
private void validateProfileEqualityConstraint(Profile<?> profile, ProfileEqualityConstraint constraint, EPlanElement pe) {/* w ww . j a va 2s . c o m*/ String valueLiteral = constraint.getValueLiteral(); if (LOG.isDebugEnabled()) { LOG.debug("validating profile equality contraint " + profile.getId()); LOG.debug("\t" + constraint.getValueLiteral()); } Object value = null; try { value = parseValue(profile, valueLiteral); if (value == null) { appendViolation(profile, constraint, new ProfileConstraintViolation(this, pe, profile, constraint, "No value supplied")); return; } } catch (ParseException e) { appendViolation(profile, constraint, new ProfileConstraintViolation(this, pe, profile, constraint, e.getMessage())); return; } ProfileConstraintViolation currentViolation = null; TemporalExtent constraintExtent = getExtent(constraint); for (Long time : getTimesToVerify(profile, constraintExtent)) { Date date = new Date(time); Object pt = profile.getValue(date); boolean violated = !CommonUtils.equals(pt, value); if (LOG.isDebugEnabled()) { LOG.debug("\t\tviolated = " + violated + " @ " + date); } if (violated) { if (currentViolation == null) { StringBuffer buffer = new StringBuffer(); buffer.append(ProfileUtil.getDisplayString(profile, value)); buffer.append(" is the required value"); currentViolation = new ProfileConstraintViolation(this, pe, profile, constraint, date, date, buffer.toString()); } else { currentViolation.setEnd(date); } } else if (currentViolation != null) { currentViolation.setEnd(date); // presumably the previous violation holds till this time appendViolation(profile, constraint, currentViolation); currentViolation = null; } } if (currentViolation != null) { Date start = constraintExtent.getEnd(); Date end = DateUtils.latest(constraintExtent.getEnd(), pe.getMember(TemporalMember.class).getEndTime()); end = DateUtils.add(end, 1); for (Long time : getTimesToVerify(profile, new TemporalExtent(start, end))) { Date date = new Date(time); Object pt = profile.getValue(date); boolean violated = !CommonUtils.equals(pt, value); if (!violated) { currentViolation.setEnd(date); appendViolation(profile, constraint, currentViolation); return; } } currentViolation.setEnd(pe.getMember(TemporalMember.class).getEndTime()); appendViolation(profile, constraint, currentViolation); } }
From source file:edu.hawaii.soest.kilonalu.utilities.FileSource.java
/** * A method that executes the reading of data from the data file to the RBNB * server after all configuration of settings, connections to hosts, and * thread initiatizing occurs. This method contains the detailed code for * reading the data and interpreting the data files. *//*from www . j a va 2 s .c o m*/ protected boolean execute() { logger.debug("FileSource.execute() called."); boolean failed = true; // indicates overall success of execute() // do not execute the stream if there is no connection if (!isConnected()) return false; try { // open the data file for monitoring FileReader reader = new FileReader(new File(getFileName())); this.fileReader = new BufferedReader(reader); // add channels of data that will be pushed to the server. // Each sample will be sent to the Data Turbine as an rbnb frame. int channelIndex = 0; ChannelMap rbnbChannelMap = new ChannelMap(); // used to insert channel data ChannelMap registerChannelMap = new ChannelMap(); // used to register channels // add the DecimalASCIISampleData channel to the channelMap channelIndex = registerChannelMap.Add(getRBNBChannelName()); registerChannelMap.PutUserInfo(channelIndex, "units=none"); // and register the RBNB channels getSource().Register(registerChannelMap); registerChannelMap.Clear(); // on execute(), query the DT to find the timestamp of the last sample inserted // and be sure the following inserts are after that date ChannelMap requestMap = new ChannelMap(); int entryIndex = requestMap.Add(getRBNBClientName() + "/" + getRBNBChannelName()); logger.debug("Request Map: " + requestMap.toString()); Sink sink = new Sink(); sink.OpenRBNBConnection(getServer(), "lastEntrySink"); sink.Request(requestMap, 0., 1., "newest"); ChannelMap responseMap = sink.Fetch(5000); // get data within 5 seconds // initialize the last sample date Date initialDate = new Date(); long lastSampleTimeAsSecondsSinceEpoch = initialDate.getTime() / 1000L; logger.debug("Initialized the last sample date to: " + initialDate.toString()); logger.debug("The last sample date as a long is: " + lastSampleTimeAsSecondsSinceEpoch); if (responseMap.NumberOfChannels() == 0) { // set the last sample time to 0 since there are no channels yet lastSampleTimeAsSecondsSinceEpoch = 0L; logger.debug("Resetting the last sample date to the epoch: " + (new Date(lastSampleTimeAsSecondsSinceEpoch * 1000L)).toString()); } else if (responseMap.NumberOfChannels() > 0) { lastSampleTimeAsSecondsSinceEpoch = new Double(responseMap.GetTimeStart(entryIndex)).longValue(); logger.debug("There are existing channels. Last sample time: " + (new Date(lastSampleTimeAsSecondsSinceEpoch * 1000L)).toString()); } sink.CloseRBNBConnection(); // poll the data file for new lines of data and insert them into the RBNB while (true) { String line = fileReader.readLine(); if (line == null) { this.streamingThread.sleep(POLL_INTERVAL); } else { // test the line for the expected data pattern Matcher matcher = this.dataPattern.matcher(line); // if the line matches the data pattern, insert it into the DataTurbine if (matcher.matches()) { logger.debug("This line matches the data line pattern: " + line); Date sampleDate = getSampleDate(line); if (this.dateFormats == null || this.dateFields == null) { logger.warn("Using the default datetime format and field for sample data. " + "Use the -f and -d options to explicitly set date time fields."); } logger.debug("Sample date is: " + sampleDate.toString()); // convert the sample date to seconds since the epoch long sampleTimeAsSecondsSinceEpoch = (sampleDate.getTime() / 1000L); // only insert samples newer than the last sample seen at startup // and that are not in the future (> 1 hour since the CTD clock // may have drifted) Calendar currentCal = Calendar.getInstance(); this.tz = TimeZone.getTimeZone(this.timezone); currentCal.setTimeZone(this.tz); currentCal.add(Calendar.HOUR, 1); Date currentDate = currentCal.getTime(); if (lastSampleTimeAsSecondsSinceEpoch < sampleTimeAsSecondsSinceEpoch && sampleTimeAsSecondsSinceEpoch < currentDate.getTime() / 1000L) { // add the sample timestamp to the rbnb channel map //registerChannelMap.PutTime(sampleTimeAsSecondsSinceEpoch, 0d); rbnbChannelMap.PutTime((double) sampleTimeAsSecondsSinceEpoch, 0d); // then add the data line to the channel map channelIndex = rbnbChannelMap.Add(getRBNBChannelName()); rbnbChannelMap.PutMime(channelIndex, "text/plain"); rbnbChannelMap.PutDataAsString(channelIndex, line + "\r\n"); // be sure we are connected int numberOfChannelsFlushed = 0; try { //insert into the DataTurbine numberOfChannelsFlushed = getSource().Flush(rbnbChannelMap, true); // in the event we just lost the network, sleep, try again while (numberOfChannelsFlushed < 1) { logger.debug("No channels flushed, trying again in 10 seconds."); Thread.sleep(10000L); numberOfChannelsFlushed = getSource().Flush(rbnbChannelMap, true); } } catch (SAPIException sapie) { // reconnect if an exception is thrown on Flush() logger.debug("Error while flushing the source: " + sapie.getMessage()); logger.debug("Disconnecting from the DataTurbine."); disconnect(); logger.debug("Connecting to the DataTurbine."); connect(); getSource().Flush(rbnbChannelMap); // in the event we just lost the network, sleep, try again while (numberOfChannelsFlushed < 1) { logger.debug("No channels flushed, trying again in 10 seconds."); Thread.sleep(10000L); numberOfChannelsFlushed = getSource().Flush(rbnbChannelMap, true); } } // reset the last sample time to the sample just inserted lastSampleTimeAsSecondsSinceEpoch = sampleTimeAsSecondsSinceEpoch; logger.debug("Last sample time is now: " + (new Date(lastSampleTimeAsSecondsSinceEpoch * 1000L).toString())); logger.info(getRBNBClientName() + " Sample sent to the DataTurbine: " + line.trim()); rbnbChannelMap.Clear(); } else { logger.info("The current line is earlier than the last entry " + "in the Data Turbine or is a date in the future. " + "Skipping it. The line was: " + line); } } else { logger.info("The current line doesn't match an expected " + "data line pattern. Skipping it. The line was: " + line); } } // end if() } // end while } catch (ParseException pe) { logger.info( "There was a problem parsing the sample date string. " + "The message was: " + pe.getMessage()); try { this.fileReader.close(); } catch (IOException ioe2) { logger.info( "There was a problem closing the data file. The " + "message was: " + ioe2.getMessage()); } failed = true; return !failed; } catch (SAPIException sapie) { logger.info("There was a problem communicating with the DataTurbine. " + "The message was: " + sapie.getMessage()); try { this.fileReader.close(); } catch (IOException ioe2) { logger.info( "There was a problem closing the data file. The " + "message was: " + ioe2.getMessage()); } failed = true; return !failed; } catch (InterruptedException ie) { logger.info( "There was a problem while polling the data file. The " + "message was: " + ie.getMessage()); try { this.fileReader.close(); } catch (IOException ioe2) { logger.info( "There was a problem closing the data file. The " + "message was: " + ioe2.getMessage()); } failed = true; return !failed; } catch (IOException ioe) { logger.info("There was a problem opening the data file. " + "The message was: " + ioe.getMessage()); failed = true; return !failed; } }
From source file:no.met.jtimeseries.service.TimeSeriesService.java
@POST @Path("forecast/archived/precipitation") @Produces("application/json") @ServiceDescription("Return archived precipitation.") public Response getArchivedPrecipitation(@QueryParam("latitude") @DefaultValue("0") double latitude, @QueryParam("longitude") @DefaultValue("0") double longitude, @QueryParam("resolution") @DefaultValue("1") int resolution, @QueryParam("term") @DefaultValue("long") String term, String forecast) { if (forecast == null || forecast.isEmpty()) return Response.status(Response.Status.BAD_REQUEST).build(); try {//from w ww .j a v a2 s .co m Schema schema = getLocationForecastSchema(); Validator validator = new LocationForecastValidator(schema); validator.validate(new SAXSource(new InputSource(new StringReader(forecast)))); //posted data is ok. int hh = ("short".equalsIgnoreCase(term)) ? MeteogramWrapper.SHORT_TERM_HOURS : MeteogramWrapper.LONG_TERM_HOURS; GenericDataModel model = MeteogramWrapper.getModel(new StringReader(forecast)); NumberPhenomenon precipitation = model .getNumberPhenomenon(PhenomenonName.Precipitation.nameWithResolution(resolution)); Date from = precipitation.getStartTime(); Date to = Utility.getDateWithAddedHours(from, hh); model.cutOlderThan(to); return Response.ok(precipitation.toJSON()).build(); } catch (ParseException ex) { LogUtils.logException(logger, "Failed to parse provided data", ex); } catch (IOException ex) { LogUtils.logException(logger, "Failed to parse provided data", ex); } catch (DocumentException ex) { LogUtils.logException(logger, "Failed to parse provided data", ex); } catch (SAXException ex) { Logger.getLogger(TimeSeriesService.class.getName()).log(Level.WARNING, "Invalid schema from api.met.no", ex); } catch (ValidationException ex) { Logger.getLogger(TimeSeriesService.class.getName()).log(Level.WARNING, ex.getMessage(), ex); return Response.status(Response.Status.BAD_REQUEST).build(); } return Response.status(Response.Status.INTERNAL_SERVER_ERROR).build(); }
From source file:gobblin.source.extractor.extract.jdbc.JdbcExtractor.java
@Override public long getHighWatermark(CommandOutput<?, ?> response, String watermarkColumn, String watermarkColumnFormat) throws HighWatermarkException { this.log.debug("Extract high watermark from resultset"); ResultSet resultset = null;//from w w w . ja v a 2 s. c om Iterator<ResultSet> itr = (Iterator<ResultSet>) response.getResults().values().iterator(); if (itr.hasNext()) { resultset = itr.next(); } else { throw new HighWatermarkException( "Failed to get high watermark from database - Resultset has no records"); } Long HighWatermark; try { String watermark; if (resultset.next()) { watermark = resultset.getString(1); } else { watermark = null; } if (watermark == null) { return ConfigurationKeys.DEFAULT_WATERMARK_VALUE; } if (watermarkColumnFormat != null) { SimpleDateFormat inFormat = new SimpleDateFormat(watermarkColumnFormat); Date date = null; try { date = inFormat.parse(watermark); } catch (ParseException e) { this.log.error("ParseException: " + e.getMessage(), e); } SimpleDateFormat outFormat = new SimpleDateFormat("yyyyMMddHHmmss"); HighWatermark = Long.parseLong(outFormat.format(date)); } else { HighWatermark = Long.parseLong(watermark); } } catch (Exception e) { throw new HighWatermarkException( "Failed to get high watermark from database; error - " + e.getMessage(), e); } return HighWatermark; }
From source file:edu.hawaii.soest.hioos.isus.ISUSSource.java
/** * A method that processes the data object passed and flushes the * data to the DataTurbine given the sensor properties in the XMLConfiguration * passed in.//from w w w .j ava2 s . c o m * * @param xmlConfig - the XMLConfiguration object containing the list of * sensor properties * @param frameMap - the parsed data as a HierarchicalMap object */ public boolean process(XMLConfiguration xmlConfig, HierarchicalMap frameMap) { logger.debug("ISUSSource.process() called."); // do not execute the stream if there is no connection if (!isConnected()) return false; boolean success = false; try { // add channels of data that will be pushed to the server. // Each sample will be sent to the Data Turbine as an rbnb frame. Information // on each channel is found in the XMLConfiguration file (email.account.properties.xml) // and the StorXParser object (to get the data string) ChannelMap rbnbChannelMap = new ChannelMap(); // used to flush channels ChannelMap registerChannelMap = new ChannelMap(); // used to register channels int channelIndex = 0; String sensorName = null; String sensorSerialNumber = null; String sensorDescription = null; boolean isImmersed = false; String[] calibrationURLs = null; String calibrationURL = null; String type = null; List sensorList = xmlConfig.configurationsAt("account.logger.sensor"); for (Iterator sIterator = sensorList.iterator(); sIterator.hasNext();) { // HierarchicalConfiguration sensorConfig = (HierarchicalConfiguration) sIterator.next(); sensorSerialNumber = sensorConfig.getString("serialNumber"); // find the correct sensor configuration properties if (sensorSerialNumber.equals(frameMap.get("serialNumber"))) { sensorName = sensorConfig.getString("name"); sensorDescription = sensorConfig.getString("description"); isImmersed = new Boolean(sensorConfig.getString("isImmersed")).booleanValue(); calibrationURLs = sensorConfig.getStringArray("calibrationURL"); type = (String) frameMap.get("type"); // find the correct calibrationURL from the list given the type for (String url : calibrationURLs) { if (url.indexOf(type) > 0) { calibrationURL = url; break; } else { logger.debug("There was no match for " + type); } } // get a Calibration instance to interpret raw sensor values Calibration calibration = new Calibration(); if (calibration.parse(calibrationURL)) { // Build the RBNB channel map // get the sample date and convert it to seconds since the epoch Date frameDate = (Date) frameMap.get("date"); Calendar frameDateTime = Calendar.getInstance(); frameDateTime.setTime(frameDate); double sampleTimeAsSecondsSinceEpoch = (double) (frameDateTime.getTimeInMillis() / 1000); // and create a string formatted date for the given time zone DATE_FORMAT.setTimeZone(TZ); String frameDateAsString = DATE_FORMAT.format(frameDate).toString(); // get the sample data from the frame map ByteBuffer rawFrame = (ByteBuffer) frameMap.get("rawFrame"); ISUSFrame isusFrame = (ISUSFrame) frameMap.get("parsedFrameObject"); String serialNumber = isusFrame.getSerialNumber(); String sampleDate = isusFrame.getSampleDate(); String sampleTime = isusFrame.getSampleTime(); SimpleDateFormat dtFormat = new SimpleDateFormat(); Date sampleDateTime = isusFrame.getSampleDateTime(); dtFormat.setTimeZone(TimeZone.getTimeZone("UTC")); dtFormat.applyPattern("MM/dd/yy"); String sampleDateUTC = dtFormat.format(sampleDateTime); dtFormat.applyPattern("HH:mm:ss"); String sampleTimeUTC = dtFormat.format(sampleDateTime); dtFormat.setTimeZone(TimeZone.getTimeZone("HST")); dtFormat.applyPattern("MM/dd/yy"); String sampleDateHST = dtFormat.format(sampleDateTime); dtFormat.applyPattern("HH:mm:ss"); String sampleTimeHST = dtFormat.format(sampleDateTime); dtFormat.applyPattern("dd-MMM-yy HH:mm"); String sampleDateTimeHST = dtFormat.format(sampleDateTime); double rawNitrogenConcentration = isusFrame.getNitrogenConcentration(); double rawAuxConcentration1 = isusFrame.getAuxConcentration1(); double rawAuxConcentration2 = isusFrame.getAuxConcentration2(); double rawAuxConcentration3 = isusFrame.getAuxConcentration3(); double rawRmsError = isusFrame.getRmsError(); double rawInsideTemperature = isusFrame.getInsideTemperature(); double rawSpectrometerTemperature = isusFrame.getSpectrometerTemperature(); double rawLampTemperature = isusFrame.getLampTemperature(); int rawLampTime = isusFrame.getLampTime(); double rawHumidity = isusFrame.getHumidity(); double rawLampVoltage12 = isusFrame.getLampVoltage12(); double rawInternalPowerVoltage5 = isusFrame.getInternalPowerVoltage5(); double rawMainPowerVoltage = isusFrame.getMainPowerVoltage(); double rawReferenceAverage = isusFrame.getReferenceAverage(); double rawReferenceVariance = isusFrame.getReferenceVariance(); double rawSeaWaterDarkCounts = isusFrame.getSeaWaterDarkCounts(); double rawSpectrometerAverage = isusFrame.getSpectrometerAverage(); int checksum = isusFrame.getChecksum(); //// apply calibrations to the observed data double nitrogenConcentration = calibration.apply(rawNitrogenConcentration, isImmersed, "NITRATE"); double auxConcentration1 = calibration.apply(rawAuxConcentration1, isImmersed, "AUX1"); double auxConcentration2 = calibration.apply(rawAuxConcentration2, isImmersed, "AUX2"); double auxConcentration3 = calibration.apply(rawAuxConcentration3, isImmersed, "AUX3"); double rmsError = calibration.apply(rawRmsError, isImmersed, "RMSe"); double insideTemperature = calibration.apply(rawInsideTemperature, isImmersed, "T_INT"); double spectrometerTemperature = calibration.apply(rawSpectrometerTemperature, isImmersed, "T_SPEC"); double lampTemperature = calibration.apply(rawLampTemperature, isImmersed, "T_LAMP"); int lampTime = rawLampTime; double humidity = calibration.apply(rawHumidity, isImmersed, "HUMIDITY"); double lampVoltage12 = calibration.apply(rawLampVoltage12, isImmersed, "VOLT_12"); double internalPowerVoltage5 = calibration.apply(rawInternalPowerVoltage5, isImmersed, "VOLT_5"); double mainPowerVoltage = calibration.apply(rawMainPowerVoltage, isImmersed, "VOLT_MAIN"); double referenceAverage = calibration.apply(rawReferenceAverage, isImmersed, "REF_AVG"); double referenceVariance = calibration.apply(rawReferenceVariance, isImmersed, "REF_STD"); double seaWaterDarkCounts = calibration.apply(rawSeaWaterDarkCounts, isImmersed, "SW_DARK"); double spectrometerAverage = calibration.apply(rawSpectrometerAverage, isImmersed, "SPEC_AVG"); // iterate through the individual wavelengths List<String> variableNames = calibration.getVariableNames(); TreeMap<String, Double> wavelengthsMap = new TreeMap<String, Double>(); Collections.sort(variableNames); int rawWavelengthCounts = 0; int count = 1; for (String name : variableNames) { // just handle the wavelength channels if (name.startsWith("UV_")) { rawWavelengthCounts = isusFrame.getChannelWavelengthCounts(count); double value = calibration.apply(rawWavelengthCounts, isImmersed, name); count++; wavelengthsMap.put(name, new Double(value)); } } String sampleString = ""; sampleString += sampleDate + "\t"; sampleString += sampleDateUTC + "\t"; sampleString += sampleTime + "\t"; sampleString += sampleTimeUTC + "\t"; sampleString += sampleDateHST + "\t"; sampleString += sampleTimeHST + "\t"; sampleString += sampleDateTimeHST + "\t"; sampleString += String.format("%-15.11f", nitrogenConcentration) + "\t"; //sampleString += String.format("%15.11f", auxConcentration1) + "\t"; //sampleString += String.format("%15.11f", auxConcentration2) + "\t"; //sampleString += String.format("%15.11f", auxConcentration3) + "\t"; sampleString += String.format("%15.11f", rmsError) + "\t"; sampleString += String.format("%15.11f", insideTemperature) + "\t"; sampleString += String.format("%15.11f", spectrometerTemperature) + "\t"; sampleString += String.format("%15.11f", lampTemperature) + "\t"; sampleString += String.format("%6d", lampTime) + "\t"; sampleString += String.format("%15.11f", humidity) + "\t"; sampleString += String.format("%15.11f", lampVoltage12) + "\t"; sampleString += String.format("%15.11f", internalPowerVoltage5) + "\t"; sampleString += String.format("%15.11f", mainPowerVoltage) + "\t"; sampleString += String.format("%15.11f", referenceAverage) + "\t"; sampleString += String.format("%15.11f", referenceVariance) + "\t"; sampleString += String.format("%15.11f", seaWaterDarkCounts) + "\t"; sampleString += String.format("%15.11f", spectrometerAverage) + "\t"; Set<String> wavelengths = wavelengthsMap.keySet(); Iterator wIterator = wavelengths.iterator(); while (wIterator.hasNext()) { String name = (String) wIterator.next(); Double wavelengthValue = (Double) wavelengthsMap.get(name); sampleString += String.format("%6d", wavelengthValue.intValue()) + "\t"; channelIndex = registerChannelMap.Add(name); registerChannelMap.PutUserInfo(channelIndex, "units=counts"); channelIndex = rbnbChannelMap.Add(name); rbnbChannelMap.PutMime(channelIndex, "application/octet-stream"); rbnbChannelMap.PutDataAsFloat64(channelIndex, new double[] { wavelengthValue.doubleValue() }); } sampleString += String.format("%03d", checksum); sampleString += "\n"; // add the sample timestamp to the rbnb channel map //registerChannelMap.PutTime(sampleTimeAsSecondsSinceEpoch, 0d); rbnbChannelMap.PutTime(sampleTimeAsSecondsSinceEpoch, 0d); // add the BinaryRawSatlanticFrameData channel to the channelMap channelIndex = registerChannelMap.Add("BinaryRawSatlanticFrameData"); registerChannelMap.PutUserInfo(channelIndex, "units=none"); channelIndex = rbnbChannelMap.Add("BinaryRawSatlanticFrameData"); rbnbChannelMap.PutMime(channelIndex, "application/octet-stream"); rbnbChannelMap.PutDataAsByteArray(channelIndex, rawFrame.array()); // add the DecimalASCIISampleData channel to the channelMap channelIndex = registerChannelMap.Add(getRBNBChannelName()); registerChannelMap.PutUserInfo(channelIndex, "units=none"); channelIndex = rbnbChannelMap.Add(getRBNBChannelName()); rbnbChannelMap.PutMime(channelIndex, "text/plain"); rbnbChannelMap.PutDataAsString(channelIndex, sampleString); // add the serialNumber channel to the channelMap channelIndex = registerChannelMap.Add("serialNumber"); registerChannelMap.PutUserInfo(channelIndex, "units=none"); channelIndex = rbnbChannelMap.Add("serialNumber"); rbnbChannelMap.PutMime(channelIndex, "text/plain"); rbnbChannelMap.PutDataAsString(channelIndex, serialNumber); // add the sampleDateUTC channel to the channelMap channelIndex = registerChannelMap.Add("sampleDateUTC"); registerChannelMap.PutUserInfo(channelIndex, "units=YYYYDDD"); channelIndex = rbnbChannelMap.Add("sampleDateUTC"); rbnbChannelMap.PutMime(channelIndex, "text/plain"); rbnbChannelMap.PutDataAsString(channelIndex, sampleDate); // add the sampleTimeUTC channel to the channelMap channelIndex = registerChannelMap.Add("sampleTimeUTC"); registerChannelMap.PutUserInfo(channelIndex, "units=hh.hhhhhh"); channelIndex = rbnbChannelMap.Add("sampleTimeUTC"); rbnbChannelMap.PutMime(channelIndex, "text/plain"); rbnbChannelMap.PutDataAsString(channelIndex, sampleTimeUTC); // add the nitrogenConcentration channel to the channelMap channelIndex = registerChannelMap.Add("nitrogenConcentration"); registerChannelMap.PutUserInfo(channelIndex, "units=uM"); channelIndex = rbnbChannelMap.Add("nitrogenConcentration"); rbnbChannelMap.PutMime(channelIndex, "application/octet-stream"); rbnbChannelMap.PutDataAsFloat64(channelIndex, new double[] { nitrogenConcentration }); // add the auxConcentration1 channel to the channelMap channelIndex = registerChannelMap.Add("auxConcentration1"); registerChannelMap.PutUserInfo(channelIndex, "units=none"); channelIndex = rbnbChannelMap.Add("auxConcentration1"); rbnbChannelMap.PutMime(channelIndex, "application/octet-stream"); rbnbChannelMap.PutDataAsFloat64(channelIndex, new double[] { auxConcentration1 }); // add the auxConcentration3 channel to the channelMap channelIndex = registerChannelMap.Add("auxConcentration2"); registerChannelMap.PutUserInfo(channelIndex, "units=none"); channelIndex = rbnbChannelMap.Add("auxConcentration2"); rbnbChannelMap.PutMime(channelIndex, "application/octet-stream"); rbnbChannelMap.PutDataAsFloat64(channelIndex, new double[] { auxConcentration2 }); // add the serialNumber channel to the channelMap channelIndex = registerChannelMap.Add("auxConcentration3"); registerChannelMap.PutUserInfo(channelIndex, "units=none"); channelIndex = rbnbChannelMap.Add("auxConcentration3"); rbnbChannelMap.PutMime(channelIndex, "application/octet-stream"); rbnbChannelMap.PutDataAsFloat64(channelIndex, new double[] { auxConcentration3 }); // add the rmsError channel to the channelMap channelIndex = registerChannelMap.Add("rmsError"); registerChannelMap.PutUserInfo(channelIndex, "units=none"); channelIndex = rbnbChannelMap.Add("rmsError"); rbnbChannelMap.PutMime(channelIndex, "application/octet-stream"); rbnbChannelMap.PutDataAsFloat64(channelIndex, new double[] { rmsError }); // add the insideTemperature channel to the channelMap channelIndex = registerChannelMap.Add("insideTemperature"); registerChannelMap.PutUserInfo(channelIndex, "units=Celsius"); channelIndex = rbnbChannelMap.Add("insideTemperature"); rbnbChannelMap.PutMime(channelIndex, "application/octet-stream"); rbnbChannelMap.PutDataAsFloat64(channelIndex, new double[] { insideTemperature }); // add the spectrometerTemperature channel to the channelMap channelIndex = registerChannelMap.Add("spectrometerTemperature"); registerChannelMap.PutUserInfo(channelIndex, "units=Celsius"); channelIndex = rbnbChannelMap.Add("spectrometerTemperature"); rbnbChannelMap.PutMime(channelIndex, "application/octet-stream"); rbnbChannelMap.PutDataAsFloat64(channelIndex, new double[] { spectrometerTemperature }); // add the lampTemperature channel to the channelMap channelIndex = registerChannelMap.Add("lampTemperature"); registerChannelMap.PutUserInfo(channelIndex, "units=Celsius"); channelIndex = rbnbChannelMap.Add("lampTemperature"); rbnbChannelMap.PutMime(channelIndex, "application/octet-stream"); rbnbChannelMap.PutDataAsFloat64(channelIndex, new double[] { lampTemperature }); // add the lampTime channel to the channelMap channelIndex = registerChannelMap.Add("lampTime"); registerChannelMap.PutUserInfo(channelIndex, "units=seconds"); channelIndex = rbnbChannelMap.Add("lampTime"); rbnbChannelMap.PutMime(channelIndex, "application/octet-stream"); rbnbChannelMap.PutDataAsInt32(channelIndex, new int[] { lampTime }); // add the humidity channel to the channelMap channelIndex = registerChannelMap.Add("humidity"); registerChannelMap.PutUserInfo(channelIndex, "units=%"); channelIndex = rbnbChannelMap.Add("humidity"); rbnbChannelMap.PutMime(channelIndex, "application/octet-stream"); rbnbChannelMap.PutDataAsFloat64(channelIndex, new double[] { humidity }); // add the lampVoltage12 channel to the channelMap channelIndex = registerChannelMap.Add("lampVoltage12"); registerChannelMap.PutUserInfo(channelIndex, "units=V"); channelIndex = rbnbChannelMap.Add("lampVoltage12"); rbnbChannelMap.PutMime(channelIndex, "application/octet-stream"); rbnbChannelMap.PutDataAsFloat64(channelIndex, new double[] { lampVoltage12 }); // add the internalPowerVoltage5 channel to the channelMap channelIndex = registerChannelMap.Add("internalPowerVoltage5"); registerChannelMap.PutUserInfo(channelIndex, "units=V"); channelIndex = rbnbChannelMap.Add("internalPowerVoltage5"); rbnbChannelMap.PutMime(channelIndex, "application/octet-stream"); rbnbChannelMap.PutDataAsFloat64(channelIndex, new double[] { internalPowerVoltage5 }); // add the mainPowerVoltage channel to the channelMap channelIndex = registerChannelMap.Add("mainPowerVoltage"); registerChannelMap.PutUserInfo(channelIndex, "units=V"); channelIndex = rbnbChannelMap.Add("mainPowerVoltage"); rbnbChannelMap.PutMime(channelIndex, "application/octet-stream"); rbnbChannelMap.PutDataAsFloat64(channelIndex, new double[] { mainPowerVoltage }); // add the referenceAverage channel to the channelMap channelIndex = registerChannelMap.Add("referenceAverage"); registerChannelMap.PutUserInfo(channelIndex, "units=count"); channelIndex = rbnbChannelMap.Add("referenceAverage"); rbnbChannelMap.PutMime(channelIndex, "application/octet-stream"); rbnbChannelMap.PutDataAsFloat64(channelIndex, new double[] { referenceAverage }); // add the referenceVariance channel to the channelMap channelIndex = registerChannelMap.Add("referenceVariance"); registerChannelMap.PutUserInfo(channelIndex, "units=count"); channelIndex = rbnbChannelMap.Add("referenceVariance"); rbnbChannelMap.PutMime(channelIndex, "application/octet-stream"); rbnbChannelMap.PutDataAsFloat64(channelIndex, new double[] { referenceVariance }); // add the seaWaterDarkCounts channel to the channelMap channelIndex = registerChannelMap.Add("seaWaterDarkCounts"); registerChannelMap.PutUserInfo(channelIndex, "units=count"); channelIndex = rbnbChannelMap.Add("seaWaterDarkCounts"); rbnbChannelMap.PutMime(channelIndex, "application/octet-stream"); rbnbChannelMap.PutDataAsFloat64(channelIndex, new double[] { seaWaterDarkCounts }); // add the spectrometerAverage channel to the channelMap channelIndex = registerChannelMap.Add("spectrometerAverage"); registerChannelMap.PutUserInfo(channelIndex, "units=count"); channelIndex = rbnbChannelMap.Add("averageWavelength"); rbnbChannelMap.PutMime(channelIndex, "application/octet-stream"); rbnbChannelMap.PutDataAsFloat64(channelIndex, new double[] { spectrometerAverage }); // Now register the RBNB channels, and flush the rbnbChannelMap to the // DataTurbine getSource().Register(registerChannelMap); getSource().Flush(rbnbChannelMap); logger.info(frameDateAsString + " " + "Sample sent to the DataTurbine: (" + serialNumber + ") " + sampleString); registerChannelMap.Clear(); rbnbChannelMap.Clear(); } else { logger.info("Couldn't apply the calibration coefficients. " + "Skipping this sample."); } // end if() } // end if() } // end for() //getSource.Detach(); success = true; } catch (ParseException pe) { // parsing of the calibration file failed. Log the exception, return false success = false; logger.debug("There was a problem parsing the calibration file. The " + "error message was: " + pe.getMessage()); return success; } catch (SAPIException sapie) { // In the event of an RBNB communication exception, log the exception, // and allow execute() to return false, which will prompt a retry. success = false; sapie.printStackTrace(); return success; } return success; }
From source file:com.unboundid.scim.tools.SCIMQueryRate.java
/** * Performs the actual processing for this tool. In this case, it gets a * connection to the directory server and uses it to perform the requested * searches.//from w ww .ja va 2 s . c o m * * @return The result code for the processing that was performed. */ @Override() public ResultCode doToolProcessing() { //Initalize the Debugger Debug.setEnabled(true); Debug.getLogger().addHandler(new ConsoleHandler()); Debug.getLogger().setUseParentHandlers(false); // Determine the random seed to use. final Long seed; if (randomSeed.isPresent()) { seed = Long.valueOf(randomSeed.getValue()); } else { seed = null; } // Create a value pattern for the filter. final ValuePattern filterPattern; boolean isQuery = true; if (filter.isPresent()) { try { filterPattern = new ValuePattern(filter.getValue(), seed); } catch (ParseException pe) { Debug.debugException(pe); err(ERR_QUERY_TOOL_BAD_FILTER_PATTERN.get(pe.getMessage())); return ResultCode.PARAM_ERROR; } } else if (resourceId.isPresent()) { isQuery = false; try { filterPattern = new ValuePattern(resourceId.getValue()); } catch (ParseException pe) { Debug.debugException(pe); err(ERR_QUERY_TOOL_BAD_RESOURCE_ID_PATTERN.get(pe.getMessage())); return ResultCode.PARAM_ERROR; } } else { filterPattern = null; } // Get the attributes to return. final String[] attrs; if (attributes.isPresent()) { final List<String> attrList = attributes.getValues(); attrs = new String[attrList.size()]; attrList.toArray(attrs); } else { attrs = NO_STRINGS; } // If the --ratePerSecond option was specified, then limit the rate // accordingly. FixedRateBarrier fixedRateBarrier = null; if (ratePerSecond.isPresent()) { final int intervalSeconds = collectionInterval.getValue(); final int ratePerInterval = ratePerSecond.getValue() * intervalSeconds; fixedRateBarrier = new FixedRateBarrier(1000L * intervalSeconds, ratePerInterval); } // Determine whether to include timestamps in the output and if so what // format should be used for them. final boolean includeTimestamp; final String timeFormat; if (timestampFormat.getValue().equalsIgnoreCase("with-date")) { includeTimestamp = true; timeFormat = "dd/MM/yyyy HH:mm:ss"; } else if (timestampFormat.getValue().equalsIgnoreCase("without-date")) { includeTimestamp = true; timeFormat = "HH:mm:ss"; } else { includeTimestamp = false; timeFormat = null; } // Determine whether any warm-up intervals should be run. final long totalIntervals; final boolean warmUp; int remainingWarmUpIntervals = warmUpIntervals.getValue(); if (remainingWarmUpIntervals > 0) { warmUp = true; totalIntervals = 0L + numIntervals.getValue() + remainingWarmUpIntervals; } else { warmUp = true; totalIntervals = 0L + numIntervals.getValue(); } // Create the table that will be used to format the output. final OutputFormat outputFormat; if (csvFormat.isPresent()) { outputFormat = OutputFormat.CSV; } else { outputFormat = OutputFormat.COLUMNS; } final ColumnFormatter formatter = new ColumnFormatter(includeTimestamp, timeFormat, outputFormat, " ", new FormattableColumn(15, HorizontalAlignment.RIGHT, "Recent", "Queries/Sec"), new FormattableColumn(15, HorizontalAlignment.RIGHT, "Recent", "Avg Dur ms"), new FormattableColumn(15, HorizontalAlignment.RIGHT, "Recent", "Resources/Query"), new FormattableColumn(15, HorizontalAlignment.RIGHT, "Recent", "Errors/Sec"), new FormattableColumn(15, HorizontalAlignment.RIGHT, "Overall", "Queries/Sec"), new FormattableColumn(15, HorizontalAlignment.RIGHT, "Overall", "Avg Dur ms")); // Create values to use for statistics collection. final AtomicLong queryCounter = new AtomicLong(0L); final AtomicLong resourceCounter = new AtomicLong(0L); final AtomicLong errorCounter = new AtomicLong(0L); final AtomicLong queryDurations = new AtomicLong(0L); // Determine the length of each interval in milliseconds. final long intervalMillis = 1000L * collectionInterval.getValue(); // We will use Apache's HttpClient library for this tool. SSLUtil sslUtil; try { sslUtil = createSSLUtil(); } catch (LDAPException e) { debugException(e); err(e.getMessage()); return e.getResultCode(); } RegistryBuilder<ConnectionSocketFactory> registryBuilder = RegistryBuilder.create(); final String schemeName; if (sslUtil != null) { try { SSLConnectionSocketFactory sslConnectionSocketFactory = new SSLConnectionSocketFactory( sslUtil.createSSLContext("TLS"), new NoopHostnameVerifier()); schemeName = "https"; registryBuilder.register(schemeName, sslConnectionSocketFactory); } catch (GeneralSecurityException e) { debugException(e); err(ERR_SCIM_TOOL_CANNOT_CREATE_SSL_CONTEXT.get(getExceptionMessage(e))); return ResultCode.LOCAL_ERROR; } } else { schemeName = "http"; registryBuilder.register(schemeName, new PlainConnectionSocketFactory()); } final Registry<ConnectionSocketFactory> socketFactoryRegistry = registryBuilder.build(); RequestConfig requestConfig = RequestConfig.custom().setConnectionRequestTimeout(30000) .setExpectContinueEnabled(true).build(); SocketConfig socketConfig = SocketConfig.custom().setSoTimeout(30000).setSoReuseAddress(true).build(); final PoolingHttpClientConnectionManager mgr = new PoolingHttpClientConnectionManager( socketFactoryRegistry); mgr.setMaxTotal(numThreads.getValue()); mgr.setDefaultMaxPerRoute(numThreads.getValue()); mgr.setDefaultSocketConfig(socketConfig); mgr.setValidateAfterInactivity(-1); ClientConfig jerseyConfig = new ClientConfig(); jerseyConfig.property(ApacheClientProperties.CONNECTION_MANAGER, mgr); jerseyConfig.property(ApacheClientProperties.REQUEST_CONFIG, requestConfig); ApacheConnectorProvider connectorProvider = new ApacheConnectorProvider(); jerseyConfig.connectorProvider(connectorProvider); if (authID.isPresent()) { try { final String password; if (authPassword.isPresent()) { password = authPassword.getValue(); } else if (authPasswordFile.isPresent()) { password = authPasswordFile.getNonBlankFileLines().get(0); } else { password = null; } BasicCredentialsProvider provider = new BasicCredentialsProvider(); provider.setCredentials(new AuthScope(host.getValue(), port.getValue()), new UsernamePasswordCredentials(authID.getValue(), password)); jerseyConfig.property(ApacheClientProperties.CREDENTIALS_PROVIDER, provider); jerseyConfig.property(ApacheClientProperties.PREEMPTIVE_BASIC_AUTHENTICATION, true); } catch (IOException e) { Debug.debugException(e); err(ERR_QUERY_TOOL_SET_BASIC_AUTH.get(e.getMessage())); return ResultCode.LOCAL_ERROR; } } else if (bearerToken.isPresent()) { jerseyConfig.register(new ClientRequestFilter() { public void filter(final ClientRequestContext clientRequestContext) throws IOException { try { clientRequestContext.getHeaders().add("Authorization", "Bearer " + bearerToken.getValue()); } catch (Exception ex) { throw new RuntimeException("Unable to add authorization handler", ex); } } }); } // Create the SCIM client to use for the queries. final URI uri; try { final String path; if (contextPath.getValue().startsWith("/")) { path = contextPath.getValue(); } else { path = "/" + contextPath.getValue(); } uri = new URI(schemeName, null, host.getValue(), port.getValue(), path, null, null); } catch (URISyntaxException e) { Debug.debugException(e); err(ERR_QUERY_TOOL_CANNOT_CREATE_URL.get(e.getMessage())); return ResultCode.OTHER; } final SCIMService service = new SCIMService(uri, jerseyConfig); if (xmlFormat.isPresent()) { service.setContentType(MediaType.APPLICATION_XML_TYPE); service.setAcceptType(MediaType.APPLICATION_XML_TYPE); } // Retrieve the resource schema. final ResourceDescriptor resourceDescriptor; try { resourceDescriptor = service.getResourceDescriptor(resourceName.getValue(), null); if (resourceDescriptor == null) { throw new ResourceNotFoundException( "Resource " + resourceName.getValue() + " is not defined by the service provider"); } } catch (SCIMException e) { Debug.debugException(e); err(ERR_QUERY_TOOL_RETRIEVE_RESOURCE_SCHEMA.get(e.getMessage())); return ResultCode.OTHER; } final SCIMEndpoint<? extends BaseResource> endpoint = service.getEndpoint(resourceDescriptor, BaseResource.BASE_RESOURCE_FACTORY); // Create the threads to use for the searches. final CyclicBarrier barrier = new CyclicBarrier(numThreads.getValue() + 1); final QueryRateThread[] threads = new QueryRateThread[numThreads.getValue()]; for (int i = 0; i < threads.length; i++) { threads[i] = new QueryRateThread(i, isQuery, endpoint, filterPattern, attrs, barrier, queryCounter, resourceCounter, queryDurations, errorCounter, fixedRateBarrier); threads[i].start(); } // Display the table header. for (final String headerLine : formatter.getHeaderLines(true)) { out(headerLine); } // Indicate that the threads can start running. try { barrier.await(); } catch (Exception e) { Debug.debugException(e); } long overallStartTime = System.nanoTime(); long nextIntervalStartTime = System.currentTimeMillis() + intervalMillis; boolean setOverallStartTime = false; long lastDuration = 0L; long lastNumEntries = 0L; long lastNumErrors = 0L; long lastNumSearches = 0L; long lastEndTime = System.nanoTime(); for (long i = 0; i < totalIntervals; i++) { final long startTimeMillis = System.currentTimeMillis(); final long sleepTimeMillis = nextIntervalStartTime - startTimeMillis; nextIntervalStartTime += intervalMillis; try { if (sleepTimeMillis > 0) { Thread.sleep(sleepTimeMillis); } } catch (Exception e) { Debug.debugException(e); } final long endTime = System.nanoTime(); final long intervalDuration = endTime - lastEndTime; final long numSearches; final long numEntries; final long numErrors; final long totalDuration; if (warmUp && (remainingWarmUpIntervals > 0)) { numSearches = queryCounter.getAndSet(0L); numEntries = resourceCounter.getAndSet(0L); numErrors = errorCounter.getAndSet(0L); totalDuration = queryDurations.getAndSet(0L); } else { numSearches = queryCounter.get(); numEntries = resourceCounter.get(); numErrors = errorCounter.get(); totalDuration = queryDurations.get(); } final long recentNumSearches = numSearches - lastNumSearches; final long recentNumEntries = numEntries - lastNumEntries; final long recentNumErrors = numErrors - lastNumErrors; final long recentDuration = totalDuration - lastDuration; final double numSeconds = intervalDuration / 1000000000.0d; final double recentSearchRate = recentNumSearches / numSeconds; final double recentErrorRate = recentNumErrors / numSeconds; final double recentAvgDuration; final double recentEntriesPerSearch; if (recentNumSearches > 0L) { recentEntriesPerSearch = 1.0d * recentNumEntries / recentNumSearches; recentAvgDuration = 1.0d * recentDuration / recentNumSearches / 1000000; } else { recentEntriesPerSearch = 0.0d; recentAvgDuration = 0.0d; } if (warmUp && (remainingWarmUpIntervals > 0)) { out(formatter.formatRow(recentSearchRate, recentAvgDuration, recentEntriesPerSearch, recentErrorRate, "warming up", "warming up")); remainingWarmUpIntervals--; if (remainingWarmUpIntervals == 0) { out(INFO_QUERY_TOOL_WARM_UP_COMPLETED.get()); setOverallStartTime = true; } } else { if (setOverallStartTime) { overallStartTime = lastEndTime; setOverallStartTime = false; } final double numOverallSeconds = (endTime - overallStartTime) / 1000000000.0d; final double overallSearchRate = numSearches / numOverallSeconds; final double overallAvgDuration; if (numSearches > 0L) { overallAvgDuration = 1.0d * totalDuration / numSearches / 1000000; } else { overallAvgDuration = 0.0d; } out(formatter.formatRow(recentSearchRate, recentAvgDuration, recentEntriesPerSearch, recentErrorRate, overallSearchRate, overallAvgDuration)); lastNumSearches = numSearches; lastNumEntries = numEntries; lastNumErrors = numErrors; lastDuration = totalDuration; } lastEndTime = endTime; } // Stop all of the threads. ResultCode resultCode = ResultCode.SUCCESS; for (final QueryRateThread t : threads) { t.signalShutdown(); } // Interrupt any blocked threads after a grace period. final WakeableSleeper sleeper = new WakeableSleeper(); sleeper.sleep(1000); mgr.shutdown(); for (final QueryRateThread t : threads) { final ResultCode r = t.waitForShutdown(); if (resultCode == ResultCode.SUCCESS) { resultCode = r; } } return resultCode; }
From source file:org.openmrs.module.registration.web.controller.ajax.RegistrationAjaxController.java
/** * process patient birth date/*from w ww . j a v a 2 s . c o m*/ * * @param birthdate * @param model * @return * @throws ParseException */ @RequestMapping(value = "/module/registration/ajax/processPatientBirthDate.htm", method = RequestMethod.GET) public String processPatientBirthDate(@RequestParam("birthdate") String birthdate, Model model) throws ParseException { Map<String, Object> json = new HashMap<String, Object>(); // try to parse date // if success -> it's a birthdate // otherwise -> it's an age Date date = null; try { date = RegistrationUtils.parseDate(birthdate); } catch (ParseException e) { } if (date != null) { if (isLaterToday(date)) { json.put("error", "Birthdate must be before the current date."); } else { // the user entered the correct birthdate json.put("estimated", false); json.put("birthdate", birthdate); json.put("age", RegistrationUtils.estimateAge(birthdate)); logger.info("User entered the correct birthdate."); } } else { String lastLetter = birthdate.substring(birthdate.length() - 1).toLowerCase(); if ("ymwd".indexOf(lastLetter) < 0) { json.put("error", "Age in wrong format"); } else { try { json.put("estimated", true); String estimatedBirthdate = getEstimatedBirthdate(birthdate); json.put("birthdate", estimatedBirthdate); json.put("age", RegistrationUtils.estimateAge(estimatedBirthdate)); } catch (Exception e) { json.put("error", e.getMessage()); } } } model.addAttribute("json", json); return "/module/registration/ajax/processPatientBirthDate"; }