List of usage examples for java.util Date after
public boolean after(Date when)
From source file:com.google.sampling.experiential.server.EventServlet.java
private void sortEvents(List<Event> greetings) { Comparator<Event> dateComparator = new Comparator<Event>() { @Override/*from ww w . j a v a 2 s . c o m*/ public int compare(Event o1, Event o2) { Date when1 = o1.getWhen(); Date when2 = o2.getWhen(); if (when1 == null || when2 == null) { return 0; } else if (when1.after(when2)) { return -1; } else if (when2.after(when1)) { return 1; } return 0; } }; Collections.sort(greetings, dateComparator); }
From source file:com.virtusa.akura.reporting.controller.GenarateTeacherWisePresentAndAbsentDaysReportController.java
/** * Check the given date is a holiday or not. * * @param holidayList - list consits of Holidays for the given time period. * @param currentDate - currentDate//from w w w. j a v a2s .c o m * @param start - Calender object * @return boolean */ public boolean isHoliday(List<Holiday> holidayList, Date currentDate, Calendar start) { boolean flag = false; int dayOfWeek = start.get(Calendar.DAY_OF_WEEK); for (Holiday tempHoliday : holidayList) { if ((currentDate.after(tempHoliday.getStartDate()) && currentDate.before(tempHoliday.getEndDate())) || currentDate.equals(tempHoliday.getStartDate()) || currentDate.equals(tempHoliday.getEndDate()) || Calendar.SATURDAY == dayOfWeek || Calendar.SUNDAY == dayOfWeek) { flag = true; break; } } return flag; }
From source file:com.emc.ecs.sync.target.AtmosTarget.java
private boolean metadataChanged(SyncObject obj, Map<String, Metadata> targetSystemMeta) { Date srcCtime = obj.getMetadata().getModificationTime(); // use mtime by default if (obj.getMetadata() instanceof AtmosMetadata) { srcCtime = parseDate(((AtmosMetadata) obj.getMetadata()).getSystemMetadataValue("ctime")); }/*from w w w .java2 s . co m*/ Date dstCtime = parseDate(targetSystemMeta.get("ctime")); return srcCtime != null && dstCtime != null && srcCtime.after(dstCtime); }
From source file:com.aurel.track.exchange.track.importer.TrackImportBL.java
private static boolean externalIsMoreRecent(Date externalLastEditDate, Date internalLastEditedDate, String entity, Integer value) { if (externalLastEditDate == null) { LOGGER.warn("No external lastEdit value found for entity " + entity + " and value " + value); return false; }// w w w. j av a 2 s . c o m if (internalLastEditedDate == null || externalLastEditDate.after(internalLastEditedDate)) { // new workItem or the external is more recent return true; } return false; }
From source file:com.aerospike.examples.timeseries.TimeSeriesManipulator.java
private void retrieveResult(String[] ticker, Date startDate, Date endDate) throws ParseException { // TODO Auto-generated method stub Record[] records;/*from w w w . ja va2 s. c o m*/ String pk; int daySize = (int) dateOp.difference(startDate, endDate); Key[] keys = new Key[daySize]; int numTickers = 0; if (ticker != null) numTickers = ticker.length; Date printDate, insertDate; Long count = new Long(0); ; Double sum; Double startVal; Double endVal; // Random rand = new Random(); // long randomNum = 0; // long overallRndNum = 0 + rand.nextInt((1000000 - 0) + 1); long currTime = GregorianCalendar.getInstance().getTimeInMillis(); Key summaryKey = new Key("test", "overallsummary", currTime); String tksummKey = null; for (int j = 0; j < numTickers; j++) { Date date = startDate; int i = 0; boolean firstRec = false; count = new Long(0); sum = new Double(0); startVal = new Double(0); endVal = new Double(0); // randomNum = 0 + rand.nextInt((1000000 - 0) + 1); Key tsKey = null; //Key tsKey = new Key("test", "tickersummary", randomNum); while (!date.after(endDate)) { insertDate = dateOp.getDate(date); pk = ticker[j] + insertDate.getTime(); keys[i] = new Key("test", "timeseries", pk); tksummKey = ticker[j] + "Summary" + currTime; tsKey = new Key("test", "tickersummary", tksummKey); keys[i] = new Key("test", "timeseries", pk); String formattedDate = dateOp.dateFormatter(date); Record record = client.operate(wPolicy, keys[i], MapOperation.getByRank("stock", -1, MapReturnType.VALUE), MapOperation.getByRank("stock", -1, MapReturnType.INDEX), MapOperation.getByRank("stock", 0, MapReturnType.VALUE), MapOperation.getByRank("stock", 0, MapReturnType.INDEX), MapOperation.getByIndex("stock", 0, MapReturnType.VALUE), MapOperation.getByIndex("stock", -1, MapReturnType.VALUE), Operation.get("sum"), MapOperation.size("stock")); if (record != null) { ArrayList<Double> outList = (ArrayList<Double>) record.getList("stock"); sum = sum + (Double) record.getValue("sum"); Object countTemp = outList.get(6); count = count + (Long) countTemp; if (!firstRec) { startVal = outList.get(4); firstRec = true; } endVal = outList.get(5); Record recMax = client.operate(wPolicy, tsKey, MapOperation.put(mPolicy, "max", Value.get(formattedDate), Value.get(outList.get(0))), MapOperation.put(mPolicy, "min", Value.get(formattedDate), Value.get(outList.get(2)))); String maxIndex = dateOp.getTimeStamp(outList.get(1)); String minIndex = dateOp.getTimeStamp(outList.get(3)); System.out.println( "Reading Data for " + formattedDate + " with Primary Key: " + pk + "\n\t: MaxValue: " + Double.parseDouble(new DecimalFormat("##.##").format(outList.get(0))) + " Time of Day: " + maxIndex + "\n\t: MinValue: " + Double.parseDouble(new DecimalFormat("##.##").format(outList.get(2))) + " Time of Day: " + minIndex); } date = dateOp.addDate(date); i++; } summaryPrint(tsKey, sum, count, startVal, endVal, tksummKey, ticker[j]); double difference = endVal - startVal; Record recSumm = client.operate(wPolicy, summaryKey, MapOperation.put(mPolicy, "difference", Value.get(ticker[j]), Value.get(difference))); firstRec = false; } summaryPrint(count, summaryKey, currTime, numTickers); }
From source file:be.fedict.trust.service.bean.TrustServiceTrustLinker.java
public TrustLinkerResult hasTrustLink(X509Certificate childCertificate, X509Certificate certificate, Date validationDate, RevocationData revocationData) { LOG.debug("certificate: " + childCertificate.getSubjectX500Principal()); LOG.debug("certificate Issuer: " + childCertificate.getIssuerX500Principal().toString()); LOG.debug("Issuer: " + certificate.getSubjectX500Principal()); BigInteger issuerSerialNumber = certificate.getSerialNumber(); String key = new String(); key += certificate.getSubjectX500Principal().toString() + "|" + issuerSerialNumber.toString(); String issuerName = childCertificate.getIssuerX500Principal().toString(); CertificateAuthorityEntity certificateAuthority = this.entityManager //.find(CertificateAuthorityEntity.class, issuerName); .find(CertificateAuthorityEntity.class, key); if (null == certificateAuthority) { LOG.debug("no data cache entry for CA: " + issuerName + " - Serial Number: " + issuerSerialNumber.toString()); /*/* w w w .ja v a 2s . co m*/ * Cache Miss */ SNMPInterceptor.increment(SnmpConstants.CACHE_MISSES, SnmpConstants.SNMP_SERVICE, 1L); /* * Lookup Root CA's trust point via parent certificates' CA entity. */ String parentIssuerName = certificate.getIssuerX500Principal().toString(); CertificateAuthorityEntity parentCertificateAuthority = this.entityManager .find(CertificateAuthorityEntity.class, parentIssuerName); if (null == parentCertificateAuthority) { logAudit("CA not found for " + parentIssuerName); LOG.error("CA not found for " + parentIssuerName + " ?!"); return null; } // create new CA try { certificateAuthority = new CertificateAuthorityEntity(getCrlUrl(childCertificate), certificate); certificateAuthority.setTrustPoint(parentCertificateAuthority.getTrustPoint()); } catch (CertificateEncodingException e) { LOG.error("certificate encoding error: " + e.getMessage(), e); return null; } this.entityManager.persist(certificateAuthority); return null; } if (Status.ACTIVE != certificateAuthority.getStatus()) { LOG.debug("CA revocation data cache not yet active: " + issuerName); /* * Harvester is still busy processing the first CRL. */ if (null == certificateAuthority.getCrlUrl()) { certificateAuthority.setCrlUrl(getCrlUrl(childCertificate)); } if (Status.NONE != certificateAuthority.getStatus()) { // none means no CRL is available so not really a cache miss SNMPInterceptor.increment(SnmpConstants.CACHE_MISSES, SnmpConstants.SNMP_SERVICE, 1L); } return null; } /* * Let's use the cached revocation data */ Date thisUpdate = certificateAuthority.getThisUpdate(); if (null == thisUpdate) { LOG.warn("no thisUpdate value: " + certificateAuthority.getName()); SNMPInterceptor.increment(SnmpConstants.CACHE_MISSES, SnmpConstants.SNMP_SERVICE, 1L); return null; } Date nextUpdate = certificateAuthority.getNextUpdate(); if (null == nextUpdate) { LOG.warn("no nextUpdate value: " + certificateAuthority.getName()); SNMPInterceptor.increment(SnmpConstants.CACHE_MISSES, SnmpConstants.SNMP_SERVICE, 1L); return null; } /* * First check whether the cached revocation data is up-to-date. */ if (thisUpdate.after(validationDate)) { LOG.warn("cached CRL data too recent: " + certificateAuthority.getName()); SNMPInterceptor.increment(SnmpConstants.CACHE_MISSES, SnmpConstants.SNMP_SERVICE, 1L); return null; } if (validationDate.after(nextUpdate)) { LOG.warn("cached CRL data too old: " + certificateAuthority.getName()); SNMPInterceptor.increment(SnmpConstants.CACHE_MISSES, SnmpConstants.SNMP_SERVICE, 1L); return null; } LOG.debug("using cached CRL data"); /* * Cache Hit */ SNMPInterceptor.increment(SnmpConstants.CACHE_HITS, SnmpConstants.SNMP_SERVICE, 1L); BigInteger serialNumber = childCertificate.getSerialNumber(); RevokedCertificateEntity revokedCertificate = findRevokedCertificate(issuerName, serialNumber); if (null == revokedCertificate) { LOG.debug("certificate valid: " + childCertificate.getSubjectX500Principal()); return new TrustLinkerResult(true); } if (revokedCertificate.getRevocationDate().after(validationDate)) { LOG.debug("CRL OK for: " + childCertificate.getSubjectX500Principal() + " at " + validationDate); return new TrustLinkerResult(true); } LOG.debug("certificate invalid: " + childCertificate.getSubjectX500Principal()); return new TrustLinkerResult(false, TrustLinkerResultReason.INVALID_REVOCATION_STATUS, "certificate revoked by cached CRL"); }
From source file:com.emc.ecs.sync.target.FilesystemTarget.java
@Override public void filter(SyncObject obj) { File destFile = createFile(targetRoot.getPath(), obj.getRelativePath()); obj.setTargetIdentifier(destFile.getPath()); log.debug("Writing {} to {}", obj.getSourceIdentifier(), destFile); Date mtime = FilesystemUtil.getMtime(obj); // make sure parent directory exists mkdirs(destFile.getParentFile());/*www . jav a2s . c o m*/ // if required we will need to update metadata after any streaming operation boolean dataCopied = false; if (obj.isDirectory()) { synchronized (this) { if (!destFile.exists() && !destFile.mkdir()) throw new RuntimeException("Failed to create directory " + destFile); } } else { // If forced, retrying, newer or different size, copy the file data if (force || obj.getFailureCount() > 0 || mtime == null || !destFile.exists() || mtime.after(new Date(destFile.lastModified())) || obj.getMetadata().getContentLength() != destFile.length()) { copyData(obj, destFile); dataCopied = true; } else { log.debug("No change in content timestamps for {}", obj.getSourceIdentifier()); } } // encapsulate metadata from source system if (!ignoreMetadata) { File metaFile = createFile(null, SyncMetadata.getMetaPath(destFile.getPath(), destFile.isDirectory())); File metaDir = metaFile.getParentFile(); Date ctime = null; if (obj.getMetadata() instanceof AtmosMetadata) { // check for ctime in system meta UserMetadata m = ((AtmosMetadata) obj.getMetadata()).getSystemMetadata().get("ctime"); if (m != null) ctime = Iso8601Util.parse(m.getValue()); } if (ctime == null) ctime = mtime; // use mtime if there is no ctime // create metadata directory if it doesn't already exist synchronized (this) { if (!metaDir.exists() && !metaDir.mkdir()) throw new RuntimeException("Failed to create metadata directory " + metaDir); } // if *ctime* is newer or forced, write the metadata file.. also if object has generated new metadata from // a streaming operation if (force || ctime == null || !metaFile.exists() || ctime.after(new Date(metaFile.lastModified())) || (dataCopied && obj.requiresPostStreamMetadataUpdate())) { try { String metaJson = obj.getMetadata().toJson(); copyData(new ByteArrayInputStream(metaJson.getBytes("UTF-8")), metaFile); if (ctime != null) { // Set the metadata file mtime to the source ctime (i.e. this // metadata file's content is modified at the same // time as the source's metadata modification time) if (!metaFile.setLastModified(ctime.getTime())) log.warn("Failed to set mtime of {}", metaFile); } } catch (IOException e) { throw new RuntimeException("Failed to write metadata to: " + metaFile, e); } } else { log.debug("No change in metadata for {}", obj.getSourceIdentifier()); } } try { // TODO: figure out "preserve"/"restore" option // TODO: make the behavior here configurable (do we fail? do we track in the DB?) FilesystemUtil.applyFilesystemMetadata(destFile, obj.getMetadata(), includeAcl, true); } catch (Exception e) { log.warn("could not apply filesystem metadata to " + destFile, e); } }
From source file:com.pr7.logging.CustomDailyRollingFileAppender.java
private void customCleanUp() { // Check to see if there are already 5 files File file = new File(fileName); Calendar cal = Calendar.getInstance(); int maxDays = 30; try {/*from w w w . java2 s. c om*/ maxDays = Integer.parseInt(archiveMaxDays); } catch (Exception e) { // just leave it at 30. } cal.add(Calendar.DATE, -maxDays); Date cutoffDate = cal.getTime(); Date timeBackup = DateUtil.parse(DateUtil.format(new Date(), "dd/MM/yyyy ") + archiveTiming, "dd/MM/yyyy HH:mm:ss"); //This make sure only backup once per day //System.out.println("cleanupAndRollOver checked:: cutoffDate = " + cutoffDate + ", timeBackup = " + timeBackup + ", lastBackup = " + lastBackup + ", file.getParentFile().exists() = " + file.getParentFile().exists()); if ((lastBackup == null || timeBackup.after(lastBackup)) && timeBackup.compareTo(new Date()) <= 0 && file.getParentFile().exists()) { System.out.println("cleanupAndRollOver executed:: cutoffDate = " + cutoffDate + ", timeBackup = " + timeBackup + ", lastBackup = " + lastBackup); File[] files = file.getParentFile().listFiles(new StartsWithFileFilter(file.getName(), false)); System.out.println("cleanupAndRollOver executed:: files size = " + files.length + ", file.getName() = " + file.getName()); int nameLength = file.getName().length(); for (int i = 0; i < files.length; i++) { String datePart = null; try { if (!file.getName().equals(files[i].getName())) { datePart = files[i].getName().substring(nameLength); Date date = sdf.parse(datePart); //System.out.println("date = " + date + " vs cutoffDate " + cutoffDate); if (date.before(cutoffDate)) { if (archiveCompress.equalsIgnoreCase("TRUE")) { zipAndDelete(files[i]); } else { System.out.println("delete file = " + files[i].getName()); files[i].delete(); } } } } catch (Exception e) { // e.printStackTrace(); // This isn't a file we should touch (it isn't named // correctly) } } lastBackup = new Date(); } }
From source file:com.intuit.tank.report.JobReport.java
/** * @param all// w w w . ja v a2 s .c o m */ private void filterDate(List<JobInstance> all) { Date date = jobReportOptions.getStartTime(); if (date != null) { for (Iterator<JobInstance> iter = all.iterator(); iter.hasNext();) { JobInstance job = iter.next(); Date st = job.getStartTime() != null ? job.getStartTime() : job.getCreated(); if (!date.before(st)) { iter.remove(); } } } date = jobReportOptions.getEndTime(); if (date != null) { for (Iterator<JobInstance> iter = all.iterator(); iter.hasNext();) { JobInstance job = iter.next(); Date st = job.getStartTime() != null ? job.getStartTime() : job.getCreated(); if (!date.after(st)) { iter.remove(); } } } if (NumberUtils.isDigits(jobReportOptions.getMinUsers())) { try { int users = Integer.parseInt(jobReportOptions.getMinUsers()); for (Iterator<JobInstance> iter = all.iterator(); iter.hasNext();) { JobInstance job = iter.next(); if (job.getTotalVirtualUsers() < users) { iter.remove(); } } } catch (NumberFormatException e) { LOG.warn("Error with min users value of " + jobReportOptions.getMinUsers()); } } if (NumberUtils.isDigits(jobReportOptions.getMaxUsers())) { try { int users = Integer.parseInt(jobReportOptions.getMaxUsers()); for (Iterator<JobInstance> iter = all.iterator(); iter.hasNext();) { JobInstance job = iter.next(); if (job.getTotalVirtualUsers() > users) { iter.remove(); } } } catch (NumberFormatException e) { LOG.warn("Error with max users value of " + jobReportOptions.getMaxUsers()); } } if (NumberUtils.isDigits(jobReportOptions.getJobIdStart())) { try { int jobIdStart = NumberUtils.toInt(jobReportOptions.getJobIdStart()); for (Iterator<JobInstance> iter = all.iterator(); iter.hasNext();) { JobInstance job = iter.next(); if (job.getId() < jobIdStart) { iter.remove(); } } } catch (NumberFormatException e) { LOG.warn("Error with max users value of " + jobReportOptions.getMaxUsers()); } } if (NumberUtils.isDigits(jobReportOptions.getJobIdEnd())) { try { int jobIdStart = NumberUtils.toInt(jobReportOptions.getJobIdEnd()); for (Iterator<JobInstance> iter = all.iterator(); iter.hasNext();) { JobInstance job = iter.next(); if (job.getId() > jobIdStart) { iter.remove(); } } } catch (NumberFormatException e) { LOG.warn("Error with max users value of " + jobReportOptions.getMaxUsers()); } } }
From source file:de.iteratec.iteraplan.businesslogic.service.MassUpdateServiceImpl.java
/** * Validates a whole line in the massupdate, to check if a whole Date Interval is modified * does not write any changes/*from w w w. j av a2s . com*/ * @param line */ private <T extends BuildingBlock> void validateDateIntervalPairs(MassUpdateLine<T> line, BuildingBlock buildingBlock) { UserContext.getCurrentPerms().assureFunctionalPermission(TypeOfFunctionalPermission.MASSUPDATE); //Integer in map is the Id for an DateAT Map<Integer, MassUpdateAttributeItem> massUpdateAttributes = new HashMap<Integer, MassUpdateAttributeItem>(); for (MassUpdateAttribute attribute : line.getAttributes()) { if (BBAttribute.USERDEF_DATE_ATTRIBUTE_TYPE.equals(attribute.getType())) { massUpdateAttributes.put(attributeTypeService .loadObjectById(attribute.getMassUpdateAttributeItem().getAttributeId(), DateAT.class) .getId(), attribute.getMassUpdateAttributeItem()); } } if (massUpdateAttributes.isEmpty()) { return; } //get all the date intervals that contain the given dateATs Set<DateInterval> dateIntervals = dateIntervalService .findDateIntervalsByDateATs(massUpdateAttributes.keySet()); //For every found DateInterval that contains at least one DateAT check if the DateInterval is valid for (DateInterval dateInterval : dateIntervals) { /** * A complete "DateInterval" (two DateAVs that define a part of a DateInterval for a BB) is going to be updated, check if the * startDate is equal to / or before the enddate * */ if (massUpdateAttributes.get(dateInterval.getStartDate().getId()) != null && massUpdateAttributes.get(dateInterval.getEndDate().getId()) != null) { String startDate = massUpdateAttributes.get(dateInterval.getStartDate().getId()) .getNewAttributeValue(); String endDate = massUpdateAttributes.get(dateInterval.getEndDate().getId()).getNewAttributeValue(); if (StringUtils.isNotEmpty(startDate) && StringUtils.isNotEmpty(endDate)) { Date newStartDate = DateUtils.parseAsDate(startDate, UserContext.getCurrentLocale()); Date newEndDate = DateUtils.parseAsDate(endDate, UserContext.getCurrentLocale()); if (newStartDate != null && newEndDate != null && newStartDate.after(newEndDate)) { throw new IteraplanBusinessException( IteraplanErrorMessages.DATE_INTERVAL_END_DATE_BEFORE_START_DATE); } } } /** * A single date of a DateInterval is going to be updated */ else { String date; Date newDate; if (massUpdateAttributes.get(dateInterval.getStartDate().getId()).getNewAttributeValue() != null) { date = massUpdateAttributes.get(dateInterval.getStartDate().getId()).getNewAttributeValue(); newDate = DateUtils.parseAsDate(date, UserContext.getCurrentLocale()); if (((DateAV) buildingBlock.getAttributeTypeToAttributeValues() .getBucketNotNull(dateInterval.getEndDate()).iterator().next()).getValue() .before(newDate)) { throw new IteraplanBusinessException( IteraplanErrorMessages.DATE_INTERVAL_START_DATE_BEFORE_END_DATE); } } else { date = massUpdateAttributes.get(dateInterval.getEndDate().getId()).getNewAttributeValue(); newDate = DateUtils.parseAsDate(date, UserContext.getCurrentLocale()); if (((DateAV) buildingBlock.getAttributeTypeToAttributeValues() .getBucketNotNull(dateInterval.getEndDate()).iterator().next()).getValue() .after(newDate)) { throw new IteraplanBusinessException( IteraplanErrorMessages.DATE_INTERVAL_END_DATE_BEFORE_START_DATE); } } } } }