List of usage examples for java.util Date setTime
public void setTime(long time)
From source file:org.apache.archiva.repository.scanner.RepositoryContentConsumersTest.java
@Test public void testExecution() throws Exception { IMocksControl knownControl = createNiceControl(); RepositoryContentConsumers consumers = lookupRepositoryConsumers(); KnownRepositoryContentConsumer selectedKnownConsumer = knownControl .createMock(KnownRepositoryContentConsumer.class); KnownRepositoryContentConsumer unselectedKnownConsumer = createNiceControl() .createMock(KnownRepositoryContentConsumer.class); consumers.setApplicationContext(//from w w w. j ava 2 s.c o m new MockApplicationContext(Arrays.asList(selectedKnownConsumer, unselectedKnownConsumer), null)); consumers.setSelectedKnownConsumers(Collections.singletonList(selectedKnownConsumer)); IMocksControl invalidControl = createControl(); InvalidRepositoryContentConsumer selectedInvalidConsumer = invalidControl .createMock(InvalidRepositoryContentConsumer.class); InvalidRepositoryContentConsumer unselectedInvalidConsumer = createControl() .createMock(InvalidRepositoryContentConsumer.class); consumers.setApplicationContext(new MockApplicationContext(null, Arrays.asList(selectedInvalidConsumer, unselectedInvalidConsumer))); consumers.setSelectedInvalidConsumers(Collections.singletonList(selectedInvalidConsumer)); ManagedRepository repo = createRepository("id", "name", new File("target/test-repo")); File testFile = new File("target/test-repo/path/to/test-file.txt"); Date startTime = new Date(System.currentTimeMillis()); startTime.setTime(12345678); selectedKnownConsumer.beginScan(repo, startTime, false); expect(selectedKnownConsumer.getIncludes()).andReturn(Collections.singletonList("**/*.txt")); selectedKnownConsumer.processFile(_OS("path/to/test-file.txt"), false); knownControl.replay(); selectedInvalidConsumer.beginScan(repo, startTime, false); invalidControl.replay(); consumers.executeConsumers(repo, testFile, true); knownControl.verify(); invalidControl.verify(); knownControl.reset(); invalidControl.reset(); File notIncludedTestFile = new File("target/test-repo/path/to/test-file.xml"); selectedKnownConsumer.beginScan(repo, startTime, false); expect(selectedKnownConsumer.getExcludes()).andReturn(Collections.<String>emptyList()); expect(selectedKnownConsumer.getIncludes()).andReturn(Collections.singletonList("**/*.txt")); knownControl.replay(); selectedInvalidConsumer.beginScan(repo, startTime, false); selectedInvalidConsumer.processFile(_OS("path/to/test-file.xml"), false); expect(selectedInvalidConsumer.getId()).andReturn("invalid"); invalidControl.replay(); consumers.executeConsumers(repo, notIncludedTestFile, true); knownControl.verify(); invalidControl.verify(); knownControl.reset(); invalidControl.reset(); File excludedTestFile = new File("target/test-repo/path/to/test-file.txt"); selectedKnownConsumer.beginScan(repo, startTime, false); expect(selectedKnownConsumer.getExcludes()).andReturn(Collections.singletonList("**/test-file.txt")); knownControl.replay(); selectedInvalidConsumer.beginScan(repo, startTime, false); selectedInvalidConsumer.processFile(_OS("path/to/test-file.txt"), false); expect(selectedInvalidConsumer.getId()).andReturn("invalid"); invalidControl.replay(); consumers.executeConsumers(repo, excludedTestFile, true); knownControl.verify(); invalidControl.verify(); }
From source file:it.infn.ct.chipster.Chipster.java
private void sendHTMLEmail(String USERNAME, String TO, String FROM, String SMTP_HOST, String ApplicationAcronym, String user_emailAddress, String credential, String chipster_HOST) { log.info("\n- Sending email notification to the user " + USERNAME + " [ " + TO + " ]"); log.info("\n- SMTP Server = " + SMTP_HOST); log.info("\n- Sender = " + FROM); log.info("\n- Receiver = " + TO); log.info("\n- Application = " + ApplicationAcronym); log.info("\n- User's email = " + user_emailAddress); // Assuming you are sending email from localhost String HOST = "localhost"; // Get system properties Properties properties = System.getProperties(); properties.setProperty(SMTP_HOST, HOST); properties.setProperty("mail.debug", "true"); //properties.setProperty("mail.smtp.auth", "false"); // Get the default Session object. javax.mail.Session session = javax.mail.Session.getDefaultInstance(properties); try {/*from w w w.j av a 2 s . c om*/ // Create a default MimeMessage object. javax.mail.internet.MimeMessage message = new javax.mail.internet.MimeMessage(session); // Set From: header field of the header. message.setFrom(new javax.mail.internet.InternetAddress(FROM)); // Set To: header field of the header. message.addRecipient(javax.mail.Message.RecipientType.TO, new javax.mail.internet.InternetAddress(TO)); message.addRecipient(javax.mail.Message.RecipientType.CC, new javax.mail.internet.InternetAddress(user_emailAddress)); //new javax.mail.internet.InternetAddress("glarocca75@gmail.com")); // <== Change here! // Set Subject: header field message.setSubject(" Chipster Account Generator service notification "); Date currentDate = new Date(); currentDate.setTime(currentDate.getTime()); // Send the actual HTML message, as big as you like message.setContent("<br/><H4>" + "<img src=\"http://scilla.man.poznan.pl:8080/confluence/download/attachments/5505438/egi_logo.png\" width=\"100\">" + "</H4><hr><br/>" + "<b>Description:</b> " + ApplicationAcronym + " notification service <br/><br/>" + "<i>A request to create a new temporary chipster account has been successfully sent from the LToS Science Gateway</i><br/><br/>" + "<b>Chipster Front Node:</b> " + chipster_HOST + "<br/>" + "<b>Credentials:</b> " + credential + "<br/><br/>" + "<b>TimeStamp:</b> " + currentDate + "<br/><br/>" + "<b>Disclaimer:</b><br/>" + "<i>This is an automatic message sent by the Catania Science Gateway (CSG) tailored for the EGI Long of Tail Science.<br/><br/>", "text/html"); // Send message javax.mail.Transport.send(message); } catch (javax.mail.MessagingException ex) { Logger.getLogger(Chipster.class.getName()).log(Level.SEVERE, null, ex); } }
From source file:com.adkdevelopment.earthquakesurvival.data.syncadapter.SyncAdapter.java
@Override public void onPerformSync(Account account, Bundle extras, String authority, ContentProviderClient provider, SyncResult syncResult) {/*from w ww .j a va 2s .c om*/ Context context = getContext(); App.getApiManager().getEarthquakeService().getData().enqueue(new Callback<EarthquakeObject>() { @Override public void onResponse(Call<EarthquakeObject> call, Response<EarthquakeObject> response) { EarthquakeObject earthquake = response.body(); Vector<ContentValues> cVVector = new Vector<>(earthquake.getFeatures().size()); double currentBiggest = 0.0; ContentValues notifyValues = null; for (Feature each : earthquake.getFeatures()) { ContentValues earthquakeValues = new ContentValues(); earthquakeValues.put(EarthquakeColumns.PLACE, each.getProperties().getPlace()); earthquakeValues.put(EarthquakeColumns.ID_EARTH, each.getId()); earthquakeValues.put(EarthquakeColumns.MAG, each.getProperties().getMag()); earthquakeValues.put(EarthquakeColumns.TYPE, each.getProperties().getType()); earthquakeValues.put(EarthquakeColumns.ALERT, each.getProperties().getAlert()); earthquakeValues.put(EarthquakeColumns.TIME, each.getProperties().getTime()); earthquakeValues.put(EarthquakeColumns.URL, each.getProperties().getUrl()); earthquakeValues.put(EarthquakeColumns.DETAIL, each.getProperties().getDetail()); earthquakeValues.put(EarthquakeColumns.DEPTH, each.getGeometry().getCoordinates().get(2)); earthquakeValues.put(EarthquakeColumns.LONGITUDE, each.getGeometry().getCoordinates().get(0)); earthquakeValues.put(EarthquakeColumns.LATITUDE, each.getGeometry().getCoordinates().get(1)); LatLng latLng = new LatLng(each.getGeometry().getCoordinates().get(1), each.getGeometry().getCoordinates().get(0)); LatLng location = LocationUtils.getLocation(context); earthquakeValues.put(EarthquakeColumns.DISTANCE, LocationUtils.getDistance(latLng, location)); cVVector.add(earthquakeValues); if (each.getProperties().getMag() != null && each.getProperties().getMag() > currentBiggest) { currentBiggest = each.getProperties().getMag(); notifyValues = new ContentValues(earthquakeValues); notifyValues.put(EarthquakeColumns.PLACE, Utilities.formatEarthquakePlace(each.getProperties().getPlace())); } } int inserted = 0; // add to database ContentResolver resolver = context.getContentResolver(); if (cVVector.size() > 0) { ContentValues[] cvArray = new ContentValues[cVVector.size()]; cVVector.toArray(cvArray); inserted = resolver.bulkInsert(EarthquakeColumns.CONTENT_URI, cvArray); } // Set the date to day minus one to delete old data from the database Date date = new Date(); date.setTime(date.getTime() - DateUtils.DAY_IN_MILLIS); int deleted = resolver.delete(EarthquakeColumns.CONTENT_URI, EarthquakeColumns.TIME + " <= ?", new String[] { String.valueOf(date.getTime()) }); Log.v(TAG, "Service Complete. " + inserted + " Inserted, " + deleted + " deleted"); sendNotification(notifyValues); } @Override public void onFailure(Call<EarthquakeObject> call, Throwable t) { Log.e(TAG, "onFailure: " + t.toString()); } }); App.getNewsManager().getNewsService().getNews().enqueue(new Callback<Rss>() { @Override public void onResponse(Call<Rss> call, Response<Rss> response) { Channel news = response.body().getChannel(); Vector<ContentValues> cVVector = new Vector<>(news.getItem().size()); SimpleDateFormat simpleDateFormat = new SimpleDateFormat("EEE, d MMM yyyy HH:mm:ss Z", Locale.getDefault()); Date date = new Date(); for (Item each : news.getItem()) { ContentValues weatherValues = new ContentValues(); try { date = simpleDateFormat.parse(each.getPubDate()); } catch (ParseException e) { Log.e(TAG, "e:" + e); } weatherValues.put(NewsColumns.DATE, date.getTime()); weatherValues.put(NewsColumns.TITLE, each.getTitle()); weatherValues.put(NewsColumns.DESCRIPTION, Html.toHtml(new SpannedString(each.getDescription()))); weatherValues.put(NewsColumns.URL, each.getLink()); weatherValues.put(NewsColumns.GUID, each.getGuid().getContent()); cVVector.add(weatherValues); } int inserted = 0; // add to database ContentResolver resolver = getContext().getContentResolver(); if (cVVector.size() > 0) { // Student: call bulkInsert to add the weatherEntries to the database here ContentValues[] cvArray = new ContentValues[cVVector.size()]; cVVector.toArray(cvArray); inserted = resolver.bulkInsert(NewsColumns.CONTENT_URI, cvArray); } // Set the date to day minus two to delete old data from the database date = new Date(); date.setTime(date.getTime() - DateUtils.DAY_IN_MILLIS * 3); int deleted = resolver.delete(NewsColumns.CONTENT_URI, NewsColumns.DATE + " <= ?", new String[] { String.valueOf(date.getTime()) }); } @Override public void onFailure(Call<Rss> call, Throwable t) { Log.e(TAG, "onFailure: " + t.toString()); } }); // TODO: 4/22/16 possible refactoring //checking the last update and notify if it' the first of the day SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(context); String lastNotificationKey = context.getString(R.string.sharedprefs_key_last_countupdate); long lastSync = prefs.getLong(lastNotificationKey, DateUtils.DAY_IN_MILLIS); if (System.currentTimeMillis() - lastSync >= Utilities.getSyncIntervalPrefs(context) * DateUtils.SECOND_IN_MILLIS) { SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd", Locale.US); Date date = new Date(System.currentTimeMillis()); String startTime[] = new String[] { simpleDateFormat.format(date.getTime() - DateUtils.YEAR_IN_MILLIS), simpleDateFormat.format(date.getTime() - DateUtils.DAY_IN_MILLIS * 30), simpleDateFormat.format(date.getTime() - DateUtils.WEEK_IN_MILLIS), simpleDateFormat.format(date.getTime() - DateUtils.DAY_IN_MILLIS) }; String endTime = simpleDateFormat.format(date); int iterator = 1; while (iterator < CountColumns.ALL_COLUMNS.length) { final int round = iterator; App.getApiManager().getEarthquakeService().getEarthquakeStats(startTime[round - 1], endTime) .enqueue(new Callback<CountEarthquakes>() { @Override public void onResponse(Call<CountEarthquakes> call, Response<CountEarthquakes> response) { ContentValues count = new ContentValues(); count.put(CountColumns.ALL_COLUMNS[round], response.body().getCount()); ContentResolver contentResolver = context.getContentResolver(); Cursor cursor = contentResolver.query(CountColumns.CONTENT_URI, null, null, null, null); if (cursor != null) { if (cursor.getCount() < 1) { long inserted = ContentUris .parseId(contentResolver.insert(CountColumns.CONTENT_URI, count)); //Log.d(TAG, "inserted:" + inserted); } else { int updated = contentResolver.update(CountColumns.CONTENT_URI, count, CountColumns._ID + " = ?", new String[] { "1" }); //Log.d(TAG, "updated: " + updated); } cursor.close(); } } @Override public void onFailure(Call<CountEarthquakes> call, Throwable t) { Log.e(TAG, "Error: " + t); } }); iterator++; } //refreshing last sync prefs.edit().putLong(lastNotificationKey, System.currentTimeMillis()).apply(); } // notify PagerActivity that data has been updated context.getContentResolver().notifyChange(EarthquakeColumns.CONTENT_URI, null, false); context.getContentResolver().notifyChange(NewsColumns.CONTENT_URI, null, false); context.getContentResolver().notifyChange(CountColumns.CONTENT_URI, null, false); updateWidgets(); }
From source file:org.loklak.data.DAO.java
/** * write messages without writing them to the dump file * @param mws a collection of message wrappers * @return a set of message IDs which had been created with this bulk write. *///from w w w . j av a2 s . c o m private static Set<String> writeMessageBulkNoDump(Collection<MessageWrapper> mws) { if (mws.size() == 0) return new HashSet<>(); List<IndexEntry<UserEntry>> userBulk = new ArrayList<>(); List<IndexEntry<MessageEntry>> messageBulk = new ArrayList<>(); for (MessageWrapper mw : mws) { if (messages.existsCache(mw.t.getIdStr())) continue; // we omit writing this again synchronized (DAO.class) { // write the user into the index userBulk.add(new IndexEntry<UserEntry>(mw.u.getScreenName(), mw.t.getSourceType(), mw.u)); // record tweet into search index messageBulk.add(new IndexEntry<MessageEntry>(mw.t.getIdStr(), mw.t.getSourceType(), mw.t)); } // teach the classifier Classifier.learnPhrase(mw.t.getText(Integer.MAX_VALUE, "")); } ElasticsearchClient.BulkWriteResult result = null; try { final Date limitDate = new Date(); List<IndexEntry<MessageEntry>> macc; final Set<String> existed = new HashSet<>(); //DAO.log("***DEBUG messages INIT: " + messageBulk.size()); limitDate.setTime(DateParser.oneHourAgo().getTime()); macc = messageBulk.stream().filter(i -> i.getObject().getCreatedAt().after(limitDate)) .collect(Collectors.toList()); //DAO.log("***DEBUG messages for HOUR: " + macc.size()); result = messages_hour.writeEntries(macc); //DAO.log("***DEBUG messages for HOUR: " + result.getCreated().size() + " created"); for (IndexEntry<MessageEntry> i : macc) if (!(result.getCreated().contains(i.getId()))) existed.add(i.getId()); //DAO.log("***DEBUG messages for HOUR: " + existed.size() + " existed"); limitDate.setTime(DateParser.oneDayAgo().getTime()); macc = messageBulk.stream().filter(i -> !(existed.contains(i.getObject().getIdStr()))) .filter(i -> i.getObject().getCreatedAt().after(limitDate)).collect(Collectors.toList()); //DAO.log("***DEBUG messages for DAY : " + macc.size()); result = messages_day.writeEntries(macc); //DAO.log("***DEBUG messages for DAY: " + result.getCreated().size() + " created"); for (IndexEntry<MessageEntry> i : macc) if (!(result.getCreated().contains(i.getId()))) existed.add(i.getId()); //DAO.log("***DEBUG messages for DAY: " + existed.size() + " existed"); limitDate.setTime(DateParser.oneWeekAgo().getTime()); macc = messageBulk.stream().filter(i -> !(existed.contains(i.getObject().getIdStr()))) .filter(i -> i.getObject().getCreatedAt().after(limitDate)).collect(Collectors.toList()); //DAO.log("***DEBUG messages for WEEK: " + macc.size()); result = messages_week.writeEntries(macc); //DAO.log("***DEBUG messages for WEEK: " + result.getCreated().size() + " created"); for (IndexEntry<MessageEntry> i : macc) if (!(result.getCreated().contains(i.getId()))) existed.add(i.getId()); //DAO.log("***DEBUG messages for WEEK: " + existed.size() + " existed"); macc = messageBulk.stream().filter(i -> !(existed.contains(i.getObject().getIdStr()))) .collect(Collectors.toList()); //DAO.log("***DEBUG messages for ALL : " + macc.size()); result = messages.writeEntries(macc); //DAO.log("***DEBUG messages for ALL: " + result.getCreated().size() + " created"); for (IndexEntry<MessageEntry> i : macc) if (!(result.getCreated().contains(i.getId()))) existed.add(i.getId()); //DAO.log("***DEBUG messages for ALL: " + existed.size() + " existed"); users.writeEntries(userBulk); } catch (IOException e) { Log.getLog().warn(e); } if (result == null) return new HashSet<String>(); return result.getCreated(); }
From source file:org.guanxi.sp.engine.form.RegisterGuardFormController.java
/** * Handles the nitty gritty of signing a CSR * * @param rootCert The certificate of the root authority who will vouch for the entity * @param rootPrivKey The private key of the root authority who will vouch for the entity * @param csr The entitie's CSR//from ww w.ja v a2 s .com * @param keyType The type of the key, e.g. "RSA", "DSA" * @return A certificate chain as an array of X509Certificate instances or null if an * error occurred */ private X509Certificate[] createSignedCert(X509Certificate rootCert, PrivateKey rootPrivKey, PKCS10CertificationRequest csr, String keyType) { X509V3CertificateGenerator certGen = new X509V3CertificateGenerator(); try { Date validFrom = new Date(); validFrom.setTime(validFrom.getTime() - (10 * 60 * 1000)); Date validTo = new Date(); validTo.setTime(validTo.getTime() + (20 * (24 * 60 * 60 * 1000))); certGen.setSerialNumber(BigInteger.valueOf(System.currentTimeMillis())); certGen.setIssuerDN(rootCert.getSubjectX500Principal()); certGen.setNotBefore(validFrom); certGen.setNotAfter(validTo); certGen.setSubjectDN(csr.getCertificationRequestInfo().getSubject()); certGen.setPublicKey(csr.getPublicKey("BC")); if (keyType.toLowerCase().equals("rsa")) certGen.setSignatureAlgorithm("SHA256WithRSAEncryption"); if (keyType.toLowerCase().equals("dsa")) certGen.setSignatureAlgorithm("DSAWithSHA1"); certGen.addExtension(X509Extensions.AuthorityKeyIdentifier, false, new AuthorityKeyIdentifierStructure(rootCert)); certGen.addExtension(X509Extensions.SubjectKeyIdentifier, false, new SubjectKeyIdentifierStructure(csr.getPublicKey("BC"))); certGen.addExtension(X509Extensions.BasicConstraints, true, new BasicConstraints(false)); certGen.addExtension(X509Extensions.KeyUsage, true, new KeyUsage(KeyUsage.digitalSignature | KeyUsage.keyEncipherment)); certGen.addExtension(X509Extensions.ExtendedKeyUsage, true, new ExtendedKeyUsage(KeyPurposeId.id_kp_clientAuth)); X509Certificate issuedCert = certGen.generate(rootPrivKey, "BC"); return new X509Certificate[] { issuedCert, rootCert }; } catch (Exception e) { logger.error(e); return null; } }
From source file:org.etudes.mneme.impl.PoolServiceImpl.java
/** * {@inheritDoc}// www . ja va 2 s .c o m */ public void clearStaleMintPools() { if (M_log.isDebugEnabled()) M_log.debug("clearStaleMintPools"); // give it a day Date stale = new Date(); stale.setTime(stale.getTime() - (1000l * 60l * 60l * 24l)); // get the list of pools that are stale mint List<PoolImpl> pools = this.storage.getStaleMintPools(stale); // delete each one for (PoolImpl pool : pools) { doRemove(pool); } }
From source file:com.hp.flume.plugins.spoolsource.ReliableSpoolingFileEventReader.java
/** * Returns the next file to be consumed from the chosen directory. If the * directory is empty or the chosen file is not readable, this will return * an absent option. If the {@link #consumeOrder} variable is * {@link ConsumeOrder#OLDEST} then returns the oldest file. If the * {@link #consumeOrder} variable is {@link ConsumeOrder#YOUNGEST} then * returns the youngest file. If two or more files are equally old/young, * then the file name with lower lexicographical value is returned. If the * {@link #consumeOrder} variable is {@link ConsumeOrder#RANDOM} then * returns any arbitrary file in the directory. *//* ww w . ja va2s . c om*/ private Optional<FileInfo> getNextFile() { /* Filter to exclude finished or hidden files */ FileFilter filter = new FileFilter() { public boolean accept(File candidate) { String fileName = candidate.getName(); // lucheng // int date = // String regEx = "[a-zA-Z]+_\\d{4}_\\d{2}_(\\d{2})"; // Pattern pat = Pattern.compile(regEx); // // Matcher mat = pat.matcher(fileName); // mat.find(); // int fileDate = Integer.parseInt(mat.group(1)); // Matcher mat = Pattern.compile( // "[a-zA-Z]+_\\d{4}_\\d{2}_(\\d{2})").matcher(fileName); // mat.find(); Long time = candidate.lastModified(); Date dt = new Date(); dt.setTime(time); int modifiedTime = dt.getDate(); if ((candidate.isDirectory()) || (fileName.endsWith(completedSuffix)) || (fileName.startsWith(".")) || ignorePattern.matcher(fileName).matches() || new Date().getDate() == modifiedTime/*Integer.parseInt(mat .group(1))*/) { return false; } return true; } }; List<File> candidateFiles = Arrays.asList(spoolDirectory.listFiles(filter)); if (candidateFiles.isEmpty()) { // No matching file in spooling directory. return Optional.absent(); } File selectedFile = candidateFiles.get(0); // Select the first random // file. if (consumeOrder == ConsumeOrder.RANDOM) { // Selected file is random. return openFile(selectedFile); } else if (consumeOrder == ConsumeOrder.YOUNGEST) { for (File candidateFile : candidateFiles) { long compare = selectedFile.lastModified() - candidateFile.lastModified(); if (compare == 0) { // ts is same pick smallest lexicographically. selectedFile = smallerLexicographical(selectedFile, candidateFile); } else if (compare < 0) { // candidate is younger (cand-ts > selec-ts) selectedFile = candidateFile; } } } else { // default order is OLDEST for (File candidateFile : candidateFiles) { long compare = selectedFile.lastModified() - candidateFile.lastModified(); if (compare == 0) { // ts is same pick smallest lexicographically. selectedFile = smallerLexicographical(selectedFile, candidateFile); } else if (compare > 0) { // candidate is older (cand-ts < selec-ts). selectedFile = candidateFile; } } } return openFile(selectedFile); }
From source file:org.apache.ws.security.message.TimestampTest.java
/** * This is a test for processing an Timestamp where the "Created" element is in the future. * A Timestamp that is 120 seconds in the future should be rejected by default. *//*from w ww .j a v a2s .co m*/ @org.junit.Test public void testFutureCreated() throws Exception { Document doc = SOAPUtil.toSOAPPart(SOAPUtil.SAMPLE_SOAP_MSG); WSSecHeader secHeader = new WSSecHeader(); secHeader.insertSecurityHeader(doc); Element timestampElement = doc.createElementNS(WSConstants.WSU_NS, WSConstants.WSU_PREFIX + ":" + WSConstants.TIMESTAMP_TOKEN_LN); DateFormat zulu = new XmlSchemaDateFormat(); Element elementCreated = doc.createElementNS(WSConstants.WSU_NS, WSConstants.WSU_PREFIX + ":" + WSConstants.CREATED_LN); Date createdDate = new Date(); long currentTime = createdDate.getTime() + 120000; createdDate.setTime(currentTime); elementCreated.appendChild(doc.createTextNode(zulu.format(createdDate))); timestampElement.appendChild(elementCreated); secHeader.getSecurityHeader().appendChild(timestampElement); if (LOG.isDebugEnabled()) { String outputString = org.apache.ws.security.util.XMLUtils.PrettyDocumentToString(doc); LOG.debug(outputString); } // // Do some processing // WSSConfig config = WSSConfig.getNewInstance(); try { verify(doc, config); fail("The timestamp validation should have failed"); } catch (WSSecurityException ex) { assertTrue(ex.getErrorCode() == WSSecurityException.MESSAGE_EXPIRED); } }
From source file:org.apache.ws.security.message.TimestampTest.java
/** * This is a test for processing an Timestamp where it contains multiple "Created" elements. * This Timestamp should be rejected./*from w w w. j ava2 s . c om*/ */ @org.junit.Test public void testMultipleCreated() throws Exception { Document doc = SOAPUtil.toSOAPPart(SOAPUtil.SAMPLE_SOAP_MSG); WSSecHeader secHeader = new WSSecHeader(); secHeader.insertSecurityHeader(doc); Element timestampElement = doc.createElementNS(WSConstants.WSU_NS, WSConstants.WSU_PREFIX + ":" + WSConstants.TIMESTAMP_TOKEN_LN); DateFormat zulu = new XmlSchemaDateFormat(); Element elementCreated = doc.createElementNS(WSConstants.WSU_NS, WSConstants.WSU_PREFIX + ":" + WSConstants.CREATED_LN); Date createdDate = new Date(); long currentTime = createdDate.getTime() + 300000; createdDate.setTime(currentTime); elementCreated.appendChild(doc.createTextNode(zulu.format(createdDate))); timestampElement.appendChild(elementCreated); timestampElement.appendChild(elementCreated.cloneNode(true)); secHeader.getSecurityHeader().appendChild(timestampElement); if (LOG.isDebugEnabled()) { String outputString = org.apache.ws.security.util.XMLUtils.PrettyDocumentToString(doc); LOG.debug(outputString); } // // Do some processing // try { verify(doc, WSSConfig.getNewInstance()); fail("The timestamp validation should have failed on multiple Created elements"); } catch (WSSecurityException ex) { // expected } }
From source file:org.apache.ws.security.message.TimestampTest.java
/** * This is a test for processing an Timestamp where it contains multiple "Expires" elements. * This Timestamp should be rejected.//from ww w .j ava2 s.c om */ @org.junit.Test public void testMultipleExpires() throws Exception { Document doc = SOAPUtil.toSOAPPart(SOAPUtil.SAMPLE_SOAP_MSG); WSSecHeader secHeader = new WSSecHeader(); secHeader.insertSecurityHeader(doc); Element timestampElement = doc.createElementNS(WSConstants.WSU_NS, WSConstants.WSU_PREFIX + ":" + WSConstants.TIMESTAMP_TOKEN_LN); DateFormat zulu = new XmlSchemaDateFormat(); Element elementCreated = doc.createElementNS(WSConstants.WSU_NS, WSConstants.WSU_PREFIX + ":" + WSConstants.EXPIRES_LN); Date createdDate = new Date(); long currentTime = createdDate.getTime() + 300000; createdDate.setTime(currentTime); elementCreated.appendChild(doc.createTextNode(zulu.format(createdDate))); timestampElement.appendChild(elementCreated); timestampElement.appendChild(elementCreated.cloneNode(true)); secHeader.getSecurityHeader().appendChild(timestampElement); if (LOG.isDebugEnabled()) { String outputString = org.apache.ws.security.util.XMLUtils.PrettyDocumentToString(doc); LOG.debug(outputString); } // // Do some processing // try { verify(doc, WSSConfig.getNewInstance()); fail("The timestamp validation should have failed on multiple Expires elements"); } catch (WSSecurityException ex) { // expected } }