List of usage examples for java.util.concurrent TimeUnit DAYS
TimeUnit DAYS
To view the source code for java.util.concurrent TimeUnit DAYS.
Click Source Link
From source file:com.cloudbees.demo.beesshop.web.ProductController.java
/** * @param id id of the product//from w ww.j av a 2s . c o m * @param photo to associate with the product * @return redirection to display product */ @RequestMapping(value = "/product/{id}/photo", method = RequestMethod.POST) @Transactional public String updatePhoto(@PathVariable long id, @RequestParam("photo") MultipartFile photo) { if (photo.getSize() == 0) { logger.info("Empty uploaded file"); } else { try { String contentType = fileStorageService.findContentType(photo.getOriginalFilename()); if (contentType == null) { logger.warn("Skip file with unsupported extension '{}'", photo.getName()); } else { InputStream photoInputStream = photo.getInputStream(); long photoSize = photo.getSize(); ObjectMetadata objectMetadata = new ObjectMetadata(); objectMetadata.setContentLength(photoSize); objectMetadata.setContentType(contentType); objectMetadata .setCacheControl("public, max-age=" + TimeUnit.SECONDS.convert(365, TimeUnit.DAYS)); String photoUrl = fileStorageService.storeFile(photoInputStream, objectMetadata); Product product = productRepository.get(id); logger.info("Saved {}", photoUrl); product.setPhotoUrl(photoUrl); productRepository.update(product); } } catch (IOException e) { throw Throwables.propagate(e); } } return "redirect:/product/" + id; }
From source file:org.dcache.util.histograms.HistogramModelTest.java
@Test public void binUnitShouldBe1ForMaxValue50Days() throws NoSuchMethodException, InstantiationException, IllegalAccessException, InvocationTargetException { givenCountingHistogram();/*from w w w .j ava 2 s . c om*/ givenFilelifetimeValuesFor(50); givenBinCountOf(51); givenBinUnitOf((double) TimeUnit.DAYS.toMillis(1)); givenBinLabelOf(TimeUnit.DAYS.name()); givenDataLabelOf("COUNT"); givenHistogramTypeOf("File Lifetime Count"); whenConfigureIsCalled(); assertThatBuildSucceeded(); assertThatBinWidthIs(1); }
From source file:org.dcache.util.histograms.CountingHistogramTest.java
@Test public void buildShouldFailWhenNoCountGivenToCounting() throws Exception { givenCountingHistogram();/*from w w w . ja va 2s . c o m*/ givenFilelifetimeValuesFor(150); givenBinUnitOf((double) TimeUnit.DAYS.toMillis(1)); givenBinLabelOf(TimeUnit.DAYS.name()); givenDataLabelOf("COUNT"); givenHistogramTypeOf("File Lifetime Count"); whenConfigureIsCalled(); assertThatBuildFailed(); }
From source file:hudson.security.TokenBasedRememberMeServices2SEC868Test.java
@Test @Issue("SECURITY-868") public void rememberMeToken_shouldNotAccept_expirationDurationLargerThanConfigured() throws Exception { j.jenkins.setDisableRememberMe(false); HudsonPrivateSecurityRealm realm = new HudsonPrivateSecurityRealm(false, false, null); TokenBasedRememberMeServices2 tokenService = (TokenBasedRememberMeServices2) realm .getSecurityComponents().rememberMe; j.jenkins.setSecurityRealm(realm);/* ww w. jav a 2s. c o m*/ String username = "alice"; User alice = realm.createAccount(username, username); { // a malicious cookie with expiration too far in the future should not work JenkinsRule.WebClient wc = j.createWebClient(); // by default we have 14 days of validity, // here we increase artificially the duration of validity, that could be used to have permanent access long oneDay = TimeUnit.DAYS.toMillis(1); Cookie cookie = createRememberMeCookie(tokenService, oneDay, alice); wc.getCookieManager().addCookie(cookie); // the application should not use the cookie to connect assertUserNotConnected(wc, username); } { // a hand crafted cookie with regular expiration duration works JenkinsRule.WebClient wc = j.createWebClient(); // by default we have 14 days of validity, // here we reduce a bit the expiration date to simulate an "old" cookie (regular usage) long minusFiveMinutes = TimeUnit.MINUTES.toMillis(-5); Cookie cookie = createRememberMeCookie(tokenService, minusFiveMinutes, alice); wc.getCookieManager().addCookie(cookie); // if we reactivate the remember me feature, it's ok assertUserConnected(wc, username); } }
From source file:uk.ac.cam.cl.dtg.segue.dao.content.GitContentManager.java
/** * FOR TESTING PURPOSES ONLY - Constructor for instantiating a new Git Content Manager Object. * /*from w ww .j a v a2s.co m*/ * @param database * - that the content Manager manages. * @param searchProvider * - search provider that the content manager manages and controls. * @param contentMapper * - The utility class for mapping content objects. */ public GitContentManager(final GitDb database, final ISearchProvider searchProvider, final ContentMapper contentMapper) { this.database = database; this.mapper = contentMapper; this.searchProvider = searchProvider; this.globalProperties = null; this.allowOnlyPublishedContent = false; this.cache = CacheBuilder.newBuilder().softValues().expireAfterAccess(1, TimeUnit.DAYS).build(); }
From source file:org.eclipse.epp.internal.logging.aeri.ui.log.ProblemsDatabaseUpdateJobTest.java
private void mockDatabaseUpToDate() { // downloaded now when(configuration.getProblemsZipLastDownloadTimestamp()).thenReturn(System.currentTimeMillis()); // long time to live when(configuration.getProblemsTtlMs()).thenReturn(TimeUnit.MILLISECONDS.convert(10, TimeUnit.DAYS)); }
From source file:io.scigraph.owlapi.loader.BatchOwlLoader.java
public void loadOntology() throws InterruptedException, ExecutionException { CompletionService<Long> completionService = new ExecutorCompletionService<Long>(exec); Set<Future<?>> futures = new HashSet<>(); if (!ontologies.isEmpty()) { for (int i = 0; i < numConsumers; i++) { futures.add(completionService.submit(consumerProvider.get())); }/*w ww . j av a 2s . co m*/ for (int i = 0; i < numProducers; i++) { futures.add(completionService.submit(producerProvider.get())); } for (OntologySetup ontology : ontologies) { urlQueue.offer(ontology); } for (int i = 0; i < numProducers; i++) { urlQueue.offer(POISON_STR); } } while (futures.size() > 0) { Future<?> completedFuture = completionService.take(); futures.remove(completedFuture); try { completedFuture.get(); } catch (ExecutionException e) { logger.log(Level.SEVERE, "Stopping batchLoading due to: " + e.getMessage(), e); e.printStackTrace(); exec.shutdownNow(); throw new InterruptedException(e.getCause().getMessage()); } } exec.shutdown(); exec.awaitTermination(10, TimeUnit.DAYS); graph.shutdown(); logger.info("Postprocessing..."); postprocessorProvider.get().postprocess(); if (cliqueConfiguration.isPresent()) { postprocessorProvider.runCliquePostprocessor(cliqueConfiguration.get()); } postprocessorProvider.shutdown(); }
From source file:eu.freme.bpt.service.AbstractService.java
public void run(final FailurePolicy failurePolicy, final int nrThreads, final Callback callback) { logger.info("Running service {}", this.getClass().getName()); ExecutorService executorService = Executors.newFixedThreadPool(nrThreads); Unirest.setTimeouts(30000, 300000); // TODO: configurable? while (ioIterator.hasNext()) { final IO io = ioIterator.next(); executorService.submit(() -> { try (final InputStream inputStream = io.getInputStream(); final OutputStream outputStream = io.getOutputStream()) { byte[] input = IOUtils.toByteArray(inputStream); HttpResponse<InputStream> response = Unirest.post(endpoint).headers(headers) .queryString(parameters).body(input).asBinary(); if (response.getStatus() == 200) { logger.debug("Request alright."); try (InputStream responseInput = response.getBody()) { IOUtils.copy(responseInput, outputStream); callback.onTaskComplete(io.getInputFile(), io.getOutputFile()); } catch (IOException e) { logger.error("Error while writing response.", e); callback.onTaskFails(io.getInputFile(), io.getOutputFile(), "Error while writing response. " + e.getMessage()); if (!failurePolicy.check()) { System.exit(3); }//ww w. j a va 2 s.c om } } else { String body = IOUtils.toString(response.getBody()); String msg = "Error response from service " + endpoint + ": Status " + response.getStatus() + ": " + response.getStatusText() + " - " + body; logger.error(msg); callback.onTaskFails(io.getInputFile(), io.getOutputFile(), msg); if (!failurePolicy.check()) { System.exit(3); } } } catch (Exception e) { logger.error("Request to {} failed." + endpoint, e); callback.onTaskFails(io.getInputFile(), io.getOutputFile(), "Request to " + endpoint + " failed. " + e.getMessage()); if (!failurePolicy.check()) { System.exit(3); } } }); } executorService.shutdown(); try { executorService.awaitTermination(1, TimeUnit.DAYS); } catch (InterruptedException e) { logger.warn("Waiting on termination interrupted."); } callback.onBatchComplete(); }
From source file:com.stratio.ingestion.sink.druid.DruidSinkIT.java
private Event getTrackerEvent() { Random random = new Random(); String[] users = new String[] { "user1@santander.com", "user2@santander.com", "user3@santander.com", "user4@santander.com" }; String[] isoCode = new String[] { "DE", "ES", "US", "FR" }; TimeUnit[] offset = new TimeUnit[] { TimeUnit.DAYS, TimeUnit.HOURS, TimeUnit.SECONDS }; ObjectNode jsonBody = new ObjectNode(JsonNodeFactory.instance); Map<String, String> headers; ObjectMapper mapper = new ObjectMapper(); JsonNode jsonNode = null;//from w w w . ja va2s . c o m final String fileName = "/trackerSample" + random.nextInt(4) + ".json"; try { jsonNode = mapper.readTree(getClass().getResourceAsStream(fileName)); } catch (IOException e) { e.printStackTrace(); } headers = mapper.convertValue(jsonNode, Map.class); headers.put("timestamp", String.valueOf(new Date().getTime() + getOffset(offset[random.nextInt(3)]) * random.nextInt(100))); headers.put("santanderID", users[random.nextInt(4)]); headers.put("isoCode", isoCode[random.nextInt(4)]); return EventBuilder.withBody(jsonBody.toString().getBytes(Charsets.UTF_8), headers); }
From source file:com.linkedin.pinot.query.aggregation.AggregationQueriesOnMultiValueColumnTest.java
private void setupSegment() throws Exception { final String filePath = TestUtils .getFileFromResourceUrl(getClass().getClassLoader().getResource(AVRO_DATA)); if (INDEX_DIR.exists()) { FileUtils.deleteQuietly(INDEX_DIR); }/*from ww w . j av a 2 s .c o m*/ final SegmentGeneratorConfig config = SegmentTestUtils.getSegmentGenSpecWithSchemAndProjectedColumns( new File(filePath), INDEX_DIR, "daysSinceEpoch", TimeUnit.DAYS, "test"); final SegmentIndexCreationDriver driver = SegmentCreationDriverFactory.get(null); driver.init(config); driver.build(); LOGGER.debug("built at : {}", INDEX_DIR.getAbsolutePath()); final File indexSegmentDir = new File(INDEX_DIR, driver.getSegmentName()); _indexSegment = ColumnarSegmentLoader.load(indexSegmentDir, ReadMode.heap); Map<String, ColumnMetadata> medataMap = ((SegmentMetadataImpl) _indexSegment.getSegmentMetadata()) .getColumnMetadataMap(); for (ColumnMetadata columnMetadata : medataMap.values()) { LOGGER.debug(columnMetadata.getColumnName() + " : " + columnMetadata.isSingleValue()); } }