Example usage for java.util.concurrent TimeUnit DAYS

List of usage examples for java.util.concurrent TimeUnit DAYS

Introduction

In this page you can find the example usage for java.util.concurrent TimeUnit DAYS.

Prototype

TimeUnit DAYS

To view the source code for java.util.concurrent TimeUnit DAYS.

Click Source Link

Document

Time unit representing twenty four hours.

Usage

From source file:io.wcm.caravan.pipeline.impl.JsonPipelineFactoryImplTest.java

@Test
public void testCreateSpecifiedRequest() throws Exception {
    request = new CaravanHttpRequestBuilder("service").append("/path").build();
    ImmutableListMultimap<String, String> headers = ImmutableListMultimap.of("Cache-Control",
            "max-age=" + Long.toString(TimeUnit.DAYS.toSeconds(1)));
    when(caravanHttpClient.execute(request)).thenReturn(Observable.just(new CaravanHttpResponseBuilder()
            .status(HttpStatus.SC_OK).reason("Content").headers(headers).body(new byte[0]).build()));

    JsonPipeline pipeline = factory.create(request);
    JsonPipelineOutput output = pipeline.getOutput().toBlocking().first();
    assertEquals(86400, output.getMaxAge());
}

From source file:com.hpcloud.util.Duration.java

/** Infinite constructor. */
private Duration() {
    finite = false;
    this.length = Long.MAX_VALUE;
    this.timeUnit = TimeUnit.DAYS;
}

From source file:com.epam.ta.reportportal.TestConfig.java

@Bean
@Primary//from   w  w  w.  j  a va 2  s  .  co  m
public CacheManager getGlobalCacheManager() {
    SimpleCacheManager cacheManager = new SimpleCacheManager();

    GuavaCache tickets = new GuavaCache(EXTERNAL_SYSTEM_TICKET_CACHE,
            CacheBuilder.newBuilder().maximumSize(ticketCacheSize).softValues()
                    .expireAfterAccess(ticketCacheExpiration, TimeUnit.MINUTES).build());
    GuavaCache projects = new GuavaCache(JIRA_PROJECT_CACHE,
            CacheBuilder.newBuilder().maximumSize(projectCacheSize).softValues()
                    .expireAfterAccess(projectCacheExpiration, TimeUnit.DAYS).build());
    GuavaCache users = new GuavaCache(USERS_CACHE, CacheBuilder.newBuilder().maximumSize(userCacheSize)
            .expireAfterWrite(userCacheExpiration, TimeUnit.MINUTES).build());
    GuavaCache projectInfo = new GuavaCache(PROJECT_INFO_CACHE,
            CacheBuilder.newBuilder().maximumSize(projectCacheSize).softValues()
                    .expireAfterWrite(projectInfoCacheExpiration, TimeUnit.MINUTES).build());
    GuavaCache assignedUsers = new GuavaCache(ASSIGNED_USERS_CACHE,
            CacheBuilder.newBuilder().maximumSize(userCacheSize).weakKeys()
                    .expireAfterWrite(userCacheExpiration, TimeUnit.MINUTES).build());

    //@formatter:off
    cacheManager.setCaches(ImmutableList.<GuavaCache>builder().add(tickets).add(projects).add(users)
            .add(projectInfo).add(assignedUsers).build());
    //@formatter:on
    return cacheManager;
}

From source file:pt.ua.tm.neji.evaluation.craft.statistics.FolderBatchExecutor.java

public void run(final Context context) throws NejiException {
    logger.info("Initializing context...");
    context.initialize();//ww w.  ja  v a  2  s. c  o  m
    logger.info("Installing multi-threading support...");
    context.addMultiThreadingSupport(numThreads);

    ExecutorService executor;

    logger.info("Starting thread pool with support for {} threads...", numThreads);
    executor = Executors.newFixedThreadPool(numThreads);

    StopWatch timer = new StopWatch();
    timer.start();

    File inputFolder = new File(inputFolderPath);
    File[] files = inputFolder.listFiles(new FileUtil.Filter(new String[] { "txt" }));

    for (File file : files) {
        //            File a1File = new File(file.getAbsolutePath().replaceAll(".txt", ".ann"));
        File a1File = new File(file.getAbsolutePath().replaceAll(".txt", ".a1"));
        Processor processor = getDocumentProcessor(file, a1File, context);

        // Process entry
        executor.execute(processor);
    }

    executor.shutdown();
    try {
        executor.awaitTermination(Long.MAX_VALUE, TimeUnit.DAYS);
    } catch (InterruptedException e) {
        throw new RuntimeException(e);
    }
    logger.info("Stopped thread pool.");

    logger.info("Terminating context...");
    context.terminate();

    timer.stop();
    logger.info("Processed {} files in {}", processedCorpora.size(), timer.toString());
}

From source file:org.dcache.util.histograms.CountingHistogramTest.java

@Test
public void binUnitShouldBe1ForMaxValue50Days() throws Exception {
    givenCountingHistogram();//from w  w  w.  j  av a  2s .  c o m
    givenFilelifetimeValuesFor(50);
    givenBinCountOf(51);
    givenBinUnitOf((double) TimeUnit.DAYS.toMillis(1));
    givenBinLabelOf(TimeUnit.DAYS.name());
    givenDataLabelOf("COUNT");
    givenHistogramTypeOf("File Lifetime Count");
    whenConfigureIsCalled();
    assertThatBuildSucceeded();
    assertThatBinWidthIs(1);
}

From source file:com.linkedin.pinot.segments.v1.creator.DictionariesTest.java

@BeforeClass
public static void before() throws Exception {
    final String filePath = TestUtils
            .getFileFromResourceUrl(DictionariesTest.class.getClassLoader().getResource(AVRO_DATA));
    if (INDEX_DIR.exists()) {
        FileUtils.deleteQuietly(INDEX_DIR);
    }/*  w  w  w .ja  v  a  2 s .c  om*/

    final SegmentGeneratorConfig config = SegmentTestUtils.getSegmentGenSpecWithSchemAndProjectedColumns(
            new File(filePath), INDEX_DIR, "time_day", TimeUnit.DAYS, "test");

    final SegmentIndexCreationDriver driver = SegmentCreationDriverFactory.get(null);
    driver.init(config);
    driver.build();

    final Schema schema = AvroUtils.extractSchemaFromAvro(new File(filePath));

    final DataFileStream<GenericRecord> avroReader = AvroUtils.getAvroReader(new File(filePath));
    final org.apache.avro.Schema avroSchema = avroReader.getSchema();
    final String[] columns = new String[avroSchema.getFields().size()];
    int i = 0;
    for (final Field f : avroSchema.getFields()) {
        columns[i] = f.name();
        i++;
    }

    uniqueEntries = new HashMap<String, Set<Object>>();
    for (final String column : columns) {
        uniqueEntries.put(column, new HashSet<Object>());
    }

    while (avroReader.hasNext()) {
        final GenericRecord rec = avroReader.next();
        for (final String column : columns) {
            Object val = rec.get(column);
            if (val instanceof Utf8) {
                val = ((Utf8) val).toString();
            }
            uniqueEntries.get(column)
                    .add(getAppropriateType(schema.getFieldSpecFor(column).getDataType(), val));
        }
    }
}

From source file:runnable.Master.java

@Override
public void run() {
    try {//from   ww w . ja va 2  s . c om
        alive = true;
        Thread overseer = new Thread(new MultiLoadBar(this));
        overseer.start();

        long beginTime = System.currentTimeMillis();

        ExecutorService threadPool = Executors.newCachedThreadPool();

        for (Slave slave : slaves) {
            threadPool.submit(slave);
            //                threadPool.submit(new ConsoleSlaveOverseer(slave));
        }

        threadPool.shutdown();
        threadPool.awaitTermination(14, TimeUnit.DAYS);

        ByteStreamUtil.merge(plans, fileAbsPath);

        long size = (new File(fileAbsPath)).length();
        long finishTime = (System.currentTimeMillis() - beginTime);

        System.out.println("\nDownloaded: " + UnitUtil.displaySize(size));
        System.out.println("Time: " + UnitUtil.displayTime(finishTime));
        alive = false;

    } catch (IOException | InterruptedException ex) {
        Logger.getLogger(Master.class.getName()).log(Level.SEVERE, null, ex);
    } finally {
        alive = false;
    }
}

From source file:org.gradle.cache.internal.WrapperDistributionCleanupAction.java

public boolean execute(@Nonnull CleanupProgressMonitor progressMonitor) {
    long maximumTimestamp = Math.max(0, System.currentTimeMillis() - TimeUnit.DAYS.toMillis(1));
    Set<GradleVersion> usedVersions = this.usedGradleVersions.getUsedGradleVersions();
    Multimap<GradleVersion, File> checksumDirsByVersion = determineChecksumDirsByVersion();
    for (GradleVersion version : checksumDirsByVersion.keySet()) {
        if (!usedVersions.contains(version) && version.compareTo(GradleVersion.current()) < 0) {
            deleteDistributions(checksumDirsByVersion.get(version), maximumTimestamp, progressMonitor);
        } else {/*from w  w  w. j  ava 2  s.c  o  m*/
            progressMonitor.incrementSkipped(checksumDirsByVersion.get(version).size());
        }
    }
    return true;
}

From source file:edu.msu.cme.rdp.kmer.cli.FastKmerFilter.java

public static void main(String[] args) throws Exception {
    final KmerSet<Set<RefKmer>> kmerSet;
    final SeqReader queryReader;
    final SequenceType querySeqType;
    final File queryFile;
    final KmerStartsWriter out;
    final boolean translQuery;
    final int wordSize;
    final int translTable;
    final boolean alignedSeqs;
    final List<String> refLabels = new ArrayList();
    final int maxThreads;
    final int trieWordSize;

    try {/*from  w w w  .ja v  a  2  s.  c o m*/
        CommandLine cmdLine = new PosixParser().parse(options, args);
        args = cmdLine.getArgs();

        if (args.length < 3) {
            throw new Exception("Unexpected number of arguments");
        }

        if (cmdLine.hasOption("out")) {
            out = new KmerStartsWriter(cmdLine.getOptionValue("out"));
        } else {
            out = new KmerStartsWriter(System.out);
        }

        if (cmdLine.hasOption("aligned")) {
            alignedSeqs = true;
        } else {
            alignedSeqs = false;
        }

        if (cmdLine.hasOption("transl-table")) {
            translTable = Integer.valueOf(cmdLine.getOptionValue("transl-table"));
        } else {
            translTable = 11;
        }

        if (cmdLine.hasOption("threads")) {
            maxThreads = Integer.valueOf(cmdLine.getOptionValue("threads"));
        } else {
            maxThreads = Runtime.getRuntime().availableProcessors();
        }

        queryFile = new File(args[1]);
        wordSize = Integer.valueOf(args[0]);
        SequenceType refSeqType = null;

        querySeqType = SeqUtils.guessSequenceType(queryFile);
        queryReader = new SequenceReader(queryFile);

        if (querySeqType == SequenceType.Protein) {
            throw new Exception("Expected nucl query sequences");
        }

        refSeqType = SeqUtils
                .guessSequenceType(new File(args[2].contains("=") ? args[2].split("=")[1] : args[2]));

        translQuery = refSeqType == SequenceType.Protein;

        if (translQuery && wordSize % 3 != 0) {
            throw new Exception("Word size must be a multiple of 3 for nucl ref seqs");
        }

        if (translQuery) {
            trieWordSize = wordSize / 3;
        } else {
            trieWordSize = wordSize;
        }
        kmerSet = new KmerSet<Set<RefKmer>>();//new KmerTrie(trieWordSize, translQuery);

        for (int index = 2; index < args.length; index++) {
            String refName;
            String refFileName = args[index];
            if (refFileName.contains("=")) {
                String[] lexemes = refFileName.split("=");
                refName = lexemes[0];
                refFileName = lexemes[1];
            } else {
                String tmpName = new File(refFileName).getName();
                if (tmpName.contains(".")) {
                    refName = tmpName.substring(0, tmpName.lastIndexOf("."));
                } else {
                    refName = tmpName;
                }
            }

            File refFile = new File(refFileName);

            if (refSeqType != SeqUtils.guessSequenceType(refFile)) {
                throw new Exception(
                        "Reference file " + refFile + " contains " + SeqUtils.guessFileFormat(refFile)
                                + " sequences but expected " + refSeqType + " sequences");
            }

            SequenceReader seqReader = new SequenceReader(refFile);
            Sequence seq;

            while ((seq = seqReader.readNextSequence()) != null) {
                if (seq.getSeqName().startsWith("#")) {
                    continue;
                }

                KmerGenerator kmers;
                try {
                    if (translQuery) { //protein ref
                        kmers = new ProtKmerGenerator(seq.getSeqString(), trieWordSize, alignedSeqs);
                    } else {
                        kmers = new NuclKmerGenerator(seq.getSeqString(), trieWordSize, alignedSeqs);
                    }
                    while (kmers.hasNext()) {
                        Kmer temp = kmers.next();
                        long[] next = temp.getLongKmers();
                        Set<RefKmer> refKmers = kmerSet.get(next);
                        if (refKmers == null) {
                            refKmers = new HashSet();
                            kmerSet.add(next, refKmers);
                        }

                        RefKmer kmerRef = new RefKmer();
                        kmerRef.modelPos = kmers.getPosition();
                        kmerRef.refFileIndex = refLabels.size();
                        kmerRef.refSeqid = seq.getSeqName();
                        refKmers.add(kmerRef);
                    }
                } catch (IllegalArgumentException ex) {
                    //System.err.println(seq.getSeqName()+ " " + ex.getMessage());
                }
            }
            seqReader.close();

            refLabels.add(refName);
        }

    } catch (Exception e) {
        new HelpFormatter().printHelp(
                "KmerSearch <kmerSize> <query_file> [name=]<ref_file> ...\nkmerSize should be multiple of 3, (recommend 45, minimum 30, maximum 63) ",
                options);
        e.printStackTrace();
        System.exit(1);
        throw new RuntimeException("Stupid jvm"); //While this will never get thrown it is required to make sure javac doesn't get confused about uninitialized variables
    }

    long startTime = System.currentTimeMillis();
    long seqCount = 0;
    final int maxTasks = 25000;

    System.err.println("Starting kmer mapping at " + new Date());
    System.err.println("*  Number of threads:       " + maxThreads);
    System.err.println("*  References:              " + refLabels);
    System.err.println("*  Reads file:              " + queryFile);
    System.err.println("*  Kmer length:             " + trieWordSize);
    System.err.println("*  Kmer Refset Size:        " + kmerSet.size());

    final AtomicInteger processed = new AtomicInteger();
    final AtomicInteger outstandingTasks = new AtomicInteger();

    ExecutorService service = Executors.newFixedThreadPool(maxThreads);

    Sequence querySeq;

    while ((querySeq = queryReader.readNextSequence()) != null) {
        seqCount++;

        String seqString = querySeq.getSeqString();

        if ((!translQuery && seqString.length() < wordSize)
                || (translQuery && seqString.length() < wordSize + 2)) {
            //System.err.println(querySeq.getSeqName() + "\t" + seqString.length());
            continue;
        }

        final Sequence threadSeq = querySeq;

        Runnable r = new Runnable() {

            public void run() {
                try {
                    processSeq(threadSeq, refLabels, kmerSet, out, wordSize, translQuery, translTable, false);
                    processSeq(threadSeq, refLabels, kmerSet, out, wordSize, translQuery, translTable, true);

                    processed.incrementAndGet();
                    outstandingTasks.decrementAndGet();
                } catch (Exception e) {
                    e.printStackTrace();
                }
            }
        };

        outstandingTasks.incrementAndGet();
        service.submit(r);

        while (outstandingTasks.get() >= maxTasks)
            ;

        if ((processed.get() + 1) % 1000000 == 0) {
            System.err.println("Processed " + processed + " sequences in "
                    + (System.currentTimeMillis() - startTime) + " ms");
        }
    }

    service.shutdown();
    service.awaitTermination(1, TimeUnit.DAYS);

    System.err.println("Finished Processed " + processed + " sequences in "
            + (System.currentTimeMillis() - startTime) + " ms");

    out.close();
}

From source file:org.apache.hadoop.hive.common.type.HiveIntervalDayTime.java

public void set(int days, int hours, int minutes, int seconds, int nanos) {
    long totalSeconds = seconds;
    totalSeconds += TimeUnit.DAYS.toSeconds(days);
    totalSeconds += TimeUnit.HOURS.toSeconds(hours);
    totalSeconds += TimeUnit.MINUTES.toSeconds(minutes);
    totalSeconds += TimeUnit.NANOSECONDS.toSeconds(nanos);
    nanos = nanos % IntervalDayTimeUtils.NANOS_PER_SEC;

    this.totalSeconds = totalSeconds;
    this.nanos = nanos;

    normalizeSecondsAndNanos();//from ww w . j a v  a 2  s .  c  om
}