Example usage for java.util.concurrent TimeUnit DAYS

List of usage examples for java.util.concurrent TimeUnit DAYS

Introduction

In this page you can find the example usage for java.util.concurrent TimeUnit DAYS.

Prototype

TimeUnit DAYS

To view the source code for java.util.concurrent TimeUnit DAYS.

Click Source Link

Document

Time unit representing twenty four hours.

Usage

From source file:com.linkedin.pinot.core.plan.maker.MetadataAndDictionaryAggregationPlanMakerTest.java

@BeforeTest
public void buildSegment() throws Exception {
    FileUtils.deleteQuietly(INDEX_DIR);//from  w  w  w. j ava2 s  .co m
    FileUtils.deleteQuietly(INDEX_DIR_STARTREE);

    // Get resource file path.
    URL resource = getClass().getClassLoader().getResource(AVRO_DATA);
    Assert.assertNotNull(resource);
    String filePath = resource.getFile();

    // Build the segment schema.
    Schema schema = new Schema.SchemaBuilder().setSchemaName("testTable")
            .addMetric("column1", FieldSpec.DataType.INT).addMetric("column3", FieldSpec.DataType.INT)
            .addSingleValueDimension("column5", FieldSpec.DataType.STRING)
            .addSingleValueDimension("column6", FieldSpec.DataType.INT)
            .addSingleValueDimension("column7", FieldSpec.DataType.INT)
            .addSingleValueDimension("column9", FieldSpec.DataType.INT)
            .addSingleValueDimension("column11", FieldSpec.DataType.STRING)
            .addSingleValueDimension("column12", FieldSpec.DataType.STRING)
            .addMetric("column17", FieldSpec.DataType.INT).addMetric("column18", FieldSpec.DataType.INT)
            .addTime("daysSinceEpoch", 1, TimeUnit.DAYS, DataType.INT).build();

    // Create the segment generator config.
    SegmentGeneratorConfig segmentGeneratorConfig = new SegmentGeneratorConfig(schema);
    segmentGeneratorConfig.setInputFilePath(filePath);
    segmentGeneratorConfig.setTableName("testTable");
    segmentGeneratorConfig.setSegmentName(SEGMENT_NAME);
    segmentGeneratorConfig.setOutDir(INDEX_DIR.getAbsolutePath());
    segmentGeneratorConfig.setInvertedIndexCreationColumns(
            Arrays.asList("column6", "column7", "column11", "column17", "column18"));

    // Build the index segment.
    SegmentIndexCreationDriver driver = new SegmentIndexCreationDriverImpl();
    driver.init(segmentGeneratorConfig);
    driver.build();

    // Star Tree segment
    // Build the segment schema.
    schema = new Schema.SchemaBuilder().setSchemaName("testTableStarTree")
            .addMetric("column1", FieldSpec.DataType.INT).addMetric("column3", FieldSpec.DataType.INT)
            .addSingleValueDimension("column5", FieldSpec.DataType.STRING)
            .addSingleValueDimension("column6", FieldSpec.DataType.INT)
            .addSingleValueDimension("column7", FieldSpec.DataType.INT)
            .addSingleValueDimension("column9", FieldSpec.DataType.INT)
            .addSingleValueDimension("column11", FieldSpec.DataType.STRING)
            .addSingleValueDimension("column12", FieldSpec.DataType.STRING)
            .addMetric("column17", FieldSpec.DataType.INT).addMetric("column18", FieldSpec.DataType.INT)
            .addTime("daysSinceEpoch", 1, TimeUnit.DAYS, DataType.INT).build();

    // Create the segment generator config.
    segmentGeneratorConfig = new SegmentGeneratorConfig(schema);
    segmentGeneratorConfig.setInputFilePath(filePath);
    segmentGeneratorConfig.setTableName("testTableStarTree");
    segmentGeneratorConfig.setSegmentName(SEGMENT_NAME_STARTREE);
    segmentGeneratorConfig.setOutDir(INDEX_DIR_STARTREE.getAbsolutePath());
    segmentGeneratorConfig.enableStarTreeIndex(new StarTreeIndexSpec());

    // Build the index segment.
    driver = new SegmentIndexCreationDriverImpl();
    driver.init(segmentGeneratorConfig);
    driver.build();
}

From source file:com.linkedin.pinot.queries.BaseMultiValueQueriesTest.java

@BeforeTest
public void buildSegment() throws Exception {
    FileUtils.deleteQuietly(INDEX_DIR);/*from w  w w . ja  v  a 2  s. co  m*/

    // Get resource file path.
    URL resource = getClass().getClassLoader().getResource(AVRO_DATA);
    Assert.assertNotNull(resource);
    String filePath = resource.getFile();

    // Build the segment schema.
    Schema schema = new Schema.SchemaBuilder().setSchemaName("testTable")
            .addMetric("column1", FieldSpec.DataType.INT).addMetric("column2", FieldSpec.DataType.INT)
            .addSingleValueDimension("column3", FieldSpec.DataType.STRING)
            .addSingleValueDimension("column5", FieldSpec.DataType.STRING)
            .addMultiValueDimension("column6", FieldSpec.DataType.INT)
            .addMultiValueDimension("column7", FieldSpec.DataType.INT)
            .addSingleValueDimension("column8", FieldSpec.DataType.INT)
            .addMetric("column9", FieldSpec.DataType.INT).addMetric("column10", FieldSpec.DataType.INT)
            .addTime("daysSinceEpoch", TimeUnit.DAYS, FieldSpec.DataType.INT).build();

    // Create the segment generator config.
    SegmentGeneratorConfig segmentGeneratorConfig = new SegmentGeneratorConfig(schema);
    segmentGeneratorConfig.setInputFilePath(filePath);
    segmentGeneratorConfig.setTableName("testTable");
    segmentGeneratorConfig.setOutDir(INDEX_DIR.getAbsolutePath());
    segmentGeneratorConfig
            .setInvertedIndexCreationColumns(Arrays.asList("column3", "column7", "column8", "column9"));

    // Build the index segment.
    SegmentIndexCreationDriver driver = new SegmentIndexCreationDriverImpl();
    driver.init(segmentGeneratorConfig);
    driver.build();
}

From source file:com.linkedin.pinot.server.integration.HelixStarterTest.java

private void setupSegment(File segmentDir, String tableName) throws Exception {
    String filePath = TestUtils.getFileFromResourceUrl(getClass().getClassLoader().getResource(AVRO_DATA));
    SegmentGeneratorConfig config = SegmentTestUtils.getSegmentGenSpecWithSchemAndProjectedColumns(
            new File(filePath), segmentDir, TimeUnit.DAYS, tableName, null);
    SegmentIndexCreationDriver driver = SegmentCreationDriverFactory.get(null);
    driver.init(config);// www  .  j  a v  a  2 s.c om
    driver.build();

    LOGGER.info("Table: {} built at path: {}", tableName, segmentDir.getAbsolutePath());
}

From source file:com.pytsoft.cachelock.core.CacheLock.java

/**
 * Acquires the lock./*from   w  w  w .j  a  v a 2  s. c  om*/
        
 * <p>If the lock is not available then the current thread becomes
 * disabled for thread scheduling purposes and lies dormant until the
 * lock has been acquired.
 */
@Override
public void lock() {
    try {
        this.tryLock(10, TimeUnit.DAYS);
    } catch (InterruptedException e) {
        LOG.error(String.format("Unexpected interrupted exception!", e));
    }
}

From source file:org.dcache.util.histograms.CountingHistogramTest.java

@Test
public void binUnitShouldBe3ForMaxValue101Days() throws Exception {
    givenCountingHistogram();//from  w w  w  . ja v a  2 s.  c  o m
    givenFilelifetimeValuesFor(101);
    givenBinCountOf(51);
    givenBinUnitOf((double) TimeUnit.DAYS.toMillis(1));
    givenBinLabelOf(TimeUnit.DAYS.name());
    givenDataLabelOf("COUNT");
    givenHistogramTypeOf("File Lifetime Count");
    whenConfigureIsCalled();
    assertThatBuildSucceeded();
    assertThatBinWidthIs(3);
}

From source file:org.repodriller.RepositoryMining.java

private void processRepos(SCMRepository repo) {
    log.info("Git repository in " + repo.getPath());

    List<ChangeSet> allCs = range.get(repo.getScm());
    if (!reverseOrder)
        Collections.reverse(allCs);

    log.info("Total of commits: " + allCs.size());

    log.info("Starting threads: " + threads);
    ExecutorService exec = Executors.newFixedThreadPool(threads);
    List<List<ChangeSet>> partitions = Lists.partition(allCs, threads);
    for (List<ChangeSet> partition : partitions) {

        exec.submit(() -> {//from  w w  w .jav a  2 s  .  co m
            for (ChangeSet cs : partition) {
                try {
                    processChangeSet(repo, cs);
                } catch (OutOfMemoryError e) {
                    System.err.println("Commit " + cs.getId() + " in " + repo.getLastDir() + " caused OOME");
                    e.printStackTrace();
                    System.err.println("goodbye :/");

                    log.fatal("Commit " + cs.getId() + " in " + repo.getLastDir() + " caused OOME", e);
                    log.fatal("Goodbye! ;/");
                    System.exit(-1);
                } catch (Throwable t) {
                    log.error(t);
                }
            }
        });
    }

    try {
        exec.shutdown();
        exec.awaitTermination(Long.MAX_VALUE, TimeUnit.DAYS);
    } catch (InterruptedException e) {
        log.error("error waiting for threads to terminate in " + repo.getLastDir(), e);
    }
}

From source file:com.linkedin.pinot.queries.BaseSingleValueQueriesTest.java

@BeforeTest
public void buildSegment() throws Exception {
    FileUtils.deleteQuietly(INDEX_DIR);//ww  w .  java  2 s . c  om

    // Get resource file path.
    URL resource = getClass().getClassLoader().getResource(AVRO_DATA);
    Assert.assertNotNull(resource);
    String filePath = resource.getFile();

    // Build the segment schema.
    Schema schema = new Schema.SchemaBuilder().setSchemaName("testTable")
            .addMetric("column1", FieldSpec.DataType.INT).addMetric("column3", FieldSpec.DataType.INT)
            .addSingleValueDimension("column5", FieldSpec.DataType.STRING)
            .addSingleValueDimension("column6", FieldSpec.DataType.INT)
            .addSingleValueDimension("column7", FieldSpec.DataType.INT)
            .addSingleValueDimension("column9", FieldSpec.DataType.INT)
            .addSingleValueDimension("column11", FieldSpec.DataType.STRING)
            .addSingleValueDimension("column12", FieldSpec.DataType.STRING)
            .addMetric("column17", FieldSpec.DataType.INT).addMetric("column18", FieldSpec.DataType.INT)
            .addTime("daysSinceEpoch", TimeUnit.DAYS, FieldSpec.DataType.INT).build();

    // Create the segment generator config.
    SegmentGeneratorConfig segmentGeneratorConfig = new SegmentGeneratorConfig(schema);
    segmentGeneratorConfig.setInputFilePath(filePath);
    segmentGeneratorConfig.setTableName("testTable");
    segmentGeneratorConfig.setOutDir(INDEX_DIR.getAbsolutePath());
    segmentGeneratorConfig.setInvertedIndexCreationColumns(
            Arrays.asList("column6", "column7", "column11", "column17", "column18"));

    // Build the index segment.
    SegmentIndexCreationDriver driver = new SegmentIndexCreationDriverImpl();
    driver.init(segmentGeneratorConfig);
    driver.build();
}

From source file:ch.cyberduck.core.openstack.SwiftUrlProvider.java

@Override
public DescriptiveUrlBag toUrl(final Path file) {
    final DescriptiveUrlBag list = new DescriptiveUrlBag();
    if (file.isFile()) {
        Region region = null;/*from w w w  .j  av a2  s . c  o  m*/
        try {
            region = regionService.lookup(file);
        } catch (BackgroundException e) {
            log.warn(String.format("Failure looking up region for %s %s", file, e.getMessage()));
        }
        if (null == region) {
            list.addAll(new DefaultUrlProvider(session.getHost()).toUrl(file));
        } else {
            if (!session.getHost().isDefaultWebURL()) {
                list.addAll(new WebUrlProvider(session.getHost()).toUrl(file));
            }
            list.add(new DescriptiveUrl(
                    URI.create(region.getStorageUrl(containerService.getContainer(file).getName(),
                            containerService.getKey(file)).toString()),
                    DescriptiveUrl.Type.provider, MessageFormat.format(LocaleFactory.localizedString("{0} URL"),
                            session.getHost().getProtocol().getScheme().name().toUpperCase(Locale.ROOT))));
            // In one hour
            list.addAll(this.sign(region, file, this.getExpiry((int) TimeUnit.HOURS.toSeconds(1))));
            // Default signed URL expiring in 24 hours.
            list.addAll(this.sign(region, file, this.getExpiry((int) TimeUnit.SECONDS
                    .toSeconds(PreferencesFactory.get().getInteger("s3.url.expire.seconds")))));
            // 1 Week
            list.addAll(this.sign(region, file, this.getExpiry((int) TimeUnit.DAYS.toSeconds(7))));
            // 1 Month
            list.addAll(this.sign(region, file, this.getExpiry((int) TimeUnit.DAYS.toSeconds(30))));
            // 1 Year
            list.addAll(this.sign(region, file, this.getExpiry((int) TimeUnit.DAYS.toSeconds(365))));
        }
    }
    return list;
}

From source file:org.apache.kylin.dict.DictionaryManager.java

private DictionaryManager(KylinConfig config) {
    this.config = config;
    this.dictCache = CacheBuilder.newBuilder()//
            .softValues()//
            .removalListener(new RemovalListener<String, DictionaryInfo>() {
                @Override/*  w  ww . j a va  2 s.c o  m*/
                public void onRemoval(RemovalNotification<String, DictionaryInfo> notification) {
                    DictionaryManager.logger.info("Dict with resource path " + notification.getKey()
                            + " is removed due to " + notification.getCause());
                }
            })//
            .maximumSize(config.getCachedDictMaxEntrySize())//
            .expireAfterWrite(1, TimeUnit.DAYS).build(new CacheLoader<String, DictionaryInfo>() {
                @Override
                public DictionaryInfo load(String key) throws Exception {
                    DictionaryInfo dictInfo = DictionaryManager.this.load(key, true);
                    if (dictInfo == null) {
                        return NONE_INDICATOR;
                    } else {
                        return dictInfo;
                    }
                }
            });
}

From source file:io.hops.hopsworks.common.security.PKI.java

private long getCertificateValidityInDays(String rawConfigurationProperty) {
    Long timeValue = settings.getConfTimeValue(rawConfigurationProperty);
    TimeUnit unitValue = settings.getConfTimeTimeUnit(rawConfigurationProperty);
    return TimeUnit.DAYS.convert(timeValue, unitValue);
}