Example usage for java.util.concurrent TimeUnit DAYS

List of usage examples for java.util.concurrent TimeUnit DAYS

Introduction

In this page you can find the example usage for java.util.concurrent TimeUnit DAYS.

Prototype

TimeUnit DAYS

To view the source code for java.util.concurrent TimeUnit DAYS.

Click Source Link

Document

Time unit representing twenty four hours.

Usage

From source file:org.wso2.carbon.device.mgt.core.task.impl.ArchivalTask.java

private String getDurationBreakdown(long millis) {
    if (millis < 0) {
        throw new IllegalArgumentException("Duration must be greater than zero!");
    }/*from ww  w  .  j a  v  a 2s.co m*/
    long days = TimeUnit.MILLISECONDS.toDays(millis);
    millis -= TimeUnit.DAYS.toMillis(days);
    long hours = TimeUnit.MILLISECONDS.toHours(millis);
    millis -= TimeUnit.HOURS.toMillis(hours);
    long minutes = TimeUnit.MILLISECONDS.toMinutes(millis);
    millis -= TimeUnit.MINUTES.toMillis(minutes);
    long seconds = TimeUnit.MILLISECONDS.toSeconds(millis);

    StringBuilder sb = new StringBuilder(64);
    sb.append(days);
    sb.append(" Days ");
    sb.append(hours);
    sb.append(" Hours ");
    sb.append(minutes);
    sb.append(" Minutes ");
    sb.append(seconds);
    sb.append(" Seconds");

    return (sb.toString());
}

From source file:com.hazelcast.hibernate.app.Executor.java

public void execute() throws Exception {
    CountDownLatch latch = new CountDownLatch(1000);
    int count;//from w ww  . j  av  a2s .  co  m

    Session session = sessionFactory.openSession();
    try {
        Criteria criteria = session.createCriteria(DummyEntity.class);
        criteria.setProjection(Projections.rowCount());
        count = ((Long) criteria.uniqueResult()).intValue();
    } finally {
        session.close();
    }

    if (count == 0) {
        count = 200000;
        insertDummyEntities(count, 100);
    }

    try {
        for (int i = 0; i < latch.getCount(); i++) {
            executorService.submit(new Task(i, sessionFactory, 1000, latch));
        }

        latch.await(1, TimeUnit.DAYS);
    } finally {
        executorService.shutdown();
    }
}

From source file:com.mapr.synth.CommonPointOfCompromiseTest.java

@Test
public void testCompromise() throws IOException, ParseException {
    SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
    long start = df.parse("2014-01-01 00:00:00").getTime();
    SchemaSampler s = new SchemaSampler(
            Resources.asCharSource(Resources.getResource("schema013.json"), Charsets.UTF_8).read());

    long exploitStart = df.parse("2014-01-20 00:00:00").getTime();
    long exploitEnd = df.parse("2014-02-20 00:00:00").getTime();
    int exploitStartDay = (int) TimeUnit.DAYS.convert(exploitStart - start, TimeUnit.MILLISECONDS);

    int[] transactionsByDay = new int[DAYS_COUNTED];
    int[] compromiseByDay = new int[DAYS_COUNTED];
    int[] fraudByDay = new int[DAYS_COUNTED];

    Multiset<Integer> fraudUserCounts = HashMultiset.create();
    Multiset<Integer> nonfraudUserCounts = HashMultiset.create();
    Multiset<Integer> allMerchantCounts = HashMultiset.create();
    int fraudAccounts = 0;
    Set<Integer> merchantHistory = Sets.newHashSet();

    // these collect the evolution of the contingency table for just merchant 0 and are indexed by time relative to exploit window.
    int exploitLength = (int) (TimeUnit.DAYS.convert(exploitEnd - exploitStart, TimeUnit.MILLISECONDS)) + 1;
    //        exploitLength = 5;
    int[] atmTotal = new int[exploitLength];
    int[] atmFraud = new int[exploitLength];
    int[] atmNonFraud = new int[exploitLength];
    int[] nonAtmFraud = new int[exploitLength];
    int[] nonAtmNonFraud = new int[exploitLength];

    for (int userId = 0; userId < USER_COUNT; userId++) {
        JsonNode sample = s.sample();/*from   ww  w . jav  a  2s. c  om*/
        merchantHistory.clear();
        boolean userHasFraud = false;

        int[] hasFraudPerUser = new int[exploitLength];

        for (JsonNode record : sample.get("history")) {
            long timestamp = record.get("timestamp").asLong() * 1000;
            int day = (int) ((timestamp - start) / TimeUnit.MILLISECONDS.convert(1, TimeUnit.DAYS));
            if (day >= DAYS_COUNTED || day >= exploitStartDay + exploitLength) {
                break;
            }
            if (record.get("compromise").asInt() > 0) {
                compromiseByDay[day]++;
            }
            boolean isFraud = record.get("fraud").asInt() > 0;
            if (isFraud) {
                fraudByDay[day]++;
            }
            transactionsByDay[day]++;

            // only record history up to the beginning of the exploit window
            int merchant = record.get("merchant").asInt();
            if (timestamp < exploitStart) {
                merchantHistory.add(merchant);
            }

            // only consider fraud indicators during the exploit window
            if (timestamp >= exploitStart && timestamp <= exploitEnd) {
                // any fraud in the window marks the user
                if (isFraud) {
                    // first time we see fraud indication in exploit window, we set flags for the rest of the window
                    if (!userHasFraud) {
                        int eday = day - exploitStartDay;
                        for (int i = eday; i < exploitLength; i++) {
                            hasFraudPerUser[i] = 1;
                        }
                    }
                    userHasFraud = true;
                }
            }

        }
        // we collect flags for each day and then only count this user once.  Necessary because multiple
        // transactions can occur on each day and we don't want to count all of them.
        int atmInHistory = merchantHistory.contains(0) ? 1 : 0;
        for (int day = 0; day < exploitLength; day++) {
            atmTotal[day] += atmInHistory;
            atmFraud[day] += atmInHistory * hasFraudPerUser[day];
            atmNonFraud[day] += atmInHistory * (1 - hasFraudPerUser[day]);
            nonAtmFraud[day] += (1 - atmInHistory) * hasFraudPerUser[day];
            nonAtmNonFraud[day] += (1 - atmInHistory) * (1 - hasFraudPerUser[day]);
        }

        if (userHasFraud) {
            fraudAccounts++;
            for (Integer merchant : merchantHistory) {
                fraudUserCounts.add(merchant);
                allMerchantCounts.add(merchant);
            }
        } else {
            for (Integer merchant : merchantHistory) {
                nonfraudUserCounts.add(merchant);
                allMerchantCounts.add(merchant);
            }
        }
    }

    int k1 = fraudAccounts;
    int k2 = USER_COUNT - k1;

    try (PrintStream out = new PrintStream(new FileOutputStream("scores.tsv"))) {
        out.printf("merchant\tk11\tk12\tk21\tk22\tk.1\tscore\n");
        for (Integer merchant : allMerchantCounts.elementSet()) {
            int k11 = fraudUserCounts.count(merchant);
            int k12 = k1 - k11;
            int k21 = nonfraudUserCounts.count(merchant);
            int k22 = k2 - k21;
            out.printf("%d\t%d\t%d\t%d\t%d\t%d\t%.1f\n", merchant, k11, k12, k21, k22,
                    allMerchantCounts.count(merchant),
                    LogLikelihood.rootLogLikelihoodRatio(k11, k12, k21, k22));
        }
    }

    try (PrintStream out = new PrintStream(new FileOutputStream("counts.tsv"))) {
        out.printf("day\tcompromises\tfrauds\ttransactions\n");

        for (int i = 0; i < compromiseByDay.length; i++) {
            out.printf("%d\t%d\t%d\t%d\n", i, compromiseByDay[i], fraudByDay[i], transactionsByDay[i]);
        }
    }

    try (PrintStream out = new PrintStream(new FileOutputStream("growth.tsv"))) {
        out.printf("day\tatm.total\tk11\tk12\tk21\tk22\tscore\n");

        for (int i = 0; i < exploitLength; i++) {
            int k11 = atmFraud[i];
            int k12 = nonAtmFraud[i];
            int k21 = atmNonFraud[i];
            int k22 = nonAtmNonFraud[i];
            out.printf("%d\t%d\t%d\t%d\t%d\t%d\t%.1f\n", i, atmTotal[i], k11, k12, k21, k22,
                    LogLikelihood.rootLogLikelihoodRatio(k11, k12, k21, k22));
        }
    }

}

From source file:org.eclipse.skalli.core.rest.admin.StatisticsQueryTest.java

@Test
public void testNegativePeriod() throws Exception {
    assertPeriodQuery("-1M", 1, TimeUnit.MINUTES);
    assertPeriodQuery("-12h", 12, TimeUnit.HOURS);
    assertPeriodQuery("-1", 1, TimeUnit.DAYS);
    assertPeriodQuery("-4711", 4711, TimeUnit.DAYS);
    assertPeriodQuery("-H", 1, TimeUnit.HOURS);
    assertPeriodQuery("-", 1, TimeUnit.DAYS);
    assertPeriodQuery("-0", 1, TimeUnit.DAYS);
}

From source file:org.sonar.batch.bootstrap.GlobalTempFolderProviderTest.java

@Test
public void cleanUpOld() throws IOException {
    long creationTime = System.currentTimeMillis() - TimeUnit.DAYS.toMillis(100);
    File workingDir = temp.newFolder();

    for (int i = 0; i < 3; i++) {
        File tmp = new File(workingDir, ".sonartmp_" + i);
        tmp.mkdirs();/*w  ww  . j a v  a  2  s .  co  m*/
        setFileCreationDate(tmp, creationTime);
    }

    tempFolderProvider.provide(new GlobalProperties(
            ImmutableMap.of(CoreProperties.GLOBAL_WORKING_DIRECTORY, workingDir.getAbsolutePath())));
    // this also checks that all other temps were deleted
    assertThat(getCreatedTempDir(workingDir)).exists();

    FileUtils.deleteQuietly(workingDir);
}

From source file:org.sonar.batch.bootstrap.TempFolderProviderTest.java

@Test
public void cleanUpOld() throws IOException {
    long creationTime = System.currentTimeMillis() - TimeUnit.DAYS.toMillis(100);
    File workingDir = temp.newFolder();

    for (int i = 0; i < 3; i++) {
        File tmp = new File(workingDir, ".sonartmp_" + i);
        tmp.mkdirs();//from  w  w w.ja v  a  2  s  . c  o m
        setFileCreationDate(tmp, creationTime);
    }

    tempFolderProvider.provide(new BootstrapProperties(
            ImmutableMap.of(CoreProperties.GLOBAL_WORKING_DIRECTORY, workingDir.getAbsolutePath())));
    // this also checks that all other temps were deleted
    assertThat(getCreatedTempDir(workingDir)).exists();

    FileUtils.deleteQuietly(workingDir);
}

From source file:com.bodybuilding.argos.controller.StreamController.java

@RequestMapping("/cluster.stream")
public SseEmitter streamMetrics() {
    final SseEmitter emitter = new SseEmitter(TimeUnit.DAYS.toMillis(45));
    SseEmitterUtil.bindObservable(emitter, streamObservable);
    return emitter;
}

From source file:tools.xor.logic.DefaultCloneDataType.java

public void cloneDataType() {

    // create person
    Technician owner = new Technician();
    owner.setName("TOMMY_HILFIGHER");
    owner.setDisplayName("Tommy Hilfigher");
    owner.setDescription("A famous fashion designer");
    owner.setUserName("thilf");
    owner.setSkill("fashion design");
    owner = (Technician) aggregateService.create(owner, new Settings());
    Person person = (Person) aggregateService.read(owner, new Settings());

    // Create Task
    Task task = new Task();
    task.setName("CREATE_GOWN");
    task.setDisplayName("Create wedding gown");
    task.setDescription("Design a wedding gown");
    task.setAssignedTo(person);//w  ww. j  a  v  a  2 s .c  o m
    Date finish = new Date();
    task.setScheduledFinish(finish);
    task = (Task) aggregateService.create(task, getSettings());
    task = (Task) aggregateService.read(task, getSettings());

    // Ensure task is persisted
    assert (task.getId() != null);
    assert (task.getAssignedTo() != null);
    assert (task.getAssignedTo().getId() != null);

    // ensure change the date in the from instance does not affect the date object in the to instance
    // We need to make sure a clone is being made and the same object is not referenced
    finish.setTime(finish.getTime() + TimeUnit.DAYS.toMillis(1));
    assert (finish.getTime() != task.getScheduledFinish().getTime());
}

From source file:com.linkedin.pinot.core.startree.OffHeapStarTreeBuilderTest.java

private void testSimpleCore(int numDimensions, int numMetrics, int numSkipMaterializationDimensions)
        throws Exception {
    int ROWS = (int) MathUtils.factorial(numDimensions);
    Schema schema = new Schema();
    List<String> dimensionsSplitOrder = new ArrayList<>();
    Set<String> skipMaterializationDimensions = new HashSet<>();
    for (int i = 0; i < numDimensions; i++) {
        String dimName = "d" + (i + 1);
        DimensionFieldSpec dimensionFieldSpec = new DimensionFieldSpec(dimName, DataType.STRING, true);
        schema.addField(dimensionFieldSpec);

        if (i < (numDimensions - numSkipMaterializationDimensions)) {
            dimensionsSplitOrder.add(dimName);
        } else {//from ww  w.  j a v  a2 s  .  c  o  m
            skipMaterializationDimensions.add(dimName);
        }
    }

    schema.addField(new TimeFieldSpec("daysSinceEpoch", DataType.INT, TimeUnit.DAYS));
    for (int i = 0; i < numMetrics; i++) {
        String metricName = "m" + (i + 1);
        MetricFieldSpec metricFieldSpec = new MetricFieldSpec(metricName, DataType.INT);
        schema.addField(metricFieldSpec);
    }
    StarTreeBuilderConfig builderConfig = new StarTreeBuilderConfig();
    builderConfig.setOutDir(TEMP_DIR);
    builderConfig.setSchema(schema);
    builderConfig.setDimensionsSplitOrder(dimensionsSplitOrder);
    builderConfig.setSkipMaterializationDimensions(skipMaterializationDimensions);
    builderConfig.setMaxNumLeafRecords(10);

    OffHeapStarTreeBuilder builder = new OffHeapStarTreeBuilder();
    builder.init(builderConfig);
    HashMap<String, Object> map = new HashMap<>();
    for (int row = 0; row < ROWS; row++) {
        for (int i = 0; i < numDimensions; i++) {
            String dimName = schema.getDimensionFieldSpecs().get(i).getName();
            map.put(dimName, dimName + "-v" + row % (numDimensions - i));
        }
        //time
        map.put("daysSinceEpoch", 1);
        for (int i = 0; i < numMetrics; i++) {
            String metName = schema.getMetricFieldSpecs().get(i).getName();
            map.put(metName, 1);
        }
        GenericRow genericRow = new GenericRow();
        genericRow.init(map);
        builder.append(genericRow);
    }
    builder.build();
    int totalDocs = builder.getTotalRawDocumentCount() + builder.getTotalAggregateDocumentCount();
    Iterator<GenericRow> iterator = builder.iterator(builder.getTotalRawDocumentCount(), totalDocs);
    while (iterator.hasNext()) {
        GenericRow row = iterator.next();
        for (String skipMaterializationDimension : skipMaterializationDimensions) {
            String rowValue = (String) row.getValue(skipMaterializationDimension);
            Assert.assertEquals(rowValue, "null");
        }
    }

    FileUtils.deleteDirectory(TEMP_DIR);
}

From source file:com.rackspacecloud.blueflood.outputs.handlers.HttpMetricNamesHandlerIntegrationTest.java

@Before
public void setup() throws Exception {

    super.setUp();

    // setup metrics to be searchable
    MetricsRW metricsRW = IOContainer.fromConfig().getBasicMetricsRW();

    final List<IMetric> metrics = new ArrayList<IMetric>();
    for (int i = 0; i < numMetrics; i++) {
        long curMillis = baseMillis + i;
        Locator locator = Locator.createLocatorFromPathComponents(tenantId, metricPrefix + "." + i);
        Metric metric = new Metric(locator, getRandomIntMetricValue(), curMillis,
                new TimeValue(1, TimeUnit.DAYS), locatorToUnitMap.get(locator));
        metrics.add(metric);//w  w w  .ja v  a  2 s. c om
    }

    elasticIO.insertDiscovery(new ArrayList<IMetric>(metrics));
    esSetup.client().admin().indices().prepareRefresh().execute().actionGet();

    metricsRW.insertMetrics(metrics);
}