Example usage for java.util.concurrent TimeUnit MINUTES

List of usage examples for java.util.concurrent TimeUnit MINUTES

Introduction

In this page you can find the example usage for java.util.concurrent TimeUnit MINUTES.

Prototype

TimeUnit MINUTES

To view the source code for java.util.concurrent TimeUnit MINUTES.

Click Source Link

Document

Time unit representing sixty seconds.

Usage

From source file:rmblworx.tools.timey.AlarmTest.java

/**
 * Test method for {@link Alarm#Alarm(IAlarmService, int, TimeUnit)}.
 *///  w  w w  . j  av a2s.  c o  m
@Test(expected = ValueMinimumArgumentException.class)
public final void testShouldFailBecauseDelayIsLessThanOne() {
    this.effectiveDelegate = new Alarm(this.service, 0, TimeUnit.MINUTES);
}

From source file:com.couchbase.sqoop.mapreduce.db.CouchbaseRecordReader.java

private void createTapStreamTimeout(final TapStream tapStream, final long duration) {
    if (duration > 0) {
        final Runnable r = new Runnable() {
            @Override//from ww w  . j ava  2s  . c  o  m
            public void run() {
                try {
                    Thread.sleep(TimeUnit.MILLISECONDS.convert(duration, TimeUnit.MINUTES));
                } catch (final InterruptedException e) {
                    Thread.currentThread().interrupt();
                    CouchbaseRecordReader.LOG.error("Tap stream closing early. Reason: " + e.getMessage());
                }
                tapStream.cancel();
            }
        };
        new Thread(r).start();
    }
}

From source file:co.cask.hydrator.plugin.ValueMapperTest.java

@Test
public void testEmptyAndNull() throws Exception {

    String inputTable = "input_table_test_Empty_Null";
    ETLStage source = new ETLStage("source", MockSource.getPlugin(inputTable));

    Map<String, String> sourceproperties = new ImmutableMap.Builder<String, String>()
            .put("mapping", "designationid:designation_lookup_table_test_Empty_Null:designationName")
            .put("defaults", "designationid:DEFAULTID").build();

    ETLStage transform = new ETLStage("transform",
            new ETLPlugin("ValueMapper", Transform.PLUGIN_TYPE, sourceproperties, null));

    String sinkTable = "output_table_test_Empty_Null";
    ETLStage sink = new ETLStage("sink", MockSink.getPlugin(sinkTable));

    ETLBatchConfig etlConfig = ETLBatchConfig.builder("* * * * *").addStage(source).addStage(transform)
            .addStage(sink).addConnection(source.getName(), transform.getName())
            .addConnection(transform.getName(), sink.getName()).build();

    AppRequest<ETLBatchConfig> appRequest = new AppRequest<>(ETLBATCH_ARTIFACT, etlConfig);
    Id.Application appId = Id.Application.from(Id.Namespace.DEFAULT, "valuemappertest_test_Empty_Null");
    ApplicationManager appManager = deployApplication(appId, appRequest);

    addDatasetInstance(KeyValueTable.class.getName(), "designation_lookup_table_test_Empty_Null");
    DataSetManager<KeyValueTable> dataSetManager = getDataset("designation_lookup_table_test_Empty_Null");
    KeyValueTable keyValueTable = dataSetManager.get();
    keyValueTable.write("1".getBytes(Charsets.UTF_8), "SE".getBytes(Charsets.UTF_8));
    keyValueTable.write("2".getBytes(Charsets.UTF_8), "SSE".getBytes(Charsets.UTF_8));
    keyValueTable.write("3".getBytes(Charsets.UTF_8), "ML".getBytes(Charsets.UTF_8));
    dataSetManager.flush();/*  w w w. j a  v  a2  s.  co  m*/

    DataSetManager<Table> inputManager = getDataset(inputTable);
    List<StructuredRecord> input = ImmutableList.of(
            StructuredRecord.builder(SOURCE_SCHEMA).set(ID, "100").set(NAME, "John").set(SALARY, "1000")
                    .set(DESIGNATIONID, null).build(),
            StructuredRecord.builder(SOURCE_SCHEMA).set(ID, "101").set(NAME, "Kerry").set(SALARY, "1030")
                    .set(DESIGNATIONID, "2").build(),
            StructuredRecord.builder(SOURCE_SCHEMA).set(ID, "102").set(NAME, "Mathew").set(SALARY, "1230")
                    .set(DESIGNATIONID, "").build(),
            StructuredRecord.builder(SOURCE_SCHEMA).set(ID, "103").set(NAME, "Allie").set(SALARY, "2000")
                    .set(DESIGNATIONID, "4").build());

    MockSource.writeInput(inputManager, input);

    MapReduceManager mrManager = appManager.getMapReduceManager(ETLMapReduce.NAME);
    mrManager.start();
    mrManager.waitForFinish(5, TimeUnit.MINUTES);

    DataSetManager<Table> outputManager = getDataset(sinkTable);
    List<StructuredRecord> outputRecords = MockSink.readOutput(outputManager);
    Map<String, String> nameDesignationMap = new HashMap<String, String>();
    nameDesignationMap.put("John", "DEFAULTID");
    nameDesignationMap.put("Kerry", "SSE");
    nameDesignationMap.put("Mathew", "DEFAULTID");
    nameDesignationMap.put("Allie", "DEFAULTID");

    Assert.assertEquals(4, outputRecords.size());
    Assert.assertEquals(nameDesignationMap.get(outputRecords.get(0).get(NAME)),
            outputRecords.get(0).get(DESIGNATIONNAME));
    Assert.assertEquals(nameDesignationMap.get(outputRecords.get(1).get(NAME)),
            outputRecords.get(1).get(DESIGNATIONNAME));
    Assert.assertEquals(nameDesignationMap.get(outputRecords.get(2).get(NAME)),
            outputRecords.get(2).get(DESIGNATIONNAME));
    Assert.assertEquals(nameDesignationMap.get(outputRecords.get(3).get(NAME)),
            outputRecords.get(3).get(DESIGNATIONNAME));

}

From source file:com.gooddata.warehouse.WarehouseServiceAT.java

@Test(groups = "warehouse", dependsOnMethods = "addS3Credentials")
public void updateS3Credentials() {
    final DateTime lastUpdated = s3Credentials.getUpdated();
    s3Credentials.setSecretKey("newSecretKey");
    s3Credentials = service.updateS3Credentials(s3Credentials).get(1, TimeUnit.MINUTES);

    assertThat(s3Credentials, notNullValue());
    assertThat(s3Credentials.getUpdated(), is(not(lastUpdated)));
}

From source file:info.raack.appliancelabeler.service.DefaultDataService.java

@PostConstruct
public void init() throws JAXBException, SAXException {
    lastDatapoints = new HashMap<String, Long>();

    // initialize email reloader daily
    scheduler = Executors.newScheduledThreadPool(1);
    scheduler.scheduleWithFixedDelay(new ReloadUserEmailRunnable(), 0, 5, TimeUnit.MINUTES);
}

From source file:org.mitre.openid.connect.web.DynamicClientRegistrationEndpoint.java

/**
 * Create a new Client, issue a client ID, and create a registration access token.
 * @param jsonString/*from w  w  w.  ja v a2 s .  c  om*/
 * @param m
 * @param p
 * @return
 */
@RequestMapping(method = RequestMethod.POST, consumes = MediaType.APPLICATION_JSON_VALUE, produces = MediaType.APPLICATION_JSON_VALUE)
public String registerNewClient(@RequestBody String jsonString, Model m) {

    ClientDetailsEntity newClient = null;
    try {
        newClient = ClientDetailsEntityJsonProcessor.parse(jsonString);
    } catch (JsonSyntaxException e) {
        // bad parse
        // didn't parse, this is a bad request
        logger.error("registerNewClient failed; submitted JSON is malformed");
        m.addAttribute(HttpCodeView.CODE, HttpStatus.BAD_REQUEST); // http 400
        return HttpCodeView.VIEWNAME;
    }

    if (newClient != null) {
        // it parsed!

        //
        // Now do some post-processing consistency checks on it
        //

        // clear out any spurious id/secret (clients don't get to pick)
        newClient.setClientId(null);
        newClient.setClientSecret(null);

        // do validation on the fields
        try {
            newClient = validateScopes(newClient);
            newClient = validateResponseTypes(newClient);
            newClient = validateGrantTypes(newClient);
            newClient = validateRedirectUris(newClient);
            newClient = validateAuth(newClient);
        } catch (ValidationException ve) {
            // validation failed, return an error
            m.addAttribute(JsonErrorView.ERROR, ve.getError());
            m.addAttribute(JsonErrorView.ERROR_MESSAGE, ve.getErrorDescription());
            m.addAttribute(HttpCodeView.CODE, ve.getStatus());
            return JsonErrorView.VIEWNAME;
        }

        if (newClient.getTokenEndpointAuthMethod() == null) {
            newClient.setTokenEndpointAuthMethod(AuthMethod.SECRET_BASIC);
        }

        if (newClient.getTokenEndpointAuthMethod() == AuthMethod.SECRET_BASIC
                || newClient.getTokenEndpointAuthMethod() == AuthMethod.SECRET_JWT
                || newClient.getTokenEndpointAuthMethod() == AuthMethod.SECRET_POST) {

            // we need to generate a secret
            newClient = clientService.generateClientSecret(newClient);
        }

        // set some defaults for token timeouts
        if (config.isHeartMode()) {
            // heart mode has different defaults depending on primary grant type
            if (newClient.getGrantTypes().contains("authorization_code")) {
                newClient.setAccessTokenValiditySeconds((int) TimeUnit.HOURS.toSeconds(1)); // access tokens good for 1hr
                newClient.setIdTokenValiditySeconds((int) TimeUnit.MINUTES.toSeconds(5)); // id tokens good for 5min
                newClient.setRefreshTokenValiditySeconds((int) TimeUnit.HOURS.toSeconds(24)); // refresh tokens good for 24hr
            } else if (newClient.getGrantTypes().contains("implicit")) {
                newClient.setAccessTokenValiditySeconds((int) TimeUnit.MINUTES.toSeconds(15)); // access tokens good for 15min
                newClient.setIdTokenValiditySeconds((int) TimeUnit.MINUTES.toSeconds(5)); // id tokens good for 5min
                newClient.setRefreshTokenValiditySeconds(0); // no refresh tokens
            } else if (newClient.getGrantTypes().contains("client_credentials")) {
                newClient.setAccessTokenValiditySeconds((int) TimeUnit.HOURS.toSeconds(6)); // access tokens good for 6hr
                newClient.setIdTokenValiditySeconds(0); // no id tokens
                newClient.setRefreshTokenValiditySeconds(0); // no refresh tokens
            }
        } else {
            newClient.setAccessTokenValiditySeconds((int) TimeUnit.HOURS.toSeconds(1)); // access tokens good for 1hr
            newClient.setIdTokenValiditySeconds((int) TimeUnit.MINUTES.toSeconds(10)); // id tokens good for 10min
            newClient.setRefreshTokenValiditySeconds(null); // refresh tokens good until revoked
        }

        // this client has been dynamically registered (obviously)
        newClient.setDynamicallyRegistered(true);

        // this client can't do token introspection
        newClient.setAllowIntrospection(false);

        // now save it
        try {
            ClientDetailsEntity savedClient = clientService.saveNewClient(newClient);

            // generate the registration access token
            OAuth2AccessTokenEntity token = connectTokenService.createRegistrationAccessToken(savedClient);
            token = tokenService.saveAccessToken(token);

            // send it all out to the view

            RegisteredClient registered = new RegisteredClient(savedClient, token.getValue(), config.getIssuer()
                    + "register/" + UriUtils.encodePathSegment(savedClient.getClientId(), "UTF-8"));
            m.addAttribute("client", registered);
            m.addAttribute(HttpCodeView.CODE, HttpStatus.CREATED); // http 201

            return ClientInformationResponseView.VIEWNAME;
        } catch (UnsupportedEncodingException e) {
            logger.error("Unsupported encoding", e);
            m.addAttribute(HttpCodeView.CODE, HttpStatus.INTERNAL_SERVER_ERROR);
            return HttpCodeView.VIEWNAME;
        } catch (IllegalArgumentException e) {
            logger.error("Couldn't save client", e);

            m.addAttribute(JsonErrorView.ERROR, "invalid_client_metadata");
            m.addAttribute(JsonErrorView.ERROR_MESSAGE,
                    "Unable to save client due to invalid or inconsistent metadata.");
            m.addAttribute(HttpCodeView.CODE, HttpStatus.BAD_REQUEST); // http 400

            return JsonErrorView.VIEWNAME;
        }
    } else {
        // didn't parse, this is a bad request
        logger.error("registerNewClient failed; submitted JSON is malformed");
        m.addAttribute(HttpCodeView.CODE, HttpStatus.BAD_REQUEST); // http 400

        return HttpCodeView.VIEWNAME;
    }

}

From source file:org.jberet.support.io.JacksonCsvItemReaderWriterTest.java

private void testReadWrite0(final String resource, final String writeResource, final String start,
        final String end, final String beanType, final boolean useHeader, final String columns,
        final String columnSeparator, final String quoteChar, final String expect, final String forbid)
        throws Exception {
    final Properties params = createParams(CsvProperties.BEAN_TYPE_KEY, beanType);
    params.setProperty(CsvProperties.RESOURCE_KEY, resource);

    if (start != null) {
        params.setProperty("start", start);
    }/*from   ww w .j ava 2  s  .c  o m*/
    if (end != null) {
        params.setProperty("end", end);
    }
    if (useHeader) {
        params.setProperty("useHeader", String.valueOf(useHeader));
    }
    if (columns != null) {
        params.setProperty("columns", columns);
    }
    if (columnSeparator != null) {
        params.setProperty("columnSeparator", columnSeparator);
    }
    if (quoteChar != null) {
        params.setProperty("quoteChar", quoteChar);
    }

    if (lineSeparator != null) {
        params.setProperty("lineSeparator", lineSeparator);
    }
    if (escapeChar != null) {
        params.setProperty("escapeChar", escapeChar);
    }
    if (skipFirstDataRow != null) {
        params.setProperty("skipFirstDataRow", skipFirstDataRow);
    }
    if (nullValue != null) {
        params.setProperty("nullValue", nullValue);
    }

    if (jsonParserFeatures != null) {
        params.setProperty("jsonParserFeatures", jsonParserFeatures);
    }
    if (csvParserFeatures != null) {
        params.setProperty("csvParserFeatures", csvParserFeatures);
    }
    if (deserializationProblemHandlers != null) {
        params.setProperty("deserializationProblemHandlers", deserializationProblemHandlers);
    }
    if (inputDecorator != null) {
        params.setProperty("inputDecorator", inputDecorator);
    }

    if (jsonGeneratorFeatures != null) {
        params.setProperty("jsonGeneratorFeatures", jsonGeneratorFeatures);
    }
    if (csvGeneratorFeatures != null) {
        params.setProperty("csvGeneratorFeatures", csvGeneratorFeatures);
    }
    if (outputDecorator != null) {
        params.setProperty("outputDecorator", outputDecorator);
    }

    final File writeResourceFile = new File(tmpdir, writeResource);
    params.setProperty("writeResource", writeResourceFile.getPath());

    final long jobExecutionId = jobOperator.start(jobName, params);
    final JobExecutionImpl jobExecution = (JobExecutionImpl) jobOperator.getJobExecution(jobExecutionId);
    jobExecution.awaitTermination(waitTimeoutMinutes, TimeUnit.MINUTES);
    Assert.assertEquals(BatchStatus.COMPLETED, jobExecution.getBatchStatus());
    validate(writeResourceFile, expect, forbid);
}

From source file:com.comcast.cdn.traffic_control.traffic_router.core.dns.ZoneManager.java

protected static void initZoneCache(final TrafficRouter tr) {
    synchronized (ZoneManager.class) {
        final CacheRegister cacheRegister = tr.getCacheRegister();
        final JSONObject config = cacheRegister.getConfig();

        int poolSize = 1;
        final double scale = config.optDouble("zonemanager.threadpool.scale", 0.75);
        final int cores = Runtime.getRuntime().availableProcessors();

        if (cores > 2) {
            final Double s = Math.floor((double) cores * scale);

            if (s.intValue() > 1) {
                poolSize = s.intValue();
            }//from w  w w.  j  av  a2 s  .co  m
        }

        final ExecutorService initExecutor = Executors.newFixedThreadPool(poolSize);

        final ExecutorService ze = Executors.newFixedThreadPool(poolSize);
        final ScheduledExecutorService me = Executors.newScheduledThreadPool(2); // 2 threads, one for static, one for dynamic, threads to refresh zones
        final int maintenanceInterval = config.optInt("zonemanager.cache.maintenance.interval", 300); // default 5 minutes
        final String dspec = "expireAfterAccess="
                + config.optString("zonemanager.dynamic.response.expiration", "300s"); // default to 5 minutes

        final LoadingCache<ZoneKey, Zone> dzc = createZoneCache(ZoneCacheType.DYNAMIC,
                CacheBuilderSpec.parse(dspec));
        final LoadingCache<ZoneKey, Zone> zc = createZoneCache(ZoneCacheType.STATIC);

        initZoneDirectory();

        try {
            LOGGER.info("Generating zone data");
            generateZones(tr, zc, dzc, initExecutor);
            initExecutor.shutdown();
            initExecutor.awaitTermination(5, TimeUnit.MINUTES);
            LOGGER.info("Zone generation complete");
        } catch (final InterruptedException ex) {
            LOGGER.warn("Initialization of zone data exceeded time limit of 5 minutes; continuing", ex);
        } catch (IOException ex) {
            LOGGER.fatal("Caught fatal exception while generating zone data!", ex);
        }

        me.scheduleWithFixedDelay(getMaintenanceRunnable(dzc, ZoneCacheType.DYNAMIC, maintenanceInterval), 0,
                maintenanceInterval, TimeUnit.SECONDS);
        me.scheduleWithFixedDelay(getMaintenanceRunnable(zc, ZoneCacheType.STATIC, maintenanceInterval), 0,
                maintenanceInterval, TimeUnit.SECONDS);

        final ExecutorService tze = ZoneManager.zoneExecutor;
        final ScheduledExecutorService tme = ZoneManager.zoneMaintenanceExecutor;
        final LoadingCache<ZoneKey, Zone> tzc = ZoneManager.zoneCache;
        final LoadingCache<ZoneKey, Zone> tdzc = ZoneManager.dynamicZoneCache;

        ZoneManager.zoneExecutor = ze;
        ZoneManager.zoneMaintenanceExecutor = me;
        ZoneManager.dynamicZoneCache = dzc;
        ZoneManager.zoneCache = zc;

        if (tze != null) {
            tze.shutdownNow();
        }

        if (tme != null) {
            tme.shutdownNow();
        }

        if (tzc != null) {
            tzc.invalidateAll();
        }

        if (tdzc != null) {
            tdzc.invalidateAll();
        }
    }
}

From source file:datameer.awstasks.ant.ec2.Ec2LaunchTask.java

@Override
public void doExecute(AmazonEC2 ec2) throws BuildException {
    LOG.info("executing " + getClass().getSimpleName() + " with groupName '" + _groupName + "'");
    try {//from   w  ww  .  j a v  a2 s.  c om
        boolean instancesRunning = Ec2Util.findByGroup(ec2, _groupName, false, InstanceStateName.Pending,
                InstanceStateName.Running) != null;
        if (!isReuseRunningInstances() && instancesRunning) {
            throw new IllegalStateException("found already running instances for group '" + _groupName + "'");
        }
        if (!Ec2Util.groupExists(ec2, _groupName)) {
            LOG.info("group '" + _groupName + "' does not exists - creating it");
            String groupDescription = getGroupDescription();
            if (groupDescription == null) {
                throw new BuildException("must specify groupDescription");
            }
            ec2.createSecurityGroup(new CreateSecurityGroupRequest(_groupName, groupDescription));
        }

        List<String> securityGroups = Arrays.asList("default", _groupName);
        List<IpPermission> existingPermissions = Ec2Util.getPermissions(ec2, securityGroups);
        for (GroupPermission groupPermission : _groupPermissions) {
            if (groupPermission.getToPort() == -1) {
                groupPermission.setToPort(groupPermission.getFromPort());
            }
            if (!permissionExists(groupPermission, existingPermissions)) {
                LOG.info("did not found permission '" + groupPermission + "' - creating it...");
                ec2.authorizeSecurityGroupIngress(new AuthorizeSecurityGroupIngressRequest()
                        .withGroupName(_groupName).withIpPermissions(groupPermission.toIpPermission()));
            }
        }

        InstanceGroup instanceGroup = new InstanceGroupImpl(ec2);
        RunInstancesRequest launchConfiguration = new RunInstancesRequest(_ami, _instanceCount, _instanceCount);
        if (_kernelId != null) {
            launchConfiguration.setKernelId(_kernelId);
        }
        if (_ramDiskId != null) {
            launchConfiguration.setKernelId(_ramDiskId);
        }
        launchConfiguration.setKeyName(_privateKeyName);
        launchConfiguration.setSecurityGroups(securityGroups);
        if (_userData != null) {
            launchConfiguration.setUserData(Base64.encodeBase64String(_userData.getBytes()));
        }
        if (_instanceType != null) {
            launchConfiguration.setInstanceType(_instanceType);
        }
        launchConfiguration.setPlacement(new Placement(_availabilityZone));
        if (instancesRunning) {
            instanceGroup.connectTo(_groupName);
        } else {
            instanceGroup.launch(launchConfiguration, TimeUnit.MINUTES, _maxStartTime);
            if (_instanceName != null) {
                LOG.info("tagging instances with name '" + _instanceName + " [<idx>]'");
                int idx = 1;
                for (Instance instance : instanceGroup.getInstances(false)) {
                    CreateTagsRequest createTagsRequest = new CreateTagsRequest();
                    createTagsRequest.withResources(instance.getInstanceId()) //
                            .withTags(new Tag("Name", _instanceName + " [" + idx + "]"));
                    ec2.createTags(createTagsRequest);
                    idx++;
                }
            }
        }
    } catch (Exception e) {
        LOG.error("execution " + getClass().getSimpleName() + " with groupName '" + _groupName + "' failed: "
                + e.getMessage());
        throw new BuildException(e);
    }
}

From source file:com.github.joelittlejohn.embedmongo.StartMojo.java

@Override
@SuppressWarnings("unchecked")
public void executeStart() throws MojoExecutionException, MojoFailureException {

    MongodExecutable executable;/*from   w  w  w  .j  a v a 2  s.c o m*/
    try {

        final ICommandLinePostProcessor commandLinePostProcessor;
        if (authEnabled) {
            commandLinePostProcessor = new ICommandLinePostProcessor() {
                @Override
                public List<String> process(final Distribution distribution, final List<String> args) {
                    args.remove("--noauth");
                    args.add("--auth");
                    return args;
                }
            };
        } else {
            commandLinePostProcessor = new ICommandLinePostProcessor.Noop();
        }

        IRuntimeConfig runtimeConfig = new RuntimeConfigBuilder().defaults(Command.MongoD)
                .processOutput(getOutputConfig()).artifactStore(getArtifactStore())
                .commandLinePostProcessor(commandLinePostProcessor).build();

        int port = getPort();

        if (isRandomPort()) {
            port = NetworkUtils.allocateRandomPort();
        }
        savePortToProjectProperties(port);

        IMongodConfig config = new MongodConfigBuilder().version(getVersion())
                .net(new Net(bindIp, port, NetworkUtils.localhostIsIPv6()))
                .replication(new Storage(getDataDirectory(), null, 0)).cmdOptions(new MongoCmdOptionsBuilder()
                        .useNoJournal(!journal).useStorageEngine(storageEngine).build())
                .build();

        executable = MongodStarter.getInstance(runtimeConfig).prepare(config);
    } catch (DistributionException e) {
        throw new MojoExecutionException("Failed to download MongoDB distribution: " + e.withDistribution(), e);
    } catch (IOException e) {
        throw new MojoExecutionException("Unable to Config MongoDB: ", e);
    }

    try {
        MongodProcess mongod = executable.start();

        if (isWait()) {
            while (true) {
                try {
                    TimeUnit.MINUTES.sleep(5);
                } catch (InterruptedException e) {
                    break;
                }
            }
        }

        getPluginContext().put(MONGOD_CONTEXT_PROPERTY_NAME, mongod);
    } catch (IOException e) {
        throw new MojoExecutionException("Unable to start the mongod", e);
    }
}