Example usage for java.lang System nanoTime

List of usage examples for java.lang System nanoTime

Introduction

In this page you can find the example usage for java.lang System nanoTime.

Prototype

@HotSpotIntrinsicCandidate
public static native long nanoTime();

Source Link

Document

Returns the current value of the running Java Virtual Machine's high-resolution time source, in nanoseconds.

Usage

From source file:com.taobao.datax.plugins.writer.ftpwriter.FtpWriter.java

@Override
public List<PluginParam> split(PluginParam param) {
    LOG.info("begin do split...");
    Set<String> allFileExists = new HashSet<String>();
    allFileExists.addAll(this.allFileExists);
    List<PluginParam> pluginParams = new ArrayList<PluginParam>();
    String filePrefix = param.getValue(ParamKey.FILE_NAME, Constant.DEFAULT_FILE_NAME);
    int concurrency = param.getIntValue(ParamKey.CONCURRENCY, 1);
    for (int i = 0; i < concurrency; i++) {
        // handle same file name
        PluginParam splitedParam = param.clone();
        String fileSuffix = System.nanoTime() + "";
        String fullFileName = String.format("%s__%s", filePrefix, fileSuffix);
        //********????***********
        while (allFileExists.contains(fullFileName)) {
            fileSuffix = System.nanoTime() + "";
            fullFileName = String.format("%s__%s", filePrefix, fileSuffix);
        }//from w ww . ja v  a 2 s  .  co m
        allFileExists.add(fullFileName);
        //================================
        splitedParam.putValue(ParamKey.FILE_NAME, fullFileName);
        LOG.info(String.format("splited write file name:[%s]", fullFileName));
        pluginParams.add(splitedParam);
    }
    LOG.info("end do split.");
    return pluginParams;
}

From source file:at.tuwien.minimee.emulation.EmulationService.java

/**
 * Currently not exposed as a web service since miniMEE
 * has been integrated with Plato./*from www. ja v a2 s .  c o m*/
 * This starts a session with GRATE
 * @param samplename filename of the object to be rendered remotely
 * @param data the file to be rendered remotely
 * @param toolID pointing to the corresponding minimee configuration
 * @return a URL to be posted to the browser for opening a GRATE session.
 * This URL points to a GRATE session that contains the object readily waiting
 * to be rendered, already injected into the appropriate environment.
 * @throws PlatoServiceException if the connection to the GRATE server failed
 */
public String startSession(String samplename, byte[] data, String toolID) throws PlatoServiceException {
    ToolConfig config = getToolConfig(toolID);

    String response;
    try {
        HttpClient client = new HttpClient();
        MultipartPostMethod mPost = new MultipartPostMethod(config.getTool().getExecutablePath());
        client.setConnectionTimeout(8000);

        // MultipartPostMethod needs a file instance
        File sample = File.createTempFile(samplename + System.nanoTime(), "tmp");
        OutputStream out = new BufferedOutputStream(new FileOutputStream(sample));
        out.write(data);
        out.close();

        mPost.addParameter("datei", samplename, sample);

        int statusCode = client.executeMethod(mPost);

        response = mPost.getResponseBodyAsString();

        return response + config.getParams();

    } catch (HttpException e) {
        throw new PlatoServiceException("Could not connect to GRATE.", e);
    } catch (FileNotFoundException e) {
        throw new PlatoServiceException("Could not create temp file.", e);
    } catch (IOException e) {
        throw new PlatoServiceException("Could not connect to GRATE.", e);
    }

}

From source file:edu.wisc.commons.httpclient.ConnPoolControlData.java

private PoolStats getPoolStats() {
    PoolStats p = this.poolStats;

    final long now = System.nanoTime();
    if (now - lastLoaded >= POOL_STATS_REFRESH || p == null) {
        p = this.connPoolControl.getTotalStats();
        this.lastLoaded = now;
        this.poolStats = p;
    }/*w  w  w .  j  av a2s  .c om*/

    return p;
}

From source file:net.carinae.dev.async.TasksIntegrationTest.java

/**
 * Enqueues a simple task and waits for 3 minutes for it to be executed.
 *//*  www  .  ja v  a 2 s  .c o  m*/
@Test
public void testSimpleTask() throws InterruptedException {

    String data = "" + System.nanoTime();

    enqueueSimpleTask(data);

    // Now, try to read from the database
    int tries = 0;
    while (tries < 180 && pollDummyEntity(data)) {
        Thread.sleep(1000); // 1 second
        tries++;
    }

    Assert.assertTrue("Task didn't execute in 3 minutes time", tries < 180);
}

From source file:ch.algotrader.util.metric.MetricsUtil.java

/**
 * Resets all metrics and also reset the {@code startMillis}
 *//* w  w w.j a v  a2s.c  om*/
public static void resetMetrics() {

    metrics.clear();
    startMillis = System.nanoTime();
}

From source file:hudson.remoting.RegExpBenchmark.java

@Test
public void benchmark() throws Exception {
    System.out.println("there are " + getAllRTClasses().size());

    List<String> classes = getAllRTClasses();
    final long startRegExp = System.nanoTime();
    final List<String> matchesRegExp = checkClassesRegExp(classes);
    final long durationRegexpNanos = System.nanoTime() - startRegExp;
    classes = null;/*from   w ww.j  av a 2 s.  c o  m*/

    System.gc();
    System.gc();
    System.gc();

    // make sure we use new Strings each time so that hotpsot does not do funky caching (after all the strings we will be testing will come from the stream and be new).
    classes = getAllRTClasses();
    final long startSingleRegExp = System.nanoTime();
    final List<String> matchesSingleRegExp = checkClassesSingleRegExp(classes);
    final long durationSingleRegexpNanos = System.nanoTime() - startSingleRegExp;
    classes = null;
    System.gc();
    System.gc();
    System.gc();

    // make sure we use new Strings each time so that hotpsot does not do funky caching (after all the strings we will be testing will come from the stream and be new).
    classes = getAllRTClasses();
    final long startString = System.nanoTime();
    final List<String> matchesString = checkClassesString(classes);
    final long durationStringNanos = System.nanoTime() - startString;

    System.out.println(String.format(Locale.ENGLISH,
            "%-13s: %d blacklisted classes in %9dns.  Average class check time is %dns",
            new Object[] { "RegExp ", matchesRegExp.size(), durationRegexpNanos,
                    durationRegexpNanos / classes.size() }));
    System.out.println(String.format(Locale.ENGLISH,
            "%-13s: %d blacklisted classes in %9dns.  Average class check time is %dns",
            new Object[] { "SingleRegExp ", matchesSingleRegExp.size(), durationSingleRegexpNanos,
                    durationSingleRegexpNanos / classes.size() }));
    System.out.println(String.format(Locale.ENGLISH,
            "%-13s: %d blacklisted classes in %9dns.  Average class check time is %dns",
            new Object[] { "String ", matchesString.size(), durationStringNanos,
                    durationStringNanos / classes.size() }));

    System.out.println("Regular Expression is " + durationRegexpNanos / durationStringNanos + " times slower");
    System.out.println("Single Regular Expression is " + durationSingleRegexpNanos / durationStringNanos
            + " times slower\n");
}

From source file:ffx.algorithms.AlgorithmUtils.java

public AlgorithmUtils() {
    initTime = System.nanoTime();
    interTime = initTime;
}

From source file:com.netflix.genie.web.jobs.workflow.impl.ClusterTask.java

/**
 * {@inheritDoc}/* ww  w.  j a  v  a2s .c  o m*/
 */
@Override
public void executeTask(@NotNull final Map<String, Object> context) throws GenieException, IOException {
    final Set<Tag> tags = Sets.newHashSet();
    final long start = System.nanoTime();
    try {
        final JobExecutionEnvironment jobExecEnv = (JobExecutionEnvironment) context
                .get(JobConstants.JOB_EXECUTION_ENV_KEY);
        final Cluster cluster = jobExecEnv.getCluster();
        tags.add(Tag.of(MetricsConstants.TagKeys.CLUSTER_NAME, cluster.getMetadata().getName()));
        tags.add(Tag.of(MetricsConstants.TagKeys.CLUSTER_ID, cluster.getId()));
        final String jobWorkingDirectory = jobExecEnv.getJobWorkingDir().getCanonicalPath();
        final String genieDir = jobWorkingDirectory + JobConstants.FILE_PATH_DELIMITER
                + JobConstants.GENIE_PATH_VAR;
        final Writer writer = (Writer) context.get(JobConstants.WRITER_KEY);
        log.info("Starting Cluster Task for job {}", jobExecEnv.getJobRequest().getId().orElse(NO_ID_FOUND));

        final String clusterId = cluster.getId();

        // Create the directory for this application under applications in the cwd
        createEntityInstanceDirectory(genieDir, clusterId, AdminResources.CLUSTER);

        // Create the config directory for this id
        createEntityInstanceConfigDirectory(genieDir, clusterId, AdminResources.CLUSTER);

        // Create the dependencies directory for this id
        createEntityInstanceDependenciesDirectory(genieDir, clusterId, AdminResources.CLUSTER);

        // Get the set up file for cluster and add it to source in launcher script
        final Optional<String> setupFile = cluster.getResources().getSetupFile();
        if (setupFile.isPresent()) {
            final String clusterSetupFile = setupFile.get();
            if (StringUtils.isNotBlank(clusterSetupFile)) {
                final String localPath = super.buildLocalFilePath(jobWorkingDirectory, clusterId,
                        clusterSetupFile, FileType.SETUP, AdminResources.CLUSTER);

                this.fts.getFile(clusterSetupFile, localPath);

                super.generateSetupFileSourceSnippet(clusterId, "Cluster:", localPath, writer,
                        jobWorkingDirectory);
            }
        }

        // Iterate over and get all configuration files
        for (final String configFile : cluster.getResources().getConfigs()) {
            final String localPath = super.buildLocalFilePath(jobWorkingDirectory, clusterId, configFile,
                    FileType.CONFIG, AdminResources.CLUSTER);
            this.fts.getFile(configFile, localPath);
        }

        // Iterate over and get all dependencies
        for (final String dependencyFile : cluster.getResources().getDependencies()) {
            final String localPath = super.buildLocalFilePath(jobWorkingDirectory, clusterId, dependencyFile,
                    FileType.DEPENDENCIES, AdminResources.CLUSTER);
            this.fts.getFile(dependencyFile, localPath);
        }
        log.info("Finished Cluster Task for job {}", jobExecEnv.getJobRequest().getId().orElse(NO_ID_FOUND));
        MetricsUtils.addSuccessTags(tags);
    } catch (final Throwable t) {
        MetricsUtils.addFailureTagsWithException(tags, t);
        throw t;
    } finally {
        this.getRegistry().timer(CLUSTER_TASK_TIMER_NAME, tags).record(System.nanoTime() - start,
                TimeUnit.NANOSECONDS);
    }
}

From source file:net.duckling.ddl.service.oauth.impl.OAuthServiceImpl.java

@Override
public void generateAccessToken(OAuthAccessor accessor) {

    // generate oauth_token and oauth_secret
    String consumerKey = (String) accessor.consumer.getProperty("name");
    // generate token and secret based on consumer_key

    // for now use md5 of name + current time as token
    String token_data = consumerKey + System.nanoTime();
    String token = DigestUtils.md5Hex(token_data);

    String oldRequestToken = accessor.requestToken;
    // first remove the accessor from cache
    accessor.requestToken = null;//from  www  .j  a  v  a 2  s.  c  o  m
    accessor.accessToken = token;

    AccessorPo po;
    if (oldRequestToken != null) {
        po = accessorDAO.getAccessor(oldRequestToken);
        po.copyDateFrom(accessor);
        accessorDAO.updateAccessor(po);
    } else {
        po = new AccessorPo();
        po.copyDateFrom(accessor);
        accessorDAO.createAccessor(po);
    }
}

From source file:it.dockins.dockerslaves.spec.DockerfileContainerDefinition.java

@Override
public String getImage(DockerDriver driver, Launcher.ProcStarter procStarter, TaskListener listener)
        throws IOException, InterruptedException {
    boolean pull = forcePull;
    if (image != null)
        return image;
    String tag = Long.toHexString(System.nanoTime());

    final FilePath workspace = procStarter.pwd();

    final FilePath pathToContext = workspace.child(contextPath);
    if (!pathToContext.exists()) {
        throw new IOException(pathToContext.getRemote() + " does not exists.");
    }/*  w  w  w .  jav  a  2  s  . c  o  m*/

    final FilePath pathToDockerfile = pathToContext.child(dockerfile);
    if (!pathToDockerfile.exists()) {
        throw new IOException(pathToContext.getRemote() + " does not exists.");
    }

    final File context = Util.createTempDir();
    pathToContext.copyRecursiveTo(new FilePath(context));
    pathToDockerfile.copyTo(new FilePath(new File(context, "Dockerfile")));

    if (driver.buildDockerfile(listener, context.getAbsolutePath(), tag, pull) != 0) {
        throw new IOException("Failed to build image from Dockerfile " + dockerfile);
    }
    Util.deleteRecursive(context);
    this.image = tag;
    return tag;
}