Example usage for java.util.concurrent TimeUnit NANOSECONDS

List of usage examples for java.util.concurrent TimeUnit NANOSECONDS

Introduction

In this page you can find the example usage for java.util.concurrent TimeUnit NANOSECONDS.

Prototype

TimeUnit NANOSECONDS

To view the source code for java.util.concurrent TimeUnit NANOSECONDS.

Click Source Link

Document

Time unit representing one thousandth of a microsecond.

Usage

From source file:org.apache.hadoop.mapreduce.JobHistoryFileReplayMapperV1.java

public void map(IntWritable key, IntWritable val, Context context) throws IOException {
    // collect the apps it needs to process
    TimelineClient tlc = new TimelineClientImpl();
    TimelineEntityConverterV1 converter = new TimelineEntityConverterV1();
    JobHistoryFileReplayHelper helper = new JobHistoryFileReplayHelper(context);
    int replayMode = helper.getReplayMode();
    Collection<JobFiles> jobs = helper.getJobFiles();
    JobHistoryFileParser parser = helper.getParser();

    if (jobs.isEmpty()) {
        LOG.info(context.getTaskAttemptID().getTaskID() + " will process no jobs");
    } else {//from  ww w  .  ja v  a 2s.c o  m
        LOG.info(context.getTaskAttemptID().getTaskID() + " will process " + jobs.size() + " jobs");
    }
    for (JobFiles job : jobs) {
        // process each job
        String jobIdStr = job.getJobId();
        LOG.info("processing " + jobIdStr + "...");
        JobId jobId = TypeConverter.toYarn(JobID.forName(jobIdStr));
        ApplicationId appId = jobId.getAppId();

        try {
            // parse the job info and configuration
            Path historyFilePath = job.getJobHistoryFilePath();
            Path confFilePath = job.getJobConfFilePath();
            if ((historyFilePath == null) || (confFilePath == null)) {
                continue;
            }
            JobInfo jobInfo = parser.parseHistoryFile(historyFilePath);
            Configuration jobConf = parser.parseConfiguration(confFilePath);
            LOG.info("parsed the job history file and the configuration file for job " + jobIdStr);

            // create entities from job history and write them
            long totalTime = 0;
            Set<TimelineEntity> entitySet = converter.createTimelineEntities(jobInfo, jobConf);
            LOG.info("converted them into timeline entities for job " + jobIdStr);
            // use the current user for this purpose
            UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
            long startWrite = System.nanoTime();
            try {
                switch (replayMode) {
                case JobHistoryFileReplayHelper.WRITE_ALL_AT_ONCE:
                    writeAllEntities(tlc, entitySet, ugi);
                    break;
                case JobHistoryFileReplayHelper.WRITE_PER_ENTITY:
                    writePerEntity(tlc, entitySet, ugi);
                    break;
                default:
                    break;
                }
            } catch (Exception e) {
                context.getCounter(PerfCounters.TIMELINE_SERVICE_WRITE_FAILURES).increment(1);
                LOG.error("writing to the timeline service failed", e);
            }
            long endWrite = System.nanoTime();
            totalTime += TimeUnit.NANOSECONDS.toMillis(endWrite - startWrite);
            int numEntities = entitySet.size();
            LOG.info("wrote " + numEntities + " entities in " + totalTime + " ms");

            context.getCounter(PerfCounters.TIMELINE_SERVICE_WRITE_TIME).increment(totalTime);
            context.getCounter(PerfCounters.TIMELINE_SERVICE_WRITE_COUNTER).increment(numEntities);
        } finally {
            context.progress(); // move it along
        }
    }
}

From source file:com.jbombardier.reports.ReportGenerator.java

private static void buildTestsTable(List<TransactionResult> transactionResults, TimeUnit reportTimeUnits,
        HTMLBuilder2.Element testsDiv) {
    HTMLBuilder2.TableElement testsTable = testsDiv.table();

    testsTable.addClass("table");
    testsTable.addClass("table-striped");
    testsTable.addClass("table-header-rotated");

    String timeUnitDescription;//from w w w  . j a  v  a 2s .com
    if (reportTimeUnits == TimeUnit.NANOSECONDS) {
        timeUnitDescription = "(ns)";
    } else if (reportTimeUnits == TimeUnit.MICROSECONDS) {
        timeUnitDescription = "(s)";
    } else if (reportTimeUnits == TimeUnit.MILLISECONDS) {
        timeUnitDescription = "(ms)";
    } else {
        timeUnitDescription = "(s)";
    }

    HTMLBuilder2.THeadRow headerRow = testsTable.getThead().headerRow();

    headerRow.cell("Test name").addClass("rotate-45");
    headerRow.cell("Transaction").addClass("rotate-45");
    headerRow.cell("Total transaction count").addClass("rotate-45");

    headerRow.cell("Agents").addClass("rotate-45");
    headerRow.cell("Threads").addClass("rotate-45");
    //        headerRow.cell("Sample Time") .addClass("rotate-45");

    headerRow.cell("Successful transactions").addClass("rotate-45");
    headerRow.cell("Unsuccessful transactions").addClass("rotate-45");

    headerRow.cell("Mean duration " + timeUnitDescription).addClass("rotate-45");
    headerRow.cell("SLA").addClass("rotate-45");
    headerRow.cell("Mean TPS").addClass("rotate-45");
    headerRow.cell("Target TPS").addClass("rotate-45");
    headerRow.cell("Maximum TPS").addClass("rotate-45");

    headerRow.cell("Abs dev").addClass("rotate-45");
    headerRow.cell("%").addClass("rotate-45");
    headerRow.cell("stdevp").addClass("rotate-45");
    headerRow.cell("TP90").addClass("rotate-45");
    headerRow.cell("TP99").addClass("rotate-45");
    headerRow.cell("Median").addClass("rotate-45");
    headerRow.cell("Fastest").addClass("rotate-45");
    headerRow.cell("Slowest").addClass("rotate-45");

    headerRow.cell("Unsuccessful mean duration " + timeUnitDescription).addClass("rotate-45");
    headerRow.cell("Unsuccessful Mean TPS").addClass("rotate-45");

    for (TransactionResult transactionResult : transactionResults) {

        HTMLBuilder2.RowElement row = testsTable.row();

        row.cell(transactionResult.getTestName());
        row.cell(transactionResult.getTransactionName());
        row.cell(format(transactionResult.getTotalTransactionCount()));

        row.cell(format(transactionResult.getAgents()));
        row.cell(format(transactionResult.getThreads()));
        //            row.cell(TimeUtils.formatIntervalMilliseconds(transactionResult.getSampleTime()));

        row.cell(format(transactionResult.getSuccessfulTransactionCount()));
        row.cell(format(transactionResult.getUnsuccessfulTransactionCount()));

        row.cell(formatTime(transactionResult.getSuccessfulTransactionMeanDuration(), reportTimeUnits));
        row.cell(formatTime(transactionResult.getSla(), reportTimeUnits));
        row.cell(format(transactionResult.getSuccessfulTransactionMeanTransactionsPerSecond()));
        row.cell(format(transactionResult.getSuccessfulTransactionMeanTransactionsPerSecondTarget()));
        row.cell(format(transactionResult.getSuccessfulMaximumTransactionsPerSecond()));

        row.cell(formatTime(transactionResult.getSuccessfulAbsoluteDeviation(), reportTimeUnits));
        row.cell(format(transactionResult.getSuccessfulAbsoluteDeviationAsPercentage()));
        row.cell(formatTime(transactionResult.getSuccessfulStandardDeviation(), reportTimeUnits));
        row.cell(formatTime(transactionResult.getSuccessfulPercentiles()[90], reportTimeUnits));
        row.cell(formatTime(transactionResult.getSuccessfulPercentiles()[99], reportTimeUnits));
        row.cell(formatTime(transactionResult.getSuccessfulMedian(), reportTimeUnits));
        row.cell(formatTime(transactionResult.getSuccessfulFastestResult(), reportTimeUnits));
        row.cell(formatTime(transactionResult.getSuccessfulSlowestResult(), reportTimeUnits));

        row.cell(formatTime(transactionResult.getUnsuccessfulTransactionMeanDuration(), reportTimeUnits));
        row.cell(format(transactionResult.getUnsuccessfulTransactionMeanTransactionsPerSecond()));
    }
}

From source file:com.tinspx.util.concurrent.TimedSemaphoreTest.java

@Test
@SuppressWarnings("ResultOfObjectAllocationIgnored")
public void testInvalidArguments() throws InterruptedException {
    try {//from  w  ww  . j  av a 2s  .c o m
        new TimedSemaphore(0, 1, TimeUnit.MILLISECONDS);
        fail();
    } catch (IllegalArgumentException ex) {
    }
    try {
        new TimedSemaphore(-1, 1, TimeUnit.MILLISECONDS);
        fail();
    } catch (IllegalArgumentException ex) {
    }
    try {
        new TimedSemaphore(1, 0, TimeUnit.MILLISECONDS);
        fail();
    } catch (IllegalArgumentException ex) {
    }
    try {
        new TimedSemaphore(1, 1, null);
        fail();
    } catch (NullPointerException ex) {
    }

    TimedSemaphore ts = new TimedSemaphore(3, 100, TimeUnit.NANOSECONDS);
    try {
        ts.setLimit(0);
        fail();
    } catch (IllegalArgumentException ex) {
    }
    try {
        ts.setLimit(-1);
        fail();
    } catch (IllegalArgumentException ex) {
    }
    try {
        ts.setPeriod(0, TimeUnit.NANOSECONDS);
        fail();
    } catch (IllegalArgumentException ex) {
    }
    try {
        ts.setPeriod(-1, TimeUnit.NANOSECONDS);
        fail();
    } catch (IllegalArgumentException ex) {
    }
    try {
        ts.setPeriod(1, null);
        fail();
    } catch (NullPointerException ex) {
    }

    //acquiring too many permits
    try {
        ts.tryAcquire(4);
        fail();
    } catch (IllegalArgumentException ex) {
    }
    ts.tryAcquire(3);

    try {
        ts.tryAcquire(4, 99, TimeUnit.NANOSECONDS);
        fail();
    } catch (IllegalArgumentException ex) {
    }
    try {
        ts.tryAcquire(6, 99, TimeUnit.NANOSECONDS);
        fail();
    } catch (IllegalArgumentException ex) {
    }
    ts.tryAcquire(6, 100, TimeUnit.NANOSECONDS); //should not through
    try {
        ts.tryAcquire(7, 100, TimeUnit.NANOSECONDS);
        fail();
    } catch (IllegalArgumentException ex) {
    }
    try {
        ts.tryAcquire(7, 199, TimeUnit.NANOSECONDS);
        fail();
    } catch (IllegalArgumentException ex) {
    }
    ts.tryAcquire(7, 200, TimeUnit.NANOSECONDS);
    ts.tryAcquire(9, 200, TimeUnit.NANOSECONDS);

    //acquire invalid args
    try {
        ts.acquire(-1);
        fail();
    } catch (IllegalArgumentException ex) {
    }
    try {
        ts.acquire(0);
        fail();
    } catch (IllegalArgumentException ex) {
    }

    //tryAcquire invalid
    try {
        ts.tryAcquire(-1);
        fail();
    } catch (IllegalArgumentException ex) {
    }
    try {
        ts.tryAcquire(0);
        fail();
    } catch (IllegalArgumentException ex) {
    }

    //tryAcquire with timeout, invalid arguments
    try {
        ts.tryAcquire(0, 10, TimeUnit.NANOSECONDS);
        fail();
    } catch (IllegalArgumentException ex) {
    }
    try {
        ts.tryAcquire(-1, 10, TimeUnit.NANOSECONDS);
        fail();
    } catch (IllegalArgumentException ex) {
    }
    try {
        ts.tryAcquire(1, 10, null);
        fail();
    } catch (NullPointerException ex) {
    }
    try {
        ts.tryAcquire(2, 10, null);
        fail();
    } catch (NullPointerException ex) {
    }

    //0 and negative timout should not throw
    ts.tryAcquire(0, TimeUnit.NANOSECONDS);
    ts.tryAcquire(-1, TimeUnit.NANOSECONDS);
    ts.tryAcquire(1, 0, TimeUnit.NANOSECONDS);
    ts.tryAcquire(1, -1, TimeUnit.NANOSECONDS);
    ts.tryAcquire(2, 0, TimeUnit.NANOSECONDS);
    ts.tryAcquire(2, -1, TimeUnit.NANOSECONDS);
}

From source file:com.netflix.genie.core.jobs.workflow.impl.ApplicationTask.java

/**
 * {@inheritDoc}//  w  w  w .j  ava2 s .  c o m
 */
@Override
public void executeTask(@NotNull final Map<String, Object> context) throws GenieException, IOException {
    final long start = System.nanoTime();
    try {
        final JobExecutionEnvironment jobExecEnv = (JobExecutionEnvironment) context
                .get(JobConstants.JOB_EXECUTION_ENV_KEY);
        final String jobWorkingDirectory = jobExecEnv.getJobWorkingDir().getCanonicalPath();
        final String genieDir = jobWorkingDirectory + JobConstants.FILE_PATH_DELIMITER
                + JobConstants.GENIE_PATH_VAR;
        final Writer writer = (Writer) context.get(JobConstants.WRITER_KEY);
        log.info("Starting Application Task for job {}", jobExecEnv.getJobRequest().getId());

        if (jobExecEnv.getApplications() != null) {
            for (Application application : jobExecEnv.getApplications()) {
                final String applicationId = application.getId()
                        .orElseThrow(() -> new GeniePreconditionException("Application without id"));

                // Create the directory for this application under applications in the cwd
                createEntityInstanceDirectory(genieDir, applicationId, AdminResources.APPLICATION);

                // Create the config directory for this id
                createEntityInstanceConfigDirectory(genieDir, applicationId, AdminResources.APPLICATION);

                // Create the dependencies directory for this id
                createEntityInstanceDependenciesDirectory(genieDir, applicationId, AdminResources.APPLICATION);

                // Get the setup file if specified and add it as source command in launcher script
                final Optional<String> setupFile = application.getSetupFile();
                if (setupFile.isPresent()) {
                    final String applicationSetupFile = setupFile.get();
                    if (StringUtils.isNotBlank(applicationSetupFile)) {
                        final String localPath = super.buildLocalFilePath(jobWorkingDirectory, applicationId,
                                applicationSetupFile, FileType.SETUP, AdminResources.APPLICATION);
                        this.fts.getFile(applicationSetupFile, localPath);

                        super.generateSetupFileSourceSnippet(applicationId, "Application:", localPath, writer,
                                jobWorkingDirectory);
                    }
                }

                // Iterate over and get all dependencies
                for (final String dependencyFile : application.getDependencies()) {
                    final String localPath = super.buildLocalFilePath(jobWorkingDirectory, applicationId,
                            dependencyFile, FileType.DEPENDENCIES, AdminResources.APPLICATION);
                    fts.getFile(dependencyFile, localPath);
                }

                // Iterate over and get all configuration files
                for (final String configFile : application.getConfigs()) {
                    final String localPath = super.buildLocalFilePath(jobWorkingDirectory, applicationId,
                            configFile, FileType.CONFIG, AdminResources.APPLICATION);
                    fts.getFile(configFile, localPath);
                }
            }
        }
        log.info("Finished Application Task for job {}", jobExecEnv.getJobRequest().getId());
    } finally {
        final long finish = System.nanoTime();
        this.timer.record(finish - start, TimeUnit.NANOSECONDS);
    }
}

From source file:com.twosigma.beaker.kdb.KdbShell.java

/**
 * kdb result conversions to standard beaker types.
 *
 * TODO it would be better if this was handled by customizing a
 * serializer module, but I don't see a clean way of doing that.
 *///from   www .  jav  a  2  s  . c  o  m
private Object convert(Object o) {
    // Convert flips of simple lists into TableDisplays.
    if (c.t(o) == 98)
        to_tabledisplay: {
            Flip f = (Flip) o;

            // Make sure each column is an array and a type we can handle.
            List<String> columns = Arrays.asList(f.x);
            List<String> classes = new ArrayList<>();
            List<List<?>> colLists = new ArrayList<>();
            for (Object c : f.y) {
                List<?> values = toList(c);
                if (values == null) {
                    break to_tabledisplay;
                }
                String type = objConv
                        .convertType(ClassUtils.primitiveToWrapper(c.getClass().getComponentType()).getName());
                if (type == null) {
                    break to_tabledisplay;
                }

                // Special case - convert Dates to nanosecond times.
                if (BasicObjectSerializer.TYPE_TIME.equals(type)) {
                    List<Long> timestamps = new ArrayList<>(values.size());
                    for (Object d : values) {
                        timestamps
                                .add(TimeUnit.NANOSECONDS.convert(((Date) d).getTime(), TimeUnit.MILLISECONDS));
                    }
                    values = timestamps;
                }

                classes.add(type);
                colLists.add(values);
            }

            // Columns to rows.
            int rows = colLists.get(0).size();
            List<List<?>> values = new ArrayList<>();
            for (int row = 0; row < rows; ++row) {
                List<Object> rowValues = new ArrayList<>();
                for (List<?> col : colLists) {
                    rowValues.add(col.get(row));
                }
                values.add(rowValues);
            }

            return new TableDisplay(values, columns, classes);
        }

    // Convert Dicts to maps.
    if (c.t(o) == 99)
        to_map: {
            Dict f = (Dict) o;

            // Special case - keyed tables are dicts of flips.
            if ((c.t(f.x) == 98) && (c.t(f.y) == 98))
                to_table: {
                    Flip keys = (Flip) f.x;
                    Flip cols = (Flip) f.y;
                    return convert(new Flip(
                            new Dict(ArrayUtils.addAll(keys.x, cols.x), ArrayUtils.addAll(keys.y, cols.y))));
                }

            List<?> keys = toList(f.x);
            if (keys == null)
                break to_map;
            List<?> values = toList(f.y);
            if (values == null)
                break to_map;
            Map<Object, Object> map = new HashMap<>();
            for (int i = 0; i < values.size(); ++i) {
                map.put(keys.get(i), values.get(i));
            }
            return map;
        }

    // No conversion.
    return o;
}

From source file:org.apache.hadoop.mapreduce.JobHistoryFileReplayMapperV2.java

@Override
protected void writeEntities(Configuration tlConf, TimelineCollectorManager manager, Context context)
        throws IOException {
    JobHistoryFileReplayHelper helper = new JobHistoryFileReplayHelper(context);
    int replayMode = helper.getReplayMode();
    JobHistoryFileParser parser = helper.getParser();
    TimelineEntityConverterV2 converter = new TimelineEntityConverterV2();

    // collect the apps it needs to process
    Collection<JobFiles> jobs = helper.getJobFiles();
    if (jobs.isEmpty()) {
        LOG.info(context.getTaskAttemptID().getTaskID() + " will process no jobs");
    } else {/*from  w  ww. j a  v  a 2s .c  o m*/
        LOG.info(context.getTaskAttemptID().getTaskID() + " will process " + jobs.size() + " jobs");
    }
    for (JobFiles job : jobs) {
        // process each job
        String jobIdStr = job.getJobId();
        // skip if either of the file is missing
        if (job.getJobConfFilePath() == null || job.getJobHistoryFilePath() == null) {
            LOG.info(jobIdStr + " missing either the job history file or the "
                    + "configuration file. Skipping.");
            continue;
        }
        LOG.info("processing " + jobIdStr + "...");
        JobId jobId = TypeConverter.toYarn(JobID.forName(jobIdStr));
        ApplicationId appId = jobId.getAppId();

        // create the app level timeline collector and start it
        AppLevelTimelineCollector collector = new AppLevelTimelineCollector(appId);
        manager.putIfAbsent(appId, collector);
        try {
            // parse the job info and configuration
            JobInfo jobInfo = parser.parseHistoryFile(job.getJobHistoryFilePath());
            Configuration jobConf = parser.parseConfiguration(job.getJobConfFilePath());
            LOG.info("parsed the job history file and the configuration file " + "for job " + jobIdStr);

            // set the context
            // flow id: job name, flow run id: timestamp, user id
            TimelineCollectorContext tlContext = collector.getTimelineEntityContext();
            tlContext.setFlowName(jobInfo.getJobname());
            tlContext.setFlowRunId(jobInfo.getSubmitTime());
            tlContext.setUserId(jobInfo.getUsername());

            // create entities from job history and write them
            long totalTime = 0;
            List<TimelineEntity> entitySet = converter.createTimelineEntities(jobInfo, jobConf);
            LOG.info("converted them into timeline entities for job " + jobIdStr);
            // use the current user for this purpose
            UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
            long startWrite = System.nanoTime();
            try {
                switch (replayMode) {
                case JobHistoryFileReplayHelper.WRITE_ALL_AT_ONCE:
                    writeAllEntities(collector, entitySet, ugi);
                    break;
                case JobHistoryFileReplayHelper.WRITE_PER_ENTITY:
                    writePerEntity(collector, entitySet, ugi);
                    break;
                default:
                    break;
                }
            } catch (Exception e) {
                context.getCounter(PerfCounters.TIMELINE_SERVICE_WRITE_FAILURES).increment(1);
                LOG.error("writing to the timeline service failed", e);
            }
            long endWrite = System.nanoTime();
            totalTime += TimeUnit.NANOSECONDS.toMillis(endWrite - startWrite);
            int numEntities = entitySet.size();
            LOG.info("wrote " + numEntities + " entities in " + totalTime + " ms");

            context.getCounter(PerfCounters.TIMELINE_SERVICE_WRITE_TIME).increment(totalTime);
            context.getCounter(PerfCounters.TIMELINE_SERVICE_WRITE_COUNTER).increment(numEntities);
        } finally {
            manager.remove(appId);
            context.progress(); // move it along
        }
    }
}

From source file:org.apache.drill.exec.store.hive.HiveMetadataProvider.java

/**
 * Return stats for table/partitions in given {@link HiveReadEntry}. If valid stats are available in MetaStore,
 * return it. Otherwise estimate using the size of the input data.
 *
 * @param hiveReadEntry Subset of the {@link HiveReadEntry} used when creating this cache object.
 * @return//from  w  ww  .jav  a  2 s  .  co  m
 * @throws IOException
 */
public HiveStats getStats(final HiveReadEntry hiveReadEntry) throws IOException {
    final Stopwatch timeGetStats = Stopwatch.createStarted();

    final HiveTableWithColumnCache table = hiveReadEntry.getTable();
    try {
        if (!isPartitionedTable) {
            final Properties properties = MetaStoreUtils.getTableMetadata(table);
            final HiveStats stats = getStatsFromProps(properties);
            if (stats.valid()) {
                return stats;
            }

            // estimate the stats from the InputSplits.
            return getStatsEstimateFromInputSplits(getTableInputSplits());
        } else {
            final HiveStats aggStats = new HiveStats(0, 0);
            for (HivePartition partition : hiveReadEntry.getPartitions()) {
                final Properties properties = HiveUtilities.getPartitionMetadata(partition, table);
                HiveStats stats = getStatsFromProps(properties);

                if (!stats.valid()) {
                    // estimate the stats from InputSplits
                    stats = getStatsEstimateFromInputSplits(getPartitionInputSplits(partition));
                }
                aggStats.add(stats);
            }

            return aggStats;
        }
    } catch (final Exception e) {
        throw new IOException("Failed to get numRows from HiveTable", e);
    } finally {
        logger.debug("Took {} s to get stats from {}.{}", timeGetStats.elapsed(TimeUnit.NANOSECONDS) / 1000,
                table.getDbName(), table.getTableName());
    }
}

From source file:com.netflix.genie.web.jobs.workflow.impl.CommandTask.java

/**
 * {@inheritDoc}//from   ww w . j  av  a 2s.  co m
 */
@Override
public void executeTask(@NotNull final Map<String, Object> context) throws GenieException, IOException {
    final long start = System.nanoTime();
    final Set<Tag> tags = Sets.newHashSet();
    try {
        final JobExecutionEnvironment jobExecEnv = (JobExecutionEnvironment) context
                .get(JobConstants.JOB_EXECUTION_ENV_KEY);
        final Command command = jobExecEnv.getCommand();
        tags.add(Tag.of(MetricsConstants.TagKeys.COMMAND_NAME, command.getMetadata().getName()));
        tags.add(Tag.of(MetricsConstants.TagKeys.COMMAND_ID, command.getId()));
        final String jobWorkingDirectory = jobExecEnv.getJobWorkingDir().getCanonicalPath();
        final String genieDir = jobWorkingDirectory + JobConstants.FILE_PATH_DELIMITER
                + JobConstants.GENIE_PATH_VAR;
        final Writer writer = (Writer) context.get(JobConstants.WRITER_KEY);

        log.info("Starting Command Task for job {}", jobExecEnv.getJobRequest().getId().orElse(NO_ID_FOUND));

        final String commandId = command.getId();

        // Create the directory for this command under command dir in the cwd
        createEntityInstanceDirectory(genieDir, commandId, AdminResources.COMMAND);

        // Create the config directory for this id
        createEntityInstanceConfigDirectory(genieDir, commandId, AdminResources.COMMAND);

        // Create the dependencies directory for this id
        createEntityInstanceDependenciesDirectory(genieDir, commandId, AdminResources.COMMAND);

        // Get the setup file if specified and add it as source command in launcher script
        final Optional<String> setupFile = command.getResources().getSetupFile();
        if (setupFile.isPresent()) {
            final String commandSetupFile = setupFile.get();
            if (StringUtils.isNotBlank(commandSetupFile)) {
                final String localPath = super.buildLocalFilePath(jobWorkingDirectory, commandId,
                        commandSetupFile, FileType.SETUP, AdminResources.COMMAND);

                fts.getFile(commandSetupFile, localPath);

                super.generateSetupFileSourceSnippet(commandId, "Command:", localPath, writer,
                        jobWorkingDirectory);
            }
        }

        // Iterate over and get all configuration files
        for (final String configFile : command.getResources().getConfigs()) {
            final String localPath = super.buildLocalFilePath(jobWorkingDirectory, commandId, configFile,
                    FileType.CONFIG, AdminResources.COMMAND);
            fts.getFile(configFile, localPath);
        }

        // Iterate over and get all dependencies
        for (final String dependencyFile : command.getResources().getDependencies()) {
            final String localPath = super.buildLocalFilePath(jobWorkingDirectory, commandId, dependencyFile,
                    FileType.DEPENDENCIES, AdminResources.COMMAND);
            fts.getFile(dependencyFile, localPath);
        }
        log.info("Finished Command Task for job {}", jobExecEnv.getJobRequest().getId().orElse(NO_ID_FOUND));
        MetricsUtils.addSuccessTags(tags);
    } catch (Throwable t) {
        MetricsUtils.addFailureTagsWithException(tags, t);
        throw t;
    } finally {
        this.getRegistry().timer(COMMAND_TASK_TIMER_NAME, tags).record(System.nanoTime() - start,
                TimeUnit.NANOSECONDS);
    }
}

From source file:at.alladin.rmbt.util.tools.InformationCollectorTool.java

/**
 * update all collectors//w w  w .  jav a 2  s.c  o  m
 */
public void update() {
    final long now = System.nanoTime();

    try {
        for (CollectorHolder c : collectorList) {
            if ((c.lastUpdate + c.collector.getNanoPause()) <= now) {
                c.collector.update(deltaTimeUnit.convert(now - c.lastUpdate, TimeUnit.NANOSECONDS),
                        deltaTimeUnit);
                c.lastUpdate = now;
            } else if (c.lastUpdate <= 0) {
                c.collector.update(0, deltaTimeUnit);
                c.lastUpdate = now;
            }
        }
    } catch (Exception e) {
        e.printStackTrace();
    }
}

From source file:com.netflix.genie.web.services.impl.HttpFileTransferImpl.java

/**
 * {@inheritDoc}/*from w  w  w.ja  v  a 2  s  .co  m*/
 */
@Override
public long getLastModifiedTime(final String path) throws GenieException {
    final long start = System.nanoTime();
    try {
        final URL url = new URL(path);
        final long time = this.restTemplate.headForHeaders(url.toURI()).getLastModified();
        // Returns now if there was no last modified header as best we can do is assume file is brand new
        return time != -1 ? time : Instant.now().toEpochMilli();
    } catch (final MalformedURLException | URISyntaxException e) {
        log.error(e.getLocalizedMessage(), e);
        throw new GenieServerException(e);
    } finally {
        this.getLastModifiedTimer.record(System.nanoTime() - start, TimeUnit.NANOSECONDS);
    }
}