Example usage for java.time Instant ofEpochMilli

List of usage examples for java.time Instant ofEpochMilli

Introduction

In this page you can find the example usage for java.time Instant ofEpochMilli.

Prototype

public static Instant ofEpochMilli(long epochMilli) 

Source Link

Document

Obtains an instance of Instant using milliseconds from the epoch of 1970-01-01T00:00:00Z.

Usage

From source file:io.stallion.utils.GeneralUtils.java

@Deprecated
public static ZonedDateTime milsToDateTime(long mils) {
    return ZonedDateTime.ofInstant(Instant.ofEpochMilli(mils), UTC);
}

From source file:com.github.aptd.simulation.elements.passenger.CPassengerSource.java

protected synchronized Instant determinenextstatechange() {
    if (m_passengersgenerated >= m_passengers)
        return Instant.MAX;
    return Instant.ofEpochMilli(m_startmillis + (long) Math.ceil(
            m_distribution.inverseCumulativeProbability(1.0 * (m_passengersgenerated + 1) / m_passengers)));
}

From source file:de.qaware.chronix.solr.ingestion.format.OpenTsdbTelnetFormatParser.java

/**
 * Extracts the metric timestamp from the parts.
 *
 * @param parts Parts.//www.  j  a va  2 s. co m
 * @return Metric timestamp.
 * @throws FormatParseException If something went wrong while extracting.
 */
private Instant getMetricTimestamp(String[] parts) throws FormatParseException {
    String value = parts[2];
    try {
        if (value.length() != 10 && value.length() != 13) {
            throw new FormatParseException(
                    "Expected a timestamp length of 10 or 13, found " + value.length() + " ('" + value + "')");
        }

        // 10 digits means seconds, 13 digits mean milliseconds
        boolean secondResolution = value.length() == 10;

        long epochTime = Long.parseLong(value);
        return secondResolution ? Instant.ofEpochSecond(epochTime) : Instant.ofEpochMilli(epochTime);
    } catch (NumberFormatException e) {
        throw new FormatParseException("Can't convert '" + value + "' to long", e);
    }
}

From source file:io.gravitee.management.service.impl.InstanceServiceImpl.java

@Override
public InstanceEntity findById(String eventId) {
    EventEntity event = eventService.findById(eventId);
    Instant nowMinusXMinutes = Instant.now().minus(5, ChronoUnit.MINUTES);

    Map<String, String> props = event.getProperties();
    InstanceEntity instance = new InstanceEntity(props.get("id"));
    instance.setLastHeartbeatAt(new Date(Long.parseLong(props.get("last_heartbeat_at"))));
    instance.setStartedAt(new Date(Long.parseLong(props.get("started_at"))));

    if (event.getPayload() != null) {
        try {//from   w w  w . j  a v  a 2  s. co  m
            InstanceInfo info = objectMapper.readValue(event.getPayload(), InstanceInfo.class);
            instance.setHostname(info.getHostname());
            instance.setIp(info.getIp());
            instance.setVersion(info.getVersion());
            instance.setTags(info.getTags());
            instance.setSystemProperties(info.getSystemProperties());
            instance.setPlugins(info.getPlugins());
        } catch (IOException ioe) {
            LOGGER.error("Unexpected error while getting instance informations from event payload", ioe);
        }
    }

    if (event.getType() == EventType.GATEWAY_STARTED) {
        instance.setState(InstanceState.STARTED);
        // If last heartbeat timestamp is < now - 5m, set as unknown state
        Instant lastHeartbeat = Instant.ofEpochMilli(instance.getLastHeartbeatAt().getTime());
        if (lastHeartbeat.isBefore(nowMinusXMinutes)) {
            instance.setState(InstanceState.UNKNOWN);
        }
    } else {
        instance.setState(InstanceState.STOPPED);
        instance.setStoppedAt(new Date(Long.parseLong(props.get("stopped_at"))));
    }

    return instance;
}

From source file:com.amazonaws.services.kinesis.io.StringDataExtractor.java

/**
 * {@inheritDoc}/*w  w  w .  j  a v  a  2  s .  c om*/
 */
@Override
public List<AggregateData> getData(InputEvent event) throws SerializationException {
    try {
        int summaryIndex = -1;
        String dateString;
        OffsetDateTime dateValue;
        List<AggregateData> data = new ArrayList<>();

        List<List<String>> content = serialiser.toClass(event);
        for (List<String> line : content) {
            if (line != null) {
                LabelSet labels = new LabelSet();
                labels.withAlias(this.labelAttributeAlias);

                for (Integer key : this.labelIndicies) {
                    labels.put("" + key, line.get(key));
                }

                // get the unique index
                String uniqueId = null;
                if (this.usePartitionKeyForUnique) {
                    uniqueId = event.getPartitionKey();
                } else if (this.useSequenceForUnique) {
                    uniqueId = event.getSequenceNumber();
                } else {
                    if (this.uniqueIdIndex != -1) {
                        uniqueId = line.get(this.uniqueIdIndex);
                    }
                }

                // get the date value from the line
                if (this.dateValueIndex != -1) {
                    dateString = line.get(dateValueIndex);
                    if (this.dateFormat != null) {
                        dateValue = OffsetDateTime.parse(dateString, dateFormatter);
                    } else {
                        // no formatter, so treat as epoch seconds
                        try {
                            dateValue = OffsetDateTime.ofInstant(
                                    Instant.ofEpochMilli(Long.parseLong(dateString)), ZoneId.of("UTC"));
                        } catch (Exception e) {
                            LOG.error(String.format(
                                    "Unable to create Date Value element from item '%s' due to invalid format as Epoch Seconds",
                                    dateValueIndex));
                            throw new SerializationException(e);
                        }
                    }
                } else {
                    dateValue = OffsetDateTime.now(ZoneId.of("UTC"));
                }

                // get the summed values
                if (this.aggregatorType.equals(AggregatorType.SUM)) {
                    sumUpdates = new HashMap<>();

                    // get the positional sum items
                    for (int i = 0; i < summaryIndicies.size(); i++) {
                        summaryIndex = summaryIndicies.get(i);
                        try {
                            sumUpdates.put("" + summaryIndex, Double.parseDouble(line.get(summaryIndex)));
                        } catch (NumberFormatException nfe) {
                            LOG.error(String.format(
                                    "Unable to deserialise Summary '%s' due to NumberFormatException", i));
                            throw new SerializationException(nfe);
                        }
                    }
                }

                data.add(new AggregateData(uniqueId, labels, dateValue, sumUpdates));
            }
        }

        return data;
    } catch (Exception e) {
        throw new SerializationException(e);
    }

}

From source file:IndexImportMediawiki_p.java

/**
 * Run conditions :/*from  w  ww . j  av a  2  s  .  c om*/
 * - no MediaWiki import thread is running : allow to start a new import by filling the "file" parameter
 * - the MediaWiki import thread is running : returns monitoring information.
 * @param header servlet request header
 * @param post request parameters. Supported keys :
 *            <ul>
 *            <li>file : a dump URL or file path on this YaCy server local file system</li>
 *            <li>iffresh : when set to true, the dump file is imported only if its last modified date is unknown or after the last import trial date on this same file.  </li>
 *            <li>report : when set, display the currently running thread monitoring info, or the last import report when no one is running.
 *            Ignored when no import thread is known.</li>
 *            </ul>
 * @param env server environment
 * @return the servlet answer object
 */
public static serverObjects respond(final RequestHeader header, final serverObjects post,
        final serverSwitch env) {
    final serverObjects prop = new serverObjects();
    final Switchboard sb = (Switchboard) env;

    if (MediawikiImporter.job != null
            && (MediawikiImporter.job.isAlive() || (post != null && post.containsKey("report")))) {
        /* one import is running, or report was explicitly requested : no option to insert anything */
        prop.put("import", 1);
        /* Only refresh automatically when the job is running */
        prop.put("refresh", MediawikiImporter.job.isAlive() ? 1 : 0);
        final String jobErrorMessage = MediawikiImporter.job.status();
        if (jobErrorMessage != null && !jobErrorMessage.isEmpty()) {
            prop.put("import_status", 1);
            prop.put("import_status_message", jobErrorMessage);
        }
        prop.put("import_thread", MediawikiImporter.job.isAlive() ? 2 : 0);
        prop.put("import_dump", MediawikiImporter.job.source());
        prop.put("import_count", MediawikiImporter.job.count());
        prop.put("import_speed", MediawikiImporter.job.speed());
        prop.put("import_runningHours", (MediawikiImporter.job.runningTime() / 60) / 60);
        prop.put("import_runningMinutes", (MediawikiImporter.job.runningTime() / 60) % 60);
        prop.put("import_remainingHours", (MediawikiImporter.job.remainingTime() / 60) / 60);
        prop.put("import_remainingMinutes", (MediawikiImporter.job.remainingTime() / 60) % 60);
    } else {
        prop.put("import", 0);
        prop.put("refresh", 0);
        prop.put("import_prevReport", MediawikiImporter.job != null ? 1 : 0);
        if (post == null) {
            prop.put("import_status", 0);

            /* Acquire a transaction token for the next POST form submission */
            final String token = TransactionManager.getTransactionToken(header);
            prop.put(TransactionManager.TRANSACTION_TOKEN_PARAM, token);
            prop.put("import_" + TransactionManager.TRANSACTION_TOKEN_PARAM, token);

        } else {
            if (post.containsKey("file")) {
                /* Check the transaction is valid */
                TransactionManager.checkPostTransaction(header, post);

                String file = post.get("file");
                MultiProtocolURL sourceURL = null;
                int status = 0;
                String sourceFilePath = "";
                final Row lastExecutedCall = WorkTables.selectLastExecutedApiCall("IndexImportMediawiki_p.html",
                        post, sb);
                Date lastExecutionDate = null;
                if (lastExecutedCall != null) {
                    lastExecutionDate = lastExecutedCall.get(WorkTables.TABLE_API_COL_DATE_LAST_EXEC,
                            (Date) null);
                }
                try {
                    sourceURL = new MultiProtocolURL(file);
                    if (sourceURL.isFile()) {
                        final File sourcefile = sourceURL.getFSFile();
                        sourceFilePath = sourcefile.getAbsolutePath();
                        if (!sourcefile.exists()) {
                            status = 2;
                        } else if (!sourcefile.canRead()) {
                            status = 3;
                        } else if (sourcefile.isDirectory()) {
                            status = 4;
                        }
                    }

                    if (status == 0 && post.getBoolean("iffresh")) {
                        long lastModified = getLastModified(sourceURL);
                        if (lastExecutionDate != null && lastModified != 0L
                                && Instant.ofEpochMilli(lastModified).isBefore(lastExecutionDate.toInstant())) {
                            status = 5;
                            prop.put("import_status_lastImportDate", GenericFormatter.formatSafely(
                                    lastExecutionDate.toInstant(), GenericFormatter.FORMAT_SIMPLE));

                            /* the import is not performed, but we increase here the api call count */
                            if (sb.tables != null) {
                                byte[] lastExecutedCallPk = lastExecutedCall.getPK();
                                if (lastExecutedCallPk != null
                                        && !post.containsKey(WorkTables.TABLE_API_COL_APICALL_PK)) {
                                    post.add(WorkTables.TABLE_API_COL_APICALL_PK,
                                            UTF8.String(lastExecutedCallPk));
                                }
                                sb.tables.recordAPICall(post, "IndexImportMediawiki_p.html",
                                        WorkTables.TABLE_API_TYPE_DUMP,
                                        "MediaWiki Dump Import for " + sourceURL);
                            }
                        }
                    }
                } catch (MalformedURLException e) {
                    status = 1;
                }
                if (status == 0) {
                    /* store this call as an api call */
                    if (sb.tables != null) {
                        /* We avoid creating a duplicate of any already recorded API call with the same parameters */
                        if (lastExecutedCall != null
                                && !post.containsKey(WorkTables.TABLE_API_COL_APICALL_PK)) {
                            byte[] lastExecutedCallPk = lastExecutedCall.getPK();
                            if (lastExecutedCallPk != null) {
                                post.add(WorkTables.TABLE_API_COL_APICALL_PK, UTF8.String(lastExecutedCallPk));
                            }
                        }
                        sb.tables.recordAPICall(post, "IndexImportMediawiki_p.html",
                                WorkTables.TABLE_API_TYPE_DUMP, "MediaWiki Dump Import for " + sourceURL);
                    }

                    MediawikiImporter.job = new MediawikiImporter(sourceURL, sb.surrogatesInPath);
                    MediawikiImporter.job.start();
                    prop.put("import_dump", MediawikiImporter.job.source());
                    prop.put("import_thread", 1);
                    prop.put("import", 1);
                    prop.put("refresh", 1);
                } else {
                    prop.put("import_status", status);
                    prop.put("import_status_sourceFile", sourceFilePath);

                    /* Acquire a transaction token for the next POST form submission */
                    final String token = TransactionManager.getTransactionToken(header);
                    prop.put(TransactionManager.TRANSACTION_TOKEN_PARAM, token);
                    prop.put("import_" + TransactionManager.TRANSACTION_TOKEN_PARAM, token);
                }
                prop.put("import_count", 0);
                prop.put("import_speed", 0);
                prop.put("import_runningHours", 0);
                prop.put("import_runningMinutes", 0);
                prop.put("import_remainingHours", 0);
                prop.put("import_remainingMinutes", 0);
            }
        }
    }
    return prop;
}

From source file:it.tidalwave.northernwind.frontend.filesystem.impl.ResourceFileNetBeansPlatform.java

@Override
@Nonnull/*  w  ww  . j a va 2s.com*/
public ZonedDateTime getLatestModificationTime() {
    // See NW-154
    final File file = toFile();

    final long millis = (file != null) ? file.lastModified() : delegate.lastModified().getTime();
    return Instant.ofEpochMilli(millis).atZone(ZoneId.of("GMT"));
}

From source file:com.amazonaws.services.kinesis.io.JsonDataExtractor.java

/**
 * {@inheritDoc}//  www  .  j a  v  a 2s .  c o  m
 */
@Override
public List<AggregateData> getData(InputEvent event) throws SerializationException {
    try {
        List<AggregateData> aggregateData = new ArrayList<>();
        OffsetDateTime dateValue = null;
        JsonNode jsonContent = null;
        String dateString, summary = null;
        long localOffset = 0;

        List<String> items = (List<String>) serialiser.toClass(event);

        // log a warning if we didn't get anything back from the serialiser
        // - this could be OK, but probably isn't
        // it would be OK, for example, if you have filterRegex
        //            if (items == null || items.size() == 0)
        //                LOG.warn(String.format(
        //                        "Failed to deserialise any content for Record (Partition Key %s, Sequence %s",
        //                        event.getPartitionKey(), event.getSequenceNumber()));

        // process all the items returned by the serialiser
        for (String item : items) {
            // Convert the string to a Jackson JsonNode for navigation
            jsonContent = StreamAggregatorUtils.asJsonNode(item);
            sumUpdates = new HashMap<>();

            LabelSet labels = new LabelSet();
            for (String key : this.labelAttributes) {
                labels.put(key, StreamAggregatorUtils.readValueAsString(jsonContent, key));
            }

            // get the unique ID for the event
            String uniqueId = null;
            if (this.uniqueIdAttribute != null) {
                switch (this.uniqueIdAttribute) {
                case StreamAggregator.REF_PARTITION_KEY:
                    uniqueId = event.getPartitionKey();
                    break;
                case StreamAggregator.REF_SEQUENCE:
                    uniqueId = event.getSequenceNumber();
                    break;
                default:
                    uniqueId = StreamAggregatorUtils.readValueAsString(jsonContent, uniqueIdAttribute);
                    break;
                }
            }

            // get the date value from the line
            if (dateValueAttribute != null) {
                dateString = StreamAggregatorUtils.readValueAsString(jsonContent, dateValueAttribute);

                // bail on no date returned
                if (dateString == null || dateString.equals(""))
                    throw new SerializationException(
                            String.format("Unable to read date value attribute %s from JSON Content %s",
                                    dateValueAttribute, item));

                // turn date as long or string into Date
                if (this.dateFormat != null) {
                    dateValue = OffsetDateTime.parse(dateString, dateFormatter);
                } else {
                    // no formatter, so treat as epoch seconds
                    try {
                        dateValue = OffsetDateTime.ofInstant(Instant.ofEpochMilli(Long.parseLong(dateString)),
                                ZoneId.of("UTC"));
                    } catch (Exception e) {
                        LOG.error(String.format(
                                "Unable to create Date Value element from item '%s' due to invalid format as Epoch Seconds",
                                dateValueAttribute));
                        throw new SerializationException(e);
                    }
                }
            } else {
                // no date value attribute configured, so use now
                dateValue = OffsetDateTime.now(ZoneId.of("UTC"));
            }

            // get the summed values
            if (this.aggregatorType.equals(AggregatorType.SUM)) {
                // get the positional sum items
                for (String s : summaryConfig.getItemSet()) {
                    try {
                        summary = StreamAggregatorUtils.readValueAsString(jsonContent, s);

                        // if a summary is not found in the data element,
                        // then we simply continue without it
                        if (summary != null)
                            sumUpdates.put(s, Double.parseDouble(summary));
                    } catch (NumberFormatException nfe) {
                        LOG.error(String
                                .format("Unable to deserialise Summary '%s' due to NumberFormatException", s));
                        throw new SerializationException(nfe);
                    }
                }
            }

            // get local offset
            if (offsetAttribute != null) {
                try {
                    String offsetString = StreamAggregatorUtils.readValueAsString(jsonContent, offsetAttribute);
                    if (offsetString != null)
                        localOffset = Long.parseLong(offsetString);
                } catch (NumberFormatException nfe) {
                    LOG.error(String.format(
                            "Unable to deserialise local offset '%s' due to NumberFormatException",
                            dateValueAttribute));
                    throw new SerializationException(nfe);
                }
            }

            aggregateData.add(new AggregateData(uniqueId, labels, dateValue, sumUpdates, localOffset));
        }

        return aggregateData;
    } catch (Exception e) {
        throw new SerializationException(e);
    }
}

From source file:reactivity.SseController.java

/**
 * <p>// ww w.  j av  a 2s  .c om
 * Retrieves and pushes the timeseries associated to the given artifact.
 * </p>
 *
 * @param a the artifact associated to timeseries
 */
private void timeseries(final Artifact a) {

    // Compute the key
    final LocalDateTime i = LocalDateTime.ofInstant(Instant.ofEpochMilli(a.getTimestamp()),
            ZoneId.systemDefault());
    final Object[] key = new Object[] { a.getGroup().getType() + "/" + a.getGroup().getName(), i.getYear(),
            i.getMonthValue(), i.getDayOfMonth() };

    repository.color(key).subscribe((item) -> replayProcessor.onNext(sse(Artifact.class.cast(item))));
}