Example usage for org.joda.time DateTime parse

List of usage examples for org.joda.time DateTime parse

Introduction

In this page you can find the example usage for org.joda.time DateTime parse.

Prototype

@FromString
public static DateTime parse(String str) 

Source Link

Document

Parses a DateTime from the specified string.

Usage

From source file:monasca.api.infrastructure.persistence.vertica.MeasurementVerticaRepoImpl.java

License:Apache License

@Override
public List<Measurements> find(String tenantId, String name, Map<String, String> dimensions, DateTime startTime,
        @Nullable DateTime endTime, @Nullable String offset, int limit, Boolean mergeMetricsFlag)
        throws MultipleMetricsException {

    try (Handle h = db.open()) {

        Map<ByteBuffer, Measurements> results = new LinkedHashMap<>();

        Set<byte[]> defDimIdSet = new HashSet<>();
        Set<byte[]> dimSetIdSet = new HashSet<>();

        String namePart = "";

        if (name != null && !name.isEmpty()) {
            namePart = "AND def.name = :name ";
        }/*from   w  w  w  . j  a  v a  2 s  . c o m*/

        String defDimSql = String.format(DEFDIM_IDS_SELECT, namePart,
                MetricQueries.buildDimensionAndClause(dimensions, "defDims", 0));

        Query<Map<String, Object>> query = h.createQuery(defDimSql).bind("tenantId", tenantId);

        MetricQueries.bindDimensionsToQuery(query, dimensions);

        if (name != null && !name.isEmpty()) {
            query.bind("name", name);
        }

        List<Map<String, Object>> rows = query.list();

        ByteBuffer defId = ByteBuffer.wrap(new byte[0]);

        for (Map<String, Object> row : rows) {

            byte[] defDimId = (byte[]) row.get("id");
            defDimIdSet.add(defDimId);

            byte[] dimSetIdBytes = (byte[]) row.get("dimension_set_id");
            dimSetIdSet.add(dimSetIdBytes);

            byte[] defIdBytes = (byte[]) row.get("definition_id");
            defId = ByteBuffer.wrap(defIdBytes);

        }

        if (!Boolean.TRUE.equals(mergeMetricsFlag) && (dimSetIdSet.size() > 1)) {
            throw new MultipleMetricsException(name, dimensions);
        }

        //
        // If we didn't find any definition dimension ids,
        // we won't have any measurements, let's just bail
        // now.
        //
        if (defDimIdSet.size() == 0) {
            return new ArrayList<>(results.values());
        }

        String defDimInClause = MetricQueries.createDefDimIdInClause(defDimIdSet);

        StringBuilder sb = new StringBuilder();

        if (endTime != null) {
            sb.append(" and time_stamp <= :endTime");
        }

        if (offset != null && !offset.isEmpty()) {
            sb.append(" and time_stamp > :offset");
        }

        String sql = String.format(FIND_BY_METRIC_DEF_SQL, defDimInClause, sb);

        query = h.createQuery(sql).bind("startTime", new Timestamp(startTime.getMillis())).bind("limit",
                limit + 1);

        if (endTime != null) {
            logger.debug("binding endtime: {}", endTime);
            query.bind("endTime", new Timestamp(endTime.getMillis()));
        }

        if (offset != null && !offset.isEmpty()) {
            logger.debug("binding offset: {}", offset);
            query.bind("offset", new Timestamp(DateTime.parse(offset).getMillis()));
        }

        rows = query.list();

        for (Map<String, Object> row : rows) {

            String timestamp = DATETIME_FORMATTER.print(((Timestamp) row.get("time_stamp")).getTime());

            byte[] defdimsIdBytes = (byte[]) row.get("definition_dimensions_id");
            ByteBuffer defdimsId = ByteBuffer.wrap(defdimsIdBytes);

            double value = (double) row.get("value");

            String valueMetaString = (String) row.get("value_meta");

            Map<String, String> valueMetaMap = new HashMap<>();

            if (valueMetaString != null && !valueMetaString.isEmpty()) {

                try {

                    valueMetaMap = this.objectMapper.readValue(valueMetaString, VALUE_META_TYPE);

                } catch (IOException e) {

                    logger.error("failed to parse value metadata: {}", valueMetaString);
                }

            }

            Measurements measurements = (Boolean.TRUE.equals(mergeMetricsFlag)) ? results.get(defId)
                    : results.get(defdimsId);

            if (measurements == null) {
                if (Boolean.TRUE.equals(mergeMetricsFlag)) {
                    measurements = new Measurements(name, new HashMap<String, String>(),
                            new ArrayList<Object[]>());

                    results.put(defId, measurements);
                } else {
                    measurements = new Measurements(name,
                            MetricQueries.dimensionsFor(h, (byte[]) dimSetIdSet.toArray()[0]),
                            new ArrayList<Object[]>());
                    results.put(defdimsId, measurements);
                }
            }

            measurements.addMeasurement(new Object[] { timestamp, value, valueMetaMap });
        }

        return new ArrayList<>(results.values());
    }
}

From source file:monasca.api.infrastructure.persistence.vertica.MetricQueries.java

License:Apache License

static void bindOffsetToQuery(Query<Map<String, Object>> query, String offset) {
    List<String> offsets = UNDERSCORE_SPLITTER.splitToList(offset);
    if (offsets.size() > 1) {
        query.bind("offset_id", offsets.get(0));
        query.bind("offset_timestamp", new Timestamp(DateTime.parse(offsets.get(1)).getMillis()));
    } else {//from w w w  . ja va2s  .  co m
        query.bind("offset_timestamp", new Timestamp(DateTime.parse(offsets.get(0)).getMillis()));
    }
}

From source file:monasca.api.infrastructure.persistence.vertica.StatisticVerticaRepoImpl.java

License:Apache License

@Override
public List<Statistics> find(String tenantId, String name, Map<String, String> dimensions, DateTime startTime,
        DateTime endTime, List<String> statisticsCols, int period, String offset, int limit,
        Boolean mergeMetricsFlag) throws MultipleMetricsException {

    List<Statistics> statisticsList = new ArrayList<>();

    // Sort the column names so that they match the order of the statistics in the results.
    List<String> statisticsColumns = createColumnsList(statisticsCols);

    try (Handle h = db.open()) {

        Map<byte[], Statistics> byteMap = findDefIds(h, tenantId, name, dimensions);

        if (byteMap.isEmpty()) {

            return statisticsList;

        }//from  ww  w  .j  ava2s.c o  m

        if (!Boolean.TRUE.equals(mergeMetricsFlag) && byteMap.keySet().size() > 1) {

            throw new MultipleMetricsException(name, dimensions);

        }

        List<List<Object>> statisticsListList = new ArrayList<>();

        String sql = createQuery(byteMap.keySet(), period, startTime, endTime, offset, statisticsCols);

        logger.debug("vertica sql: {}", sql);

        Query<Map<String, Object>> query = h.createQuery(sql).bind("start_time", startTime)
                .bind("end_time", endTime).bind("limit", limit + 1);

        if (offset != null && !offset.isEmpty()) {
            logger.debug("binding offset: {}", offset);
            query.bind("offset", new Timestamp(DateTime.parse(offset).getMillis()));
        }

        List<Map<String, Object>> rows = query.list();

        for (Map<String, Object> row : rows) {

            List<Object> statisticsRow = parseRow(row);

            statisticsListList.add(statisticsRow);

        }

        // Just use the first entry in the byteMap to get the def name and dimensions.
        Statistics statistics = byteMap.entrySet().iterator().next().getValue();

        statistics.setColumns(statisticsColumns);

        if (Boolean.TRUE.equals(mergeMetricsFlag) && byteMap.keySet().size() > 1) {

            // Wipe out the dimensions.
            statistics.setDimensions(new HashMap<String, String>());

        }

        statistics.setStatistics(statisticsListList);

        statisticsList.add(statistics);

    }

    return statisticsList;
}

From source file:mvm.rya.indexing.accumulo.temporal.AccumuloTemporalIndexer.java

License:Apache License

/**
 * parse the literal dates from the object of a statement.
 *
 * @param statement// ww w  .j  av  a  2  s  . c om
 * @param outputDateTimes
 */
private void extractDateTime(Statement statement, DateTime[] outputDateTimes) {
    if (!(statement.getObject() instanceof Literal)) // Error since it should already be tested by caller.
        throw new RuntimeException("Statement's object must be a literal: " + statement);
    // throws IllegalArgumentException NumberFormatException if can't parse
    String logThis = null;
    Literal literalValue = (Literal) statement.getObject();
    // First attempt to parse a interval in the form "[date1,date2]"
    Matcher matcher = Pattern.compile("\\[(.*)\\,(.*)\\].*").matcher(literalValue.stringValue());
    if (matcher.find()) {
        try {
            // Got a datetime pair, parse into an interval.
            outputDateTimes[0] = new DateTime(matcher.group(1));
            outputDateTimes[1] = new DateTime(matcher.group(2));
            return;
        } catch (java.lang.IllegalArgumentException e) {
            logThis = e.getMessage() + " " + logThis;
            outputDateTimes[0] = null;
            outputDateTimes[1] = null;
        }
    }

    try {
        XMLGregorianCalendar calendarValue = literalValue.calendarValue();
        outputDateTimes[0] = new DateTime(calendarValue.toGregorianCalendar());
        outputDateTimes[1] = null;
        return;
    } catch (java.lang.IllegalArgumentException e) {
        logThis = e.getMessage();
    }
    // Try again using Joda Time DateTime.parse()
    try {
        outputDateTimes[0] = DateTime.parse(literalValue.stringValue());
        outputDateTimes[1] = null;
        //System.out.println(">>>>>>>Joda parsed: "+literalValue.stringValue());
        return;
    } catch (java.lang.IllegalArgumentException e) {
        logThis = e.getMessage() + " " + logThis;
    }
    logger.warn("TemporalIndexer is unable to parse the date/time from statement=" + statement.toString() + " "
            + logThis);
    return;
}

From source file:net.bunselmeyer.mongo.maven.plugin.MigrateMojo.java

License:Apache License

protected ImmutableListMultimap<MIGRATION_CHECK, MigrationDetails> buildStatusIndex(
        Set<Class<? extends Migration>> allMigrations) {
    Iterable<MigrationDetails> migrationStatus = Iterables.transform(allMigrations,
            new Function<Class<? extends Migration>, MigrationDetails>() {
                public MigrationDetails apply(Class<? extends Migration> input) {
                    if (input == null) {
                        return new MigrationDetails(MIGRATION_CHECK.ERROR,
                                "Failed to load migration from classloader.", input);
                    }//  ww  w.java2  s. co m
                    Connection connection = input.getAnnotation(Connection.class);
                    if (connection == null) {
                        return new MigrationDetails(MIGRATION_CHECK.WARNING,
                                "Migration does not have @Connection", input);
                    }

                    if (StringUtils.isBlank(connection.db())) {
                        return new MigrationDetails(MIGRATION_CHECK.ERROR, "Empty db property in @Connection",
                                input);
                    }

                    if (StringUtils.isBlank(connection.version())) {
                        return new MigrationDetails(MIGRATION_CHECK.ERROR,
                                "Empty version property in @Connection", input);
                    }

                    try {
                        DateTime version = DateTime.parse(connection.version());
                        String host = StringUtils.isNotBlank(connection.host()) ? connection.host()
                                : MigrateMojo.this.host;
                        return version != null ? //
                        new MigrationDetails(input, version, host, connection.db()) : //
                        new MigrationDetails(MIGRATION_CHECK.ERROR,
                                "Failed to parse @version to timestamp in @Connection", input);
                    } catch (Exception e) {
                        return new MigrationDetails(MIGRATION_CHECK.ERROR,
                                "Failed to parse @version to timestamp in @Connection", input);
                    }
                }
            });

    return Multimaps.index(migrationStatus, new Function<MigrationDetails, MIGRATION_CHECK>() {
        public MIGRATION_CHECK apply(MigrationDetails input) {
            return input.status;
        }
    });
}

From source file:net.es.netshell.api.ISODateTime.java

License:Open Source License

public void setIsoDateTime(String isoDateTime) {
    this.isoDateTime = isoDateTime;
    this.dateTime = DateTime.parse(isoDateTime);
}

From source file:nl.ulso.sprox.atom.DateTimeParser.java

License:Apache License

@Override
public DateTime fromString(String value) throws ParseException {
    try {/* ww w  .  java  2  s  . c  o m*/
        return DateTime.parse(value);
    } catch (IllegalArgumentException e) {
        throw new ParseException(DateTime.class, value, e);
    }
}

From source file:no.difi.oxalis.statistics.inbound.StatisticsServlet.java

License:EUPL

private Date parseDate(String dateAsString) {
    if (dateAsString != null) {
        try {/*  ww w.  j a v  a  2 s.  c o  m*/
            // JODA time is really the king of date and time parsing :-)
            DateTime date = DateTime.parse(dateAsString);
            return date.toDate();
        } catch (Exception e) {
            throw new IllegalStateException(String.format(
                    "Unable to parseMultipart '%s'into a date and time using ISO8601 pattern YYYY-MM-DD HH",
                    dateAsString));
        }
    }
    return null;
}

From source file:oncue.backingstore.RedisBackingStore.java

License:Apache License

/**
 * Construct a job from a given Job ID//w  w  w. j  ava2 s.  c  o m
 * 
 * @param id
 *            is the id of the job
 * @param redis
 *            is a connection to Redis
 * @return a {@linkplain Job} that represents the job hash in Redis
 */
@SuppressWarnings("unchecked")
public static Job loadJob(long id, Jedis redis) {
    String jobKey = String.format(JOB_KEY, id);
    Job job;

    try {
        DateTime enqueuedAt = DateTime.parse(redis.hget(jobKey, JOB_ENQUEUED_AT));

        DateTime startedAt = null;
        String startedAtRaw = redis.hget(jobKey, JOB_STARTED_AT);
        if (startedAtRaw != null)
            startedAt = DateTime.parse(startedAtRaw);

        DateTime completedAt = null;
        String completedAtRaw = redis.hget(jobKey, JOB_COMPLETED_AT);
        if (completedAtRaw != null)
            completedAt = DateTime.parse(completedAtRaw);

        String workerType = redis.hget(jobKey, JOB_WORKER_TYPE);
        String state = redis.hget(jobKey, JOB_STATE);
        String progress = redis.hget(jobKey, JOB_PROGRESS);
        String params = redis.hget(jobKey, JOB_PARAMS);
        String errorMessage = redis.hget(jobKey, JOB_ERROR_MESSAGE);
        String rerunStatus = redis.hget(jobKey, JOB_RERUN_STATUS);

        job = new Job(new Long(id), workerType);
        job.setEnqueuedAt(enqueuedAt);

        if (startedAt != null)
            job.setStartedAt(startedAt);

        if (completedAt != null)
            job.setCompletedAt(completedAt);

        job.setRerun(Boolean.parseBoolean(rerunStatus));

        if (params != null)
            job.setParams((Map<String, String>) JSONValue.parse(params));

        if (state != null)
            job.setState(State.valueOf(state.toUpperCase()));

        if (progress != null)
            job.setProgress(new Double(progress));

        if (errorMessage != null)
            job.setErrorMessage(errorMessage);

    } catch (Exception e) {
        throw new RuntimeException(String.format("Could not load job with id %s from Redis", id), e);
    }

    return job;
}

From source file:org.activiti.engine.impl.calendar.DueDateBusinessCalendar.java

License:Apache License

@Override
public Date resolveDuedate(String duedate, int maxIterations) {
    try {//w  w  w.  ja v  a  2  s.  c  om
        // check if due period was specified
        if (duedate.startsWith("P")) {
            return DateTime.now().plus(Period.parse(duedate)).toDate();
        }

        return DateTime.parse(duedate).toDate();

    } catch (Exception e) {
        throw new ActivitiException("couldn't resolve duedate: " + e.getMessage(), e);
    }
}