List of usage examples for org.joda.time DateTime parse
@FromString public static DateTime parse(String str)
From source file:com.wisenut.worker.YoutubeWorker.java
License:Apache License
private void setResultData(Iterator<SearchResult> iteratorSearchResults, WNResultData data) { data.setProvider("youtube"); int numberOfResult = 0; while (iteratorSearchResults.hasNext()) { OpenAPIResult result = new OpenAPIResult(); SearchResult singleVideo = iteratorSearchResults.next(); ResourceId rId = singleVideo.getId(); // Double checks the kind is video. if (rId.getKind().equals("youtube#video")) { numberOfResult++;/*from ww w . j a va2s .com*/ Thumbnail thumbnail = (Thumbnail) singleVideo.getSnippet().getThumbnails().get("default"); result.setTitle(StringUtil.removeSpecialCharacter(singleVideo.getSnippet().getTitle())); result.setContents(StringUtil.removeSpecialCharacter(singleVideo.getSnippet().getDescription())); result.setCreateDate( dtf.print(DateTime.parse(singleVideo.getSnippet().getPublishedAt().toString()))); result.setLink("https://youtu.be/" + rId.getVideoId()); result.setAuthor(singleVideo.getSnippet().getChannelTitle()); result.setThumbnailUrl(thumbnail.getUrl()); data.addItem(result); } } data.setTotalCount(numberOfResult); }
From source file:com.yahoo.bard.webservice.data.dimension.impl.KeyValueStoreDimension.java
License:Apache License
@Override public DateTime getLastUpdated() { String lastUpdatedString = keyValueStore.get(lastUpdatedKey); if (lastUpdatedString == null) { return null; } else {/*from w ww . j a v a 2 s .c o m*/ return DateTime.parse(lastUpdatedString); } }
From source file:com.yahoo.bard.webservice.data.PreResponseDeserializer.java
License:Apache License
/** * Creates new Result object from JsonNode. * * @param serializedResult JsonNode which contains all the serialized details to generate Result object * @param resultSetSchema Schema of the result to generate the Result object * * @return Result object generated from given JsonNode *///w ww . j ava2s. co m private Result getResult(JsonNode serializedResult, ResultSetSchema resultSetSchema) { return new Result( extractDimensionValues(serializedResult.get(DIMENSION_VALUES_KEY), resultSetSchema.getColumns(DimensionColumn.class)), extractMetricValues(serializedResult.get(METRIC_VALUES_KEY), resultSetSchema.getColumns(MetricColumnWithValueType.class)), DateTime.parse(serializedResult.get(TIMESTAMP_KEY).asText())); }
From source file:com.yahoo.bard.webservice.sql.helper.TimestampUtils.java
License:Apache License
/** * Parses a timestamp from a String./*from w w w . jav a 2s. co m*/ * * @param time The time to be parsed. * * @return the timeStamp created from this time. */ public static Timestamp timestampFromString(String time) { return timestampFromDateTime(DateTime.parse(time)); }
From source file:com.yahoo.bard.webservice.web.endpoints.JobsEndpointResources.java
License:Apache License
/** * Get an instance of PreResponseStore for testing. * * @return An instance of PreResponseStore. */// w w w. j av a 2 s. c o m public static PreResponseStore getPreResponseStore() { PreResponseStore preResponseStore = new HashPreResponseStore(); Granularity granularity = AllGranularity.INSTANCE; Map<MetricColumn, Object> metricValues = new HashMap<>(); MetricColumn pageViewColumn = new MetricColumn("pageViews"); metricValues.put(pageViewColumn, new BigDecimal(111)); ResultSetSchema schema = new ResultSetSchema(granularity, Collections.singleton(pageViewColumn)); Result result = new Result(new HashMap<>(), metricValues, DateTime.parse("2016-01-12T00:00:00.000Z")); List<Result> results = new ArrayList<>(); results.add(result); ResultSet resultSet = new ResultSet(schema, results); LinkedHashSet<String> apiMetricColumnNames = new LinkedHashSet<>(); apiMetricColumnNames.add("pageViews"); ResponseContext responseContext = new ResponseContext(); responseContext.put("headers", new MultivaluedHashMap<>()); responseContext.put("apiMetricColumnNames", apiMetricColumnNames); responseContext.put("requestedApiDimensionFields", new LinkedHashMap<>()); PreResponse preResponse = new PreResponse(resultSet, responseContext); preResponseStore.save("ticket1", preResponse); preResponseStore.save("IExistOnlyInPreResponseStore", preResponse); ResponseContext errorResponseContext = new ResponseContext(); errorResponseContext.put("headers", new MultivaluedHashMap<>()); errorResponseContext.put(ResponseContextKeys.STATUS.getName(), 500); errorResponseContext.put(ResponseContextKeys.ERROR_MESSAGE.getName(), "Error"); errorResponseContext.put("apiMetricColumnNames", apiMetricColumnNames); errorResponseContext.put("requestedApiDimensionFields", new HashMap<>()); PreResponse errorPresResponse = new PreResponse(resultSet, errorResponseContext); preResponseStore.save("errorPreResponse", errorPresResponse); //Pagination test resources Result result1 = new Result(new HashMap<>(), metricValues, DateTime.parse("2016-01-12T00:00:00.000Z")); Result result2 = new Result(new HashMap<>(), metricValues, DateTime.parse("2016-01-13T00:00:00.000Z")); Result result3 = new Result(new HashMap<>(), metricValues, DateTime.parse("2016-01-14T00:00:00.000Z")); List<Result> results1 = new ArrayList<>(); results1.add(result1); results1.add(result2); results1.add(result3); ResultSet resultSet1 = new ResultSet(schema, results1); PreResponse preResponse1 = new PreResponse(resultSet1, responseContext); preResponseStore.save("ticket3p", preResponse1); return preResponseStore; }
From source file:com.yolodata.tbana.hadoop.mapred.splunk.SplunkDataQueryFactory.java
License:Open Source License
public static SplunkDataQuery createWithJobConf(JobConf jobConf) { String earliest = jobConf.get(ShuttlInputFormatConstants.EARLIEST_TIME); String latest = jobConf.get(ShuttlInputFormatConstants.LATEST_TIME); String indexes = jobConf.get(ShuttlInputFormatConstants.INDEX_LIST); String[] indexList = indexes.split(SplunkDataQuery.INDEX_LIST_SEPARATOR); return new SplunkDataQuery(DateTime.parse(earliest), DateTime.parse(latest), indexList); }
From source file:controllers.api.DashboardsApiController.java
License:Open Source License
protected List<Map<String, Object>> formatWidgetValueResults(final int maxDataPoints, final Object resultValue, final String functionType, final String interval, final Map<String, Object> timeRange, final boolean allQuery) { final ImmutableList.Builder<Map<String, Object>> pointListBuilder = ImmutableList.builder(); if (resultValue instanceof Map) { final Map<?, ?> resultMap = (Map) resultValue; DateTime from;/*from ww w . j ava 2 s . c o m*/ if (allQuery) { String firstTimestamp = (String) resultMap.entrySet().iterator().next().getKey(); from = new DateTime(Long.parseLong(firstTimestamp) * 1000, DateTimeZone.UTC); } else { from = DateTime.parse((String) timeRange.get("from")).withZone(DateTimeZone.UTC); } final DateTime to = DateTime.parse((String) timeRange.get("to")); final MutableDateTime currentTime = new MutableDateTime(from); final Duration step = estimateIntervalStep(interval); final int dataPoints = (int) ((to.getMillis() - from.getMillis()) / step.getMillis()); // using the absolute value guarantees, that there will always be enough values for the given resolution final int factor = (maxDataPoints != -1 && dataPoints > maxDataPoints) ? dataPoints / maxDataPoints : 1; int index = 0; floorToBeginningOfInterval(interval, currentTime); while (currentTime.isBefore(to) || currentTime.isEqual(to)) { if (index % factor == 0) { String timestamp = Long.toString(currentTime.getMillis() / 1000); Object value = resultMap.get(timestamp); if (functionType != null && value != null) { value = ((Map) value).get(functionType); } Object result = value == null ? 0 : value; final Map<String, Object> point = ImmutableMap.of("x", Long.parseLong(timestamp), "y", result); pointListBuilder.add(point); } index++; nextStep(interval, currentTime); } } return pointListBuilder.build(); }
From source file:controllers.api.IndicesApiController.java
License:Open Source License
public Result failures(Integer limit, Integer offset) { try {/*from w ww . ja v a 2 s .c o m*/ IndexerFailuresResponse failures = clusterService.getIndexerFailures(limit, offset); // dynatable AJAX format. List<Map<String, Object>> records = Lists.newArrayList(); for (IndexerFailureSummary failure : failures.failures) { Map<String, Object> record = Maps.newHashMap(); record.put("timestamp", DateTools.inUserTimeZone(DateTime.parse(failure.timestamp)).toString()); record.put("errorMessage", failure.message); record.put("index", failure.index); record.put("deadLetter", failure.written); record.put("letterId", failure.letterId); records.add(record); } Map<String, Object> result = Maps.newHashMap(); result.put("records", records); result.put("queryRecordCount", failures.total); result.put("totalRecordCount", failures.total); return ok(Json.toJson(result)); } catch (APIException e) { String message = "Could not get indexer failures. We expected HTTP 200, but got a HTTP " + e.getHttpCode() + "."; return status(504, views.html.errors.error.render(message, e, request())); } catch (IOException e) { return status(504, views.html.errors.error.render(ApiClient.ERROR_MSG_IO, e, request())); } }
From source file:controllers.api.SearchApiController.java
License:Open Source License
/** * Create a list with histogram results that would be serialized to JSON like this * <p/>//from w ww.j a v a 2 s.c o m * [{ x: -1893456000, y: 92228531 }, { x: -1577923200, y: 106021568 }] */ protected List<Map<String, Long>> formatHistogramResults(DateHistogramResult histogram, int maxDataPoints, boolean allQuery) { final List<Map<String, Long>> points = Lists.newArrayList(); final Map<String, Long> histogramResults = histogram.getResults(); DateTime from; if (allQuery) { String firstTimestamp = histogramResults.entrySet().iterator().next().getKey(); from = new DateTime(Long.parseLong(firstTimestamp) * 1000, DateTimeZone.UTC); } else { from = DateTime.parse(histogram.getHistogramBoundaries().getFrom()); } final DateTime to = DateTime.parse(histogram.getHistogramBoundaries().getTo()); final MutableDateTime currentTime = new MutableDateTime(from); final Duration step = estimateIntervalStep(histogram.getInterval()); final int dataPoints = (int) ((to.getMillis() - from.getMillis()) / step.getMillis()); // using the absolute value guarantees, that there will always be enough values for the given resolution final int factor = (maxDataPoints != -1 && dataPoints > maxDataPoints) ? dataPoints / maxDataPoints : 1; int index = 0; floorToBeginningOfInterval(histogram.getInterval(), currentTime); while (currentTime.isBefore(to) || currentTime.isEqual(to)) { if (index % factor == 0) { String timestamp = Long.toString(currentTime.getMillis() / 1000); Long result = histogramResults.get(timestamp); Map<String, Long> point = Maps.newHashMap(); point.put("x", Long.parseLong(timestamp)); point.put("y", result != null ? result : 0); points.add(point); } index++; nextStep(histogram.getInterval(), currentTime); } return points; }
From source file:controllers.TelemetryController.java
License:Apache License
/** * Store new aggregated metrics for streaming to clients. * * @return Empty response with success/error code. */// w w w . j a va 2 s. c om @BodyParser.Of(Json.class) public Result report() { // TODO(barp): Map with a POJO mapper [MAI-184] final ArrayNode list = (ArrayNode) request().body().asJson(); for (final JsonNode objNode : list) { final ObjectNode obj = (ObjectNode) objNode; final String service = obj.get("service").asText(); final String host = obj.get("host").asText(); final String statistic = obj.get("statistic").asText(); final String metric = obj.get("metric").asText(); final double value = obj.get("value").asDouble(); final String periodStart = obj.get("periodStart").asText(); final DateTime startTime = DateTime.parse(periodStart); _streamContext.tell(new MetricReport(service, host, statistic, metric, value, startTime), ActorRef.noSender()); } return ok(); }