List of usage examples for org.joda.time.format DateTimeFormatter parseDateTime
public DateTime parseDateTime(String text)
From source file:edu.usf.cutr.gtfs_realtime.bullrunner.GtfsRealtimeProviderImpl.java
License:Apache License
private int calcDelayTime(long arrivalTime) { int diff;//from w w w. jav a 2s . c o m String pattern = "yyyy-MM-dd'T'HH:mm:ssZ"; DateTimeFormatter dtf = DateTimeFormat.forPattern(pattern); DateTime parsedDate = dtf.parseDateTime(responseTimeStamp); diff = (int) (arrivalTime - parsedDate.getMillis() / 1000); return diff; }
From source file:edu.usf.cutr.gtfs_realtime.bullrunner.GtfsRealtimeProviderImpl.java
License:Apache License
private String convert2FormattedTime(String myTimeStamp) { DateTimeFormatter dtf = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ssZ"); DateTime jodatime = dtf.parseDateTime(myTimeStamp); DateTimeFormatter dtfOut = DateTimeFormat.forPattern("HH:mm:ss"); return dtfOut.print(jodatime); }
From source file:edu.uta.courses.service.impl.EventServiceImpl.java
@Override public DateTime convertToDateTime(String time) { DateTimeFormatter formatter = DateTimeFormat.forPattern("dd.MM.yyyy kk:mm"); DateTime dt = formatter.parseDateTime(time); return dt;/* w w w . j a v a 2s. c om*/ }
From source file:energy.usef.dso.event.endpoint.EventEndpoint.java
License:Apache License
private static LocalDate parseDate(String value) { DateTimeFormatter formatter = DateTimeFormat.forPattern(DATE_PATTERN); DateTime dateTime = formatter.parseDateTime(value); return dateTime.toLocalDate(); }
From source file:es.pode.adl.datamodels.datatypes.DateTimeValidator.java
License:Open Source License
/** * Compares two valid data model elements for equality. * /*from w w w. j a v a2s . c om*/ * @param iFirst The first value being compared. * * @param iSecond The second value being compared. * * @param iDelimiters The common set of delimiters associated with the * values being compared. * * @return Returns <code>true</code> if the two values are equal, otherwise * <code>false</code>. */ public boolean compare(String iFirst, String iSecond, Vector iDelimiters) { boolean equal = true; DateTimeFormatter dtp = ISODateTimeFormat.dateTimeParser(); try { // Parse the first string and remove the sub-seconds DateTime dt1 = dtp.parseDateTime(iFirst); dt1 = new DateTime(dt1.getYear(), dt1.getMonthOfYear(), dt1.getDayOfMonth(), dt1.getHourOfDay(), dt1.getMinuteOfHour(), dt1.getSecondOfMinute(), 0); // Parse the second string and remove the sub-seconds DateTime dt2 = dtp.parseDateTime(iSecond); dt2 = new DateTime(dt2.getYear(), dt2.getMonthOfYear(), dt2.getDayOfMonth(), dt2.getHourOfDay(), dt2.getMinuteOfHour(), dt2.getSecondOfMinute(), 0); equal = dt1.equals(dt2); } catch (Exception e) { // String format error -- these cannot be equal equal = false; } return equal; }
From source file:esiptestbed.mudrod.weblog.pre.CrawlerDetection.java
License:Apache License
/** * Check crawler by request sending rate, which is read from configruation file * @throws InterruptedException InterruptedException * @throws IOException IOException/*from w w w .ja v a2s . co m*/ */ public void checkByRate() throws InterruptedException, IOException { es.createBulkProcessor(); int rate = Integer.parseInt(props.getProperty("sendingrate")); SearchResponse sr = es.getClient().prepareSearch(props.getProperty(MudrodConstants.ES_INDEX_NAME)) .setTypes(httpType).setQuery(QueryBuilders.matchAllQuery()).setSize(0) .addAggregation(AggregationBuilders.terms("Users").field("IP").size(0)).execute().actionGet(); Terms users = sr.getAggregations().get("Users"); int userCount = 0; Pattern pattern = Pattern.compile("get (.*?) http/*"); Matcher matcher; for (Terms.Bucket entry : users.getBuckets()) { QueryBuilder filterSearch = QueryBuilders.boolQuery() .must(QueryBuilders.termQuery("IP", entry.getKey())); QueryBuilder querySearch = QueryBuilders.filteredQuery(QueryBuilders.matchAllQuery(), filterSearch); AggregationBuilder aggregation = AggregationBuilders.dateHistogram("by_minute").field("Time") .interval(DateHistogramInterval.MINUTE).order(Order.COUNT_DESC); SearchResponse checkRobot = es.getClient().prepareSearch(props.getProperty("indexName")) .setTypes(httpType, ftpType).setQuery(querySearch).setSize(0).addAggregation(aggregation) .execute().actionGet(); Histogram agg = checkRobot.getAggregations().get("by_minute"); List<? extends Histogram.Bucket> botList = agg.getBuckets(); long maxCount = botList.get(0).getDocCount(); if (maxCount >= rate) { } else { userCount++; DateTime dt1 = null; int toLast = 0; SearchResponse scrollResp = es.getClient().prepareSearch(props.getProperty("indexName")) .setTypes(httpType, ftpType).setScroll(new TimeValue(60000)).setQuery(querySearch) .setSize(100).execute().actionGet(); while (true) { for (SearchHit hit : scrollResp.getHits().getHits()) { Map<String, Object> result = hit.getSource(); String logtype = (String) result.get("LogType"); if (logtype.equals("PO.DAAC")) { String request = (String) result.get("Request"); matcher = pattern.matcher(request.trim().toLowerCase()); boolean find = false; while (matcher.find()) { request = matcher.group(1); result.put("RequestUrl", "http://podaac.jpl.nasa.gov" + request); find = true; } if (!find) { result.put("RequestUrl", request); } } else { result.put("RequestUrl", (String) result.get("Request")); } DateTimeFormatter fmt = ISODateTimeFormat.dateTime(); DateTime dt2 = fmt.parseDateTime((String) result.get("Time")); if (dt1 == null) { toLast = 0; } else { toLast = Math.abs(Seconds.secondsBetween(dt1, dt2).getSeconds()); } result.put("ToLast", toLast); IndexRequest ir = new IndexRequest(props.getProperty("indexName"), cleanupType) .source(result); es.getBulkProcessor().add(ir); dt1 = dt2; } scrollResp = es.getClient().prepareSearchScroll(scrollResp.getScrollId()) .setScroll(new TimeValue(600000)).execute().actionGet(); if (scrollResp.getHits().getHits().length == 0) { break; } } } } es.destroyBulkProcessor(); LOG.info("User count: {}", Integer.toString(userCount)); }
From source file:esiptestbed.mudrod.weblog.pre.SessionGenerator.java
License:Apache License
/** * Method to generate session by time threshold and referrer * @param timeThres value of time threshold (s) * @throws ElasticsearchException ElasticsearchException * @throws IOException IOException/* w w w. j ava2 s. c o m*/ */ public void genSessionByReferer(int timeThres) throws ElasticsearchException, IOException { SearchResponse sr = es.getClient().prepareSearch(props.getProperty("indexName")).setTypes(this.cleanupType) .setQuery(QueryBuilders.matchAllQuery()).setSize(0) .addAggregation(AggregationBuilders.terms("Users").field("IP").size(0)).execute().actionGet(); Terms users = sr.getAggregations().get("Users"); int sessionCount = 0; for (Terms.Bucket entry : users.getBuckets()) { String startTime = null; int sessionCountIn = 0; QueryBuilder filterSearch = QueryBuilders.boolQuery() .must(QueryBuilders.termQuery("IP", entry.getKey())); QueryBuilder querySearch = QueryBuilders.filteredQuery(QueryBuilders.matchAllQuery(), filterSearch); SearchResponse scrollResp = es.getClient().prepareSearch(props.getProperty("indexName")) .setTypes(this.cleanupType).setScroll(new TimeValue(60000)).setQuery(querySearch) .addSort("Time", SortOrder.ASC).setSize(100).execute().actionGet(); Map<String, Map<String, DateTime>> sessionReqs = new HashMap<>(); String request = ""; String referer = ""; String logType = ""; String id = ""; String ip = entry.getKey().toString(); String indexUrl = "http://podaac.jpl.nasa.gov/"; DateTime time = null; DateTimeFormatter fmt = ISODateTimeFormat.dateTime(); while (scrollResp.getHits().getHits().length != 0) { for (SearchHit hit : scrollResp.getHits().getHits()) { Map<String, Object> result = hit.getSource(); request = (String) result.get("RequestUrl"); referer = (String) result.get("Referer"); logType = (String) result.get("LogType"); time = fmt.parseDateTime((String) result.get("Time")); id = hit.getId(); if (logType.equals("PO.DAAC")) { if (referer.equals("-") || referer.equals(indexUrl) || !referer.contains(indexUrl)) { sessionCount++; sessionCountIn++; sessionReqs.put(ip + "@" + sessionCountIn, new HashMap<String, DateTime>()); sessionReqs.get(ip + "@" + sessionCountIn).put(request, time); update(props.getProperty("indexName"), this.cleanupType, id, "SessionID", ip + "@" + sessionCountIn); } else { int count = sessionCountIn; int rollbackNum = 0; while (true) { Map<String, DateTime> requests = sessionReqs.get(ip + "@" + count); if (requests == null) { sessionReqs.put(ip + "@" + count, new HashMap<String, DateTime>()); sessionReqs.get(ip + "@" + count).put(request, time); update(props.getProperty("indexName"), this.cleanupType, id, "SessionID", ip + "@" + count); break; } ArrayList<String> keys = new ArrayList<>(requests.keySet()); boolean bFindRefer = false; for (int i = keys.size() - 1; i >= 0; i--) { rollbackNum++; if (keys.get(i).equalsIgnoreCase(referer)) { bFindRefer = true; // threshold,if time interval > 10* // click num, start a new session if (Math.abs(Seconds.secondsBetween(requests.get(keys.get(i)), time) .getSeconds()) < timeThres * rollbackNum) { sessionReqs.get(ip + "@" + count).put(request, time); update(props.getProperty("indexName"), this.cleanupType, id, "SessionID", ip + "@" + count); } else { sessionCount++; sessionCountIn++; sessionReqs.put(ip + "@" + sessionCountIn, new HashMap<String, DateTime>()); sessionReqs.get(ip + "@" + sessionCountIn).put(request, time); update(props.getProperty("indexName"), this.cleanupType, id, "SessionID", ip + "@" + sessionCountIn); } break; } } if (bFindRefer) { break; } count--; if (count < 0) { sessionCount++; sessionCountIn++; sessionReqs.put(ip + "@" + sessionCountIn, new HashMap<String, DateTime>()); sessionReqs.get(ip + "@" + sessionCountIn).put(request, time); update(props.getProperty(MudrodConstants.ES_INDEX_NAME), this.cleanupType, id, "SessionID", ip + "@" + sessionCountIn); break; } } } } else if ("ftp".equals(logType)) { // may affect computation efficiency Map<String, DateTime> requests = sessionReqs.get(ip + "@" + sessionCountIn); if (requests == null) { sessionReqs.put(ip + "@" + sessionCountIn, new HashMap<String, DateTime>()); } else { ArrayList<String> keys = new ArrayList<>(requests.keySet()); int size = keys.size(); if (Math.abs(Seconds.secondsBetween(requests.get(keys.get(size - 1)), time) .getSeconds()) > timeThres) { sessionCount += 1; sessionCountIn += 1; sessionReqs.put(ip + "@" + sessionCountIn, new HashMap<String, DateTime>()); } } sessionReqs.get(ip + "@" + sessionCountIn).put(request, time); update(props.getProperty("indexName"), this.cleanupType, id, "SessionID", ip + "@" + sessionCountIn); } } scrollResp = es.getClient().prepareSearchScroll(scrollResp.getScrollId()) .setScroll(new TimeValue(600000)).execute().actionGet(); } } }
From source file:esiptestbed.mudrod.weblog.pre.SessionGenerator.java
License:Apache License
public void combineShortSessions(int Timethres) throws ElasticsearchException, IOException { SearchResponse sr = es.getClient().prepareSearch(props.getProperty("indexName")).setTypes(this.cleanupType) .setQuery(QueryBuilders.matchAllQuery()) .addAggregation(AggregationBuilders.terms("Users").field("IP").size(0)).execute().actionGet(); Terms users = sr.getAggregations().get("Users"); for (Terms.Bucket entry : users.getBuckets()) { QueryBuilder filterAll = QueryBuilders.boolQuery().must(QueryBuilders.termQuery("IP", entry.getKey())); QueryBuilder queryAll = QueryBuilders.filteredQuery(QueryBuilders.matchAllQuery(), filterAll); SearchResponse checkAll = es.getClient().prepareSearch(props.getProperty("indexName")) .setTypes(this.cleanupType).setScroll(new TimeValue(60000)).setQuery(queryAll).setSize(0) .execute().actionGet();/*from ww w. j a v a 2 s . c o m*/ long all = checkAll.getHits().getTotalHits(); QueryBuilder filterCheck = QueryBuilders.boolQuery().must(QueryBuilders.termQuery("IP", entry.getKey())) .must(QueryBuilders.termQuery("Referer", "-")); QueryBuilder queryCheck = QueryBuilders.filteredQuery(QueryBuilders.matchAllQuery(), filterCheck); SearchResponse checkReferer = es.getClient().prepareSearch(props.getProperty("indexName")) .setTypes(this.cleanupType).setScroll(new TimeValue(60000)).setQuery(queryCheck).setSize(0) .execute().actionGet(); long numInvalid = checkReferer.getHits().getTotalHits(); double invalidRate = (float) (numInvalid / all); if (invalidRate >= 0.8 || all < 3) { deleteInvalid(entry.getKey().toString()); continue; } QueryBuilder filterSearch = QueryBuilders.boolQuery() .must(QueryBuilders.termQuery("IP", entry.getKey())); QueryBuilder querySearch = QueryBuilders.filteredQuery(QueryBuilders.matchAllQuery(), filterSearch); MetricsAggregationBuilder statsAgg = AggregationBuilders.stats("Stats").field("Time"); SearchResponse sr_session = es .getClient().prepareSearch(props.getProperty("indexName")).setTypes(this.cleanupType) .setScroll(new TimeValue(60000)).setQuery(querySearch).addAggregation(AggregationBuilders .terms("Sessions").field("SessionID").size(0).subAggregation(statsAgg)) .execute().actionGet(); Terms sessions = sr_session.getAggregations().get("Sessions"); List<Session> sessionList = new ArrayList<>(); for (Terms.Bucket session : sessions.getBuckets()) { Stats agg = session.getAggregations().get("Stats"); Session sess = new Session(props, es, agg.getMinAsString(), agg.getMaxAsString(), session.getKey().toString()); sessionList.add(sess); } Collections.sort(sessionList); DateTimeFormatter fmt = ISODateTimeFormat.dateTime(); String last = null; String lastnewID = null; String lastoldID = null; String current = null; for (Session s : sessionList) { current = s.getEndTime(); if (last != null) { if (Seconds.secondsBetween(fmt.parseDateTime(last), fmt.parseDateTime(current)) .getSeconds() < Timethres) { if (lastnewID == null) { s.setNewID(lastoldID); } else { s.setNewID(lastnewID); } QueryBuilder fs = QueryBuilders.boolQuery() .must(QueryBuilders.termQuery("SessionID", s.getID())); QueryBuilder qs = QueryBuilders.filteredQuery(QueryBuilders.matchAllQuery(), fs); SearchResponse scrollResp = es.getClient().prepareSearch(props.getProperty("indexName")) .setTypes(this.cleanupType).setScroll(new TimeValue(60000)).setQuery(qs) .setSize(100).execute().actionGet(); while (true) { for (SearchHit hit : scrollResp.getHits().getHits()) { if (lastnewID == null) { update(props.getProperty("indexName"), this.cleanupType, hit.getId(), "SessionID", lastoldID); } else { update(props.getProperty("indexName"), this.cleanupType, hit.getId(), "SessionID", lastnewID); } } scrollResp = es.getClient().prepareSearchScroll(scrollResp.getScrollId()) .setScroll(new TimeValue(600000)).execute().actionGet(); if (scrollResp.getHits().getHits().length == 0) { break; } } } ; } lastoldID = s.getID(); lastnewID = s.getNewID(); last = current; } } }
From source file:esiptestbed.mudrod.weblog.pre.SessionStatistic.java
License:Apache License
/** * Method to summarize duration, numbers of searching, viewing, and downloading requests, and * filter out suspicious sessions/*from w w w.j a va 2 s . co m*/ * @throws IOException IOException * @throws InterruptedException InterruptedException * @throws ExecutionException ExecutionException */ public void processSession() throws IOException, InterruptedException, ExecutionException { es.createBulkProcessor(); String inputType = this.cleanupType; String outputType = this.sessionStats; MetricsAggregationBuilder<?> statsAgg = AggregationBuilders.stats("Stats").field("Time"); SearchResponse sr = es.getClient().prepareSearch(props.getProperty(MudrodConstants.ES_INDEX_NAME)) .setTypes(inputType).setQuery(QueryBuilders.matchAllQuery()) .addAggregation( AggregationBuilders.terms("Sessions").field("SessionID").size(0).subAggregation(statsAgg)) .execute().actionGet(); Terms Sessions = sr.getAggregations().get("Sessions"); DateTimeFormatter fmt = ISODateTimeFormat.dateTime(); String min = null; String max = null; DateTime start = null; DateTime end = null; int duration = 0; float request_rate = 0; int session_count = 0; Pattern pattern = Pattern.compile("get (.*?) http/*"); for (Terms.Bucket entry : Sessions.getBuckets()) { if (entry.getDocCount() >= 3 && !entry.getKey().equals("invalid")) { Stats agg = entry.getAggregations().get("Stats"); min = agg.getMinAsString(); max = agg.getMaxAsString(); start = fmt.parseDateTime(min); end = fmt.parseDateTime(max); duration = Seconds.secondsBetween(start, end).getSeconds(); int searchDataListRequest_count = 0; int searchDataRequest_count = 0; int searchDataListRequest_byKeywords_count = 0; int ftpRequest_count = 0; int keywords_num = 0; String IP = null; String keywords = ""; String views = ""; String downloads = ""; QueryBuilder filter_search = QueryBuilders.boolQuery() .must(QueryBuilders.termQuery("SessionID", entry.getKey())); QueryBuilder query_search = QueryBuilders.filteredQuery(QueryBuilders.matchAllQuery(), filter_search); SearchResponse scrollResp = es.getClient().prepareSearch(props.getProperty("indexName")) .setTypes(inputType).setScroll(new TimeValue(60000)).setQuery(query_search).setSize(100) .execute().actionGet(); while (true) { for (SearchHit hit : scrollResp.getHits().getHits()) { Map<String, Object> result = hit.getSource(); String request = (String) result.get("Request"); String logType = (String) result.get("LogType"); IP = (String) result.get("IP"); Matcher matcher = pattern.matcher(request.trim().toLowerCase()); while (matcher.find()) { request = matcher.group(1); } String datasetlist = "/datasetlist?"; String dataset = "/dataset/"; if (request.contains(datasetlist)) { searchDataListRequest_count++; RequestUrl requestURL = new RequestUrl(this.props, this.es, null); String info = requestURL.getSearchInfo(request) + ","; if (!info.equals(",")) { if (keywords.equals("")) { keywords = keywords + info; } else { String[] items = info.split(","); String[] keywordList = keywords.split(","); for (int m = 0; m < items.length; m++) { if (!Arrays.asList(keywordList).contains(items[m])) { keywords = keywords + items[m] + ","; } } } } } if (request.startsWith(dataset)) { searchDataRequest_count++; if (findDataset(request) != null) { String view = findDataset(request); if ("".equals(views)) { views = view; } else { if (views.contains(view)) { } else { views = views + "," + view; } } } } if ("ftp".equals(logType)) { ftpRequest_count++; String download = ""; String requestLowercase = request.toLowerCase(); if (requestLowercase.endsWith(".jpg") == false && requestLowercase.endsWith(".pdf") == false && requestLowercase.endsWith(".txt") == false && requestLowercase.endsWith(".gif") == false) { download = request; } if ("".equals(downloads)) { downloads = download; } else { if (downloads.contains(download)) { } else { downloads = downloads + "," + download; } } } } scrollResp = es.getClient().prepareSearchScroll(scrollResp.getScrollId()) .setScroll(new TimeValue(600000)).execute().actionGet(); // Break condition: No hits are returned if (scrollResp.getHits().getHits().length == 0) { break; } } if (!keywords.equals("")) { keywords_num = keywords.split(",").length; } if (searchDataListRequest_count != 0 && searchDataListRequest_count <= Integer.parseInt(props.getProperty("searchf")) && searchDataRequest_count != 0 && searchDataRequest_count <= Integer.parseInt(props.getProperty("viewf")) && ftpRequest_count <= Integer.parseInt(props.getProperty("downloadf"))) { String sessionURL = props.getProperty("SessionPort") + props.getProperty("SessionUrl") + "?sessionid=" + entry.getKey() + "&sessionType=" + outputType + "&requestType=" + inputType; session_count++; IndexRequest ir = new IndexRequest(props.getProperty("indexName"), outputType) .source(jsonBuilder().startObject().field("SessionID", entry.getKey()) .field("SessionURL", sessionURL).field("Request_count", entry.getDocCount()) .field("Duration", duration).field("Number of Keywords", keywords_num) .field("Time", min).field("End_time", max) .field("searchDataListRequest_count", searchDataListRequest_count) .field("searchDataListRequest_byKeywords_count", searchDataListRequest_byKeywords_count) .field("searchDataRequest_count", searchDataRequest_count) .field("keywords", es.customAnalyzing(props.getProperty("indexName"), keywords)) .field("views", views).field("downloads", downloads) .field("request_rate", request_rate).field("Comments", "") .field("Validation", 0).field("Produceby", 0).field("Correlation", 0) .field("IP", IP) // .field("Coordinates", loc.latlon) .endObject()); es.getBulkProcessor().add(ir); } } } LOG.info("Session count: {}", Integer.toString(session_count)); es.destroyBulkProcessor(); }
From source file:etc.HelperUtils.java
License:Apache License
/** * the input string has the format: yyyy-MM-dd hh:mm * /*from w w w .j a v a2s .co m*/ * @param input * the (hopefully correct) formatted input-string * @return the timestamp as millisecs, or -1 if the String is malformed */ public static Long parseTimeString(String input) { if (input.equals("0") || input.equals("unlimited")) { // return the "TS" for an unlimited Box return 0L; } if (!hasCorrectFormat(input)) { // wrong input return -1L; } input = input.trim(); DateTimeFormatter formatter = DateTimeFormat.forPattern("yyyy-MM-dd HH:mm"); return formatter.parseDateTime(input).getMillis(); }