Example usage for org.joda.time.format DateTimeFormatter parseDateTime

List of usage examples for org.joda.time.format DateTimeFormatter parseDateTime

Introduction

In this page you can find the example usage for org.joda.time.format DateTimeFormatter parseDateTime.

Prototype

public DateTime parseDateTime(String text) 

Source Link

Document

Parses a date-time from the given text, returning a new DateTime.

Usage

From source file:fulldatechart.FullDateChart.java

License:Open Source License

public static void main(String[] args) throws IOException, ParseException, ValidationException {
    if (args.length != 2) {
        System.out.println("Usage: .../fulldatechart.jar /path/to/myfinds.gpx <myuserid>");
        System.exit(1);// w w w . j  ava  2s.c  o  m
    }

    File f = new File(args[0]);
    String ownerId = args[1];

    DateTimeFormatter fmt = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss'Z'").withZoneUTC();

    System.out.println("Parsing: " + f);
    Document document = Jsoup.parse(f, "UTF-8");

    Set<Slot> dates = new HashSet<Slot>();

    for (int i = 1; i <= 366; i++) {
        dates.add(new Slot(new DateTime().withYear(2012).withTimeAtStartOfDay().withDayOfYear(i)));
    }

    for (Element elem : document.select("groundspeak|date")) {
        Elements siblings = elem.siblingElements();
        String type = siblings.select("groundspeak|type").text();

        if ((type.equals("Found it") || type.equals("Attended"))
                && siblings.select("groundspeak|finder").attr("id").equals(ownerId)) {
            DateTime dt = fmt.parseDateTime(elem.text());

            /*
             * This seems to be necessary to get all log dates to conform to
             * what's displayed on geocaching.com. Certain logs which have a
             * date between 0000 and 0600 should really belong to the
             * previous day.
             */
            dt = dt.minusHours(6);
            dates.remove(new Slot(dt));
        }
    }

    Calendar cal = new Calendar();
    cal.getProperties().add(new ProdId("-//JesperEs/fulldatechart 1.0//EN"));
    cal.getProperties().add(Version.VERSION_2_0);
    cal.getProperties().add(CalScale.GREGORIAN);

    UidGenerator ug = new UidGenerator("1");

    List<Slot> list = new ArrayList<Slot>();
    list.addAll(dates);
    Collections.sort(list);

    for (Slot slot : list) {
        DateTime dt = slot.getNextOccurrence();
        System.out.format("Next occurrence of empty slot: %s -> %s\n", slot, dt);
        VEvent event = new VEvent(new Date(dt.withHourOfDay(19).toDate()), "Cache-day");
        event.getProperties().add(ug.generateUid());
        cal.getComponents().add(event);
    }

    File output = new File("fulldatechart.ics");
    FileOutputStream fout = new FileOutputStream(output);
    try {
        CalendarOutputter out = new CalendarOutputter();
        out.output(cal, fout);
        System.out.println("Calendar written to " + output.getAbsolutePath());
    } finally {
        fout.close();
    }
}

From source file:geomesa.example.twitter.ingest.TwitterParser.java

License:Apache License

private SimpleFeature convertToFeature(final JsonObject obj, final SimpleFeatureBuilder builder,
        final GeometryFactory factory, final DateTimeFormatter df) {
    builder.reset();//from www.j  a va  2s .  c om

    // user info
    final JsonObject user = obj.getAsJsonObject(USER);
    final String userName = user.getAsJsonPrimitive(USER_NAME).getAsString();
    final String userId = user.getAsJsonPrimitive(USER_ID).getAsString();
    builder.set(TwitterFeatureIngester.FEAT_USER_NAME, utf8(userName));
    builder.set(TwitterFeatureIngester.FEAT_USER_ID, userId);

    builder.set(TwitterFeatureIngester.FEAT_TEXT, utf8(obj.get(TEXT).getAsString()));

    // geo info
    final boolean hasGeoJson = obj.has(COORDS_GEO_JSON) && obj.get(COORDS_GEO_JSON) != JsonNull.INSTANCE;
    if (hasGeoJson) {
        final JsonObject geo = obj.getAsJsonObject(COORDS_GEO_JSON);
        final JsonArray coords = geo.getAsJsonArray(COORDS);
        double lat = coords.get(0).getAsDouble();
        double lon = coords.get(1).getAsDouble();

        if (lon != 0.0 && lat != 0.0) {
            final Coordinate coordinate = new Coordinate(lat, lon);
            final Geometry g = new Point(new CoordinateArraySequence(new Coordinate[] { coordinate }), factory);
            builder.set(TwitterFeatureIngester.FEAT_GEOM, g);
        }
    }

    // time and id
    final String tweetId = obj.get(TWEET_ID).getAsString();
    final Date date = df.parseDateTime(obj.get(CREATED_AT).getAsString()).toDate();
    builder.set(TwitterFeatureIngester.FEAT_TWEET_ID, tweetId);
    builder.set(TwitterFeatureIngester.FEAT_DTG, date);

    if (useExtendedFeatures) {
        conditionalSetString(builder, obj, FEAT_IS_RETWEET);
        conditionalSetString(builder, obj, FEAT_SOURCE);
        conditionalSetString(builder, obj, FEAT_RETWEETS);
        conditionalSetString(builder, obj, FEAT_IN_REPLY_TO_USER_ID);
        conditionalSetString(builder, obj, FEAT_IN_REPLY_TO_USER_NAME);
        conditionalSetString(builder, obj, FEAT_IN_REPLY_TO_STATUS);
        conditionalSetString(builder, obj, FEAT_FILTER_LEVEL);
        conditionalSetString(builder, obj, FEAT_LANGUAGE);
        conditionalSetString(builder, obj, FEAT_WITHHELD_COPYRIGHT);
        conditionalSetString(builder, obj, FEAT_WITHHELD_SCOPE);
        conditionalSetArray(builder, obj, FEAT_WITHHELD_COUNTRIES);

        JsonElement entities = obj.get("entities");
        if (entities != null && entities != JsonNull.INSTANCE) {
            JsonObject e = (JsonObject) entities;
            conditionalSetObjectArray(builder, e, FEAT_HASHTAGS, "text");
            conditionalSetObjectArray(builder, e, FEAT_URLS, "url");
            conditionalSetObjectArray(builder, e, FEAT_SYMBOLS, "text");
            conditionalSetObjectArray(builder, e, FEAT_USER_MENTIONS, "id");
            conditionalSetObjectArray(builder, e, FEAT_MEDIA, "media_url");
        }
    }

    return builder.buildFeature(tweetId);
}

From source file:geomesa.example.twitter.ingest.TwitterParser.java

License:Apache License

private void fillFeature(final JsonObject obj, final SimpleFeature sf, final GeometryFactory factory,
        final DateTimeFormatter df) {

    // user info//from  w ww . j  ava  2  s .  c  om
    final JsonObject user = obj.getAsJsonObject(USER);
    final String userName = user.getAsJsonPrimitive(USER_NAME).getAsString();
    final String userId = user.getAsJsonPrimitive(USER_ID).getAsString();
    sf.setAttribute(TwitterFeatureIngester.FEAT_USER_NAME, utf8(userName));
    sf.setAttribute(TwitterFeatureIngester.FEAT_USER_ID, userId);

    sf.setAttribute(TwitterFeatureIngester.FEAT_TEXT, utf8(obj.get(TEXT).getAsString()));

    // geo info
    final boolean hasGeoJson = obj.has(COORDS_GEO_JSON) && obj.get(COORDS_GEO_JSON) != JsonNull.INSTANCE;
    if (hasGeoJson) {
        final JsonObject geo = obj.getAsJsonObject(COORDS_GEO_JSON);
        final JsonArray coords = geo.getAsJsonArray(COORDS);
        double lat = coords.get(0).getAsDouble();
        double lon = coords.get(1).getAsDouble();

        if (lon != 0.0 && lat != 0.0) {
            final Coordinate coordinate = new Coordinate(lat, lon);
            final Geometry g = new Point(new CoordinateArraySequence(new Coordinate[] { coordinate }), factory);
            sf.setAttribute(TwitterFeatureIngester.FEAT_GEOM, g);
        }
    }

    // time and id
    final String tweetId = obj.get(TWEET_ID).getAsString();
    final Date date = df.parseDateTime(obj.get(CREATED_AT).getAsString()).toDate();
    sf.setAttribute(TwitterFeatureIngester.FEAT_TWEET_ID, tweetId);
    sf.setAttribute(TwitterFeatureIngester.FEAT_DTG, date);

    if (useExtendedFeatures) {
        conditionalSetString(sf, obj, FEAT_IS_RETWEET);
        conditionalSetString(sf, obj, FEAT_SOURCE);
        conditionalSetString(sf, obj, FEAT_RETWEETS);
        conditionalSetString(sf, obj, FEAT_IN_REPLY_TO_USER_ID);
        conditionalSetString(sf, obj, FEAT_IN_REPLY_TO_USER_NAME);
        conditionalSetString(sf, obj, FEAT_IN_REPLY_TO_STATUS);
        conditionalSetString(sf, obj, FEAT_FILTER_LEVEL);
        conditionalSetString(sf, obj, FEAT_LANGUAGE);
        conditionalSetString(sf, obj, FEAT_WITHHELD_COPYRIGHT);
        conditionalSetString(sf, obj, FEAT_WITHHELD_SCOPE);
        conditionalSetArray(sf, obj, FEAT_WITHHELD_COUNTRIES);

        JsonElement entities = obj.get("entities");
        if (entities != null && entities != JsonNull.INSTANCE) {
            JsonObject e = (JsonObject) entities;
            conditionalSetObjectArray(sf, e, FEAT_HASHTAGS, "text");
            conditionalSetObjectArray(sf, e, FEAT_URLS, "url");
            conditionalSetObjectArray(sf, e, FEAT_SYMBOLS, "text");
            conditionalSetObjectArray(sf, e, FEAT_USER_MENTIONS, "id");
            conditionalSetObjectArray(sf, e, FEAT_MEDIA, "media_url");
        }
    }

    //((FeatureIdImpl)sf.getIdentifier()).setID(Long.toString(tweetId));
    //sf.getUserData().put(Hints.USE_PROVIDED_FID, Boolean.TRUE);
}

From source file:gobblin.source.extractor.utils.Utils.java

License:Apache License

/**
 * Convert timestamp in a string format to joda time
 * @param input timestamp/*w ww  .ja  v a  2s .com*/
 * @param format timestamp format
 * @param timezone time zone of timestamp
 * @return joda time
 */
public static DateTime toDateTime(String input, String format, String timezone) {
    String tz = StringUtils.defaultString(timezone, ConfigurationKeys.DEFAULT_SOURCE_TIMEZONE);
    DateTimeZone dateTimeZone = getTimeZone(tz);
    DateTimeFormatter inputDtFormat = DateTimeFormat.forPattern(format).withZone(dateTimeZone);
    DateTime outputDateTime = inputDtFormat.parseDateTime(input).withZone(dateTimeZone);
    return outputDateTime;
}

From source file:gov.nasa.jpl.mudrod.weblog.pre.CrawlerDetection.java

License:Apache License

private int checkByRate(ESDriver es, String user) {

    int rate = Integer.parseInt(props.getProperty("sendingrate"));
    Pattern pattern = Pattern.compile("get (.*?) http/*");
    Matcher matcher;//from   ww w . jav  a 2 s  . co  m

    BoolQueryBuilder filterSearch = new BoolQueryBuilder();
    filterSearch.must(QueryBuilders.termQuery("IP", user));

    AggregationBuilder aggregation = AggregationBuilders.dateHistogram("by_minute").field("Time")
            .dateHistogramInterval(DateHistogramInterval.MINUTE).order(Order.COUNT_DESC);
    SearchResponse checkRobot = es.getClient().prepareSearch(logIndex).setTypes(httpType, ftpType)
            .setQuery(filterSearch).setSize(0).addAggregation(aggregation).execute().actionGet();

    Histogram agg = checkRobot.getAggregations().get("by_minute");

    List<? extends Histogram.Bucket> botList = agg.getBuckets();
    long maxCount = botList.get(0).getDocCount();
    if (maxCount >= rate) {
        return 0;
    } else {
        DateTime dt1 = null;
        int toLast = 0;
        SearchResponse scrollResp = es.getClient().prepareSearch(logIndex).setTypes(httpType, ftpType)
                .setScroll(new TimeValue(60000)).setQuery(filterSearch).setSize(100).execute().actionGet();
        while (true) {
            for (SearchHit hit : scrollResp.getHits().getHits()) {
                Map<String, Object> result = hit.getSource();
                String logtype = (String) result.get("LogType");
                if (logtype.equals("PO.DAAC")) {
                    String request = (String) result.get("Request");
                    matcher = pattern.matcher(request.trim().toLowerCase());
                    boolean find = false;
                    while (matcher.find()) {
                        request = matcher.group(1);
                        result.put("RequestUrl", "http://podaac.jpl.nasa.gov" + request);
                        find = true;
                    }
                    if (!find) {
                        result.put("RequestUrl", request);
                    }
                } else {
                    result.put("RequestUrl", result.get("Request"));
                }

                DateTimeFormatter fmt = ISODateTimeFormat.dateTime();
                DateTime dt2 = fmt.parseDateTime((String) result.get("Time"));

                if (dt1 == null) {
                    toLast = 0;
                } else {
                    toLast = Math.abs(Seconds.secondsBetween(dt1, dt2).getSeconds());
                }
                result.put("ToLast", toLast);
                IndexRequest ir = new IndexRequest(logIndex, cleanupType).source(result);

                es.getBulkProcessor().add(ir);
                dt1 = dt2;
            }

            scrollResp = es.getClient().prepareSearchScroll(scrollResp.getScrollId())
                    .setScroll(new TimeValue(600000)).execute().actionGet();
            if (scrollResp.getHits().getHits().length == 0) {
                break;
            }
        }

    }

    return 1;
}

From source file:gov.nasa.jpl.mudrod.weblog.pre.SessionGenerator.java

License:Apache License

public int genSessionByReferer(ESDriver es, String user, int timeThres)
        throws ElasticsearchException, IOException {

    String startTime = null;/*from   w  w w.  j  a v a2  s .  c o m*/
    int sessionCountIn = 0;

    BoolQueryBuilder filterSearch = new BoolQueryBuilder();
    filterSearch.must(QueryBuilders.termQuery("IP", user));

    SearchResponse scrollResp = es.getClient().prepareSearch(logIndex).setTypes(this.cleanupType)
            .setScroll(new TimeValue(60000)).setQuery(filterSearch).addSort("Time", SortOrder.ASC).setSize(100)
            .execute().actionGet();

    Map<String, Map<String, DateTime>> sessionReqs = new HashMap<>();
    String request = "";
    String referer = "";
    String logType = "";
    String id = "";
    String ip = user;
    String indexUrl = "http://podaac.jpl.nasa.gov/";
    DateTime time = null;
    DateTimeFormatter fmt = ISODateTimeFormat.dateTime();

    while (scrollResp.getHits().getHits().length != 0) {
        for (SearchHit hit : scrollResp.getHits().getHits()) {
            Map<String, Object> result = hit.getSource();
            request = (String) result.get("RequestUrl");
            referer = (String) result.get("Referer");
            logType = (String) result.get("LogType");
            time = fmt.parseDateTime((String) result.get("Time"));
            id = hit.getId();

            if ("PO.DAAC".equals(logType)) {
                if ("-".equals(referer) || referer.equals(indexUrl) || !referer.contains(indexUrl)) {
                    sessionCountIn++;
                    sessionReqs.put(ip + "@" + sessionCountIn, new HashMap<String, DateTime>());
                    sessionReqs.get(ip + "@" + sessionCountIn).put(request, time);

                    update(es, logIndex, this.cleanupType, id, "SessionID", ip + "@" + sessionCountIn);

                } else {
                    int count = sessionCountIn;
                    int rollbackNum = 0;
                    while (true) {
                        Map<String, DateTime> requests = sessionReqs.get(ip + "@" + count);
                        if (requests == null) {
                            sessionReqs.put(ip + "@" + count, new HashMap<String, DateTime>());
                            sessionReqs.get(ip + "@" + count).put(request, time);
                            update(es, logIndex, this.cleanupType, id, "SessionID", ip + "@" + count);

                            break;
                        }
                        ArrayList<String> keys = new ArrayList<>(requests.keySet());
                        boolean bFindRefer = false;

                        for (int i = keys.size() - 1; i >= 0; i--) {
                            rollbackNum++;
                            if (keys.get(i).equalsIgnoreCase(referer)) {
                                bFindRefer = true;
                                // threshold,if time interval > 10*
                                // click num, start a new session
                                if (Math.abs(Seconds.secondsBetween(requests.get(keys.get(i)), time)
                                        .getSeconds()) < timeThres * rollbackNum) {
                                    sessionReqs.get(ip + "@" + count).put(request, time);
                                    update(es, logIndex, this.cleanupType, id, "SessionID", ip + "@" + count);
                                } else {
                                    sessionCountIn++;
                                    sessionReqs.put(ip + "@" + sessionCountIn, new HashMap<String, DateTime>());
                                    sessionReqs.get(ip + "@" + sessionCountIn).put(request, time);
                                    update(es, logIndex, this.cleanupType, id, "SessionID",
                                            ip + "@" + sessionCountIn);
                                }

                                break;
                            }
                        }

                        if (bFindRefer) {
                            break;
                        }

                        count--;
                        if (count < 0) {
                            sessionCountIn++;

                            sessionReqs.put(ip + "@" + sessionCountIn, new HashMap<String, DateTime>());
                            sessionReqs.get(ip + "@" + sessionCountIn).put(request, time);
                            update(es, props.getProperty(MudrodConstants.ES_INDEX_NAME), this.cleanupType, id,
                                    "SessionID", ip + "@" + sessionCountIn);

                            break;
                        }
                    }
                }
            } else if ("ftp".equals(logType)) {

                // may affect computation efficiency
                Map<String, DateTime> requests = sessionReqs.get(ip + "@" + sessionCountIn);
                if (requests == null) {
                    sessionReqs.put(ip + "@" + sessionCountIn, new HashMap<String, DateTime>());
                } else {
                    ArrayList<String> keys = new ArrayList<>(requests.keySet());
                    int size = keys.size();
                    if (Math.abs(Seconds.secondsBetween(requests.get(keys.get(size - 1)), time)
                            .getSeconds()) > timeThres) {
                        sessionCountIn += 1;
                        sessionReqs.put(ip + "@" + sessionCountIn, new HashMap<String, DateTime>());
                    }
                }
                sessionReqs.get(ip + "@" + sessionCountIn).put(request, time);
                update(es, logIndex, this.cleanupType, id, "SessionID", ip + "@" + sessionCountIn);
            }
        }

        scrollResp = es.getClient().prepareSearchScroll(scrollResp.getScrollId())
                .setScroll(new TimeValue(600000)).execute().actionGet();
    }

    return sessionCountIn;
}

From source file:gov.nasa.jpl.mudrod.weblog.pre.SessionGenerator.java

License:Apache License

public void combineShortSessions(ESDriver es, String user, int timeThres)
        throws ElasticsearchException, IOException {

    BoolQueryBuilder filterSearch = new BoolQueryBuilder();
    filterSearch.must(QueryBuilders.termQuery("IP", user));

    String[] indexArr = new String[] { logIndex };
    String[] typeArr = new String[] { cleanupType };
    int docCount = es.getDocCount(indexArr, typeArr, filterSearch);

    if (docCount < 3) {
        deleteInvalid(es, user);//from w  w  w.j  av  a2s  .  c o  m
        return;
    }

    BoolQueryBuilder filterCheck = new BoolQueryBuilder();
    filterCheck.must(QueryBuilders.termQuery("IP", user)).must(QueryBuilders.termQuery("Referer", "-"));
    SearchResponse checkReferer = es.getClient().prepareSearch(logIndex).setTypes(this.cleanupType)
            .setScroll(new TimeValue(60000)).setQuery(filterCheck).setSize(0).execute().actionGet();

    long numInvalid = checkReferer.getHits().getTotalHits();
    double invalidRate = numInvalid / docCount;

    if (invalidRate >= 0.8) {
        deleteInvalid(es, user);
        return;
    }

    StatsAggregationBuilder statsAgg = AggregationBuilders.stats("Stats").field("Time");
    SearchResponse srSession = es.getClient().prepareSearch(logIndex).setTypes(this.cleanupType)
            .setScroll(new TimeValue(60000)).setQuery(filterSearch).addAggregation(AggregationBuilders
                    .terms("Sessions").field("SessionID").size(docCount).subAggregation(statsAgg))
            .execute().actionGet();

    Terms sessions = srSession.getAggregations().get("Sessions");

    List<Session> sessionList = new ArrayList<>();
    for (Terms.Bucket session : sessions.getBuckets()) {
        Stats agg = session.getAggregations().get("Stats");
        Session sess = new Session(props, es, agg.getMinAsString(), agg.getMaxAsString(),
                session.getKey().toString());
        sessionList.add(sess);
    }

    Collections.sort(sessionList);
    DateTimeFormatter fmt = ISODateTimeFormat.dateTime();
    String last = null;
    String lastnewID = null;
    String lastoldID = null;
    String current = null;
    for (Session s : sessionList) {
        current = s.getEndTime();
        if (last != null) {
            if (Seconds.secondsBetween(fmt.parseDateTime(last), fmt.parseDateTime(current))
                    .getSeconds() < timeThres) {
                if (lastnewID == null) {
                    s.setNewID(lastoldID);
                } else {
                    s.setNewID(lastnewID);
                }

                QueryBuilder fs = QueryBuilders.boolQuery()
                        .filter(QueryBuilders.termQuery("SessionID", s.getID()));

                SearchResponse scrollResp = es.getClient().prepareSearch(logIndex).setTypes(this.cleanupType)
                        .setScroll(new TimeValue(60000)).setQuery(fs).setSize(100).execute().actionGet();
                while (true) {
                    for (SearchHit hit : scrollResp.getHits().getHits()) {
                        if (lastnewID == null) {
                            update(es, logIndex, this.cleanupType, hit.getId(), "SessionID", lastoldID);
                        } else {
                            update(es, logIndex, this.cleanupType, hit.getId(), "SessionID", lastnewID);
                        }
                    }

                    scrollResp = es.getClient().prepareSearchScroll(scrollResp.getScrollId())
                            .setScroll(new TimeValue(600000)).execute().actionGet();
                    if (scrollResp.getHits().getHits().length == 0) {
                        break;
                    }
                }
            }
            ;
        }
        lastoldID = s.getID();
        lastnewID = s.getNewID();
        last = current;
    }

}

From source file:gov.nasa.jpl.mudrod.weblog.pre.SessionStatistic.java

License:Apache License

public int processSession(ESDriver es, String sessionId)
        throws IOException, InterruptedException, ExecutionException {

    String inputType = cleanupType;
    String outputType = sessionStats;

    DateTimeFormatter fmt = ISODateTimeFormat.dateTime();
    String min = null;/*from   ww  w. j  a v  a2 s.co  m*/
    String max = null;
    DateTime start = null;
    DateTime end = null;
    int duration = 0;
    float request_rate = 0;

    int session_count = 0;
    Pattern pattern = Pattern.compile("get (.*?) http/*");

    StatsAggregationBuilder statsAgg = AggregationBuilders.stats("Stats").field("Time");

    BoolQueryBuilder filter_search = new BoolQueryBuilder();
    filter_search.must(QueryBuilders.termQuery("SessionID", sessionId));

    SearchResponse sr = es.getClient().prepareSearch(logIndex).setTypes(inputType).setQuery(filter_search)
            .addAggregation(statsAgg).execute().actionGet();

    Stats agg = sr.getAggregations().get("Stats");
    min = agg.getMinAsString();
    max = agg.getMaxAsString();
    start = fmt.parseDateTime(min);
    end = fmt.parseDateTime(max);

    duration = Seconds.secondsBetween(start, end).getSeconds();

    int searchDataListRequest_count = 0;
    int searchDataRequest_count = 0;
    int searchDataListRequest_byKeywords_count = 0;
    int ftpRequest_count = 0;
    int keywords_num = 0;

    String IP = null;
    String keywords = "";
    String views = "";
    String downloads = "";

    SearchResponse scrollResp = es.getClient().prepareSearch(logIndex).setTypes(inputType)
            .setScroll(new TimeValue(60000)).setQuery(filter_search).setSize(100).execute().actionGet();

    while (true) {
        for (SearchHit hit : scrollResp.getHits().getHits()) {
            Map<String, Object> result = hit.getSource();

            String request = (String) result.get("Request");
            String logType = (String) result.get("LogType");
            IP = (String) result.get("IP");
            Matcher matcher = pattern.matcher(request.trim().toLowerCase());
            while (matcher.find()) {
                request = matcher.group(1);
            }

            String datasetlist = "/datasetlist?";
            String dataset = "/dataset/";
            if (request.contains(datasetlist)) {
                searchDataListRequest_count++;

                RequestUrl requestURL = new RequestUrl();
                String infoStr = requestURL.getSearchInfo(request) + ",";
                String info = es.customAnalyzing(props.getProperty("indexName"), infoStr);

                if (!info.equals(",")) {
                    if (keywords.equals("")) {
                        keywords = keywords + info;
                    } else {
                        String[] items = info.split(",");
                        String[] keywordList = keywords.split(",");
                        for (int m = 0; m < items.length; m++) {
                            if (!Arrays.asList(keywordList).contains(items[m])) {
                                keywords = keywords + items[m] + ",";
                            }
                        }
                    }
                }

            }
            if (request.startsWith(dataset)) {
                searchDataRequest_count++;
                if (findDataset(request) != null) {
                    String view = findDataset(request);

                    if ("".equals(views)) {
                        views = view;
                    } else {
                        if (views.contains(view)) {

                        } else {
                            views = views + "," + view;
                        }
                    }
                }
            }
            if ("ftp".equals(logType)) {
                ftpRequest_count++;
                String download = "";
                String requestLowercase = request.toLowerCase();
                if (requestLowercase.endsWith(".jpg") == false && requestLowercase.endsWith(".pdf") == false
                        && requestLowercase.endsWith(".txt") == false
                        && requestLowercase.endsWith(".gif") == false) {
                    download = request;
                }

                if ("".equals(downloads)) {
                    downloads = download;
                } else {
                    if (downloads.contains(download)) {

                    } else {
                        downloads = downloads + "," + download;
                    }
                }
            }

        }

        scrollResp = es.getClient().prepareSearchScroll(scrollResp.getScrollId())
                .setScroll(new TimeValue(600000)).execute().actionGet();
        // Break condition: No hits are returned
        if (scrollResp.getHits().getHits().length == 0) {
            break;
        }
    }

    if (!keywords.equals("")) {
        keywords_num = keywords.split(",").length;
    }

    if (searchDataListRequest_count != 0
            && searchDataListRequest_count <= Integer.parseInt(props.getProperty("searchf"))
            && searchDataRequest_count != 0
            && searchDataRequest_count <= Integer.parseInt(props.getProperty("viewf"))
            && ftpRequest_count <= Integer.parseInt(props.getProperty("downloadf"))) {
        String sessionURL = props.getProperty("SessionPort") + props.getProperty("SessionUrl") + "?sessionid="
                + sessionId + "&sessionType=" + outputType + "&requestType=" + inputType;
        session_count = 1;

        IndexRequest ir = new IndexRequest(logIndex, outputType).source(jsonBuilder().startObject()
                .field("SessionID", sessionId).field("SessionURL", sessionURL).field("Duration", duration)
                .field("Number of Keywords", keywords_num).field("Time", min).field("End_time", max)
                .field("searchDataListRequest_count", searchDataListRequest_count)
                .field("searchDataListRequest_byKeywords_count", searchDataListRequest_byKeywords_count)
                .field("searchDataRequest_count", searchDataRequest_count)
                .field("keywords", es.customAnalyzing(logIndex, keywords)).field("views", views)
                .field("downloads", downloads).field("request_rate", request_rate).field("Comments", "")
                .field("Validation", 0).field("Produceby", 0).field("Correlation", 0).field("IP", IP)
                .endObject());

        es.getBulkProcessor().add(ir);
    }

    return session_count;
}

From source file:graph.inference.module.LaterThanWorker.java

License:Open Source License

private Interval parseDate(DAGNode date, DateTime now) {
    String dateStr = date.toString();
    if (dateStr.equals("Now") || dateStr.equals("Now-Generally"))
        return new Interval(now.getMillis(), now.getMillis() + 1);
    if (dateStr.equals("Today-Indexical"))
        return new Interval(now.dayOfYear().roundFloorCopy(), now.dayOfYear().roundCeilingCopy());
    if (dateStr.equals("Tomorrow-Indexical")) {
        return new Interval(now.plusDays(1).dayOfYear().roundFloorCopy(),
                now.plusDays(1).dayOfYear().roundCeilingCopy());
    }//from   ww w . ja  v  a 2  s  .c o m
    if (dateStr.equals("Yesterday-Indexical")) {
        return new Interval(now.minusDays(1).dayOfYear().roundFloorCopy(),
                now.minusDays(1).dayOfYear().roundCeilingCopy());
    }
    if (dateStr.equals("TheYear-Indexical")) {
        return new Interval(now.year().roundFloorCopy(), now.year().roundCeilingCopy());
    }

    // Parse the date from the DAGNode
    String parsePattern = null;
    for (int i = DATE_PARSE_INTERVALS.length - 1; i >= 0; i--) {
        StringBuilder newPattern = new StringBuilder("(" + DATE_PARSE_INTERVALS[i]);
        if (parsePattern != null)
            newPattern.append(" " + parsePattern);
        newPattern.append(")");
        parsePattern = newPattern.toString();

        DateTimeFormatter dtf = DateTimeFormat.forPattern(parsePattern);
        try {
            DateTime dateTime = dtf.parseDateTime(dateStr);
            if (dateTime != null) {
                switch (i) {
                case 0:
                    return new Interval(dateTime.getMillis(),
                            dateTime.plusSeconds(1).minusMillis(1).getMillis());
                case 1:
                    return new Interval(dateTime.getMillis(),
                            dateTime.plusMinutes(1).minusMillis(1).getMillis());
                case 2:
                    return new Interval(dateTime.getMillis(), dateTime.plusHours(1).minusMillis(1).getMillis());
                case 3:
                    return new Interval(dateTime.getMillis(), dateTime.plusDays(1).minusMillis(1).getMillis());
                case 4:
                    return new Interval(dateTime.getMillis(),
                            dateTime.plusMonths(1).minusMillis(1).getMillis());
                case 5:
                    return new Interval(dateTime.getMillis(), dateTime.plusYears(1).minusMillis(1).getMillis());
                }
            }
        } catch (Exception e) {
        }
    }
    return null;
}

From source file:gsonjodatime.DateMidnightConverter.java

License:Open Source License

/**
 * Gson invokes this call-back method during deserialization when it encounters a field of the
 * specified type. <p>/*from   www. j a va 2  s .co m*/
 *
 * In the implementation of this call-back method, you should consider invoking
 * {@link com.google.gson.JsonDeserializationContext#deserialize(com.google.gson.JsonElement, java.lang.reflect.Type)} method to create objects
 * for any non-trivial field of the returned object. However, you should never invoke it on the
 * the same type passing {@code json} since that will cause an infinite loop (Gson will call your
 * call-back method again).
 * @param json The Json data being deserialized
 * @param typeOfT The type of the Object to deserialize to
 * @return a deserialized object of the specified type typeOfT which is a subclass of {@code T}
 * @throws com.google.gson.JsonParseException if json is not in the expected format of {@code typeOfT}
 */
@Override
public DateMidnight deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context)
        throws JsonParseException {
    // Do not try to deserialize null or empty values
    if (json.getAsString() == null || json.getAsString().isEmpty()) {
        return null;
    }

    final DateTimeFormatter fmt = ISODateTimeFormat.dateTime();
    return new DateMidnight(fmt.parseDateTime(json.getAsString()));
}