Example usage for org.joda.time Seconds secondsBetween

List of usage examples for org.joda.time Seconds secondsBetween

Introduction

In this page you can find the example usage for org.joda.time Seconds secondsBetween.

Prototype

public static Seconds secondsBetween(ReadablePartial start, ReadablePartial end) 

Source Link

Document

Creates a Seconds representing the number of whole seconds between the two specified partial datetimes.

Usage

From source file:esiptestbed.mudrod.weblog.structure.Session.java

License:Apache License

/**
 * Compare current session with another session
 *
 * @see java.lang.Comparable#compareTo(java.lang.Object)
 *///from   ww w. ja  va  2s .co  m
@Override
public int compareTo(Session o) {
    fmt.parseDateTime(this.end);
    fmt.parseDateTime(o.end);
    // ascending order
    return Seconds.secondsBetween(fmt.parseDateTime(o.end), fmt.parseDateTime(this.end)).getSeconds();

}

From source file:gov.nasa.jpl.mudrod.weblog.pre.CrawlerDetection.java

License:Apache License

private int checkByRate(ESDriver es, String user) {

    int rate = Integer.parseInt(props.getProperty("sendingrate"));
    Pattern pattern = Pattern.compile("get (.*?) http/*");
    Matcher matcher;//from w ww  . ja  v  a 2s .  c  om

    BoolQueryBuilder filterSearch = new BoolQueryBuilder();
    filterSearch.must(QueryBuilders.termQuery("IP", user));

    AggregationBuilder aggregation = AggregationBuilders.dateHistogram("by_minute").field("Time")
            .dateHistogramInterval(DateHistogramInterval.MINUTE).order(Order.COUNT_DESC);
    SearchResponse checkRobot = es.getClient().prepareSearch(logIndex).setTypes(httpType, ftpType)
            .setQuery(filterSearch).setSize(0).addAggregation(aggregation).execute().actionGet();

    Histogram agg = checkRobot.getAggregations().get("by_minute");

    List<? extends Histogram.Bucket> botList = agg.getBuckets();
    long maxCount = botList.get(0).getDocCount();
    if (maxCount >= rate) {
        return 0;
    } else {
        DateTime dt1 = null;
        int toLast = 0;
        SearchResponse scrollResp = es.getClient().prepareSearch(logIndex).setTypes(httpType, ftpType)
                .setScroll(new TimeValue(60000)).setQuery(filterSearch).setSize(100).execute().actionGet();
        while (true) {
            for (SearchHit hit : scrollResp.getHits().getHits()) {
                Map<String, Object> result = hit.getSource();
                String logtype = (String) result.get("LogType");
                if (logtype.equals("PO.DAAC")) {
                    String request = (String) result.get("Request");
                    matcher = pattern.matcher(request.trim().toLowerCase());
                    boolean find = false;
                    while (matcher.find()) {
                        request = matcher.group(1);
                        result.put("RequestUrl", "http://podaac.jpl.nasa.gov" + request);
                        find = true;
                    }
                    if (!find) {
                        result.put("RequestUrl", request);
                    }
                } else {
                    result.put("RequestUrl", result.get("Request"));
                }

                DateTimeFormatter fmt = ISODateTimeFormat.dateTime();
                DateTime dt2 = fmt.parseDateTime((String) result.get("Time"));

                if (dt1 == null) {
                    toLast = 0;
                } else {
                    toLast = Math.abs(Seconds.secondsBetween(dt1, dt2).getSeconds());
                }
                result.put("ToLast", toLast);
                IndexRequest ir = new IndexRequest(logIndex, cleanupType).source(result);

                es.getBulkProcessor().add(ir);
                dt1 = dt2;
            }

            scrollResp = es.getClient().prepareSearchScroll(scrollResp.getScrollId())
                    .setScroll(new TimeValue(600000)).execute().actionGet();
            if (scrollResp.getHits().getHits().length == 0) {
                break;
            }
        }

    }

    return 1;
}

From source file:gov.nasa.jpl.mudrod.weblog.pre.SessionGenerator.java

License:Apache License

public int genSessionByReferer(ESDriver es, String user, int timeThres)
        throws ElasticsearchException, IOException {

    String startTime = null;// w w w  .  j  a  va 2  s . c o m
    int sessionCountIn = 0;

    BoolQueryBuilder filterSearch = new BoolQueryBuilder();
    filterSearch.must(QueryBuilders.termQuery("IP", user));

    SearchResponse scrollResp = es.getClient().prepareSearch(logIndex).setTypes(this.cleanupType)
            .setScroll(new TimeValue(60000)).setQuery(filterSearch).addSort("Time", SortOrder.ASC).setSize(100)
            .execute().actionGet();

    Map<String, Map<String, DateTime>> sessionReqs = new HashMap<>();
    String request = "";
    String referer = "";
    String logType = "";
    String id = "";
    String ip = user;
    String indexUrl = "http://podaac.jpl.nasa.gov/";
    DateTime time = null;
    DateTimeFormatter fmt = ISODateTimeFormat.dateTime();

    while (scrollResp.getHits().getHits().length != 0) {
        for (SearchHit hit : scrollResp.getHits().getHits()) {
            Map<String, Object> result = hit.getSource();
            request = (String) result.get("RequestUrl");
            referer = (String) result.get("Referer");
            logType = (String) result.get("LogType");
            time = fmt.parseDateTime((String) result.get("Time"));
            id = hit.getId();

            if ("PO.DAAC".equals(logType)) {
                if ("-".equals(referer) || referer.equals(indexUrl) || !referer.contains(indexUrl)) {
                    sessionCountIn++;
                    sessionReqs.put(ip + "@" + sessionCountIn, new HashMap<String, DateTime>());
                    sessionReqs.get(ip + "@" + sessionCountIn).put(request, time);

                    update(es, logIndex, this.cleanupType, id, "SessionID", ip + "@" + sessionCountIn);

                } else {
                    int count = sessionCountIn;
                    int rollbackNum = 0;
                    while (true) {
                        Map<String, DateTime> requests = sessionReqs.get(ip + "@" + count);
                        if (requests == null) {
                            sessionReqs.put(ip + "@" + count, new HashMap<String, DateTime>());
                            sessionReqs.get(ip + "@" + count).put(request, time);
                            update(es, logIndex, this.cleanupType, id, "SessionID", ip + "@" + count);

                            break;
                        }
                        ArrayList<String> keys = new ArrayList<>(requests.keySet());
                        boolean bFindRefer = false;

                        for (int i = keys.size() - 1; i >= 0; i--) {
                            rollbackNum++;
                            if (keys.get(i).equalsIgnoreCase(referer)) {
                                bFindRefer = true;
                                // threshold,if time interval > 10*
                                // click num, start a new session
                                if (Math.abs(Seconds.secondsBetween(requests.get(keys.get(i)), time)
                                        .getSeconds()) < timeThres * rollbackNum) {
                                    sessionReqs.get(ip + "@" + count).put(request, time);
                                    update(es, logIndex, this.cleanupType, id, "SessionID", ip + "@" + count);
                                } else {
                                    sessionCountIn++;
                                    sessionReqs.put(ip + "@" + sessionCountIn, new HashMap<String, DateTime>());
                                    sessionReqs.get(ip + "@" + sessionCountIn).put(request, time);
                                    update(es, logIndex, this.cleanupType, id, "SessionID",
                                            ip + "@" + sessionCountIn);
                                }

                                break;
                            }
                        }

                        if (bFindRefer) {
                            break;
                        }

                        count--;
                        if (count < 0) {
                            sessionCountIn++;

                            sessionReqs.put(ip + "@" + sessionCountIn, new HashMap<String, DateTime>());
                            sessionReqs.get(ip + "@" + sessionCountIn).put(request, time);
                            update(es, props.getProperty(MudrodConstants.ES_INDEX_NAME), this.cleanupType, id,
                                    "SessionID", ip + "@" + sessionCountIn);

                            break;
                        }
                    }
                }
            } else if ("ftp".equals(logType)) {

                // may affect computation efficiency
                Map<String, DateTime> requests = sessionReqs.get(ip + "@" + sessionCountIn);
                if (requests == null) {
                    sessionReqs.put(ip + "@" + sessionCountIn, new HashMap<String, DateTime>());
                } else {
                    ArrayList<String> keys = new ArrayList<>(requests.keySet());
                    int size = keys.size();
                    if (Math.abs(Seconds.secondsBetween(requests.get(keys.get(size - 1)), time)
                            .getSeconds()) > timeThres) {
                        sessionCountIn += 1;
                        sessionReqs.put(ip + "@" + sessionCountIn, new HashMap<String, DateTime>());
                    }
                }
                sessionReqs.get(ip + "@" + sessionCountIn).put(request, time);
                update(es, logIndex, this.cleanupType, id, "SessionID", ip + "@" + sessionCountIn);
            }
        }

        scrollResp = es.getClient().prepareSearchScroll(scrollResp.getScrollId())
                .setScroll(new TimeValue(600000)).execute().actionGet();
    }

    return sessionCountIn;
}

From source file:gov.nasa.jpl.mudrod.weblog.pre.SessionGenerator.java

License:Apache License

public void combineShortSessions(ESDriver es, String user, int timeThres)
        throws ElasticsearchException, IOException {

    BoolQueryBuilder filterSearch = new BoolQueryBuilder();
    filterSearch.must(QueryBuilders.termQuery("IP", user));

    String[] indexArr = new String[] { logIndex };
    String[] typeArr = new String[] { cleanupType };
    int docCount = es.getDocCount(indexArr, typeArr, filterSearch);

    if (docCount < 3) {
        deleteInvalid(es, user);//w  ww.  j a v  a  2s.com
        return;
    }

    BoolQueryBuilder filterCheck = new BoolQueryBuilder();
    filterCheck.must(QueryBuilders.termQuery("IP", user)).must(QueryBuilders.termQuery("Referer", "-"));
    SearchResponse checkReferer = es.getClient().prepareSearch(logIndex).setTypes(this.cleanupType)
            .setScroll(new TimeValue(60000)).setQuery(filterCheck).setSize(0).execute().actionGet();

    long numInvalid = checkReferer.getHits().getTotalHits();
    double invalidRate = numInvalid / docCount;

    if (invalidRate >= 0.8) {
        deleteInvalid(es, user);
        return;
    }

    StatsAggregationBuilder statsAgg = AggregationBuilders.stats("Stats").field("Time");
    SearchResponse srSession = es.getClient().prepareSearch(logIndex).setTypes(this.cleanupType)
            .setScroll(new TimeValue(60000)).setQuery(filterSearch).addAggregation(AggregationBuilders
                    .terms("Sessions").field("SessionID").size(docCount).subAggregation(statsAgg))
            .execute().actionGet();

    Terms sessions = srSession.getAggregations().get("Sessions");

    List<Session> sessionList = new ArrayList<>();
    for (Terms.Bucket session : sessions.getBuckets()) {
        Stats agg = session.getAggregations().get("Stats");
        Session sess = new Session(props, es, agg.getMinAsString(), agg.getMaxAsString(),
                session.getKey().toString());
        sessionList.add(sess);
    }

    Collections.sort(sessionList);
    DateTimeFormatter fmt = ISODateTimeFormat.dateTime();
    String last = null;
    String lastnewID = null;
    String lastoldID = null;
    String current = null;
    for (Session s : sessionList) {
        current = s.getEndTime();
        if (last != null) {
            if (Seconds.secondsBetween(fmt.parseDateTime(last), fmt.parseDateTime(current))
                    .getSeconds() < timeThres) {
                if (lastnewID == null) {
                    s.setNewID(lastoldID);
                } else {
                    s.setNewID(lastnewID);
                }

                QueryBuilder fs = QueryBuilders.boolQuery()
                        .filter(QueryBuilders.termQuery("SessionID", s.getID()));

                SearchResponse scrollResp = es.getClient().prepareSearch(logIndex).setTypes(this.cleanupType)
                        .setScroll(new TimeValue(60000)).setQuery(fs).setSize(100).execute().actionGet();
                while (true) {
                    for (SearchHit hit : scrollResp.getHits().getHits()) {
                        if (lastnewID == null) {
                            update(es, logIndex, this.cleanupType, hit.getId(), "SessionID", lastoldID);
                        } else {
                            update(es, logIndex, this.cleanupType, hit.getId(), "SessionID", lastnewID);
                        }
                    }

                    scrollResp = es.getClient().prepareSearchScroll(scrollResp.getScrollId())
                            .setScroll(new TimeValue(600000)).execute().actionGet();
                    if (scrollResp.getHits().getHits().length == 0) {
                        break;
                    }
                }
            }
            ;
        }
        lastoldID = s.getID();
        lastnewID = s.getNewID();
        last = current;
    }

}

From source file:gov.nasa.jpl.mudrod.weblog.pre.SessionStatistic.java

License:Apache License

public int processSession(ESDriver es, String sessionId)
        throws IOException, InterruptedException, ExecutionException {

    String inputType = cleanupType;
    String outputType = sessionStats;

    DateTimeFormatter fmt = ISODateTimeFormat.dateTime();
    String min = null;//from   w w  w.j  av  a2s . c  om
    String max = null;
    DateTime start = null;
    DateTime end = null;
    int duration = 0;
    float request_rate = 0;

    int session_count = 0;
    Pattern pattern = Pattern.compile("get (.*?) http/*");

    StatsAggregationBuilder statsAgg = AggregationBuilders.stats("Stats").field("Time");

    BoolQueryBuilder filter_search = new BoolQueryBuilder();
    filter_search.must(QueryBuilders.termQuery("SessionID", sessionId));

    SearchResponse sr = es.getClient().prepareSearch(logIndex).setTypes(inputType).setQuery(filter_search)
            .addAggregation(statsAgg).execute().actionGet();

    Stats agg = sr.getAggregations().get("Stats");
    min = agg.getMinAsString();
    max = agg.getMaxAsString();
    start = fmt.parseDateTime(min);
    end = fmt.parseDateTime(max);

    duration = Seconds.secondsBetween(start, end).getSeconds();

    int searchDataListRequest_count = 0;
    int searchDataRequest_count = 0;
    int searchDataListRequest_byKeywords_count = 0;
    int ftpRequest_count = 0;
    int keywords_num = 0;

    String IP = null;
    String keywords = "";
    String views = "";
    String downloads = "";

    SearchResponse scrollResp = es.getClient().prepareSearch(logIndex).setTypes(inputType)
            .setScroll(new TimeValue(60000)).setQuery(filter_search).setSize(100).execute().actionGet();

    while (true) {
        for (SearchHit hit : scrollResp.getHits().getHits()) {
            Map<String, Object> result = hit.getSource();

            String request = (String) result.get("Request");
            String logType = (String) result.get("LogType");
            IP = (String) result.get("IP");
            Matcher matcher = pattern.matcher(request.trim().toLowerCase());
            while (matcher.find()) {
                request = matcher.group(1);
            }

            String datasetlist = "/datasetlist?";
            String dataset = "/dataset/";
            if (request.contains(datasetlist)) {
                searchDataListRequest_count++;

                RequestUrl requestURL = new RequestUrl();
                String infoStr = requestURL.getSearchInfo(request) + ",";
                String info = es.customAnalyzing(props.getProperty("indexName"), infoStr);

                if (!info.equals(",")) {
                    if (keywords.equals("")) {
                        keywords = keywords + info;
                    } else {
                        String[] items = info.split(",");
                        String[] keywordList = keywords.split(",");
                        for (int m = 0; m < items.length; m++) {
                            if (!Arrays.asList(keywordList).contains(items[m])) {
                                keywords = keywords + items[m] + ",";
                            }
                        }
                    }
                }

            }
            if (request.startsWith(dataset)) {
                searchDataRequest_count++;
                if (findDataset(request) != null) {
                    String view = findDataset(request);

                    if ("".equals(views)) {
                        views = view;
                    } else {
                        if (views.contains(view)) {

                        } else {
                            views = views + "," + view;
                        }
                    }
                }
            }
            if ("ftp".equals(logType)) {
                ftpRequest_count++;
                String download = "";
                String requestLowercase = request.toLowerCase();
                if (requestLowercase.endsWith(".jpg") == false && requestLowercase.endsWith(".pdf") == false
                        && requestLowercase.endsWith(".txt") == false
                        && requestLowercase.endsWith(".gif") == false) {
                    download = request;
                }

                if ("".equals(downloads)) {
                    downloads = download;
                } else {
                    if (downloads.contains(download)) {

                    } else {
                        downloads = downloads + "," + download;
                    }
                }
            }

        }

        scrollResp = es.getClient().prepareSearchScroll(scrollResp.getScrollId())
                .setScroll(new TimeValue(600000)).execute().actionGet();
        // Break condition: No hits are returned
        if (scrollResp.getHits().getHits().length == 0) {
            break;
        }
    }

    if (!keywords.equals("")) {
        keywords_num = keywords.split(",").length;
    }

    if (searchDataListRequest_count != 0
            && searchDataListRequest_count <= Integer.parseInt(props.getProperty("searchf"))
            && searchDataRequest_count != 0
            && searchDataRequest_count <= Integer.parseInt(props.getProperty("viewf"))
            && ftpRequest_count <= Integer.parseInt(props.getProperty("downloadf"))) {
        String sessionURL = props.getProperty("SessionPort") + props.getProperty("SessionUrl") + "?sessionid="
                + sessionId + "&sessionType=" + outputType + "&requestType=" + inputType;
        session_count = 1;

        IndexRequest ir = new IndexRequest(logIndex, outputType).source(jsonBuilder().startObject()
                .field("SessionID", sessionId).field("SessionURL", sessionURL).field("Duration", duration)
                .field("Number of Keywords", keywords_num).field("Time", min).field("End_time", max)
                .field("searchDataListRequest_count", searchDataListRequest_count)
                .field("searchDataListRequest_byKeywords_count", searchDataListRequest_byKeywords_count)
                .field("searchDataRequest_count", searchDataRequest_count)
                .field("keywords", es.customAnalyzing(logIndex, keywords)).field("views", views)
                .field("downloads", downloads).field("request_rate", request_rate).field("Comments", "")
                .field("Validation", 0).field("Produceby", 0).field("Correlation", 0).field("IP", IP)
                .endObject());

        es.getBulkProcessor().add(ir);
    }

    return session_count;
}

From source file:hanulhan.jms.spring.reqreply.util.DateConverter.java

public static int elapsedSeconds(Date lastAliveMessage) {
    return Seconds.secondsBetween(new DateTime(lastAliveMessage), new DateTime()).getSeconds();
}

From source file:hoot.services.models.osm.Changeset.java

License:Open Source License

/**
 * Updates the expiration of this changeset in the database by modifying its closed at time
 *
 * This logic is pulled directly from the Rails port, and is meant to be executed
 * at the end of each upload process involving this changeset.  This effectively extends the
 * changeset's expiration once any data is written to it and leaves it with a shorter expiration
 * if it has been opened but had no data added to it.
 *
 * @throws Exception/*from   www  .  j av  a2 s.  com*/
 * @todo This method is very confusing.
 */
public void updateExpiration() throws Exception {
    final DateTime now = new DateTime();

    //SQLQuery query = new SQLQuery(conn, DbUtils.getConfiguration());

    Changesets changesetRecord = (Changesets) new SQLQuery(conn, DbUtils.getConfiguration(_mapId))
            .from(changesets).where(changesets.id.eq(getId())).singleResult(changesets);

    if (isOpen()) {
        final int maximumChangesetElements = Integer.parseInt(HootProperties.getInstance().getProperty(
                "maximumChangesetElements", HootProperties.getDefault("maximumChangesetElements")));
        Timestamp newClosedAt = null;
        assert (changesetRecord.getNumChanges() <= maximumChangesetElements);
        if (changesetRecord.getNumChanges() == maximumChangesetElements) {
            newClosedAt = new Timestamp(now.getMillis());
        } else if (changesetRecord.getNumChanges() > 0) {
            /*
             * from rails port:
             *
             * if (closed_at - created_at) > (MAX_TIME_OPEN - IDLE_TIMEOUT)
             *   self.closed_at = create_at + MAX_TIME_OPEN
             * else
             *   self.closed_at = Time.now.getutc + IDLE_TIMEOUT
             */

            final DateTime createdAt = new DateTime(changesetRecord.getCreatedAt().getTime());
            final DateTime closedAt = new DateTime(changesetRecord.getClosedAt().getTime());

            final int changesetIdleTimeout = Integer.parseInt(HootProperties.getInstance().getProperty(
                    "changesetIdleTimeoutMinutes", HootProperties.getDefault("changesetIdleTimeoutMinutes")));
            final int changesetMaxOpenTime = Integer.parseInt(HootProperties.getInstance().getProperty(
                    "changesetMaxOpenTimeHours", HootProperties.getDefault("changesetMaxOpenTimeHours")));
            //The testChangesetAutoClose option = true causes changesetIdleTimeoutMinutes and
            //changesetMaxOpenTimeHours to be interpreted in seconds rather than minutes and hours,
            //respectively.  This enables faster running of auto-close related unit tests.
            if (Boolean.parseBoolean(HootProperties.getInstance().getProperty("testChangesetAutoClose",
                    HootProperties.getDefault("testChangesetAutoClose")))) {
                final int changesetMaxOpenTimeSeconds = changesetMaxOpenTime;
                final int changesetIdleTimeoutSeconds = changesetIdleTimeout;
                if (Seconds.secondsBetween(createdAt, closedAt)
                        .getSeconds() > (changesetMaxOpenTimeSeconds - changesetIdleTimeoutSeconds)) {
                    newClosedAt = new Timestamp(createdAt.plusSeconds(changesetMaxOpenTimeSeconds).getMillis());
                } else {
                    newClosedAt = new Timestamp(now.plusSeconds(changesetIdleTimeoutSeconds).getMillis());
                }
            } else {
                final int changesetMaxOpenTimeMinutes = changesetMaxOpenTime * 60;
                final int changesetIdleTimeoutMinutes = changesetIdleTimeout;
                if (Minutes.minutesBetween(createdAt, closedAt)
                        .getMinutes() > (changesetMaxOpenTimeMinutes - changesetIdleTimeoutMinutes)) {
                    newClosedAt = new Timestamp(createdAt.plusMinutes(changesetMaxOpenTimeMinutes).getMillis());
                } else {
                    newClosedAt = new Timestamp(now.plusMinutes(changesetIdleTimeoutMinutes).getMillis());
                }
            }
        }

        if (newClosedAt != null) {
            if (new SQLUpdateClause(conn, DbUtils.getConfiguration(_mapId), changesets)
                    .where(changesets.id.eq(getId())).set(changesets.closedAt, newClosedAt).execute() != 1) {
                throw new Exception("Error updating expiration on changeset.");
            }
        }
    } else {
        //TODO: I have no idea why this code block is needed now.  It didn't use to be, but after
        //some refactoring to support the changes to marking items as reviewed in ReviewResource, it
        //now is needed.  I've been unable to track down what causes this to happen.
        if (!changesetRecord.getClosedAt().before(new Timestamp(now.getMillis()))) {
            if (new SQLUpdateClause(conn, DbUtils.getConfiguration(_mapId), changesets)
                    .where(changesets.id.eq(getId())).set(changesets.closedAt, new Timestamp(now.getMillis()))
                    .execute() != 1) {
                throw new Exception("Error updating expiration on changeset.");
            }
        }
    }
}

From source file:influent.server.utilities.DateRangeBuilder.java

License:MIT License

public static int determineInterval(DateTime date, DateTime startDate, FL_DateInterval interval,
        int numIntervalsPerBin) {
    switch (interval) {
    case SECONDS:
        Seconds seconds = Seconds.secondsBetween(startDate, date);
        return seconds.getSeconds() / numIntervalsPerBin;
    case HOURS:/*from w  w  w .jav  a 2 s. co m*/
        Hours hours = Hours.hoursBetween(startDate, date);
        return hours.getHours() / numIntervalsPerBin;
    case DAYS:
        Days days = Days.daysBetween(startDate, date);
        return days.getDays() / numIntervalsPerBin;
    case WEEKS:
        Weeks weeks = Weeks.weeksBetween(startDate, date);
        return weeks.getWeeks() / numIntervalsPerBin;
    case MONTHS:
        Months months = Months.monthsBetween(startDate, date);
        return months.getMonths() / numIntervalsPerBin;
    case QUARTERS:
        months = Months.monthsBetween(startDate, date);
        return months.getMonths() / 3 / numIntervalsPerBin;
    case YEARS:
        Years years = Years.yearsBetween(startDate, date);
        return years.getYears() / numIntervalsPerBin;
    }
    return 0;
}

From source file:io.cassandrareaper.service.SegmentRunner.java

License:Apache License

private void processTriggeredSegment(final RepairSegment segment, final JmxProxy coordinator, int repairNo) {

    repairRunner.updateLastEvent(String.format("Triggered repair of segment %s via host %s", segment.getId(),
            coordinator.getHost()));//from   ww w  .j  ava 2  s. com

    {
        long timeout = repairUnit.getIncrementalRepair() ? timeoutMillis * MAX_TIMEOUT_EXTENSIONS
                : timeoutMillis;
        LOG.info("Repair for segment {} started, status wait will timeout in {} millis", segmentId, timeout);
    }

    try {
        final long startTime = System.currentTimeMillis();
        final long maxTime = startTime + timeoutMillis;
        final long waitTime = Math.min(timeoutMillis, 60000);
        long lastLoopTime = startTime;

        while (System.currentTimeMillis() < maxTime) {
            condition.await(waitTime, TimeUnit.MILLISECONDS);

            boolean isDoneOrTimedOut = lastLoopTime + 60_000 > System.currentTimeMillis();

            isDoneOrTimedOut |= RepairSegment.State.DONE == context.storage
                    .getRepairSegment(segment.getRunId(), segmentId).get().getState();

            if (isDoneOrTimedOut) {
                break;
            }
            renewLead();
            lastLoopTime = System.currentTimeMillis();
        }
    } catch (InterruptedException e) {
        LOG.warn("Repair command {} on segment {} interrupted", this.repairNo, segmentId, e);
    } finally {
        coordinator.removeRepairStatusHandler(repairNo);
        RepairSegment resultingSegment = context.storage
                .getRepairSegment(repairRunner.getRepairRunId(), segmentId).get();

        LOG.info("Repair command {} on segment {} returned with state {}", this.repairNo, segmentId,
                resultingSegment.getState());

        if (RepairSegment.State.RUNNING == resultingSegment.getState()) {
            LOG.info("Repair command {} on segment {} has been cancelled while running", this.repairNo,
                    segmentId);
            segmentFailed.set(true);
            abort(resultingSegment, coordinator);

        } else if (RepairSegment.State.DONE == resultingSegment.getState()) {

            LOG.debug("Repair segment with id '{}' was repaired in {} seconds", resultingSegment.getId(),
                    Seconds.secondsBetween(resultingSegment.getStartTime(), resultingSegment.getEndTime())
                            .getSeconds());

            SEGMENT_RUNNERS.remove(resultingSegment.getId());
        } else {
            // Something went wrong on the coordinator node and we never got the RUNNING notification
            // or we are in an undetermined state.
            // Let's just abort and reschedule the segment.
            LOG.info("Repair command {} on segment {} never managed to start within timeout.", this.repairNo,
                    segmentId);
            segmentFailed.set(true);
            abort(resultingSegment, coordinator);
        }
        // Repair is still running, we'll renew lead on the segment when using Cassandra as storage backend
        renewLead();
    }
}

From source file:ManageBean.Model.CompareTime.java

@Override
public int compare(ViewData t, ViewData t1) {

    DateTime date1 = new DateTime(t.getPostdate());
    DateTime date2 = new DateTime(t1.getPostdate());
    if (Seconds.secondsBetween(date1, date2).getSeconds() > 0) {
        return 1;
    } else {/*from w w  w.  j  av  a2s.co m*/
        return -1;
    }
}