Example usage for java.util TreeSet remove

List of usage examples for java.util TreeSet remove

Introduction

In this page you can find the example usage for java.util TreeSet remove.

Prototype

public boolean remove(Object o) 

Source Link

Document

Removes the specified element from this set if it is present.

Usage

From source file:com.thoughtworks.go.server.dao.PipelineSqlMapDao.java

private void removeCurrentLatestIfNoLongerActive(Stage stage, TreeSet<Long> ids) {
    if (!ids.isEmpty()) {
        if (isNewerThanCurrentLatest(stage, ids) && isCurrentLatestInactive(ids)) {
            ids.remove(ids.last());
        }/*from w  w  w. ja v a2 s. c om*/
    }
}

From source file:org.apache.lens.cube.parse.StorageCandidate.java

/**
 * Gets FactPartitions for the given fact using the following logic
 *
 * 1. Find the max update interval that will be used for the query. Lets assume time
 * range is 15 Sep to 15 Dec and the fact has two storage with update periods as MONTHLY,DAILY,HOURLY.
 * In this case the data for [15 sep - 1 oct)U[1 Dec - 15 Dec) will be answered by DAILY partitions
 * and [1 oct - 1Dec) will be answered by MONTHLY partitions. The max interavl for this query will be MONTHLY.
 *
 * 2.Prune Storgaes that do not fall in the queries time range.
 * {@link org.apache.lens.cube.metadata.CubeMetastoreClient#isStorageTableCandidateForRange(String, Date, Date)}
 *
 * 3. Iterate over max interavl . In out case it will give two months Oct and Nov. Find partitions for
 * these two months.Check validity of FactPartitions for Oct and Nov
 * via {@link #updatePartitionStorage(FactPartition)}.
 * If the partition is missing, try getting partitions for the time range form other update periods (DAILY,HOURLY).
 * This is achieved by calling getPartitions() recursively but passing only 2 update periods (DAILY,HOURLY)
 *
 * 4.If the monthly partitions are found, check for lookahead partitions and call getPartitions recursively for the
 * remaining time intervals i.e, [15 sep - 1 oct) and [1 Dec - 15 Dec)
 *
 * TODO union : Move this into util./*from www  .ja  va 2 s  .  c  o  m*/
 */
private boolean getPartitions(Date fromDate, Date toDate, String partCol, Set<FactPartition> partitions,
        TreeSet<UpdatePeriod> updatePeriods, boolean addNonExistingParts, boolean failOnPartialData,
        PartitionRangesForPartitionColumns missingPartitions) throws LensException {
    if (fromDate.equals(toDate) || fromDate.after(toDate)) {
        return true;
    }
    if (updatePeriods == null || updatePeriods.isEmpty()) {
        return false;
    }

    UpdatePeriod maxInterval = CubeFactTable.maxIntervalInRange(fromDate, toDate, updatePeriods);
    if (maxInterval == null) {
        log.info("No max interval for range: {} to {}", fromDate, toDate);
        return false;
    }

    if (maxInterval == UpdatePeriod.CONTINUOUS
            && cubeQueryContext.getRangeWriter().getClass().equals(BetweenTimeRangeWriter.class)) {
        FactPartition part = new FactPartition(partCol, fromDate, maxInterval, null, partWhereClauseFormat);
        partitions.add(part);
        part.getStorageTables().add(storageTable);
        part = new FactPartition(partCol, toDate, maxInterval, null, partWhereClauseFormat);
        partitions.add(part);
        part.getStorageTables().add(storageTable);
        this.participatingUpdatePeriods.add(maxInterval);
        log.info("Added continuous fact partition for storage table {}", storageName);
        return true;
    }

    if (!getCubeMetastoreClient().partColExists(this.getFact(), storageName, partCol)) {
        log.info("{} does not exist in {}", partCol, name);
        return false;
    }

    Date maxIntervalStorageTblStartDate = getStorageTableStartDate(maxInterval);
    Date maxIntervalStorageTblEndDate = getStorageTableEndDate(maxInterval);

    TreeSet<UpdatePeriod> remainingIntervals = new TreeSet<>(updatePeriods);
    remainingIntervals.remove(maxInterval);
    if (!isCandidatePartiallyValidForTimeRange(maxIntervalStorageTblStartDate, maxIntervalStorageTblEndDate,
            fromDate, toDate)) {
        //Check the time range in remainingIntervals as maxInterval is not useful
        return getPartitions(fromDate, toDate, partCol, partitions, remainingIntervals, addNonExistingParts,
                failOnPartialData, missingPartitions);
    }

    Date ceilFromDate = DateUtil.getCeilDate(
            fromDate.after(maxIntervalStorageTblStartDate) ? fromDate : maxIntervalStorageTblStartDate,
            maxInterval);
    Date floorToDate = DateUtil.getFloorDate(
            toDate.before(maxIntervalStorageTblEndDate) ? toDate : maxIntervalStorageTblEndDate, maxInterval);
    if (ceilFromDate.equals(floorToDate) || floorToDate.before(ceilFromDate)) {
        return getPartitions(fromDate, toDate, partCol, partitions, remainingIntervals, addNonExistingParts,
                failOnPartialData, missingPartitions);
    }

    int lookAheadNumParts = getConf().getInt(CubeQueryConfUtil.getLookAheadPTPartsKey(maxInterval),
            CubeQueryConfUtil.DEFAULT_LOOK_AHEAD_PT_PARTS);
    TimeRange.Iterable.Iterator iter = TimeRange.iterable(ceilFromDate, floorToDate, maxInterval, 1).iterator();
    // add partitions from ceilFrom to floorTo
    while (iter.hasNext()) {
        Date dt = iter.next();
        Date nextDt = iter.peekNext();
        FactPartition part = new FactPartition(partCol, dt, maxInterval, null, partWhereClauseFormat);
        updatePartitionStorage(part);
        log.debug("Storage tables containing Partition {} are: {}", part, part.getStorageTables());
        if (part.isFound()) {
            log.debug("Adding existing partition {}", part);
            partitions.add(part);
            this.participatingUpdatePeriods.add(maxInterval);
            log.debug("Looking for look ahead process time partitions for {}", part);
            if (processTimePartCol == null) {
                log.debug("processTimePartCol is null");
            } else if (partCol.equals(processTimePartCol)) {
                log.debug("part column is process time col");
            } else if (updatePeriods.first().equals(maxInterval)) {
                log.debug("Update period is the least update period");
            } else if ((iter.getNumIters() - iter.getCounter()) > lookAheadNumParts) {
                // see if this is the part of the last-n look ahead partitions
                log.debug("Not a look ahead partition");
            } else {
                log.debug("Looking for look ahead process time partitions for {}", part);
                // check if finer partitions are required
                // final partitions are required if no partitions from
                // look-ahead
                // process time are present
                TimeRange.Iterable.Iterator processTimeIter = TimeRange
                        .iterable(nextDt, lookAheadNumParts, maxInterval, 1).iterator();
                while (processTimeIter.hasNext()) {
                    Date pdt = processTimeIter.next();
                    Date nextPdt = processTimeIter.peekNext();
                    FactPartition processTimePartition = new FactPartition(processTimePartCol, pdt, maxInterval,
                            null, partWhereClauseFormat);
                    updatePartitionStorage(processTimePartition);
                    if (processTimePartition.isFound()) {
                        log.debug("Finer parts not required for look-ahead partition :{}", part);
                    } else {
                        log.debug("Looked ahead process time partition {} is not found", processTimePartition);
                        TreeSet<UpdatePeriod> newset = new TreeSet<UpdatePeriod>();
                        newset.addAll(updatePeriods);
                        newset.remove(maxInterval);
                        log.debug("newset of update periods:{}", newset);
                        if (!newset.isEmpty()) {
                            // Get partitions for look ahead process time
                            log.debug("Looking for process time partitions between {} and {}", pdt, nextPdt);
                            Set<FactPartition> processTimeParts = getPartitions(
                                    TimeRange.builder().fromDate(pdt).toDate(nextPdt)
                                            .partitionColumn(processTimePartCol).build(),
                                    newset, true, failOnPartialData, missingPartitions);
                            log.debug("Look ahead partitions: {}", processTimeParts);
                            TimeRange timeRange = TimeRange.builder().fromDate(dt).toDate(nextDt).build();
                            for (FactPartition pPart : processTimeParts) {
                                log.debug("Looking for finer partitions in pPart: {}", pPart);
                                for (Date date : timeRange.iterable(pPart.getPeriod(), 1)) {
                                    FactPartition innerPart = new FactPartition(partCol, date,
                                            pPart.getPeriod(), pPart, partWhereClauseFormat);
                                    updatePartitionStorage(innerPart);
                                    innerPart.setFound(pPart.isFound());
                                    if (innerPart.isFound()) {
                                        partitions.add(innerPart);
                                    }
                                }
                                log.debug("added all sub partitions blindly in pPart: {}", pPart);
                            }
                        }
                    }
                }
            }
        } else {
            log.info("Partition:{} does not exist in any storage table", part);
            if (!getPartitions(dt, nextDt, partCol, partitions, remainingIntervals, false, failOnPartialData,
                    missingPartitions)) {
                log.debug("Adding non existing partition {}", part);
                if (addNonExistingParts) {
                    // Add non existing partitions for all cases of whether we populate all non existing or not.
                    this.participatingUpdatePeriods.add(maxInterval);
                    missingPartitions.add(part);
                    if (!failOnPartialData) {
                        partitions.add(part);
                        part.getStorageTables().add(storageTable);
                    }
                } else {
                    log.info("No finer granualar partitions exist for {}", part);
                    return false;
                }
            } else {
                log.debug("Finer granualar partitions added for {}", part);
            }
        }
    }

    return getPartitions(fromDate, ceilFromDate, partCol, partitions, remainingIntervals, addNonExistingParts,
            failOnPartialData, missingPartitions)
            && getPartitions(floorToDate, toDate, partCol, partitions, remainingIntervals, addNonExistingParts,
                    failOnPartialData, missingPartitions);
}

From source file:org.unitime.timetable.onlinesectioning.OnlineSectioningServerImpl.java

@Override
public void remove(Offering offering) {
    iLock.writeLock().lock();/*  ww w  .ja v  a 2s. c  om*/
    try {
        for (Course course : offering.getCourses()) {
            CourseInfo ci = iCourseForId.get(course.getId());
            if (ci != null) {
                TreeSet<CourseInfo> courses = iCourseForName.get(ci.toString());
                if (courses != null) {
                    courses.remove(ci);
                    if (courses.isEmpty()) {
                        iCourseForName.remove(ci.toString());
                    } else if (courses.size() == 1) {
                        for (CourseInfo x : courses)
                            x.setHasUniqueName(true);
                    }
                }
                iCourseForId.remove(ci.getUniqueId());
                iCourses.remove(ci);
            }
            iCourseTable.remove(course.getId());
        }
        iOfferingTable.remove(offering.getId());
        for (Config config : offering.getConfigs()) {
            for (Subpart subpart : config.getSubparts())
                for (Section section : subpart.getSections())
                    iClassTable.remove(section.getId());
            for (Enrollment enrollment : new ArrayList<Enrollment>(config.getEnrollments()))
                enrollment.variable().unassign(0);
        }
    } finally {
        iLock.writeLock().unlock();
    }
}

From source file:org.zaproxy.zap.extension.ascanrulesBeta.UsernameEnumeration.java

/**
 * looks for username enumeration in the login page, by changing the username field to be a
 * valid / invalid user, and looking for differences in the response
 *///from  ww  w .  jav  a2  s  . c  o  m
@Override
public void scan() {

    // the technique to determine if usernames can be enumerated is as follows, using a variant
    // of the Freiling+Schinzel method,
    // adapted to the case where we do not know which is the username field
    //
    // 1) Request the original URL n times. (The original URL is assumed to have a valid
    // username, if not a valid password). Store the results in A[].
    // 2) Compute the longest common subsequence (LCS) of A[] into LCS_A
    // 3) for each parameter in the original URL (ie, for URL params, form params, and cookie
    // params)
    //   4) Change the current parameter (which we assume is the username parameter) to an invalid
    // username (randomly), and request the URL n times. Store the results in B[].
    //   5) Compute the longest common subsequence (LCS) of B[] into LCS_B
    //   6) If LCS_A <> LCS_B, then there is a Username Enumeration issue on the current parameter

    try {
        boolean loginUrl = false;

        // Are we dealing with a login url in any of the contexts of which this uri is part
        URI requestUri = getBaseMsg().getRequestHeader().getURI();

        // using the session, get the list of contexts for the url
        List<Context> contextList = extAuth.getModel().getSession().getContextsForUrl(requestUri.getURI());

        // now loop, and see if the url is a login url in each of the contexts in turn...
        for (Context context : contextList) {
            URI loginUri = extAuth.getLoginRequestURIForContext(context);
            if (loginUri != null) {
                if (requestUri.getScheme().equals(loginUri.getScheme())
                        && requestUri.getHost().equals(loginUri.getHost())
                        && requestUri.getPort() == loginUri.getPort()
                        && requestUri.getPath().equals(loginUri.getPath())) {
                    // we got this far.. only the method (GET/POST), user details, query params,
                    // fragment, and POST params
                    // are possibly different from the login page.
                    loginUrl = true;
                    log.info(requestUri.toString()
                            + " falls within a context, and is the defined Login URL. Scanning for possible Username Enumeration vulnerability.");
                    break; // Stop checking
                }
            }
        }

        // the Username Enumeration scanner will only run for logon pages
        if (loginUrl == false) {
            if (this.debugEnabled) {
                log.debug(requestUri.toString() + " is not a defined Login URL.");
            }
            return; // No need to continue for this URL
        }

        // find all params set in the request (GET/POST/Cookie)
        TreeSet<HtmlParameter> htmlParams = new TreeSet<>();
        htmlParams.addAll(getBaseMsg().getRequestHeader().getCookieParams()); // request cookies only. no response cookies
        htmlParams.addAll(getBaseMsg().getFormParams()); // add in the POST params
        htmlParams.addAll(getBaseMsg().getUrlParams()); // add in the GET params

        int numberOfRequests = 0;
        if (this.getAttackStrength() == AttackStrength.INSANE) {
            numberOfRequests = 50;
        } else if (this.getAttackStrength() == AttackStrength.HIGH) {
            numberOfRequests = 15;
        } else if (this.getAttackStrength() == AttackStrength.MEDIUM) {
            numberOfRequests = 5;
        } else if (this.getAttackStrength() == AttackStrength.LOW) {
            numberOfRequests = 3;
        }

        // 1) Request the original URL n times. (The original URL is assumed to have a valid
        // username, if not a valid password). Store the results in A[].
        // make sure to manually handle all redirects, and cookies that may be set in response.
        // allocate enough space for the responses

        StringBuilder responseA = null;
        StringBuilder responseB = null;
        String longestCommonSubstringA = null;
        String longestCommonSubstringB = null;

        for (int i = 0; i < numberOfRequests; i++) {

            // initialise the storage for this iteration
            // baseResponses[i]= new StringBuilder(250);
            responseA = new StringBuilder(250);

            HttpMessage msgCpy = getNewMsg(); // clone the request, but not the response

            sendAndReceive(msgCpy, false, false); // request the URL, but do not automatically follow redirects.

            // get all cookies set in the response
            TreeSet<HtmlParameter> cookies = msgCpy.getResponseHeader().getCookieParams();

            int redirectCount = 0;
            while (HttpStatusCode.isRedirection(msgCpy.getResponseHeader().getStatusCode())) {
                redirectCount++;

                if (this.debugEnabled)
                    log.debug("Following redirect " + redirectCount + " for message " + i + " of "
                            + numberOfRequests + " iterations of the original query");

                // append the response to the responses so far for this particular instance
                // this will give us a complete picture of the full set of actual traffic
                // associated with following redirects for the request
                responseA.append(msgCpy.getResponseHeader().getHeadersAsString());
                responseA.append(msgCpy.getResponseBody().toString());

                // and manually follow the redirect
                // create a new message from scratch
                HttpMessage msgRedirect = new HttpMessage();

                // create a new URI from the absolute location returned, and interpret it as
                // escaped
                // note that the standard says that the Location returned should be absolute,
                // but it ain't always so...
                URI newLocation = new URI(msgCpy.getResponseHeader().getHeader(HttpHeader.LOCATION), true);
                try {
                    msgRedirect.getRequestHeader().setURI(newLocation);
                } catch (Exception e) {
                    // the Location field contents may not be standards compliant. Lets generate
                    // a uri to use as a workaround where a relative path was
                    // given instead of an absolute one
                    URI newLocationWorkaround = new URI(msgCpy.getRequestHeader().getURI(),
                            msgCpy.getResponseHeader().getHeader(HttpHeader.LOCATION), true);
                    // try again, except this time, if it fails, don't try to handle it
                    if (this.debugEnabled)
                        log.debug("The Location [" + newLocation
                                + "] specified in a redirect was not valid (not absolute?). Trying absolute workaround url ["
                                + newLocationWorkaround + "]");
                    msgRedirect.getRequestHeader().setURI(newLocationWorkaround);
                }
                msgRedirect.getRequestHeader().setMethod(HttpRequestHeader.GET); // it's always a GET for a redirect
                msgRedirect.getRequestHeader().setContentLength(0); // since we send a GET, the body will be 0 long
                if (cookies.size() > 0) {
                    // if a previous request sent back a cookie that has not since been
                    // invalidated, we need to set that cookie when following redirects, as a
                    // browser would
                    msgRedirect.getRequestHeader().setCookieParams(cookies);
                }

                if (this.debugEnabled)
                    log.debug("DEBUG: Following redirect to [" + newLocation + "]");
                sendAndReceive(msgRedirect, false, false); // do NOT redirect.. handle it here

                // handle scenario where a cookie is unset in a subsequent iteration, or where
                // the same cookie name is later re-assigned a different value
                // ie, in these cases, do not simply (and dumbly) accumulate cookie detritus.
                // first get all cookies set in the response
                TreeSet<HtmlParameter> cookiesTemp = msgRedirect.getResponseHeader().getCookieParams();
                for (Iterator<HtmlParameter> redirectSetsCookieIterator = cookiesTemp
                        .iterator(); redirectSetsCookieIterator.hasNext();) {
                    HtmlParameter cookieJustSet = redirectSetsCookieIterator.next();
                    // loop through each of the cookies we know about in cookies, to see if it
                    // matches by name.
                    // if so, delete that cookie, and add the one that was just set to cookies.
                    // if not, add the one that was just set to cookies.
                    for (Iterator<HtmlParameter> knownCookiesIterator = cookies.iterator(); knownCookiesIterator
                            .hasNext();) {
                        HtmlParameter knownCookie = knownCookiesIterator.next();
                        if (cookieJustSet.getName().equals(knownCookie.getName())) {
                            knownCookiesIterator.remove();
                            break; // out of the loop for known cookies, back to the next cookie
                            // set in the response
                        }
                    } // end of loop for cookies we already know about
                      // we can now safely add the cookie that was just set into cookies, knowing
                      // it does not clash with anything else in there.
                    cookies.add(cookieJustSet);
                } // end of for loop for cookies just set in the redirect

                msgCpy = msgRedirect; // store the last redirect message into the MsgCpy, as we
                // will be using it's output in a moment..
            } // end of loop to follow redirects

            // now that the redirections have all been handled.. was the request finally a
            // success or not?  Successful or Failed Logins would normally both return an OK
            // HTTP status
            if (!HttpStatusCode.isSuccess(msgCpy.getResponseHeader().getStatusCode())) {
                log.warn("The original URL [" + getBaseMsg().getRequestHeader().getURI()
                        + "] returned a non-OK HTTP status " + msgCpy.getResponseHeader().getStatusCode()
                        + " (after " + i + " of " + numberOfRequests
                        + " steps). Could be indicative of SQL Injection, or some other error. The URL is not stable enough to look at Username Enumeration");
                return; // we have not even got as far as looking at the parameters, so just
                // abort straight out of the method
            }

            if (this.debugEnabled)
                log.debug("Done following redirects!");

            // append the response to the responses so far for this particular instance
            // this will give us a complete picture of the full set of actual traffic associated
            // with following redirects for the request
            responseA.append(msgCpy.getResponseHeader().getHeadersAsString());
            responseA.append(msgCpy.getResponseBody().toString());

            // 2) Compute the longest common subsequence (LCS) of A[] into LCS_A
            // Note: in the Freiling and Schinzel method, this is calculated recursively. We
            // calculate it iteratively, but using an equivalent method

            // first time in, the LCS is simple: it's the first HTML result.. no diffing
            // required
            if (i == 0)
                longestCommonSubstringA = responseA.toString();
            // else get the LCS of the existing string, and the current result
            else
                longestCommonSubstringA = this.longestCommonSubsequence(longestCommonSubstringA,
                        responseA.toString());

            // optimisation step: if the LCS of A is 0 characters long already, then the URL
            // output is not stable, and we can abort now, and save some time
            if (longestCommonSubstringA.length() == 0) {
                // this might occur if the output returned for the URL changed mid-way. Perhaps
                // a CAPTCHA has fired, or a WAF has kicked in.  Let's abort now so.
                log.warn("The original URL [" + getBaseMsg().getRequestHeader().getURI()
                        + "] does not produce stable output (at " + i + 1 + " of " + numberOfRequests
                        + " steps). There is no static element in the output that can be used as a basis of comparison for the result of requesting URLs with the parameter values modified. Perhaps a CAPTCHA or WAF has kicked in!!");
                return; // we have not even got as far as looking at the parameters, so just
                // abort straight out of the method
            }
        }
        // get rid of any remnants of cookie setting and Date headers in the responses, as these
        // cause false positives, and can be safely ignored
        // replace the content length with a non-variable placeholder
        // replace url parameters with a non-variable placeholder to eliminate tokens in URLs in
        // the output
        longestCommonSubstringA = longestCommonSubstringA.replaceAll("Set-Cookie:[^\\r\\n]+[\\r\\n]{1,2}", "");
        longestCommonSubstringA = longestCommonSubstringA.replaceAll("Date:[^\\r\\n]+[\\r\\n]{1,2}", "");
        longestCommonSubstringA = longestCommonSubstringA.replaceAll("Content-Length:[^\\r\\n]+[\\r\\n]{1,2}",
                "Content-Length: XXXX\n");
        longestCommonSubstringA = longestCommonSubstringA
                .replaceAll("(?<=(&amp;|\\?)[^\\?\"=&;]+=)[^\\?\"=&;]+(?=(&amp;|\"))", "YYYY");

        if (this.debugEnabled)
            log.debug("The LCS of A is [" + longestCommonSubstringA + "]");

        // 3) for each parameter in the original URL (ie, for URL params, form params, and
        // cookie params)
        for (Iterator<HtmlParameter> iter = htmlParams.iterator(); iter.hasNext();) {

            HttpMessage msgModifiedParam = getNewMsg();
            HtmlParameter currentHtmlParameter = iter.next();

            if (this.debugEnabled)
                log.debug("Handling [" + currentHtmlParameter.getType() + "] parameter ["
                        + currentHtmlParameter.getName() + "], with value [" + currentHtmlParameter.getValue()
                        + "]");

            // 4) Change the current parameter value (which we assume is the username parameter)
            // to an invalid username (randomly), and request the URL n times. Store the results
            // in B[].

            // get a random user name the same length as the original!
            String invalidUsername = RandomStringUtils.random(currentHtmlParameter.getValue().length(),
                    RANDOM_USERNAME_CHARS);
            if (this.debugEnabled)
                log.debug("The invalid username chosen was [" + invalidUsername + "]");

            TreeSet<HtmlParameter> requestParams = null;
            if (currentHtmlParameter.getType().equals(HtmlParameter.Type.cookie)) {
                requestParams = msgModifiedParam.getRequestHeader().getCookieParams();
                requestParams.remove(currentHtmlParameter);
                requestParams.add(new HtmlParameter(currentHtmlParameter.getType(),
                        currentHtmlParameter.getName(), invalidUsername.toString())); // add in the invalid username
                msgModifiedParam.setCookieParams(requestParams);
            } else if (currentHtmlParameter.getType().equals(HtmlParameter.Type.url)) {
                requestParams = msgModifiedParam.getUrlParams();
                requestParams.remove(currentHtmlParameter);
                requestParams.add(new HtmlParameter(currentHtmlParameter.getType(),
                        currentHtmlParameter.getName(), invalidUsername.toString())); // add in the invalid username
                msgModifiedParam.setGetParams(requestParams);
            } else if (currentHtmlParameter.getType().equals(HtmlParameter.Type.form)) {
                requestParams = msgModifiedParam.getFormParams();
                requestParams.remove(currentHtmlParameter);
                requestParams.add(new HtmlParameter(currentHtmlParameter.getType(),
                        currentHtmlParameter.getName(), invalidUsername.toString())); // add in the invalid username
                msgModifiedParam.setFormParams(requestParams);
            }

            if (this.debugEnabled)
                log.debug("About to loop for " + numberOfRequests
                        + " iterations with an incorrect user of the same length");

            boolean continueForParameter = true;
            for (int i = 0; i < numberOfRequests && continueForParameter; i++) {

                // initialise the storage for this iteration
                responseB = new StringBuilder(250);

                HttpMessage msgCpy = msgModifiedParam; // use the message we already set up, with the
                // modified parameter value

                sendAndReceive(msgCpy, false, false); // request the URL, but do not automatically follow redirects.

                // get all cookies set in the response
                TreeSet<HtmlParameter> cookies = msgCpy.getResponseHeader().getCookieParams();

                int redirectCount = 0;
                while (HttpStatusCode.isRedirection(msgCpy.getResponseHeader().getStatusCode())) {
                    redirectCount++;

                    if (this.debugEnabled)
                        log.debug("Following redirect " + redirectCount + " for message " + i + " of "
                                + numberOfRequests + " iterations of the modified query");

                    // append the response to the responses so far for this particular instance
                    // this will give us a complete picture of the full set of actual traffic
                    // associated with following redirects for the request
                    responseB.append(msgCpy.getResponseHeader().getHeadersAsString());
                    responseB.append(msgCpy.getResponseBody().toString());

                    // and manually follow the redirect
                    // create a new message from scratch
                    HttpMessage msgRedirect = new HttpMessage();

                    // create a new URI from the absolute location returned, and interpret it as
                    // escaped
                    // note that the standard says that the Location returned should be
                    // absolute, but it ain't always so...
                    URI newLocation = new URI(msgCpy.getResponseHeader().getHeader(HttpHeader.LOCATION), true);
                    try {
                        msgRedirect.getRequestHeader().setURI(newLocation);
                    } catch (Exception e) {
                        // the Location field contents may not be standards compliant. Lets
                        // generate a uri to use as a workaround where a relative path was
                        // given instead of an absolute one
                        URI newLocationWorkaround = new URI(msgCpy.getRequestHeader().getURI(),
                                msgCpy.getResponseHeader().getHeader(HttpHeader.LOCATION), true);
                        // try again, except this time, if it fails, don't try to handle it
                        if (this.debugEnabled)
                            log.debug("The Location [" + newLocation
                                    + "] specified in a redirect was not valid (not absolute?). Trying absolute workaround url ["
                                    + newLocationWorkaround + "]");
                        msgRedirect.getRequestHeader().setURI(newLocationWorkaround);
                    }
                    msgRedirect.getRequestHeader().setMethod(HttpRequestHeader.GET); // it's always a GET for a redirect
                    msgRedirect.getRequestHeader().setContentLength(0); // since we send a GET, the body will be 0 long
                    if (cookies.size() > 0) {
                        // if a previous request sent back a cookie that has not since been
                        // invalidated, we need to set that cookie when following redirects, as
                        // a browser would
                        msgRedirect.getRequestHeader().setCookieParams(cookies);
                    }

                    sendAndReceive(msgRedirect, false, false); // do NOT redirect.. handle it here

                    // handle scenario where a cookie is unset in a subsequent iteration, or
                    // where the same cookie name is later re-assigned a different value
                    // ie, in these cases, do not simply (and dumbly) accumulate cookie
                    // detritus.
                    // first get all cookies set in the response
                    TreeSet<HtmlParameter> cookiesTemp = msgRedirect.getResponseHeader().getCookieParams();
                    for (Iterator<HtmlParameter> redirectSetsCookieIterator = cookiesTemp
                            .iterator(); redirectSetsCookieIterator.hasNext();) {
                        HtmlParameter cookieJustSet = redirectSetsCookieIterator.next();
                        // loop through each of the cookies we know about in cookies, to see if
                        // it matches by name.
                        // if so, delete that cookie, and add the one that was just set to
                        // cookies.
                        // if not, add the one that was just set to cookies.
                        for (Iterator<HtmlParameter> knownCookiesIterator = cookies
                                .iterator(); knownCookiesIterator.hasNext();) {
                            HtmlParameter knownCookie = knownCookiesIterator.next();
                            if (cookieJustSet.getName().equals(knownCookie.getName())) {
                                knownCookiesIterator.remove();
                                break; // out of the loop for known cookies, back to the next
                                // cookie set in the response
                            }
                        } // end of loop for cookies we already know about
                          // we can now safely add the cookie that was just set into cookies,
                          // knowing it does not clash with anything else in there.
                        cookies.add(cookieJustSet);
                    } // end of for loop for cookies just set in the redirect

                    msgCpy = msgRedirect; // store the last redirect message into the MsgCpy, as
                    // we will be using it's output in a moment..
                } // end of loop to follow redirects

                // now that the redirections have all been handled.. was the request finally a
                // success or not?  Successful or Failed Logins would normally both return an OK
                // HTTP status
                if (!HttpStatusCode.isSuccess(msgCpy.getResponseHeader().getStatusCode())) {
                    log.warn("The modified URL [" + msgModifiedParam.getRequestHeader().getURI()
                            + "] returned a non-OK HTTP status " + msgCpy.getResponseHeader().getStatusCode()
                            + " (after " + i + 1 + " of " + numberOfRequests + " steps for ["
                            + currentHtmlParameter.getType() + "] parameter " + currentHtmlParameter.getName()
                            + "). Could be indicative of SQL Injection, or some other error. The URL is not stable enough to look at Username Enumeration");
                    continueForParameter = false;
                    continue; // skip directly to the next parameter. Do not pass Go. Do not
                    // collect $200.
                }

                if (this.debugEnabled)
                    log.debug("Done following redirects!");

                // append the response to the responses so far for this particular instance
                // this will give us a complete picture of the full set of actual traffic
                // associated with following redirects for the request
                responseB.append(msgCpy.getResponseHeader().getHeadersAsString());
                responseB.append(msgCpy.getResponseBody().toString());

                // 5) Compute the longest common subsequence (LCS) of B[] into LCS_B
                // Note: in the Freiling and Schinzel method, this is calculated recursively. We
                // calculate it iteratively, but using an equivalent method

                // first time in, the LCS is simple: it's the first HTML result.. no diffing
                // required
                if (i == 0)
                    longestCommonSubstringB = responseB.toString();
                // else get the LCS of the existing string, and the current result
                else
                    longestCommonSubstringB = this.longestCommonSubsequence(longestCommonSubstringB,
                            responseB.toString());

                // optimisation step: if the LCS of B is 0 characters long already, then the URL
                // output is not stable, and we can abort now, and save some time
                if (longestCommonSubstringB.length() == 0) {
                    // this might occur if the output returned for the URL changed mid-way.
                    // Perhaps a CAPTCHA has fired, or a WAF has kicked in.  Let's abort now so.
                    log.warn("The modified URL [" + msgModifiedParam.getRequestHeader().getURI() + "] (for ["
                            + currentHtmlParameter.getType() + "] parameter " + currentHtmlParameter.getName()
                            + ") does not produce stable output (after " + i + 1 + " of " + numberOfRequests
                            + " steps). There is no static element in the output that can be used as a basis of comparison with the static output of the original query. Perhaps a CAPTCHA or WAF has kicked in!!");
                    continueForParameter = false;
                    continue; // skip directly to the next parameter. Do not pass Go. Do not
                    // collect $200.
                    // Note: if a CAPTCHA or WAF really has fired, the results of subsequent
                    // iterations will likely not be accurate..
                }
            }

            // if we didn't hit something with one of the iterations for the parameter (ie, if
            // the output when changing the parm is stable),
            // check if the parameter might be vulnerable by comparins its LCS with the original
            // LCS for a valid login
            if (continueForParameter == true) {
                // get rid of any remnants of cookie setting and Date headers in the responses,
                // as these cause false positives, and can be safely ignored
                // replace the content length with a non-variable placeholder
                // replace url parameters with a non-variable placeholder to eliminate tokens in
                // URLs in the output
                longestCommonSubstringB = longestCommonSubstringB
                        .replaceAll("Set-Cookie:[^\\r\\n]+[\\r\\n]{1,2}", "");
                longestCommonSubstringB = longestCommonSubstringB.replaceAll("Date:[^\\r\\n]+[\\r\\n]{1,2}",
                        "");
                longestCommonSubstringB = longestCommonSubstringB
                        .replaceAll("Content-Length:[^\\r\\n]+[\\r\\n]{1,2}", "Content-Length: XXXX\n");
                longestCommonSubstringB = longestCommonSubstringB
                        .replaceAll("(?<=(&amp;|\\?)[^\\?\"=&;]+=)[^\\?\"=&;]+(?=(&amp;|\"))", "YYYY");

                if (this.debugEnabled)
                    log.debug("The LCS of B is [" + longestCommonSubstringB + "]");

                // 6) If LCS_A <> LCS_B, then there is a Username Enumeration issue on the
                // current parameter
                if (!longestCommonSubstringA.equals(longestCommonSubstringB)) {
                    // calculate line level diffs of the 2 Longest Common Substrings to aid the
                    // user in deciding if the match is a false positive
                    // get the diff as a series of patches
                    Patch diffpatch = DiffUtils.diff(
                            new LinkedList<String>(Arrays.asList(longestCommonSubstringA.split("\\n"))),
                            new LinkedList<String>(Arrays.asList(longestCommonSubstringB.split("\\n"))));

                    int numberofDifferences = diffpatch.getDeltas().size();

                    // and convert the list of patches to a String, joining using a newline
                    // String diffAB = StringUtils.join(diffpatch.getDeltas(), "\n");
                    StringBuilder tempDiff = new StringBuilder(250);
                    for (Delta delta : diffpatch.getDeltas()) {
                        String changeType = null;
                        if (delta.getType() == Delta.TYPE.CHANGE)
                            changeType = "Changed Text";
                        else if (delta.getType() == Delta.TYPE.DELETE)
                            changeType = "Deleted Text";
                        else if (delta.getType() == Delta.TYPE.INSERT)
                            changeType = "Inserted text";
                        else
                            changeType = "Unknown change type [" + delta.getType() + "]";

                        tempDiff.append("\n(" + changeType + ")\n"); // blank line before
                        tempDiff.append("Output for Valid Username  : " + delta.getOriginal() + "\n"); // no blank lines
                        tempDiff.append("\nOutput for Invalid Username: " + delta.getRevised() + "\n"); // blank line before
                    }
                    String diffAB = tempDiff.toString();
                    String extraInfo = Constant.messages.getString(
                            "ascanbeta.usernameenumeration.alert.extrainfo", currentHtmlParameter.getType(),
                            currentHtmlParameter.getName(), currentHtmlParameter.getValue(), // original value
                            invalidUsername.toString(), // new value
                            diffAB, // the differences between the two sets of output
                            numberofDifferences); // the number of differences
                    String attack = Constant.messages.getString("ascanbeta.usernameenumeration.alert.attack",
                            currentHtmlParameter.getType(), currentHtmlParameter.getName());
                    String vulnname = Constant.messages.getString("ascanbeta.usernameenumeration.name");
                    String vulndesc = Constant.messages.getString("ascanbeta.usernameenumeration.desc");
                    String vulnsoln = Constant.messages.getString("ascanbeta.usernameenumeration.soln");

                    // call bingo with some extra info, indicating that the alert is
                    bingo(Alert.RISK_INFO, Alert.CONFIDENCE_LOW, vulnname, vulndesc,
                            getBaseMsg().getRequestHeader().getURI().getURI(), currentHtmlParameter.getName(),
                            attack, extraInfo, vulnsoln, getBaseMsg());

                } else {
                    if (this.debugEnabled)
                        log.debug("[" + currentHtmlParameter.getType() + "] parameter ["
                                + currentHtmlParameter.getName()
                                + "] looks ok (Invalid Usernames cannot be distinguished from Valid usernames)");
                }
            }
        } // end of the for loop around the parameter list

    } catch (Exception e) {
        // Do not try to internationalise this.. we need an error message in any event..
        // if it's in English, it's still better than not having it at all.
        log.error("An error occurred checking a url for Username Enumeration issues", e);
    }
}

From source file:org.broadinstitute.gatk.tools.walkers.haplotypecaller.LDMerger.java

/**
 * Update the event maps in all haplotypes to replace a replacement of update1 and 2 with replacement
 *
 * @param haplotypes the haplotypes whose event maps we need to update
 * @param startPosKeySet a sorted set of start positions that we must update
 * @param replacement a VariantContext to replace update1 and update2 with.  Can be null, indicating that we just want to remove update1 and update2
 * @param update1 the first VC we want to update
 * @param update2 the second VC we want to update
 *///from w  w  w.j a v  a2s  . c  o m
private void replaceVariantContextsInMap(final List<Haplotype> haplotypes,
        final TreeSet<Integer> startPosKeySet, final VariantContext replacement, final VariantContext update1,
        final VariantContext update2) {
    // remove the old event from the eventMap on every haplotype and the start pos key set, replace with merged event
    for (final Haplotype h : haplotypes) {
        // if we had both events, add replacement.  In some cases the haplotype may not have both
        // events but they were still merged because the haplotype isn't a particularly informative
        // haplotype in any case.  The order of operations here is important because we are modifying the map
        final boolean shouldAdd = h.getEventMap().containsKey(update1.getStart())
                && h.getEventMap().containsKey(update2.getStart());
        h.getEventMap().remove(update1.getStart());
        h.getEventMap().remove(update2.getStart());
        if (shouldAdd && replacement != null) {
            h.getEventMap().addVC(replacement, false); // cannot merge we other events at the same position
        }
    }

    startPosKeySet.remove(update1.getStart());
    startPosKeySet.remove(update2.getStart());
    if (replacement != null)
        startPosKeySet.add(replacement.getStart());
}

From source file:org.unitime.timetable.onlinesectioning.OnlineSectioningServerImpl.java

@Override
public void update(CourseInfo info) {
    iLock.writeLock().lock();//from ww  w.  jav a2  s. co  m
    try {
        CourseInfo old = iCourseForId.get(info.getUniqueId());
        iCourseForId.put(info.getUniqueId(), info);
        TreeSet<CourseInfo> courses = iCourseForName.get(info.toString());
        if (courses == null) {
            courses = new TreeSet<CourseInfo>();
            iCourseForName.put(info.toString(), courses);
        }
        if (old != null) {
            courses.remove(old);
            iCourses.remove(old);
        }
        courses.add(info);
        iCourses.add(info);
        if (courses.size() == 1)
            for (CourseInfo x : courses)
                x.setHasUniqueName(true);
        else if (courses.size() > 1)
            for (CourseInfo x : courses)
                x.setHasUniqueName(false);
    } finally {
        iLock.writeLock().unlock();
    }
}

From source file:org.wso2.andes.kernel.slot.SlotManager.java

/**
 * Remove slot entry from slotAssignment map
 *
 * @param queueName name of the queue which is owned by the slot to be deleted
 * @param emptySlot reference of the slot to be deleted
 *///from   www .j a  v a 2s.  c  o  m
public boolean deleteSlot(String queueName, Slot emptySlot, String nodeId) {
    //return false;
    long startMsgId = emptySlot.getStartMessageId();
    long endMsgId = emptySlot.getEndMessageId();
    long slotDeleteSafeZone = getSlotDeleteSafeZone();
    if (log.isDebugEnabled()) {
        log.debug("Trying to delete slot. safeZone= " + getSlotDeleteSafeZone() + " startMsgID= " + startMsgId);
    }
    if (slotDeleteSafeZone > endMsgId) {
        String lockKey = nodeId + SlotManager.class;
        synchronized (lockKey.intern()) {
            HashMap<String, TreeSet<Slot>> queueToSlotMap = null;
            HashmapStringTreeSetWrapper wrapper = slotAssignmentMap.get(nodeId);
            if (wrapper != null) {
                queueToSlotMap = wrapper.getStringListHashMap();
            }
            if (queueToSlotMap != null) {
                TreeSet<Slot> currentSlotList = queueToSlotMap.get(queueName);
                if (currentSlotList != null) {
                    // com.google.gson.Gson gson = new GsonBuilder().create();
                    //get the actual reference of the slot to be removed
                    Slot slotInAssignmentMap = null; //currentSlotList.ceiling(emptySlot);

                    for (Slot slot : currentSlotList) {
                        if (slot.getStartMessageId() == emptySlot.getStartMessageId()) {
                            slotInAssignmentMap = slot;
                        }
                    }

                    if (null != slotInAssignmentMap) {
                        if (slotInAssignmentMap.addState(SlotState.DELETED)) {
                            currentSlotList.remove(slotInAssignmentMap);
                            queueToSlotMap.put(queueName, currentSlotList);
                            wrapper.setStringListHashMap(queueToSlotMap);
                            slotAssignmentMap.set(nodeId, wrapper);
                            if (log.isDebugEnabled()) {
                                log.debug(
                                        "Deleted slot from Slot Assignment Map : Slot= " + slotInAssignmentMap);
                            }
                        }
                    }
                }
            }
        }
        return true;
    } else {
        if (log.isDebugEnabled()) {
            log.debug("Cannot delete slot as it is within safe zone startMsgID= " + startMsgId + " safeZone= "
                    + slotDeleteSafeZone + " endMsgId= " + endMsgId + " slotToDelete= " + emptySlot);
        }
        return false;
    }
}

From source file:org.rhwlab.BHC.BHCTree.java

public Set<Nucleus> cutToMinimum(int minNucs, double minVolume, double maxProb) {
    double logProb = Math.log(maxProb);
    TreeSet<NucleusLogNode> cut = firstTreeCut();
    while (true) {
        NucleusLogNode[] next = nextTreeCut(cut);
        if (cut.size() >= minNucs) {
            // are the next nuclei overlapping
            Nucleus nuc0 = next[0].getNucleus(time);
            Nucleus nuc1 = next[1].getNucleus(time);

            if (nuc0 == null || nuc1 == null || Nucleus.intersect(nuc0, nuc1)) {
                break;
            }//from ww w . j  a  v a  2 s.  c om
        }

        if (next[0].getVolume() > minVolume) {
            cut.add(next[0]);
        }
        if (next[1].getVolume() > minVolume) {
            cut.add(next[1]);
        }
        if (next[0].getVolume() > minVolume || next[1].getVolume() > minVolume) {
            cut.remove(next[2]);
        }
        System.out.printf("logProb[0]=%f\n", next[0].getLogPosterior());
        System.out.printf("logProb[1]=%f\n", next[1].getLogPosterior());
        System.out.printf("logProb[2]=%f\n", next[2].getLogPosterior());
        if (next[0].getLogPosterior() == 0.0 && next[1].getLogPosterior() == 0.0) {
            break;
        }
    }
    Set<Nucleus> ret = new TreeSet<>();
    for (NucleusLogNode node : cut) {
        Nucleus nuc = node.getNucleus(time);
        nuc.setTime(time);
        ret.add(nuc);
    }
    return ret;
}

From source file:com.l2jfree.gameserver.datatables.SkillTable.java

private SkillTable() {
    final List<L2Skill> skills = DocumentEngine.loadSkills();
    _log.info("SkillTable: Loaded " + skills.size() + " skill templates from XML files.");

    int highestId = 0;
    for (L2Skill skill : skills)
        if (highestId < skill.getId())
            highestId = skill.getId();//from w ww . j a  va 2  s .c o m

    _maxLevels = new int[highestId + 1];

    int[] highestLevels = new int[highestId + 1];
    for (L2Skill skill : skills) {
        if (highestLevels[skill.getId()] < skill.getLevel())
            highestLevels[skill.getId()] = skill.getLevel();

        if (_maxLevels[skill.getId()] < skill.getLevel() && skill.getLevel() < 100)
            _maxLevels[skill.getId()] = skill.getLevel();
    }

    // clear previously stored skills
    for (SkillInfo[] infos : SKILL_INFOS)
        if (infos != null)
            for (SkillInfo info : infos)
                if (info != null)
                    info._skill = null;

    _skillTable = new L2Skill[highestId + 1][];

    SKILL_INFOS = Arrays.copyOf(SKILL_INFOS, Math.max(SKILL_INFOS.length, highestId + 1));

    for (int i = 0; i < highestLevels.length; i++) {
        final int highestLevel = highestLevels[i];

        if (highestLevel < 1)
            continue;

        _skillTable[i] = new L2Skill[highestLevel + 1];

        if (SKILL_INFOS[i] == null)
            SKILL_INFOS[i] = new SkillInfo[highestLevel + 1];
        else
            SKILL_INFOS[i] = Arrays.copyOf(SKILL_INFOS[i], Math.max(SKILL_INFOS[i].length, highestLevel + 1));
    }

    for (L2Skill skill : skills) {
        _skillTable[skill.getId()][skill.getLevel()] = skill;

        if (SKILL_INFOS[skill.getId()][skill.getLevel()] == null)
            SKILL_INFOS[skill.getId()][skill.getLevel()] = new SkillInfo(skill.getId(), skill.getLevel());

        SKILL_INFOS[skill.getId()][skill.getLevel()]._skill = skill;
    }

    int length = _skillTable.length;
    for (L2Skill[] array : _skillTable)
        if (array != null)
            length += array.length;

    _log.info("SkillTable: Occupying arrays for " + length + ".");

    SingletonHolder.INSTANCE = this;

    Map<Integer, L2Skill> skillsByUID = new HashMap<Integer, L2Skill>();

    for (L2Skill skill : skills) {
        try {
            L2Skill old = skillsByUID.put(SkillTable.getSkillUID(skill), skill);

            if (old != null)
                _log.warn("Overlapping UIDs for: " + old + ", " + skill, new IllegalStateException());

            skill.validate();
        } catch (Exception e) {
            _log.warn(skill, e);
        }
    }

    for (L2Skill skill0 : skills) {
        if (!(skill0 instanceof L2SkillLearnSkill))
            continue;

        L2SkillLearnSkill skill = (L2SkillLearnSkill) skill0;

        for (int i = 0; i < skill.getNewSkillId().length; i++) {
            final L2Skill learnedSkill = getInfo(skill.getNewSkillId()[i], skill.getNewSkillLvl()[i]);

            if (learnedSkill != null)
                _learnedSkills.add(learnedSkill);
        }
    }

    // checking for skill enchantment mismatch

    // in XMLs
    final TreeSet<String> skillEnchantsInXMLs = new TreeSet<String>();

    // reusing
    final Map<Integer, Set<Integer>> enchantLevelsByEnchantType = new HashMap<Integer, Set<Integer>>();

    for (int skillId = 0; skillId < _skillTable.length; skillId++) {
        final L2Skill[] skillsById = _skillTable[skillId];

        if (skillsById == null)
            continue;

        for (final L2Skill skill : skillsById) {
            if (skill == null || skill.getLevel() < 100)
                continue;

            final int enchantType = skill.getLevel() / 100;
            final int enchantLevel = skill.getLevel() % 100;

            Set<Integer> enchantLevels = enchantLevelsByEnchantType.get(enchantType);

            if (enchantLevels == null)
                enchantLevelsByEnchantType.put(enchantType, enchantLevels = new FastSet<Integer>(30));

            enchantLevels.add(enchantLevel);
        }

        for (Map.Entry<Integer, Set<Integer>> entry : enchantLevelsByEnchantType.entrySet()) {
            final int enchantType = entry.getKey();
            final Set<Integer> enchantLevels = entry.getValue();

            if (enchantLevels.isEmpty())
                continue;

            final String s = "Skill ID: " + skillId + " - EnchantType: enchant" + enchantType + " - Levels: "
                    + enchantLevels.size();

            boolean valid = true;

            for (int skillLvl = 1; skillLvl <= 30; skillLvl++) {
                if (!enchantLevels.remove(skillLvl)) {
                    if (skillLvl == 16 && enchantLevels.isEmpty())
                        break;
                    _log.warn("Missing skill enchant level in XMLs for " + s + " - Level: " + skillLvl);
                    valid = false;
                }
            }

            if (!enchantLevels.isEmpty())
                _log.warn("Extra skill enchant levels in XMLs for " + s + " - Levels: " + enchantLevels);
            else if (valid)
                skillEnchantsInXMLs.add(s);

            // reusing
            enchantLevels.clear();
        }
    }

    // in database
    final TreeSet<String> skillEnchantsInDatabase = new TreeSet<String>();

    for (L2EnchantSkillLearn skillLearn : SkillTreeTable.getInstance().getSkillEnchantments()) {
        final int skillId = skillLearn.getId();
        final List<EnchantSkillDetail>[] details = skillLearn.getEnchantRoutes();

        if (details.length == 0)
            _log.warn("Invalid skill enchant data in database for Skill ID: " + skillId);

        for (int indexingEnchantType = 0; indexingEnchantType < details.length; indexingEnchantType++) {
            final List<EnchantSkillDetail> route = details[indexingEnchantType];

            if (route == null)
                continue;

            final String s = "Skill ID: " + skillId + " - EnchantType: enchant" + (indexingEnchantType + 1)
                    + " - Levels: " + route.size();

            if (route.size() != 30 && route.size() != 15)
                _log.warn("Invalid skill enchant data in database for " + s);
            else
                skillEnchantsInDatabase.add(s);
        }
    }

    // comparing the results
    for (String skillEnchant : skillEnchantsInXMLs)
        if (!skillEnchantsInDatabase.remove(skillEnchant))
            _log.warn("Missing skill enchant data in database for " + skillEnchant);

    for (String skillEnchant : skillEnchantsInDatabase)
        _log.warn("Missing skill enchant data in XMLs for " + skillEnchant);

    // just validation
    for (L2EnchantSkillLearn skillLearn : SkillTreeTable.getInstance().getSkillEnchantments()) {
        final int skillId = skillLearn.getId();
        final List<EnchantSkillDetail>[] details = skillLearn.getEnchantRoutes();
        final int maxLevel = getMaxLevel(skillId);

        if (skillLearn.getBaseLevel() != maxLevel)
            _log.warn("Invalid `base_lvl` skill enchant data in database for Skill ID: " + skillId);

        for (int indexingEnchantType = 0; indexingEnchantType < details.length; indexingEnchantType++) {
            final List<EnchantSkillDetail> route = details[indexingEnchantType];

            if (route == null)
                continue;

            final String s = "Skill ID: " + skillId + " - EnchantType: enchant" + (indexingEnchantType + 1)
                    + " - Levels: " + route.size();

            int index = 1;
            int expectedMinSkillLevel = maxLevel;

            for (EnchantSkillDetail detail : route) {
                if (detail.getLevel() % 100 != index)
                    _log.warn("Invalid `level` skill enchant data in database for " + s);

                if (detail.getMinSkillLevel() != expectedMinSkillLevel)
                    _log.warn("Invalid `min_skill_lvl` skill enchant data in database for " + s);

                index++;
                expectedMinSkillLevel = detail.getLevel();
            }
        }
    }
}

From source file:org.hyperic.hq.measurement.server.session.AvailabilityManagerImpl.java

private void removeAvail(AvailabilityDataRLE avail, Map<Integer, TreeSet<AvailabilityDataRLE>> currAvails,
        Map<DataPoint, AvailabilityDataRLE> createMap, Map<DataPoint, AvailabilityDataRLE> removeMap) {
    long start = avail.getStartime();
    Integer mId = avail.getMeasurement().getId();
    TreeSet<AvailabilityDataRLE> rles = currAvails.get(mId);
    if (rles.remove(avail)) {
        DataPoint key = new DataPoint(mId.intValue(), avail.getAvailVal(), start);
        createMap.remove(key);/* w  ww  .ja  v a2 s  .  com*/
        removeMap.put(key, avail);
    }
}