List of usage examples for java.util TreeSet size
public int size()
From source file:org.lockss.servlet.SubscriptionManagement.java
/** * Populates a tab with the publications for a publisher. * /*www . j a v a 2 s .com*/ * @param publisherName * A String with the name of the publisher. * @param pubSet * A TreeSet<SerialPublication> with the publisher publications. * @param divTableMap * A Map<String, Table> with the tabs tables mapped by the first * letter of the tab letter group. */ private void populateTabPublisherPublications(String publisherName, TreeSet<SerialPublication> pubSet, Map<String, Table> divTableMap) { final String DEBUG_HEADER = "populateTabPublisherPublications(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "publisherName = " + publisherName); // The publisher name first letter. String firstLetterPub = publisherName.substring(0, 1).toUpperCase(); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "firstLetterPub = " + firstLetterPub); // Get the tab table that corresponds to this publisher. Table divTable = divTableMap.get(firstLetterPub); // Check whether no table corresponds naturally to this publisher. if (divTable == null) { // Yes: Use the first table. divTable = divTableMap.get("A"); // Check whether no table is found. if (divTable == null) { // Yes: Report the problem and skip this publisher. log.error("Publisher '" + publisherName + "' belongs to an unknown tab: Skipped."); return; } } // Sanitize the publisher name so that it can be used as an HTML division // identifier. String cleanNameString = StringUtil.sanitizeToIdentifier(publisherName); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "cleanNameString = " + cleanNameString); String publisherRowTitle = publisherName; // Check whether there are any publications to show. if (pubSet != null && pubSet.size() > 0) { // Yes: Get the publisher row title. publisherRowTitle += " (" + pubSet.size() + ")"; } if (log.isDebug3()) log.debug3(DEBUG_HEADER + "publisherRowTitle = " + publisherRowTitle); // Create in the table the title row for the publisher. createPublisherRow(publisherRowTitle, cleanNameString, divTable); // Check whether there are any publications to show. if (pubSet != null) { // Yes: Add them. int rowIndex = 0; // Loop through all the publications. for (SerialPublication publication : pubSet) { // Create in the table a row for the publication. createPublicationRow(publication, cleanNameString, rowIndex, divTable); rowIndex++; } } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "Done."); }
From source file:org.processmining.analysis.performance.PerformanceAnalysisGUI.java
/** * Initializes the waiting time levels. If no manual peformance settings are * filled in by the user, standard settings are calculated and used. * Standard settings: approximately 33% low, 33% high, 33% medium level note * that a set is used instead of a list however, so if a time occurs * multiple times (this can happen easily with a waiting time of 0.0s for * instance) of such places only one is used, so the 33-33-33 estimation can * be quite wrong, though this is not considered to be a problem. *//*from ww w .j a v a 2 s .c om*/ public void initializeWaitingTimeLevels() { if (!manualSettings) { // no manual settings are present TreeSet waitingTimes = new TreeSet(); ListIterator it = extendedPetriNet.getPlaces().listIterator(); while (it.hasNext()) { // place the mean waiting time of each place in the tree set ExtendedPlace p = (ExtendedPlace) it.next(); p.calculateMetrics(extendedLog.getLogTraceIDs(), advancedSettings[1], failedInstances); if (p.getMeanWaitingTime() >= 0) { // only add correct times double waitTime = p.getMeanWaitingTime() / timeDivider; waitingTimes.add(Double.valueOf(waitTime)); } } int num = waitingTimes.size() / 3; // remove the first 'num' measurements and the last 'num' // measurements // from waitingTimes for (int i = 0; i < num; i++) { // there should be at least one waiting time measurement // remaining if (!(waitingTimes.size() < 2)) { waitingTimes.remove(waitingTimes.first()); waitingTimes.remove(waitingTimes.last()); } } // give new values to the bounds and the colors if (waitingTimes.size() != 0) { Double bnd = (Double) waitingTimes.first(); bounds.set(0, bnd); bnd = (Double) waitingTimes.last(); bounds.set(1, bnd); } else { // in case there are no valid waiting times waitingTimes.add(Double.valueOf(0)); Double bnd = (Double) waitingTimes.first(); bounds.set(0, bnd); bounds.set(1, bnd); } levelColors.set(0, Color.BLUE); levelColors.set(1, Color.YELLOW); levelColors.set(2, Color.MAGENTA); } extendedPetriNet.setAdvancedSettings(advancedSettings); extendedPetriNet.setBounds(bounds); extendedPetriNet.setLevelColors(levelColors); extendedPetriNet.setTimeDivider(timeDivider); extendedPetriNet.setFailedInstances(failedInstances); }
From source file:com.microsoft.tfs.core.clients.versioncontrol.engines.internal.GetEngine.java
/** * Prepare the results returned from the server for processing in the main * get loop.//from w w w. j a va 2 s .c o m * */ private GetOperation[] prepareGetOperations(final AsyncGetOperation asyncOp, final GetOperation[][] results) { Check.notNull(asyncOp, "asyncOp"); //$NON-NLS-1$ Check.notNull(results, "results"); //$NON-NLS-1$ /* * The common case is a single result, and we do not want to slow that * down. In the case where there are multiple requests (and thus * multiple results), we need to filter to make sure that we don't have * redundant getOps. */ Map<String, GetOperation> newLocalItemHash = null; if (results.length > 1) { newLocalItemHash = new TreeMap<String, GetOperation>(LocalPath.TOP_DOWN_COMPARATOR); } System.currentTimeMillis(); for (int i = 0; i < results.length; i++) { final GetOperation[] tempGetOps = results[i]; for (final GetOperation getOp : tempGetOps) { /* * We need to build a hashtable of getOps that have a source * that is an existing item. In the multiple result case, we * also need to filter out redundant getOps. Each local item * currently on disk can only have one operation. Also, each * target local item can have only one operation. They must be * considered separately. */ final String sourceLocalItem = getOp.getSourceLocalItem(); if (sourceLocalItem != null) { if (results.length == 1) { if (!asyncOp.getExistingLocalHash().containsKey(sourceLocalItem)) { asyncOp.getExistingLocalHash().put(sourceLocalItem, getOp); } else { // This is a server problem. onNonFatalError(new Exception(MessageFormat.format( //@formatter:off Messages.getString( "GetEngine.ServerGateUsTwoGetOperationsForSameLocalPathMayRequireMultipleGetsFormat"), //$NON-NLS-1$ //@formatter:on sourceLocalItem))); } } else { // I think this test is redundant because of the test // above if (sourceLocalItem != null) { if (!asyncOp.getExistingLocalHash().containsKey(sourceLocalItem)) { asyncOp.getExistingLocalHash().put(sourceLocalItem, getOp); } else { final GetOperation existingOp = asyncOp.getExistingLocalHash().get(sourceLocalItem); /* * favor the get operation which has a target * local item this happens in the case when the * caller does 2 scoped gets See bug 416603 for * details */ if (existingOp.getTargetLocalItem() == null && getOp.getTargetLocalItem() != null) { asyncOp.getExistingLocalHash().put(sourceLocalItem, getOp); } } } } } else if (results.length != 1) { final String newLocalItem = getOp.getTargetLocalItem(); if (newLocalItem != null && !newLocalItemHash.containsKey(newLocalItem)) { newLocalItemHash.put(newLocalItem, getOp); } } } } /* * Since JDK 1.7 {@link Arrays.sort} algorithm has changed: * * The implementation was adapted from Tim Peters's list sort for Python * (<a href= * "http://svn.python.org/projects/python/trunk/Objects/listsort.txt"> * TimSort</a>). It uses techiques from Peter McIlroy's "Optimistic * Sorting and Information Theoretic Complexity", in Proceedings of the * Fourth Annual ACM-SIAM Symposium on Discrete Algorithms, pp 467-474, * January 1993. * * For some unknown reason the new implementation is not compatible with * the {@link GetOperation}'s compareTo method. We have to use another * means to get an ordered list of operation, e.g. to utilize {@link * TreeSet}. */ // Sort the get operations for execution. Note that HatGui's cache // model relies on this. final TreeSet<GetOperation> getOps = new TreeSet<GetOperation>(GetOperation.GET_OPERATION_COMPARATOR); // Again, we've optimized the case of a single result. if (results.length == 1) { getOps.addAll(Arrays.asList(results[0])); } else { // copy our get ops to the output sorted set getOps.addAll(asyncOp.getExistingLocalHash().values()); getOps.addAll(newLocalItemHash.values()); } // Record the total number of operations for use in the events. asyncOp.totalNumOperations = getOps.size(); return getOps.toArray(new GetOperation[getOps.size()]); }
From source file:org.ensembl.healthcheck.testcase.EnsTestCase.java
/** * Get the equivalent database from the secondary database server. * "equivalent" means: same database type and species. If more than one * database on the secondary server has the same type and species, then the * one with the highest version number is used. * /* w w w. jav a2s. c o m*/ * @param dbre * The database to find the equivalent for. * @return The database on the secondary server with the same type and * species, and the highest version number, or null if none is * found. */ public DatabaseRegistryEntry getEquivalentFromSecondaryServer(DatabaseRegistryEntry dbre) { DatabaseRegistry secondaryDatabaseRegistry = DBUtils.getSecondaryDatabaseRegistry(); // find any databases matching type and species TreeSet<DatabaseRegistryEntry> matchingDBs = new TreeSet<DatabaseRegistryEntry>(); // get // sorting // for // free for (DatabaseRegistryEntry secDBRE : secondaryDatabaseRegistry.getAll()) { if (DBUtils.getSecondaryDatabase() != null) { if (secDBRE.getName().equals(DBUtils.getSecondaryDatabase())) { return secDBRE; } } if (dbre.getSpecies() == Species.UNKNOWN) { // EG where we don't know the species, use type and alias // matching instead if (dbre.getType().equals(secDBRE.getType()) && dbre.getAlias().equals(secDBRE.getAlias())) { matchingDBs.add(secDBRE); logger.finest("added " + secDBRE.getName() + " to list of databases to check for equivalent to " + dbre.getName()); } } else { // nulls will set type automatically if (dbre.getType().equals(secDBRE.getType()) && dbre.getSpecies().equals(secDBRE.getSpecies())) { matchingDBs.add(secDBRE); logger.finest("added " + secDBRE.getName() + " to list of databases to check for equivalent to " + dbre.getName()); } } } if (matchingDBs.size() == 0) { logger.finest("Could not find equivalent database to " + dbre.getName() + " on secondary server"); } // take the highest one that doesn't have the same version number as our // current one, if available DatabaseRegistryEntry result = null; if (matchingDBs.size() > 0) { result = (DatabaseRegistryEntry) matchingDBs.last(); } return result; }
From source file:edu.umass.cs.gigapaxos.SQLPaxosLogger.java
private static Set<Filename> getAllButLatest(File[] files, int keep) { TreeSet<Filename> allFiles = new TreeSet<Filename>(); TreeSet<Filename> oldFiles = new TreeSet<Filename>(); for (File file : files) allFiles.add(new Filename(file)); if (allFiles.size() <= keep) return oldFiles; Iterator<Filename> iter = allFiles.iterator(); for (int i = 0; i < allFiles.size() - keep; i++) oldFiles.add(iter.next());//from ww w . ja v a 2 s. co m return oldFiles; }
From source file:edu.umass.cs.gigapaxos.SQLPaxosLogger.java
private static SortedSet<Filename> getLatest(File[] files, int numLatest) { TreeSet<Filename> allFiles = new TreeSet<Filename>(); TreeSet<Filename> oldFiles = new TreeSet<Filename>(); for (File file : files) allFiles.add(new Filename(file)); if (allFiles.size() <= numLatest) return allFiles; Iterator<Filename> iter = allFiles.descendingIterator(); for (int i = 0; i < numLatest; i++) oldFiles.add(iter.next());//from ww w. ja v a2 s. c o m return oldFiles; }
From source file:org.zaproxy.zap.extension.ascanrulesBeta.UsernameEnumeration.java
/** * looks for username enumeration in the login page, by changing the username field to be a * valid / invalid user, and looking for differences in the response *///from w ww . j a v a 2 s . c o m @Override public void scan() { // the technique to determine if usernames can be enumerated is as follows, using a variant // of the Freiling+Schinzel method, // adapted to the case where we do not know which is the username field // // 1) Request the original URL n times. (The original URL is assumed to have a valid // username, if not a valid password). Store the results in A[]. // 2) Compute the longest common subsequence (LCS) of A[] into LCS_A // 3) for each parameter in the original URL (ie, for URL params, form params, and cookie // params) // 4) Change the current parameter (which we assume is the username parameter) to an invalid // username (randomly), and request the URL n times. Store the results in B[]. // 5) Compute the longest common subsequence (LCS) of B[] into LCS_B // 6) If LCS_A <> LCS_B, then there is a Username Enumeration issue on the current parameter try { boolean loginUrl = false; // Are we dealing with a login url in any of the contexts of which this uri is part URI requestUri = getBaseMsg().getRequestHeader().getURI(); // using the session, get the list of contexts for the url List<Context> contextList = extAuth.getModel().getSession().getContextsForUrl(requestUri.getURI()); // now loop, and see if the url is a login url in each of the contexts in turn... for (Context context : contextList) { URI loginUri = extAuth.getLoginRequestURIForContext(context); if (loginUri != null) { if (requestUri.getScheme().equals(loginUri.getScheme()) && requestUri.getHost().equals(loginUri.getHost()) && requestUri.getPort() == loginUri.getPort() && requestUri.getPath().equals(loginUri.getPath())) { // we got this far.. only the method (GET/POST), user details, query params, // fragment, and POST params // are possibly different from the login page. loginUrl = true; log.info(requestUri.toString() + " falls within a context, and is the defined Login URL. Scanning for possible Username Enumeration vulnerability."); break; // Stop checking } } } // the Username Enumeration scanner will only run for logon pages if (loginUrl == false) { if (this.debugEnabled) { log.debug(requestUri.toString() + " is not a defined Login URL."); } return; // No need to continue for this URL } // find all params set in the request (GET/POST/Cookie) TreeSet<HtmlParameter> htmlParams = new TreeSet<>(); htmlParams.addAll(getBaseMsg().getRequestHeader().getCookieParams()); // request cookies only. no response cookies htmlParams.addAll(getBaseMsg().getFormParams()); // add in the POST params htmlParams.addAll(getBaseMsg().getUrlParams()); // add in the GET params int numberOfRequests = 0; if (this.getAttackStrength() == AttackStrength.INSANE) { numberOfRequests = 50; } else if (this.getAttackStrength() == AttackStrength.HIGH) { numberOfRequests = 15; } else if (this.getAttackStrength() == AttackStrength.MEDIUM) { numberOfRequests = 5; } else if (this.getAttackStrength() == AttackStrength.LOW) { numberOfRequests = 3; } // 1) Request the original URL n times. (The original URL is assumed to have a valid // username, if not a valid password). Store the results in A[]. // make sure to manually handle all redirects, and cookies that may be set in response. // allocate enough space for the responses StringBuilder responseA = null; StringBuilder responseB = null; String longestCommonSubstringA = null; String longestCommonSubstringB = null; for (int i = 0; i < numberOfRequests; i++) { // initialise the storage for this iteration // baseResponses[i]= new StringBuilder(250); responseA = new StringBuilder(250); HttpMessage msgCpy = getNewMsg(); // clone the request, but not the response sendAndReceive(msgCpy, false, false); // request the URL, but do not automatically follow redirects. // get all cookies set in the response TreeSet<HtmlParameter> cookies = msgCpy.getResponseHeader().getCookieParams(); int redirectCount = 0; while (HttpStatusCode.isRedirection(msgCpy.getResponseHeader().getStatusCode())) { redirectCount++; if (this.debugEnabled) log.debug("Following redirect " + redirectCount + " for message " + i + " of " + numberOfRequests + " iterations of the original query"); // append the response to the responses so far for this particular instance // this will give us a complete picture of the full set of actual traffic // associated with following redirects for the request responseA.append(msgCpy.getResponseHeader().getHeadersAsString()); responseA.append(msgCpy.getResponseBody().toString()); // and manually follow the redirect // create a new message from scratch HttpMessage msgRedirect = new HttpMessage(); // create a new URI from the absolute location returned, and interpret it as // escaped // note that the standard says that the Location returned should be absolute, // but it ain't always so... URI newLocation = new URI(msgCpy.getResponseHeader().getHeader(HttpHeader.LOCATION), true); try { msgRedirect.getRequestHeader().setURI(newLocation); } catch (Exception e) { // the Location field contents may not be standards compliant. Lets generate // a uri to use as a workaround where a relative path was // given instead of an absolute one URI newLocationWorkaround = new URI(msgCpy.getRequestHeader().getURI(), msgCpy.getResponseHeader().getHeader(HttpHeader.LOCATION), true); // try again, except this time, if it fails, don't try to handle it if (this.debugEnabled) log.debug("The Location [" + newLocation + "] specified in a redirect was not valid (not absolute?). Trying absolute workaround url [" + newLocationWorkaround + "]"); msgRedirect.getRequestHeader().setURI(newLocationWorkaround); } msgRedirect.getRequestHeader().setMethod(HttpRequestHeader.GET); // it's always a GET for a redirect msgRedirect.getRequestHeader().setContentLength(0); // since we send a GET, the body will be 0 long if (cookies.size() > 0) { // if a previous request sent back a cookie that has not since been // invalidated, we need to set that cookie when following redirects, as a // browser would msgRedirect.getRequestHeader().setCookieParams(cookies); } if (this.debugEnabled) log.debug("DEBUG: Following redirect to [" + newLocation + "]"); sendAndReceive(msgRedirect, false, false); // do NOT redirect.. handle it here // handle scenario where a cookie is unset in a subsequent iteration, or where // the same cookie name is later re-assigned a different value // ie, in these cases, do not simply (and dumbly) accumulate cookie detritus. // first get all cookies set in the response TreeSet<HtmlParameter> cookiesTemp = msgRedirect.getResponseHeader().getCookieParams(); for (Iterator<HtmlParameter> redirectSetsCookieIterator = cookiesTemp .iterator(); redirectSetsCookieIterator.hasNext();) { HtmlParameter cookieJustSet = redirectSetsCookieIterator.next(); // loop through each of the cookies we know about in cookies, to see if it // matches by name. // if so, delete that cookie, and add the one that was just set to cookies. // if not, add the one that was just set to cookies. for (Iterator<HtmlParameter> knownCookiesIterator = cookies.iterator(); knownCookiesIterator .hasNext();) { HtmlParameter knownCookie = knownCookiesIterator.next(); if (cookieJustSet.getName().equals(knownCookie.getName())) { knownCookiesIterator.remove(); break; // out of the loop for known cookies, back to the next cookie // set in the response } } // end of loop for cookies we already know about // we can now safely add the cookie that was just set into cookies, knowing // it does not clash with anything else in there. cookies.add(cookieJustSet); } // end of for loop for cookies just set in the redirect msgCpy = msgRedirect; // store the last redirect message into the MsgCpy, as we // will be using it's output in a moment.. } // end of loop to follow redirects // now that the redirections have all been handled.. was the request finally a // success or not? Successful or Failed Logins would normally both return an OK // HTTP status if (!HttpStatusCode.isSuccess(msgCpy.getResponseHeader().getStatusCode())) { log.warn("The original URL [" + getBaseMsg().getRequestHeader().getURI() + "] returned a non-OK HTTP status " + msgCpy.getResponseHeader().getStatusCode() + " (after " + i + " of " + numberOfRequests + " steps). Could be indicative of SQL Injection, or some other error. The URL is not stable enough to look at Username Enumeration"); return; // we have not even got as far as looking at the parameters, so just // abort straight out of the method } if (this.debugEnabled) log.debug("Done following redirects!"); // append the response to the responses so far for this particular instance // this will give us a complete picture of the full set of actual traffic associated // with following redirects for the request responseA.append(msgCpy.getResponseHeader().getHeadersAsString()); responseA.append(msgCpy.getResponseBody().toString()); // 2) Compute the longest common subsequence (LCS) of A[] into LCS_A // Note: in the Freiling and Schinzel method, this is calculated recursively. We // calculate it iteratively, but using an equivalent method // first time in, the LCS is simple: it's the first HTML result.. no diffing // required if (i == 0) longestCommonSubstringA = responseA.toString(); // else get the LCS of the existing string, and the current result else longestCommonSubstringA = this.longestCommonSubsequence(longestCommonSubstringA, responseA.toString()); // optimisation step: if the LCS of A is 0 characters long already, then the URL // output is not stable, and we can abort now, and save some time if (longestCommonSubstringA.length() == 0) { // this might occur if the output returned for the URL changed mid-way. Perhaps // a CAPTCHA has fired, or a WAF has kicked in. Let's abort now so. log.warn("The original URL [" + getBaseMsg().getRequestHeader().getURI() + "] does not produce stable output (at " + i + 1 + " of " + numberOfRequests + " steps). There is no static element in the output that can be used as a basis of comparison for the result of requesting URLs with the parameter values modified. Perhaps a CAPTCHA or WAF has kicked in!!"); return; // we have not even got as far as looking at the parameters, so just // abort straight out of the method } } // get rid of any remnants of cookie setting and Date headers in the responses, as these // cause false positives, and can be safely ignored // replace the content length with a non-variable placeholder // replace url parameters with a non-variable placeholder to eliminate tokens in URLs in // the output longestCommonSubstringA = longestCommonSubstringA.replaceAll("Set-Cookie:[^\\r\\n]+[\\r\\n]{1,2}", ""); longestCommonSubstringA = longestCommonSubstringA.replaceAll("Date:[^\\r\\n]+[\\r\\n]{1,2}", ""); longestCommonSubstringA = longestCommonSubstringA.replaceAll("Content-Length:[^\\r\\n]+[\\r\\n]{1,2}", "Content-Length: XXXX\n"); longestCommonSubstringA = longestCommonSubstringA .replaceAll("(?<=(&|\\?)[^\\?\"=&;]+=)[^\\?\"=&;]+(?=(&|\"))", "YYYY"); if (this.debugEnabled) log.debug("The LCS of A is [" + longestCommonSubstringA + "]"); // 3) for each parameter in the original URL (ie, for URL params, form params, and // cookie params) for (Iterator<HtmlParameter> iter = htmlParams.iterator(); iter.hasNext();) { HttpMessage msgModifiedParam = getNewMsg(); HtmlParameter currentHtmlParameter = iter.next(); if (this.debugEnabled) log.debug("Handling [" + currentHtmlParameter.getType() + "] parameter [" + currentHtmlParameter.getName() + "], with value [" + currentHtmlParameter.getValue() + "]"); // 4) Change the current parameter value (which we assume is the username parameter) // to an invalid username (randomly), and request the URL n times. Store the results // in B[]. // get a random user name the same length as the original! String invalidUsername = RandomStringUtils.random(currentHtmlParameter.getValue().length(), RANDOM_USERNAME_CHARS); if (this.debugEnabled) log.debug("The invalid username chosen was [" + invalidUsername + "]"); TreeSet<HtmlParameter> requestParams = null; if (currentHtmlParameter.getType().equals(HtmlParameter.Type.cookie)) { requestParams = msgModifiedParam.getRequestHeader().getCookieParams(); requestParams.remove(currentHtmlParameter); requestParams.add(new HtmlParameter(currentHtmlParameter.getType(), currentHtmlParameter.getName(), invalidUsername.toString())); // add in the invalid username msgModifiedParam.setCookieParams(requestParams); } else if (currentHtmlParameter.getType().equals(HtmlParameter.Type.url)) { requestParams = msgModifiedParam.getUrlParams(); requestParams.remove(currentHtmlParameter); requestParams.add(new HtmlParameter(currentHtmlParameter.getType(), currentHtmlParameter.getName(), invalidUsername.toString())); // add in the invalid username msgModifiedParam.setGetParams(requestParams); } else if (currentHtmlParameter.getType().equals(HtmlParameter.Type.form)) { requestParams = msgModifiedParam.getFormParams(); requestParams.remove(currentHtmlParameter); requestParams.add(new HtmlParameter(currentHtmlParameter.getType(), currentHtmlParameter.getName(), invalidUsername.toString())); // add in the invalid username msgModifiedParam.setFormParams(requestParams); } if (this.debugEnabled) log.debug("About to loop for " + numberOfRequests + " iterations with an incorrect user of the same length"); boolean continueForParameter = true; for (int i = 0; i < numberOfRequests && continueForParameter; i++) { // initialise the storage for this iteration responseB = new StringBuilder(250); HttpMessage msgCpy = msgModifiedParam; // use the message we already set up, with the // modified parameter value sendAndReceive(msgCpy, false, false); // request the URL, but do not automatically follow redirects. // get all cookies set in the response TreeSet<HtmlParameter> cookies = msgCpy.getResponseHeader().getCookieParams(); int redirectCount = 0; while (HttpStatusCode.isRedirection(msgCpy.getResponseHeader().getStatusCode())) { redirectCount++; if (this.debugEnabled) log.debug("Following redirect " + redirectCount + " for message " + i + " of " + numberOfRequests + " iterations of the modified query"); // append the response to the responses so far for this particular instance // this will give us a complete picture of the full set of actual traffic // associated with following redirects for the request responseB.append(msgCpy.getResponseHeader().getHeadersAsString()); responseB.append(msgCpy.getResponseBody().toString()); // and manually follow the redirect // create a new message from scratch HttpMessage msgRedirect = new HttpMessage(); // create a new URI from the absolute location returned, and interpret it as // escaped // note that the standard says that the Location returned should be // absolute, but it ain't always so... URI newLocation = new URI(msgCpy.getResponseHeader().getHeader(HttpHeader.LOCATION), true); try { msgRedirect.getRequestHeader().setURI(newLocation); } catch (Exception e) { // the Location field contents may not be standards compliant. Lets // generate a uri to use as a workaround where a relative path was // given instead of an absolute one URI newLocationWorkaround = new URI(msgCpy.getRequestHeader().getURI(), msgCpy.getResponseHeader().getHeader(HttpHeader.LOCATION), true); // try again, except this time, if it fails, don't try to handle it if (this.debugEnabled) log.debug("The Location [" + newLocation + "] specified in a redirect was not valid (not absolute?). Trying absolute workaround url [" + newLocationWorkaround + "]"); msgRedirect.getRequestHeader().setURI(newLocationWorkaround); } msgRedirect.getRequestHeader().setMethod(HttpRequestHeader.GET); // it's always a GET for a redirect msgRedirect.getRequestHeader().setContentLength(0); // since we send a GET, the body will be 0 long if (cookies.size() > 0) { // if a previous request sent back a cookie that has not since been // invalidated, we need to set that cookie when following redirects, as // a browser would msgRedirect.getRequestHeader().setCookieParams(cookies); } sendAndReceive(msgRedirect, false, false); // do NOT redirect.. handle it here // handle scenario where a cookie is unset in a subsequent iteration, or // where the same cookie name is later re-assigned a different value // ie, in these cases, do not simply (and dumbly) accumulate cookie // detritus. // first get all cookies set in the response TreeSet<HtmlParameter> cookiesTemp = msgRedirect.getResponseHeader().getCookieParams(); for (Iterator<HtmlParameter> redirectSetsCookieIterator = cookiesTemp .iterator(); redirectSetsCookieIterator.hasNext();) { HtmlParameter cookieJustSet = redirectSetsCookieIterator.next(); // loop through each of the cookies we know about in cookies, to see if // it matches by name. // if so, delete that cookie, and add the one that was just set to // cookies. // if not, add the one that was just set to cookies. for (Iterator<HtmlParameter> knownCookiesIterator = cookies .iterator(); knownCookiesIterator.hasNext();) { HtmlParameter knownCookie = knownCookiesIterator.next(); if (cookieJustSet.getName().equals(knownCookie.getName())) { knownCookiesIterator.remove(); break; // out of the loop for known cookies, back to the next // cookie set in the response } } // end of loop for cookies we already know about // we can now safely add the cookie that was just set into cookies, // knowing it does not clash with anything else in there. cookies.add(cookieJustSet); } // end of for loop for cookies just set in the redirect msgCpy = msgRedirect; // store the last redirect message into the MsgCpy, as // we will be using it's output in a moment.. } // end of loop to follow redirects // now that the redirections have all been handled.. was the request finally a // success or not? Successful or Failed Logins would normally both return an OK // HTTP status if (!HttpStatusCode.isSuccess(msgCpy.getResponseHeader().getStatusCode())) { log.warn("The modified URL [" + msgModifiedParam.getRequestHeader().getURI() + "] returned a non-OK HTTP status " + msgCpy.getResponseHeader().getStatusCode() + " (after " + i + 1 + " of " + numberOfRequests + " steps for [" + currentHtmlParameter.getType() + "] parameter " + currentHtmlParameter.getName() + "). Could be indicative of SQL Injection, or some other error. The URL is not stable enough to look at Username Enumeration"); continueForParameter = false; continue; // skip directly to the next parameter. Do not pass Go. Do not // collect $200. } if (this.debugEnabled) log.debug("Done following redirects!"); // append the response to the responses so far for this particular instance // this will give us a complete picture of the full set of actual traffic // associated with following redirects for the request responseB.append(msgCpy.getResponseHeader().getHeadersAsString()); responseB.append(msgCpy.getResponseBody().toString()); // 5) Compute the longest common subsequence (LCS) of B[] into LCS_B // Note: in the Freiling and Schinzel method, this is calculated recursively. We // calculate it iteratively, but using an equivalent method // first time in, the LCS is simple: it's the first HTML result.. no diffing // required if (i == 0) longestCommonSubstringB = responseB.toString(); // else get the LCS of the existing string, and the current result else longestCommonSubstringB = this.longestCommonSubsequence(longestCommonSubstringB, responseB.toString()); // optimisation step: if the LCS of B is 0 characters long already, then the URL // output is not stable, and we can abort now, and save some time if (longestCommonSubstringB.length() == 0) { // this might occur if the output returned for the URL changed mid-way. // Perhaps a CAPTCHA has fired, or a WAF has kicked in. Let's abort now so. log.warn("The modified URL [" + msgModifiedParam.getRequestHeader().getURI() + "] (for [" + currentHtmlParameter.getType() + "] parameter " + currentHtmlParameter.getName() + ") does not produce stable output (after " + i + 1 + " of " + numberOfRequests + " steps). There is no static element in the output that can be used as a basis of comparison with the static output of the original query. Perhaps a CAPTCHA or WAF has kicked in!!"); continueForParameter = false; continue; // skip directly to the next parameter. Do not pass Go. Do not // collect $200. // Note: if a CAPTCHA or WAF really has fired, the results of subsequent // iterations will likely not be accurate.. } } // if we didn't hit something with one of the iterations for the parameter (ie, if // the output when changing the parm is stable), // check if the parameter might be vulnerable by comparins its LCS with the original // LCS for a valid login if (continueForParameter == true) { // get rid of any remnants of cookie setting and Date headers in the responses, // as these cause false positives, and can be safely ignored // replace the content length with a non-variable placeholder // replace url parameters with a non-variable placeholder to eliminate tokens in // URLs in the output longestCommonSubstringB = longestCommonSubstringB .replaceAll("Set-Cookie:[^\\r\\n]+[\\r\\n]{1,2}", ""); longestCommonSubstringB = longestCommonSubstringB.replaceAll("Date:[^\\r\\n]+[\\r\\n]{1,2}", ""); longestCommonSubstringB = longestCommonSubstringB .replaceAll("Content-Length:[^\\r\\n]+[\\r\\n]{1,2}", "Content-Length: XXXX\n"); longestCommonSubstringB = longestCommonSubstringB .replaceAll("(?<=(&|\\?)[^\\?\"=&;]+=)[^\\?\"=&;]+(?=(&|\"))", "YYYY"); if (this.debugEnabled) log.debug("The LCS of B is [" + longestCommonSubstringB + "]"); // 6) If LCS_A <> LCS_B, then there is a Username Enumeration issue on the // current parameter if (!longestCommonSubstringA.equals(longestCommonSubstringB)) { // calculate line level diffs of the 2 Longest Common Substrings to aid the // user in deciding if the match is a false positive // get the diff as a series of patches Patch diffpatch = DiffUtils.diff( new LinkedList<String>(Arrays.asList(longestCommonSubstringA.split("\\n"))), new LinkedList<String>(Arrays.asList(longestCommonSubstringB.split("\\n")))); int numberofDifferences = diffpatch.getDeltas().size(); // and convert the list of patches to a String, joining using a newline // String diffAB = StringUtils.join(diffpatch.getDeltas(), "\n"); StringBuilder tempDiff = new StringBuilder(250); for (Delta delta : diffpatch.getDeltas()) { String changeType = null; if (delta.getType() == Delta.TYPE.CHANGE) changeType = "Changed Text"; else if (delta.getType() == Delta.TYPE.DELETE) changeType = "Deleted Text"; else if (delta.getType() == Delta.TYPE.INSERT) changeType = "Inserted text"; else changeType = "Unknown change type [" + delta.getType() + "]"; tempDiff.append("\n(" + changeType + ")\n"); // blank line before tempDiff.append("Output for Valid Username : " + delta.getOriginal() + "\n"); // no blank lines tempDiff.append("\nOutput for Invalid Username: " + delta.getRevised() + "\n"); // blank line before } String diffAB = tempDiff.toString(); String extraInfo = Constant.messages.getString( "ascanbeta.usernameenumeration.alert.extrainfo", currentHtmlParameter.getType(), currentHtmlParameter.getName(), currentHtmlParameter.getValue(), // original value invalidUsername.toString(), // new value diffAB, // the differences between the two sets of output numberofDifferences); // the number of differences String attack = Constant.messages.getString("ascanbeta.usernameenumeration.alert.attack", currentHtmlParameter.getType(), currentHtmlParameter.getName()); String vulnname = Constant.messages.getString("ascanbeta.usernameenumeration.name"); String vulndesc = Constant.messages.getString("ascanbeta.usernameenumeration.desc"); String vulnsoln = Constant.messages.getString("ascanbeta.usernameenumeration.soln"); // call bingo with some extra info, indicating that the alert is bingo(Alert.RISK_INFO, Alert.CONFIDENCE_LOW, vulnname, vulndesc, getBaseMsg().getRequestHeader().getURI().getURI(), currentHtmlParameter.getName(), attack, extraInfo, vulnsoln, getBaseMsg()); } else { if (this.debugEnabled) log.debug("[" + currentHtmlParameter.getType() + "] parameter [" + currentHtmlParameter.getName() + "] looks ok (Invalid Usernames cannot be distinguished from Valid usernames)"); } } } // end of the for loop around the parameter list } catch (Exception e) { // Do not try to internationalise this.. we need an error message in any event.. // if it's in English, it's still better than not having it at all. log.error("An error occurred checking a url for Username Enumeration issues", e); } }
From source file:org.dasein.persist.PersistentCache.java
@SuppressWarnings("unchecked") static public PersistentCache<? extends CachedItem> getCacheWithSchema( @Nonnull Class<? extends CachedItem> forClass, @Nullable String alternateEntytName, @Nonnull String primaryKey, @Nonnull String schemaVersion, @Nullable SchemaMapper... mappers) throws PersistenceException { PersistentCache<? extends CachedItem> cache = null; String className = forClass.getName(); synchronized (caches) { cache = caches.get(className);/*from w w w . j a va 2 s .c o m*/ if (cache != null) { return cache; } } Properties props = new Properties(); try { InputStream is = DaseinSequencer.class.getResourceAsStream(DaseinSequencer.PROPERTIES); if (is != null) { props.load(is); } } catch (Exception e) { logger.error("Problem reading " + DaseinSequencer.PROPERTIES + ": " + e.getMessage(), e); } TreeSet<Key> keys = new TreeSet<Key>(); Class<?> cls = forClass; while (!cls.getName().equals(Object.class.getName())) { for (Field field : cls.getDeclaredFields()) { for (Annotation annotation : field.getDeclaredAnnotations()) { if (annotation instanceof Index) { if (logger.isDebugEnabled()) { logger.debug("Processing Index for: " + cls.getName() + "." + field.getName()); } Index idx = (Index) annotation; if (logger.isDebugEnabled()) { logger.debug("Index is: " + idx); } if (idx.type().equals(IndexType.SECONDARY) || idx.type().equals(IndexType.FOREIGN)) { String keyName = field.getName(); if (idx.multi() != null && idx.multi().length > 0) { if (idx.cascade()) { int len = idx.multi().length; keys.add(new Key(keyName)); for (int i = 0; i < len; i++) { String[] parts = new String[i + 2]; parts[0] = keyName; for (int j = 0; j <= i; j++) { parts[j + 1] = idx.multi()[j]; } keys.add(new Key(parts)); } } else { String[] parts = new String[idx.multi().length + 1]; int i = 1; parts[0] = keyName; for (String name : idx.multi()) { parts[i++] = name; } Key k = new Key(parts); keys.add(k); } } else { Key k; if (idx.type().equals(IndexType.FOREIGN) && !idx.identifies().equals(CachedItem.class)) { k = new Key(idx.identifies(), keyName); } else { k = new Key(keyName); } keys.add(k); } } } } } cls = cls.getSuperclass(); } String propKey = "dsn.persistentCache." + className; String prop; while (cache == null && !propKey.equals("dsn.persistentCache")) { prop = props.getProperty(propKey); if (prop != null) { try { cache = (PersistentCache<? extends CachedItem>) Class.forName(prop).newInstance(); cache.initBase(forClass, alternateEntytName, schemaVersion, mappers, new Key(primaryKey), keys.toArray(new Key[keys.size()])); break; } catch (Throwable t) { logger.error("Unable to load persistence cache " + prop + ": " + t.getMessage()); throw new PersistenceException( "Unable to load persistence cache " + prop + ": " + t.getMessage()); } } int idx = propKey.lastIndexOf('.'); propKey = propKey.substring(0, idx); } if (cache == null) { prop = props.getProperty("dsn.cache.default"); if (prop == null) { throw new PersistenceException("No persistent cache implementations defined."); } try { cache = (PersistentCache<? extends CachedItem>) Class.forName(prop).newInstance(); cache.initBase(forClass, alternateEntytName, schemaVersion, mappers, new Key(primaryKey), keys.toArray(new Key[keys.size()])); } catch (Throwable t) { String err = "Unable to load persistence cache " + prop + ": " + t.getMessage(); logger.error(err, t); throw new PersistenceException(err); } } synchronized (caches) { PersistentCache<? extends CachedItem> c = caches.get(className); if (c != null) { cache = c; } else { caches.put(className, cache); } } return cache; }
From source file:crawler.HackerEarthCrawler.java
@Override public void crawl() { int flag = 0; //set of urls which should be crawled TreeSet<String> linksset = new TreeSet<String>(); TreeSet<String> tempset = new TreeSet<String>(); TreeSet<String> tutorialset = new TreeSet<String>(); //final set of problem urls TreeSet<String> problemset = new TreeSet<String>(); //visited for maintaing status of if url is already crawled or not TreeMap<String, Integer> visited = new TreeMap<String, Integer>(); //add base url linksset.add(baseUrl);//from w w w .jav a 2 s.c o m //mark base url as not crawled visited.put(baseUrl, 0); try { while (true) { flag = 0; tempset.clear(); for (String str : linksset) { //check if url is already crawled or not and it has valid domain name if ((visited.get(str) == 0) && (str.startsWith("https://www.hackerearth.com/"))) { System.out.println("crawling " + str); //retriving response of current url as document Document doc = Jsoup.connect(str).timeout(0).userAgent( "Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:25.0) Gecko/20100101 Firefox/25.0") .referrer("http://www.google.com").ignoreHttpErrors(true).get(); //retriving all urls from current page Elements links = doc.select("a[href]"); //mark url as crawled visited.put(str, 1); //mark flag as url is crawled flag = 1; //retrive all urls for (Element link : links) { if (link.absUrl("href").endsWith("/tutorial/")) { tutorialset.add(link.absUrl("href")); } //check if url is problem url then add it in problemurlset if (link.absUrl("href").startsWith("https://www.hackerearth.com/") && isProblemUrl(link.absUrl("href"))) { problemset.add(link.absUrl("href")); } //check if url has valid domain and it has problem urls or not if (link.absUrl("href").contains(("https://www.hackerearth.com/")) && isCrawlable(link.absUrl("href"))) { //if link is not visited then mark it as uncrawled if (!visited.containsKey(link.absUrl("href"))) { visited.put(link.absUrl("href"), 0); } //add it in tempsetorary set tempset.add(link.absUrl("href")); //System.out.println("\n base: "+str+" ::: link : " + link.absUrl("href")); } } } } //if nothing is left to crawl break the loop if (flag == 0) { break; } //add all retrieved links to linksset linksset.addAll(tempset); } System.out.println("\n\ntotal problem urls " + problemset.size()); int i = 0; for (String str : problemset) { System.out.println("link " + i + " : " + str); i++; } } catch (IOException ex) { Logger.getLogger(HackerEarthCrawler.class.getName()).log(Level.SEVERE, null, ex); } //scrap and store into database //for every problem url scrap problem page for (String problemUrl : problemset) { System.out.println("problemUrl :" + problemUrl); try { //create problem class to store in database Problem problem = new Problem(); String problemSIOC = "", problemIOC = ""; String problemTitle = "", problemStatement = "", problemInput = "", problemOutput = "", problemConstraints = ""; String sampleInput = "", sampleOutput = ""; String problemExplanation = ""; //set default timelimit to 1 second double problemTimeLimit = 1.0; ArrayList<String> tags = new ArrayList<String>(); //get response for given problem url Response response = Jsoup.connect(problemUrl).execute(); Document doc = response.parse(); //retrieve problem title from page Element elementTitle = doc.getElementsByTag("title").first(); StringTokenizer stTitle = new StringTokenizer(elementTitle.text(), "|"); problemTitle = stTitle.nextToken().trim(); Element content = doc.getElementsByClass("starwars-lab").first(); problemSIOC = content.text(); Elements e = content.children(); //to find problem statement String breakloop[] = { "input", "input:", "input :", "input format:", "input format :", "input format", "Input and output", "constraints :", "constraints:", "constraints", "$$Input :$$" }; flag = 0; for (Element p : e) { String tempStatement = ""; for (Element pp : p.getAllElements()) { for (String strbreak : breakloop) { if (StringUtils.equalsIgnoreCase(pp.ownText(), strbreak)) { //System.out.println("strbreak :"+strbreak); tempStatement = p.text().substring(0, p.text().toLowerCase().indexOf(strbreak.toLowerCase())); // System.out.println("temp "+tempStatement); flag = 1; break; } } } if (flag == 1) { problemStatement += tempStatement; //remove extra space at end if (tempStatement.length() == 0) { problemStatement = problemStatement.substring(0, problemStatement.length() - 1); } break; } problemStatement += p.text() + " "; } System.out.println("problemSIOC :" + problemSIOC); System.out.println("problemStatement :" + problemStatement); if (problemStatement.length() <= problemSIOC.length()) { //remove problem statement from whole text and remove extra spaces at the beginning and the end problemIOC = problemSIOC.substring(problemStatement.length()).trim(); } else { problemIOC = ""; } System.out.println("problemIOC :" + problemIOC); //keywords for identifying input String decideInput[] = { "Input format :", "Input format:", "Input format", "inputformat:", "inputformat :", "inputformat", "input and output", "input :", "input:", "input" }; //keywords for identifying output String decideOutput[] = { "output format :", "output format:", "Output format", "outputformat:", "outputformat :", "outputformat", "output :", "output:", "output" }; //keywords for identifying constraint String decideConstraint[] = { "constraints:", "constraints :", "constraints", "Constraints :", "constraint:", "constraint :", "constraint", "Contraints :" }; int posin = 0, posoutput = 0, poscon = 0, idxin, idxout, idxcon, flaginput = 0, flagoutput = 0, flagcon = 0, inlen = 0, outlen = 0, conlen = 0; //find inputformat position,length of keyword for (idxin = 0; idxin < decideInput.length; idxin++) { if (StringUtils.containsIgnoreCase(problemIOC, decideInput[idxin])) { posin = problemIOC.toLowerCase().indexOf(decideInput[idxin].toLowerCase()); flaginput = 1; inlen = decideInput[idxin].length(); //decide it is keyowrd for actucal input or it is "sample input" if (StringUtils.containsIgnoreCase(problemIOC, "sample input")) { if (posin > problemIOC.toLowerCase().indexOf("sample input")) { flaginput = 0; inlen = 0; } else { break; } } else { break; } } } //find outputformat position,length of keyword for (idxout = 0; idxout < decideOutput.length; idxout++) { if (StringUtils.containsIgnoreCase(problemIOC, decideOutput[idxout])) { posoutput = problemIOC.toLowerCase().indexOf(decideOutput[idxout].toLowerCase()); flagoutput = 1; outlen = decideOutput[idxout].length(); break; } } //find constraint position,length of keyword for (idxcon = 0; idxcon < decideConstraint.length; idxcon++) { if (StringUtils.containsIgnoreCase(problemIOC, decideConstraint[idxcon])) { poscon = problemIOC.toLowerCase().indexOf(decideConstraint[idxcon].toLowerCase()); flagcon = 1; conlen = decideConstraint[idxcon].length(); break; } } System.out.println("input " + flaginput + " " + inlen + " " + posin); System.out.println("output " + flagoutput + " " + outlen + " " + posoutput); System.out.println("constraint " + flagcon + " " + conlen + " " + poscon); //retrieve problem input and output if present in problem page //if input format is present if (flaginput == 1) { //if input keyword is "input and output" and contraint is present in problem page if (idxin == 6 && flagcon == 1) { problemInput = problemIOC.substring(inlen, poscon); } //if input keyword is "input and output" and contraint is not present in problem page else if (idxin == 6 && flagcon == 0) { problemInput = problemIOC.substring(inlen); } //if output format and constraint is present else if (flagoutput == 1 && flagcon == 1) { //if constraint is present before input format if (poscon < posin) { problemInput = problemIOC.substring(posin + inlen, posoutput); problemOutput = problemIOC.substring(posoutput + outlen); } //if constraint is present before sample else if (poscon < posoutput) { problemInput = problemIOC.substring(inlen, poscon); problemOutput = problemIOC.substring(posoutput + outlen); } else { problemInput = problemIOC.substring(inlen, posoutput); problemOutput = problemIOC.substring(posoutput + outlen, poscon); } } //if constraint is not present else if (flagoutput == 1 && flagcon == 0) { problemInput = problemIOC.substring(inlen, posoutput); problemOutput = problemIOC.substring(posoutput + outlen); } else if (flagoutput == 0 && flagcon == 1) { if (poscon < posin) { problemInput = problemIOC.substring(posin + inlen); } else { problemInput = problemIOC.substring(poscon + conlen, posin); } problemOutput = ""; } else { problemInput = problemIOC.substring(inlen); problemOutput = ""; } } //if input format and output format is not present else { problemInput = ""; problemOutput = ""; } //if constraint is present if (flagcon == 1) { //if constraint is present before input format if (poscon < posin) { problemConstraints = problemIOC.substring(0, posin); } //if constraint is present before output format else if (poscon < posoutput) { problemConstraints = problemIOC.substring(poscon + conlen, posoutput); } else { problemConstraints = problemIOC.substring(poscon + conlen); } } System.out.println("problemInput :" + problemInput); System.out.println("problemOutput :" + problemOutput); System.out.println("problemConstraints :" + problemConstraints); //retrieve problem tags from problem page Element elementtag = doc.getElementsByClass("problem-tags").first().child(1); StringTokenizer st = new StringTokenizer(elementtag.text(), ","); while (st.hasMoreTokens()) { tags.add(st.nextToken().trim()); } //retrieve sample input sample output if present Element elementSIO = doc.getElementsByClass("input-output-container").first(); //if sample input output is present if (elementSIO != null) { //find position of sample output int soutpos = elementSIO.text().indexOf("SAMPLE OUTPUT"); sampleInput = elementSIO.text().substring(12, soutpos); sampleOutput = elementSIO.text().substring(soutpos + 13); System.out.println("Sample input :\n" + sampleInput + "\n\n\n"); System.out.println("Sample Output :\n" + sampleOutput); } else { sampleInput = ""; sampleOutput = ""; } //retrieve problem explanation from problem page if present Element elementExplanation = doc.getElementsByClass("standard-margin").first().child(0); if (elementExplanation.text().toLowerCase().contains("explanation")) { problemExplanation = elementExplanation.nextElementSibling().text(); } System.out.println("Explanation :" + problemExplanation); //retrieve timelimit Element elementTL = doc.getElementsByClass("problem-guidelines").first().child(0).child(1); StringTokenizer stTL = new StringTokenizer(elementTL.ownText(), " "); problemTimeLimit = Double.parseDouble(stTL.nextToken()); //System.out.println("problemTimeLimit :"+problemTimeLimit); //set all retrieved information to problem class problem.setProblemUrl(problemUrl); if (problemTitle.length() == 0) { problemTitle = null; } if (problemStatement.length() == 0) { problemStatement = null; } if (problemInput.length() == 0) { problemInput = null; } if (problemOutput.length() == 0) { problemOutput = null; } if (problemExplanation.length() == 0) { problemExplanation = null; } if (problemConstraints.length() == 0) { problemConstraints = null; } problem.setTitle(problemTitle); problem.setProblemUrl(problemUrl); problem.setProblemStatement(problemStatement); problem.setInputFormat(problemInput); problem.setOutputFormat(problemOutput); problem.setTimeLimit(problemTimeLimit); problem.setExplanation(problemExplanation); problem.setConstraints(problemConstraints); //set sample input output to problem class SampleInputOutput sampleInputOutput = new SampleInputOutput(problem, sampleInput, sampleOutput); problem.getSampleInputOutputs().add(sampleInputOutput); //set platform as hackerearth problem.setPlatform(Platform.HackerEarth); for (String strtag : tags) { problem.getTags().add(strtag); } //store in database Session session = null; Transaction transaction = null; try { //start session session = HibernateUtil.getSessionFactory().openSession(); transaction = session.beginTransaction(); //check if problem is already stored in database String hql = "FROM Problem p where p.problemUrl = :problem_url"; Problem oldProblem = (Problem) session.createQuery(hql).setString("problem_url", problemUrl) .uniqueResult(); String task; //if problem is present in database if (oldProblem != null) { //update the old problem task = "updated"; //retrieve id of old problem problem.setId(oldProblem.getId()); session.delete(oldProblem); session.flush(); session.save(problem); } else { task = "saved"; session.save(problem); } transaction.commit(); //log the info to console Logger.getLogger(CodeForcesCrawler.class.getName()).log(Level.INFO, "{0} {1}", new Object[] { task, problem.getProblemUrl() }); } catch (HibernateException ee) { if (transaction != null) { transaction.rollback(); } Logger.getLogger(CodeForcesCrawler.class.getName()).log(Level.SEVERE, "Cannot Insert/Update problem into databse: " + problemUrl, e); } finally { //close the session if (session != null) { session.close(); } } } catch (Exception ee) { System.out.println(ee.toString()); } } System.out.println("\n\n\n\ntutorial urls\n\n"); try { for (String tutorialurl : tutorialset) { //System.out.println(tutorialurl+"\n\n"); Response tutorialres = Jsoup.connect(tutorialurl).execute(); Document doc = tutorialres.parse(); Tutorial tutorial = new Tutorial(); tutorial.setContent(doc.getElementsByClass("tutorial").first().text()); tutorial.setName(baseUrl); tutorialurl = tutorialurl.substring(0, tutorialurl.length() - 10); StringTokenizer tutorialtok = new StringTokenizer(tutorialurl, "/"); String tempstr = ""; while (tutorialtok.hasMoreTokens()) { tempstr = tutorialtok.nextToken(); } Session session = null; Transaction transaction = null; try { //start session session = HibernateUtil.getSessionFactory().openSession(); transaction = session.beginTransaction(); //check if problem is already stored in database String hql = "FROM Tutorial p where p.name = :name"; Tutorial oldProblem = (Tutorial) session.createQuery(hql).setString("name", tempstr) .uniqueResult(); String task; //if problem is present in database if (oldProblem != null) { //update the old problem task = "updated"; //retrieve id of old problem tutorial.setName(oldProblem.getName()); session.delete(oldProblem); session.flush(); session.save(tutorial); } else { task = "saved"; tutorial.setName(tempstr); session.save(tutorial); } transaction.commit(); //log the info to console Logger.getLogger(CodeForcesCrawler.class.getName()).log(Level.INFO, "{0} {1}", new Object[] { task, tutorial.getName() }); } catch (HibernateException ee) { if (transaction != null) { transaction.rollback(); } Logger.getLogger(CodeForcesCrawler.class.getName()).log(Level.SEVERE, "Cannot Insert/Update problem into databse: " + tempstr, ee); } finally { //close the session if (session != null) { session.close(); } } } } catch (Exception e) { System.out.println(e.getMessage()); } }
From source file:org.cloudata.core.tabletserver.DiskSSTable.java
public ColumnValue findClosestMeta(Row.Key rowKey, String columnName, boolean great) throws IOException { lock.obtainReadLock();// www .j av a 2 s. c om try { if (columnMemoryCaches.containsKey(columnName)) { ColumnMemoryCache cache = columnMemoryCaches.get(columnName); return cache.findClosest(rowKey); } List<TabletMapFile> tabletMapFiles = mapFiles.get(columnName); if (tabletMapFiles == null || tabletMapFiles.isEmpty()) { return null; } MapFileReader[] readers = new MapFileReader[tabletMapFiles.size()]; TreeSet<MetaValue> metaValues = new TreeSet<MetaValue>(); TreeSet<ColumnValue> workPlace = new TreeSet<ColumnValue>(); try { //init CellFilter cellFilter = new CellFilter(columnName); int index = 0; for (TabletMapFile tabletMapFile : tabletMapFiles) { MapFileReader reader = tabletMapFile.getMapFileReader(rowKey, Row.Key.MAX_KEY, cellFilter); ColumnValue columnValue = null; while ((columnValue = reader.next()) != null) { if (great) { if (columnValue.getRowKey().compareTo(rowKey) < 0) { continue; } } else { if (columnValue.getRowKey().compareTo(rowKey) <= 0) { continue; } } break; } if (columnValue != null) { workPlace.add(columnValue); readers[index] = reader; } else { reader.close(); readers[index] = null; } index++; } //findClosestMeta while (true) { if (workPlace.isEmpty()) { return null; } ColumnValue winnerColumnValue = workPlace.first(); metaValues.add(new MetaValue(winnerColumnValue)); workPlace.remove(winnerColumnValue); Row.Key winnerRowKey = winnerColumnValue.getRowKey(); List<ColumnValue> tempWorkPlace = new ArrayList<ColumnValue>(); tempWorkPlace.addAll(workPlace); for (ColumnValue eachColumnValue : tempWorkPlace) { if (winnerRowKey.equals(eachColumnValue.getRowKey())) { metaValues.add(new MetaValue(eachColumnValue)); workPlace.remove(eachColumnValue); } } for (int i = 0; i < readers.length; i++) { if (readers[i] == null) { continue; } ColumnValue columnValue = null; while ((columnValue = readers[i].next()) != null) { if (winnerRowKey.equals(columnValue.getRowKey())) { metaValues.add(new MetaValue(columnValue)); } else { workPlace.add(columnValue); break; } } if (columnValue == null) { readers[i].close(); readers[i] = null; } } if (metaValues.size() > 0) { MetaValue firstValue = metaValues.first(); if (!firstValue.columnValue.isDeleted()) { return firstValue.columnValue; } else { metaValues.clear(); } } } } finally { for (int i = 0; i < readers.length; i++) { try { if (readers[i] != null) { readers[i].close(); } } catch (Exception e) { LOG.warn("Can't close MapFileReader:" + e.getMessage()); } } } } finally { lock.releaseReadLock(); } }