List of usage examples for java.util TreeSet add
public boolean add(E e)
From source file:edu.ehu.galan.lite.utils.wikiminer.WikiminnerHelper.java
/** * Relate a list of topics with the important topics of the domain * * @param pTtopicList// ww w .ja va2s . co m * @param cGold * @param relatedness * @param minRelationship * @return */ public List<Comparison> parallelRelate(List<Topic> pTtopicList, List<Integer> cGold, float relatedness, int minRelationship) { List<Comparison> kust = new ArrayList<>(); if (!localMode) { long timeStart = System.nanoTime(); List<Topic> topicList = new ArrayList<>(); Gson son = new GsonBuilder().create(); JsonParser parser = new JsonParser(); int i = 0; List<Integer> intList = new ArrayList<>(); Comparisons ex = null; try { ProgressTracker tracker = new ProgressTracker((pTtopicList.size() / maxTopics) + 1, "....", this.getClass()); while (i < pTtopicList.size()) { String cacheElem = ""; String req = wikiminerUrl + "services/compare?ids1="; int sum = 0; for (; i < pTtopicList.size(); i++) { int id = (pTtopicList.get(i).getId()); cacheElem += id; sum++; req = req + id + ","; if (sum == maxTopics) { break; } } req = req.substring(0, req.length() - 1); req += "&ids2="; for (Integer gold : cGold) { req = req + gold.toString() + ","; cacheElem += gold.toString(); } req = req.substring(0, req.length() - 1); // Element elem = cache.get(cacheElem); // if (elem == null) { HttpGet getRequest = new HttpGet(req + "&wikipedia=" + lang + "&responseFormat=JSON"); getRequest.addHeader("accept", "application/json"); getRequest.addHeader("Accept-Encoding", "gzip"); HttpResponse response = httpClient.execute(getRequest); GzipDecompressingEntity entity = new GzipDecompressingEntity(response.getEntity()); String jsonText = EntityUtils.toString(entity, StandardCharsets.UTF_8); EntityUtils.consume(entity); ex = son.fromJson(jsonText, Comparisons.class); // elem = new Element(cacheElem, ex); // cache.put(elem); // } else { // ex = (Comparisons) elem.getObjectValue(); // } for (Comparison comp : ex.getComparisons()) { if (cGold.contains(comp.getHighId())) { comp.setHighId(null); kust.add(comp); } else { comp.setLowId(comp.getHighId()); comp.setHighId(null); kust.add(comp); } } // for (Integer id : ex.getIds()) { // intList.add(id); //// System.out.println(id); // } // } // for (Integer integer : intList) { // for (Topic top : pTtopicList) { // if (top.getId() == integer) { // topicList.add(top); // break; // } // } tracker.update(); } long timeEnd = System.nanoTime(); logger.debug("Parallel Relate processed in: " + ((timeEnd - timeStart) / 1000000) + " for size: " + pTtopicList.size()); return kust; } catch (IOException ex1) { logger.error(null, ex1); } return null; } else { if (wikipedia != null) { ArticleComparer artComparer = null; try { artComparer = new ArticleComparer(wikipedia); } catch (Exception ex) { logger.error("Error getting article comparer for this wikipedia"); } if (artComparer == null) { logger.error("No comparisons available for this Wikipedia"); } //gather articles from ids1 ; TreeSet<Article> articles1 = new TreeSet<>(); for (Topic id : pTtopicList) { try { Article art = (Article) wikipedia.getPageById(id.getId()); articles1.add(art); } catch (Exception e) { //msg.addInvalidId(id.); } } //gather articles from ids2 ; TreeSet<Article> articles2 = new TreeSet<>(); for (Integer id : cGold) { try { Article art = (Article) wikipedia.getPageById(id); articles2.add(art); } catch (Exception e) { //msg.addInvalidId(id); } } //if ids2 is not specified, then we want to compare each item in ids1 with every other one if (articles2.isEmpty()) { articles2 = articles1; } TLongHashSet doneKeys = new TLongHashSet(); float minRelatedness = relatedness; // boolean showTitles = prmTitles.getValue(request); for (Article a1 : articles1) { for (Article a2 : articles2) { if (a1.equals(a2)) { continue; } //relatedness is symmetric, so create a unique key for this pair of ids were order doesnt matter Article min, max; if (a1.getId() < a2.getId()) { min = a1; max = a2; } else { min = a2; max = a1; } //long min = Math.min(a1.getId(), a2.getId()) ; //long max = Math.max(a1.getId(), a2.getId()) ; long key = ((long) min.getId()) + (((long) max.getId()) << 30); if (doneKeys.contains(key)) { continue; } double related = 0; try { related = artComparer.getRelatedness(a1, a2); } catch (Exception ex) { } if (relatedness >= minRelatedness) { Comparison comp = new Comparison(); comp.setRelatedness(related); comp.setHighId(max.getId()); comp.setLowId(min.getId()); if (cGold.contains(comp.getHighId())) { comp.setHighId(null); kust.add(comp); } else { comp.setLowId(comp.getHighId()); comp.setHighId(null); kust.add(comp); } } doneKeys.add(key); } } return kust; } else { return null; } } }
From source file:net.spfbl.data.Block.java
public static TreeSet<String> getAllTokens(String value) { TreeSet<String> blockSet = new TreeSet<String>(); if (Subnet.isValidIP(value)) { String ip = Subnet.normalizeIP(value); if (SET.contains(ip)) { blockSet.add(ip); }//from ww w .j av a2 s.co m } else if (Subnet.isValidCIDR(value)) { String cidr = Subnet.normalizeCIDR(value); if (CIDR.contains((String) null, cidr)) { blockSet.add(cidr); } TreeSet<String> set = SET.getAll(); for (String ip : set) { if (Subnet.containsIP(cidr, ip)) { blockSet.add(ip); } } for (String ip : set) { if (SubnetIPv6.containsIP(cidr, ip)) { blockSet.add(ip); } } } else if (Domain.isHostname(value)) { LinkedList<String> regexList = new LinkedList<String>(); String host = Domain.normalizeHostname(value, true); do { int index = host.indexOf('.') + 1; host = host.substring(index); if (Block.dropExact('.' + host)) { blockSet.add('.' + host); regexList.addFirst('.' + host); } } while (host.contains(".")); } else if (SET.contains(value)) { blockSet.add(value); } return blockSet; }
From source file:asterix.parser.classad.ClassAd.java
public boolean privateGetExternalReferences(ExprTree expr, ClassAd ad, EvalState state, Map<ClassAd, TreeSet<String>> refs) throws HyracksDataException { switch (expr.getKind()) { case LITERAL_NODE: // no external references here return (true); case ATTRREF_NODE: { ClassAd start = new ClassAd(); ExprTreeHolder tree = new ExprTreeHolder(); ExprTreeHolder result = new ExprTreeHolder(); AMutableCharArrayString attr = new AMutableCharArrayString(); Value val = new Value(); MutableBoolean abs = new MutableBoolean(); ((AttributeReference) expr).getComponents(tree, attr, abs); // establish starting point for attribute search if (tree.getInnerTree() == null) { start = abs.booleanValue() ? state.getRootAd() : state.getCurAd(); if (abs.booleanValue() && (start == null)) {// NAC - circularity so no root return false; // NAC } // NAC } else {/* w ww. j av a2s . co m*/ if (!tree.publicEvaluate(state, val)) return (false); // if the tree evals to undefined, the external references // are in the tree part if (val.isUndefinedValue()) { return (privateGetExternalReferences(tree, ad, state, refs)); } // otherwise, if the tree didn't evaluate to a classad, // we have a problem if (!val.isClassAdValue(start)) return (false); // make sure that we are starting from a "valid" scope if (!refs.containsKey(start) && start != this) { return (false); } } // lookup for attribute ClassAd curAd = state.getCurAd(); TreeSet<String> pitr = refs.get(start); if (pitr == null) { pitr = new TreeSet<String>(); refs.put(start, pitr); } switch (start.lookupInScope(attr.toString(), result, state)) { case EVAL_ERROR_Int: // some error return (false); case EVAL_UNDEF_Int: // attr is external pitr.add(attr.toString()); state.setCurAd(curAd); return (true); case EVAL_OK_Int: { // attr is internal; find external refs in result boolean rval = privateGetExternalReferences(result, ad, state, refs); state.setCurAd(curAd); return (rval); } case EVAL_FAIL_Int: default: // enh?? return (false); } } case OP_NODE: { // recurse on subtrees AMutableInt32 opKind = new AMutableInt32(0); ExprTreeHolder t1 = new ExprTreeHolder(); ExprTreeHolder t2 = new ExprTreeHolder(); ExprTreeHolder t3 = new ExprTreeHolder(); ((Operation) expr).getComponents(opKind, t1, t2, t3); if (t1.getInnerTree() != null && !privateGetExternalReferences(t1, ad, state, refs)) { return (false); } if (t2.getInnerTree() != null && !privateGetExternalReferences(t2, ad, state, refs)) { return (false); } if (t3.getInnerTree() != null && !privateGetExternalReferences(t3, ad, state, refs)) { return (false); } return (true); } case FN_CALL_NODE: { // recurse on subtrees AMutableCharArrayString fnName = new AMutableCharArrayString(); ExprList args = new ExprList(); ((FunctionCall) expr).getComponents(fnName, args); for (ExprTree exprTree : args.getExprList()) { if (!privateGetExternalReferences(exprTree, ad, state, refs)) { return (false); } } return (true); } case CLASSAD_NODE: { // recurse on subtrees HashMap<CaseInsensitiveString, ExprTree> attrs = new HashMap<CaseInsensitiveString, ExprTree>(); ((ClassAd) expr).getComponents(attrs); for (Entry<CaseInsensitiveString, ExprTree> entry : attrs.entrySet()) { if (!privateGetExternalReferences(entry.getValue(), ad, state, refs)) { return (false); } } return (true); } case EXPR_LIST_NODE: { // recurse on subtrees ExprList exprs = new ExprList(); ((ExprList) expr).getComponents(exprs); for (ExprTree exprTree : exprs.getExprList()) { if (!privateGetExternalReferences(exprTree, ad, state, refs)) { return (false); } } return (true); } default: return false; } }
From source file:dhbw.ka.mwi.businesshorizon2.ui.process.period.timeline.TimelinePresenter.java
/** * Zuknftige deterministische Perioden anlegen <br> * dabei wird bercksichtigt, dass bereits Perioden vorhanden sind.<br> * Diese werden zuerst ausgegeben und, sofern mehr ausgegeben werden sollen,<br> * neue Perioden hinzugefgt.<br>/*from w w w. ja v a2s. c o m*/ * brige Perioden werden ggf gelscht * * @author Annika Weis */ private void addFuturePeriods_vorhanden() { /* * Wenn bereits Perioden vorhanden sind: so viele anlegen, sonst so * viele, wie es der Benutzer vorgibt auf der Parameter-Maske */ logger.debug("future periods"); int i = 0; int weitere_perioden_future = -1; sumFuturePeriods = 0; Period basisperiode = null; try { // del_periods: enthlt die Perioden die gelscht werden sollen TreeSet<Period> del_periods = new TreeSet<>(); // alle vorhandene Perioden durchlaufen for (Period periode : projectProxy.getSelectedProject().getDeterministicPeriods().getPeriods()) { if (i == 0) { // erste Periode = Basisjahr getView().addBasePeriod(periode); futurePeriods.addPeriod(periode); basisperiode = periode; logger.debug("Basisjahr"); } else if (i > projectProxy.getSelectedProject().getPeriodsToForecast_deterministic() + weitere_perioden_future) { // mehr Perioden vorhanden, als der Benutzer will // Diese werden gelscht logger.debug("berspringen " + periode.getYear()); // Zwischenspeichern, wird spter gelscht del_periods.add(periode); } else { // Normalfall, Periode anzeigen getView().addFuturePeriod(periode); sumFuturePeriods++; futurePeriods.addPeriod(periode); logger.debug("Normalfall " + periode.getYear()); } logger.debug(++i + " + " + periode.getYear() + " _ " + periode.getFreeCashFlow()); projectProxy.getSelectedProject().setDeterministicPeriods(futurePeriods); } for (Period periode : del_periods) { projectProxy.getSelectedProject().getDeterministicPeriods().removePeriod(periode); logger.debug("Lschen " + periode.getYear()); } periodClicked(basisperiode); } catch (Exception e) { logger.debug("Fehler:::"); e.printStackTrace(); } int vorhandene = 0; try { vorhandene = projectProxy.getSelectedProject().getDeterministicPeriods().getPeriods().size(); } catch (Exception e) { } if (vorhandene == 0) { logger.debug("Basis aufbauen"); create_base(); } // -1 wegen Basisjahr vorhandene = vorhandene - 1; // Wenn weniger Perioden vorhanden sind als geplant if (vorhandene < projectProxy.getSelectedProject().getPeriodsToForecast_deterministic() + weitere_perioden_future) { logger.debug("Manuell Perioden anlegen " + (projectProxy.getSelectedProject().getPeriodsToForecast_deterministic() + weitere_perioden_future - vorhandene)); addFuturePeriods(projectProxy.getSelectedProject().getPeriodsToForecast_deterministic() + weitere_perioden_future - vorhandene, deterministicInput); } logger.debug("Periodenanzahl fut: " + sumFuturePeriods); return; }
From source file:com.migratebird.script.executedscriptinfo.impl.DefaultExecutedScriptInfoSource.java
/** * Precondition: The table db_executed_scripts must exist * * @return All scripts that were registered as executed on the database *//*from w w w .ja v a 2s .com*/ protected synchronized SortedSet<ExecutedScript> doGetExecutedScripts() { TreeSet<ExecutedScript> executedScripts = new TreeSet<ExecutedScript>(); Connection connection = null; Statement statement = null; ResultSet resultSet = null; try { connection = defaultDatabase.getDataSource().getConnection(); statement = connection.createStatement(); resultSet = statement.executeQuery("select " + fileNameColumnName + ", " + fileLastModifiedAtColumnName + ", " + checksumColumnName + ", " + executedAtColumnName + ", " + succeededColumnName + " from " + getQualifiedExecutedScriptsTableName()); while (resultSet.next()) { String fileName = resultSet.getString(fileNameColumnName); String checkSum = resultSet.getString(checksumColumnName); Long fileLastModifiedAt = resultSet.getLong(fileLastModifiedAtColumnName); Date executedAt = null; try { String executedAtStr = resultSet.getString(executedAtColumnName); if (executedAtStr != null) executedAt = timestampFormat.parse(executedAtStr); } catch (ParseException e) { throw new MigrateBirdException( "Error when parsing date " + executedAt + " using format " + timestampFormat, e); } boolean succeeded = resultSet.getInt(succeededColumnName) == 1; Script script = scriptFactory.createScriptWithoutContent(fileName, fileLastModifiedAt, checkSum); if (!script.isIgnored()) { ExecutedScript executedScript = new ExecutedScript(script, executedAt, succeeded); executedScripts.add(executedScript); } } } catch (SQLException e) { throw new MigrateBirdException("Error while retrieving database version", e); } finally { closeQuietly(connection, statement, resultSet); } return executedScripts; }
From source file:com.doitnext.http.router.DefaultEndpointResolver.java
private void addMethodToRoutes(String path, Object implInstance, RequestResponseContext rrCtx, Method implMethod, Class<?> implClass, HttpMethod httpMethod, TreeSet<Route> routes) { try {/* www. j a v a 2s.com*/ PathTemplate pathTemplate = pathTemplateParser.parse(path); MethodReturnKey acceptKey = new MethodReturnKey(rrCtx.responseType.resolve(), rrCtx.responseFormat.resolve()); if (!successHandlers.containsKey(acceptKey)) { logger.error(String.format("No response handler for method with %s in success handlers %s", acceptKey, successHandlers)); if (logger.isDebugEnabled()) logger.debug(String.format("successHandlers = %s", successHandlers)); return; } // If no error handler in errorHandlers use a // default handler so we can handle errors. ResponseHandler errorHandler = defaultErrorHandler; if (errorHandlers.containsKey(acceptKey)) { errorHandler = errorHandlers.get(acceptKey); } ResponseHandler successHandler = successHandlers.get(acceptKey); Route route = new Route(httpMethod, rrCtx.requestType.resolve(), rrCtx.responseType.resolve(), rrCtx.requestFormat.resolve(), rrCtx.responseFormat.resolve(), pathTemplate, implClass, implMethod, invoker, implInstance, successHandler, errorHandler, false); if (routes.contains(route)) { Route existingRoute = null; for (Route r : routes) { if (r.compareTo(route) == 0) { existingRoute = r; break; } } logger.debug(String.format("An equivalent route to %s is already in routes. Conflicting route: %s", route, existingRoute)); } else { logger.debug(String.format("Adding route %s to routes.", route)); routes.add(route); } } catch (Exception e) { logger.error(String.format("Error addding route for %s.%s", implClass.getName(), implMethod.getName()), e); } }
From source file:canreg.client.analysis.TopNChartTableBuilder.java
@Override public LinkedList<String> buildTable(String tableHeader, String reportFileName, int startYear, int endYear, Object[][] incidenceData, PopulationDataset[] populations, // can be null PopulationDataset[] standardPopulations, LinkedList<ConfigFields> configList, String[] engineParameters, FileTypes fileType) throws NotCompatibleDataException { String footerString = java.util.ResourceBundle .getBundle("canreg/client/analysis/resources/AgeSpecificCasesPerHundredThousandTableBuilder") .getString("TABLE BUILT ") + new Date() + java.util.ResourceBundle .getBundle(/*from w w w .j av a 2 s .com*/ "canreg/client/analysis/resources/AgeSpecificCasesPerHundredThousandTableBuilder") .getString(" BY CANREG5."); LinkedList<String> generatedFiles = new LinkedList<String>(); if (Arrays.asList(engineParameters).contains("barchart")) { chartType = ChartType.BAR; } else { chartType = ChartType.PIE; includeOther = true; } if (Arrays.asList(engineParameters).contains("legend")) { legendOn = true; } if (Arrays.asList(engineParameters).contains("r")) { useR = true; } if (Arrays.asList(engineParameters).contains("asr")) { countType = CountType.ASR; } else if (Arrays.asList(engineParameters).contains("cum64")) { countType = CountType.CUM64; } else if (Arrays.asList(engineParameters).contains("cum74")) { countType = CountType.CUM74; } else if (Arrays.asList(engineParameters).contains("per100000")) { countType = CountType.PER_HUNDRED_THOUSAND; } else { // default to cases countType = CountType.CASES; } localSettings = CanRegClientApp.getApplication().getLocalSettings(); rpath = localSettings.getProperty(LocalSettings.R_PATH); // does R exist? if (rpath == null || rpath.isEmpty() || !new File(rpath).exists()) { useR = false; // force false if R is not installed } icdLabel = ConfigFieldsReader.findConfig("ICD_groups_labels", configList); icd10GroupDescriptions = ConfigFieldsReader.findConfig("ICD10_groups", configList); cancerGroupsLocal = EditorialTableTools.generateICD10Groups(icd10GroupDescriptions); // indexes keyGroupsMap = new EnumMap<KeyCancerGroupsEnum, Integer>(KeyCancerGroupsEnum.class); keyGroupsMap.put(KeyCancerGroupsEnum.allCancerGroupsIndex, EditorialTableTools.getICD10index("ALL", icd10GroupDescriptions)); keyGroupsMap.put(KeyCancerGroupsEnum.leukemiaNOSCancerGroupIndex, EditorialTableTools.getICD10index(950, cancerGroupsLocal)); keyGroupsMap.put(KeyCancerGroupsEnum.skinCancerGroupIndex, EditorialTableTools.getICD10index("C44", icd10GroupDescriptions)); keyGroupsMap.put(KeyCancerGroupsEnum.bladderCancerGroupIndex, EditorialTableTools.getICD10index("C67", icd10GroupDescriptions)); keyGroupsMap.put(KeyCancerGroupsEnum.mesotheliomaCancerGroupIndex, EditorialTableTools.getICD10index("C45", icd10GroupDescriptions)); keyGroupsMap.put(KeyCancerGroupsEnum.kaposiSarkomaCancerGroupIndex, EditorialTableTools.getICD10index("C46", icd10GroupDescriptions)); keyGroupsMap.put(KeyCancerGroupsEnum.myeloproliferativeDisordersCancerGroupIndex, EditorialTableTools.getICD10index("MPD", icd10GroupDescriptions)); keyGroupsMap.put(KeyCancerGroupsEnum.myelodysplasticSyndromesCancerGroupIndex, EditorialTableTools.getICD10index("MDS", icd10GroupDescriptions)); keyGroupsMap.put(KeyCancerGroupsEnum.allCancerGroupsButSkinIndex, EditorialTableTools.getICD10index("ALLbC44", icd10GroupDescriptions)); keyGroupsMap.put(KeyCancerGroupsEnum.brainAndCentralNervousSystemCancerGroupIndex, EditorialTableTools.getICD10index("C70-72", icd10GroupDescriptions)); keyGroupsMap.put(KeyCancerGroupsEnum.ovaryCancerGroupIndex, EditorialTableTools.getICD10index(569, cancerGroupsLocal)); keyGroupsMap.put(KeyCancerGroupsEnum.otherCancerGroupsIndex, EditorialTableTools.getICD10index("O&U", icd10GroupDescriptions)); otherCancerGroupsIndex = keyGroupsMap.get(KeyCancerGroupsEnum.otherCancerGroupsIndex); skinCancerGroupIndex = keyGroupsMap.get(KeyCancerGroupsEnum.skinCancerGroupIndex); allCancerGroupsIndex = keyGroupsMap.get(KeyCancerGroupsEnum.allCancerGroupsIndex); allCancerGroupsButSkinIndex = keyGroupsMap.get(KeyCancerGroupsEnum.allCancerGroupsButSkinIndex); numberOfCancerGroups = cancerGroupsLocal.length; double[] countsRow; if (populations != null && populations.length > 0) { if (populations[0].getPopulationDatasetID() < 0) { countType = CountType.CASES; } else { // calculate period pop periodPop = new PopulationDataset(); periodPop.setAgeGroupStructure(populations[0].getAgeGroupStructure()); periodPop.setReferencePopulation(populations[0].getReferencePopulation()); for (PopulationDatasetsEntry pde : populations[0].getAgeGroups()) { int count = 0; for (PopulationDataset pds : populations) { count += pds.getAgeGroupCount(pde.getSex(), pde.getAgeGroup()); } periodPop.addAgeGroup(new PopulationDatasetsEntry(pde.getAgeGroup(), pde.getSex(), count)); } } } if (incidenceData != null) { String sexString, icdString, morphologyString; double countArray[][] = new double[numberOfCancerGroups][numberOfSexes]; int sex, icdIndex, numberOfCases, age; double adjustedCases; List<Integer> dontCount = new LinkedList<Integer>(); // all sites but skin? if (Arrays.asList(engineParameters).contains("noC44")) { dontCount.add(skinCancerGroupIndex); tableHeader += ", excluding C44"; } for (Object[] dataLine : incidenceData) { // Set default adjustedCases = 0.0; // Extract data sexString = (String) dataLine[SEX_COLUMN]; sex = Integer.parseInt(sexString.trim()); // sex = 3 is unknown sex if (sex > 2) { sex = 3; } morphologyString = (String) dataLine[MORPHOLOGY_COLUMN]; icdString = (String) dataLine[ICD10_COLUMN]; icdIndex = Tools.assignICDGroupIndex(keyGroupsMap, icdString, morphologyString, cancerGroupsLocal); age = (Integer) dataLine[AGE_COLUMN]; if (!dontCount.contains(icdIndex) && icdIndex != DONT_COUNT) { // Extract cases numberOfCases = (Integer) dataLine[CASES_COLUMN]; if (countType == CountType.PER_HUNDRED_THOUSAND) { adjustedCases = (100000.0 * numberOfCases) / periodPop.getAgeGroupCount(sex, periodPop.getAgeGroupIndex(age)); } else if (countType == CountType.ASR) { try { adjustedCases = 100.0 * (periodPop.getReferencePopulationForAgeGroupIndex(sex, periodPop.getAgeGroupIndex(age)) * numberOfCases) / periodPop.getAgeGroupCount(sex, periodPop.getAgeGroupIndex(age)); } catch (IncompatiblePopulationDataSetException ex) { Logger.getLogger(TopNChartTableBuilder.class.getName()).log(Level.SEVERE, null, ex); } } else if (countType == CountType.CUM64) { if (age < 65) { adjustedCases = (100000.0 * numberOfCases) / periodPop.getAgeGroupCount(sex, periodPop.getAgeGroupIndex(age)) * 5.0 / 1000.0; } } else if (countType == CountType.CUM74) { if (age < 75) { adjustedCases = (100000.0 * numberOfCases) / periodPop.getAgeGroupCount(sex, periodPop.getAgeGroupIndex(age)) * 5.0 / 1000.0; } } else { adjustedCases = numberOfCases; } if (sex <= numberOfSexes && icdIndex >= 0 && icdIndex <= cancerGroupsLocal.length) { countArray[icdIndex][sex - 1] += adjustedCases; } else { if (otherCancerGroupsIndex >= 0) { countArray[otherCancerGroupsIndex][sex - 1] += adjustedCases; } } if (allCancerGroupsIndex >= 0) { countArray[allCancerGroupsIndex][sex - 1] += adjustedCases; } if (allCancerGroupsButSkinIndex >= 0 && skinCancerGroupIndex >= 0 && icdIndex != skinCancerGroupIndex) { countArray[allCancerGroupsButSkinIndex][sex - 1] += adjustedCases; } } } // separate top 10 and the rest TreeSet<CancerCasesCount> topNMale = new TreeSet<CancerCasesCount>(new Comparator<CancerCasesCount>() { @Override public int compare(CancerCasesCount o1, CancerCasesCount o2) { if (o1.getCount().equals(o2.getCount())) { return -o1.toString().compareTo(o2.toString()); } else { return -(o1.getCount().compareTo(o2.getCount())); } } }); LinkedList<CancerCasesCount> theRestMale = new LinkedList<CancerCasesCount>(); TreeSet<CancerCasesCount> topNFemale = new TreeSet<CancerCasesCount>( new Comparator<CancerCasesCount>() { @Override public int compare(CancerCasesCount o1, CancerCasesCount o2) { if (o1.getCount().equals(o2.getCount())) { return -o1.toString().compareTo(o2.toString()); } else { return -(o1.getCount().compareTo(o2.getCount())); } } }); LinkedList<CancerCasesCount> theRestFemale = new LinkedList<CancerCasesCount>(); CancerCasesCount otherElement; CancerCasesCount thisElement; TreeSet<CancerCasesCount> topN; LinkedList<CancerCasesCount> theRest; for (int icdGroupNumber = 0; icdGroupNumber < countArray.length; icdGroupNumber++) { countsRow = countArray[icdGroupNumber]; for (int sexNumber = 0; sexNumber < 2; sexNumber++) { if (sexNumber == 0) { topN = topNMale; theRest = theRestMale; } else { topN = topNFemale; theRest = theRestFemale; } if (countsRow[sexNumber] > 0) { thisElement = new CancerCasesCount(icd10GroupDescriptions[icdGroupNumber], icdLabel[icdGroupNumber].substring(3), countsRow[sexNumber], icdGroupNumber); // if this is the "other" group - add it immediately to "the rest" if (icdGroupNumber == otherCancerGroupsIndex) { theRest.add(thisElement); // if not we check if this is one of the collection groups } else if (icdGroupNumber != allCancerGroupsButSkinIndex && icdGroupNumber != allCancerGroupsIndex) { // if it is less than N cancers in top N - add it if (topN.size() < topNLimit) { topN.add(thisElement); } else { // otherwise we need to compare it to the last element in the top 10 otherElement = topN.last(); if (thisElement.compareTo(otherElement) < 0) { topN.remove(otherElement); theRest.add(otherElement); topN.add(thisElement); } else { theRest.add(thisElement); } } } } } } for (int sexNumber : new int[] { 0, 1 }) { String fileName = reportFileName + "-" + sexLabel[sexNumber] + "." + fileType.toString(); File file = new File(fileName); TreeSet<CancerCasesCount> casesCounts; Double restCount = Tools.sumUpTheRest(theRestMale, dontCount); if (sexNumber == 0) { casesCounts = topNMale; } else { casesCounts = topNFemale; } if (useR && !fileType.equals(FileTypes.jchart) && !fileType.equals(FileTypes.csv)) { String header = "Top 10 by " + countType + ", \n" + tableHeader + ", " + TableBuilderInterface.sexLabel[sexNumber]; generatedFiles.addAll(Tools.generateRChart(casesCounts, fileName, header, fileType, chartType, includeOther, restCount, rpath, true, "Site")); } else { double allCount = countArray[allCancerGroupsIndex][sexNumber]; Color color; if (sexNumber == 0) { color = Color.BLUE; } else { color = Color.RED; } String header = "Top 10 by " + countType + ", " + tableHeader + ", " + TableBuilderInterface.sexLabel[sexNumber]; charts[sexNumber] = Tools.generateJChart(casesCounts, fileName, header, fileType, chartType, includeOther, legendOn, restCount, allCount, color, "Site"); try { generatedFiles.add(Tools.writeJChartToFile(charts[sexNumber], file, fileType)); } catch (IOException ex) { Logger.getLogger(TopNChartTableBuilder.class.getName()).log(Level.SEVERE, null, ex); } catch (DocumentException ex) { Logger.getLogger(TopNChartTableBuilder.class.getName()).log(Level.SEVERE, null, ex); } } } } return generatedFiles; }
From source file:com.repeatability.pdf.PDFTextStripper.java
/** * This will process a TextPosition object and add the text to the list of characters on a page. It takes care of * overlapping text.//from w w w . j av a 2 s. c om * * @param text The text to process. */ @Override protected void processTextPosition(TextPosition text) { boolean showCharacter = true; if (suppressDuplicateOverlappingText) { showCharacter = false; String textCharacter = text.getUnicode(); float textX = text.getX(); float textY = text.getY(); TreeMap<Float, TreeSet<Float>> sameTextCharacters = characterListMapping.get(textCharacter); if (sameTextCharacters == null) { sameTextCharacters = new TreeMap<Float, TreeSet<Float>>(); characterListMapping.put(textCharacter, sameTextCharacters); } // RDD - Here we compute the value that represents the end of the rendered // text. This value is used to determine whether subsequent text rendered // on the same line overwrites the current text. // // We subtract any positive padding to handle cases where extreme amounts // of padding are applied, then backed off (not sure why this is done, but there // are cases where the padding is on the order of 10x the character width, and // the TJ just backs up to compensate after each character). Also, we subtract // an amount to allow for kerning (a percentage of the width of the last // character). boolean suppressCharacter = false; float tolerance = text.getWidth() / textCharacter.length() / 3.0f; SortedMap<Float, TreeSet<Float>> xMatches = sameTextCharacters.subMap(textX - tolerance, textX + tolerance); for (TreeSet<Float> xMatch : xMatches.values()) { SortedSet<Float> yMatches = xMatch.subSet(textY - tolerance, textY + tolerance); if (!yMatches.isEmpty()) { suppressCharacter = true; break; } } if (!suppressCharacter) { TreeSet<Float> ySet = sameTextCharacters.get(textX); if (ySet == null) { ySet = new TreeSet<Float>(); sameTextCharacters.put(textX, ySet); } ySet.add(textY); showCharacter = true; } } if (showCharacter) { // if we are showing the character then we need to determine which article it belongs to int foundArticleDivisionIndex = -1; int notFoundButFirstLeftAndAboveArticleDivisionIndex = -1; int notFoundButFirstLeftArticleDivisionIndex = -1; int notFoundButFirstAboveArticleDivisionIndex = -1; float x = text.getX(); float y = text.getY(); if (shouldSeparateByBeads) { for (int i = 0; i < beadRectangles.size() && foundArticleDivisionIndex == -1; i++) { PDRectangle rect = beadRectangles.get(i); if (rect != null) { if (rect.contains(x, y)) { foundArticleDivisionIndex = i * 2 + 1; } else if ((x < rect.getLowerLeftX() || y < rect.getUpperRightY()) && notFoundButFirstLeftAndAboveArticleDivisionIndex == -1) { notFoundButFirstLeftAndAboveArticleDivisionIndex = i * 2; } else if (x < rect.getLowerLeftX() && notFoundButFirstLeftArticleDivisionIndex == -1) { notFoundButFirstLeftArticleDivisionIndex = i * 2; } else if (y < rect.getUpperRightY() && notFoundButFirstAboveArticleDivisionIndex == -1) { notFoundButFirstAboveArticleDivisionIndex = i * 2; } } else { foundArticleDivisionIndex = 0; } } } else { foundArticleDivisionIndex = 0; } int articleDivisionIndex; if (foundArticleDivisionIndex != -1) { articleDivisionIndex = foundArticleDivisionIndex; } else if (notFoundButFirstLeftAndAboveArticleDivisionIndex != -1) { articleDivisionIndex = notFoundButFirstLeftAndAboveArticleDivisionIndex; } else if (notFoundButFirstLeftArticleDivisionIndex != -1) { articleDivisionIndex = notFoundButFirstLeftArticleDivisionIndex; } else if (notFoundButFirstAboveArticleDivisionIndex != -1) { articleDivisionIndex = notFoundButFirstAboveArticleDivisionIndex; } else { articleDivisionIndex = charactersByArticle.size() - 1; } List<TextPosition> textList = charactersByArticle.get(articleDivisionIndex); // In the wild, some PDF encoded documents put diacritics (accents on // top of characters) into a separate Tj element. When displaying them // graphically, the two chunks get overlayed. With text output though, // we need to do the overlay. This code recombines the diacritic with // its associated character if the two are consecutive. if (textList.isEmpty()) { textList.add(text); } else { // test if we overlap the previous entry. // Note that we are making an assumption that we need to only look back // one TextPosition to find what we are overlapping. // This may not always be true. */ TextPosition previousTextPosition = textList.get(textList.size() - 1); if (text.isDiacritic() && previousTextPosition.contains(text)) { previousTextPosition.mergeDiacritic(text); } // If the previous TextPosition was the diacritic, merge it into this // one and remove it from the list. else if (previousTextPosition.isDiacritic() && text.contains(previousTextPosition)) { text.mergeDiacritic(previousTextPosition); textList.remove(textList.size() - 1); textList.add(text); } else { textList.add(text); } } } }
From source file:net.spfbl.data.Block.java
public static TreeSet<String> getAll(Client client, User user) throws ProcessException { TreeSet<String> blockSet = new TreeSet<String>(); // Definio do e-mail do usurio. String userEmail = null;//from w w w .j a va 2s. co m if (user != null) { userEmail = user.getEmail(); } else if (client != null) { userEmail = client.getEmail(); } for (String token : getAll()) { if (!token.contains(":")) { blockSet.add(token); } else if (userEmail != null && token.startsWith(userEmail + ':')) { int index = token.indexOf(':') + 1; token = token.substring(index); blockSet.add(token); } } return blockSet; }
From source file:com.tasktop.c2c.server.scm.service.GitServiceBean.java
private List<RevCommit> getAllCommits(Repository repository, Region region, Set<ObjectId> visited) { TreeSet<RevCommit> result = new TreeSet<RevCommit>(new Comparator<RevCommit>() { @Override//from w w w . jav a2 s . co m public int compare(RevCommit o1, RevCommit o2) { int ctDiff = o2.getCommitTime() - o1.getCommitTime(); if (ctDiff != 0) { return ctDiff; } return o1.getId().compareTo(o2.getId()); } }); int maxResultsToConsider = -1; if (region != null) { maxResultsToConsider = region.getOffset() + region.getSize(); } long minTime = -1; try { for (Ref ref : getRefsToAdd(repository)) { RevWalk revWal = new RevWalk(repository); revWal.markStart(revWal.parseCommit(ref.getObjectId())); int index = 0; for (RevCommit revCommit : revWal) { if (region == null || (index >= region.getOffset() && index < region.getOffset() + region.getSize())) { if (minTime > 0 && revCommit.getCommitTime() < minTime) { break; } if (visited.add(revCommit.getId())) { result.add(revCommit); if (maxResultsToConsider > 0 && result.size() > maxResultsToConsider) { RevCommit last = result.last(); result.remove(last); minTime = last.getCommitTime(); } } else { break; // Done with this branch } } index++; if (region != null && (index >= region.getOffset() + region.getSize())) { break; } } } } catch (IOException e) { throw new RuntimeException(e); } return new ArrayList<RevCommit>(result); }