List of usage examples for java.util TreeSet addAll
public boolean addAll(Collection<? extends E> c)
From source file:org.apache.axis2.jaxws.runtime.description.marshal.impl.MarshalServiceRuntimeDescriptionBuilder.java
/** * @param implClass//w w w . ja v a 2s .co m * @return true if Field or Method has a @Resource annotation */ static private void init(MarshalServiceRuntimeDescriptionImpl marshalDesc, ServiceDescription serviceDesc) { if (log.isDebugEnabled()) { log.debug("start init"); } if (log.isDebugEnabled()) { log.debug("Discover the artifacts"); } // Artifact class discovery/builder ArtifactProcessor artifactProcessor = new ArtifactProcessor(serviceDesc); try { artifactProcessor.build(); } catch (Throwable t) { throw ExceptionFactory.makeWebServiceException(t); } marshalDesc.setRequestWrapperMap(artifactProcessor.getRequestWrapperMap()); marshalDesc.setResponseWrapperMap(artifactProcessor.getResponseWrapperMap()); marshalDesc.setFaultBeanDescMap(artifactProcessor.getFaultBeanDescMap()); marshalDesc.setMethodMap(artifactProcessor.getMethodMap()); if (log.isDebugEnabled()) { log.debug("Build the annotations map"); } // Build the annotation map Map<String, AnnotationDesc> map; try { map = AnnotationBuilder.getAnnotationDescs(serviceDesc, artifactProcessor); } catch (Throwable t) { // Since we are building a cache, proceed without exception if (log.isDebugEnabled()) { log.debug("Exception occurred during cache processing. This will impact performance:" + t); } map = new HashMap<String, AnnotationDesc>(); } marshalDesc.setAnnotationMap(map); if (log.isDebugEnabled()) { log.debug("Build the property descriptor cache"); } // Build the property descriptor map Map<Class, Map<String, PropertyDescriptorPlus>> cache; try { cache = PropertyDescriptorMapBuilder.getPropertyDescMaps(serviceDesc, artifactProcessor); } catch (Throwable t) { // Since we are building a cache, proceed without exception if (log.isDebugEnabled()) { log.debug("Exception occurred during cache processing. This will impact performance:" + t); } cache = new HashMap<Class, Map<String, PropertyDescriptorPlus>>(); } marshalDesc.setPropertyDescriptorMapCache(cache); // @TODO There are two ways to get the packages. // Schema Walk (prefered) and Annotation Walk. // The Schema walk requires an existing or generated schema. // // There are some limitations in the current schema walk // And there are problems in the annotation walk. // So for now we will do both. TreeSet<String> packages = new TreeSet<String>(); boolean doSchemaWalk = true; boolean doAnnotationWalk = true; packages = new TreeSet<String>(); if (doSchemaWalk) { if (log.isDebugEnabled()) { log.debug("Get the packages using the schema"); } packages.addAll(PackageSetBuilder.getPackagesFromSchema(serviceDesc)); } if (doAnnotationWalk) { // Get the package names from the annotations. Use the annotation map to reduce Annotation introspection if (log.isDebugEnabled()) { log.debug("Get the packages using the class annotations"); } packages.addAll(PackageSetBuilder.getPackagesFromAnnotations(serviceDesc, marshalDesc)); } marshalDesc.setPackages(packages); if (log.isDebugEnabled()) { log.debug("MarshalDesc = " + marshalDesc); log.debug("end init"); } }
From source file:org.apache.hadoop.hive.ql.optimizer.calcite.druid.DruidIntervalUtils.java
protected static List<Range> condenseRanges(List<Range> ranges) { if (ranges.size() <= 1) { return ranges; }//w w w .j av a 2 s .c om Comparator<Range> startThenEnd = new Comparator<Range>() { @Override public int compare(Range lhs, Range rhs) { int compare = 0; if (lhs.hasLowerBound() && rhs.hasLowerBound()) { compare = lhs.lowerEndpoint().compareTo(rhs.lowerEndpoint()); } else if (!lhs.hasLowerBound() && rhs.hasLowerBound()) { compare = -1; } else if (lhs.hasLowerBound() && !rhs.hasLowerBound()) { compare = 1; } if (compare != 0) { return compare; } if (lhs.hasUpperBound() && rhs.hasUpperBound()) { compare = lhs.upperEndpoint().compareTo(rhs.upperEndpoint()); } else if (!lhs.hasUpperBound() && rhs.hasUpperBound()) { compare = -1; } else if (lhs.hasUpperBound() && !rhs.hasUpperBound()) { compare = 1; } return compare; } }; TreeSet<Range> sortedIntervals = Sets.newTreeSet(startThenEnd); sortedIntervals.addAll(ranges); List<Range> retVal = Lists.newArrayList(); Iterator<Range> intervalsIter = sortedIntervals.iterator(); Range currInterval = intervalsIter.next(); while (intervalsIter.hasNext()) { Range next = intervalsIter.next(); if (currInterval.encloses(next)) { continue; } if (mergeable(currInterval, next)) { currInterval = currInterval.span(next); } else { retVal.add(currInterval); currInterval = next; } } retVal.add(currInterval); return retVal; }
From source file:org.broadinstitute.gatk.utils.haplotype.EventMap.java
/** * Get all of the VariantContexts in the event maps for all haplotypes, sorted by their start position * @param haplotypes the set of haplotypes to grab the VCs from * @return a sorted set of variant contexts *///from w w w . j a v a2 s. c o m public static TreeSet<VariantContext> getAllVariantContexts(final List<Haplotype> haplotypes) { // Using the cigar from each called haplotype figure out what events need to be written out in a VCF file final TreeSet<VariantContext> vcs = new TreeSet<VariantContext>(new VariantContextComparator()); for (final Haplotype h : haplotypes) { vcs.addAll(h.getEventMap().getVariantContexts()); } return vcs; }
From source file:org.aika.network.neuron.lattice.AndNode.java
private static TreeSet<Node> computeNonSignificantUpperBound(TreeSet<AndNode> significantLowerBound) { TreeSet<Node> nonSignificantUpperBound = new TreeSet<>(); for (AndNode n : significantLowerBound) { nonSignificantUpperBound.addAll(n.parents.values()); }//w w w . j a va2 s . c om return nonSignificantUpperBound; }
From source file:org.aika.network.neuron.lattice.AndNode.java
public static void addActivationsToNextLevelPattern(Iteration t, LatticeQueue queue, Node firstNode, Node secondNode, InputNode refinement, Activation act, Option conflict) { Key ak = act.key;//from www. j a v a2 s . c o m AndNode nlp = firstNode.andChildren.get(refinement); if (nlp == null) { return; } boolean first = true; for (Activation secondAct : secondNode.getActivations(ak.pos)) { Option o = Option.add(t.doc, true, ak.o, secondAct.key.o); if (o != null && (conflict == null || o.contains(conflict))) { if (first) { for (Map.Entry<InputNode, Node> me : nlp.parents.entrySet()) { me.getValue().andChildrenWithinDocument.put(me.getKey(), nlp); } first = false; } TreeSet<Activation> inputActs = new TreeSet<>(); if (act.uses != null) { inputActs.addAll(act.uses); } if (secondAct.uses != null) { inputActs.addAll(secondAct.uses); } nlp.addActivation(t, queue, new Key(ak.pos, o, Math.max(ak.fired, secondAct.key.fired)), Math.max(act.recurrentCount, secondAct.recurrentCount), inputActs); } } }
From source file:org.unitime.timetable.test.BatchStudentSectioningTest.java
public static void batchSectioning(DataProperties cfg) { StudentSectioningModel model = new StudentSectioningModel(cfg); DefaultSingleAssignment<Request, Enrollment> assignment = new DefaultSingleAssignment<Request, Enrollment>(); try {/*from ww w.j a v a 2 s .c o m*/ new BatchStudentSectioningLoader(model, assignment).load(); } catch (Exception e) { sLog.error("Unable to load problem, reason: " + e.getMessage(), e); return; } Solver solver = new Solver(cfg); Solution solution = new Solution(model, assignment, 0, 0); solver.setInitalSolution(solution); solver.addSolverListener(new SolverListener<Request, Enrollment>() { public boolean variableSelected(Assignment<Request, Enrollment> assignment, long iteration, Request variable) { return true; } public boolean valueSelected(Assignment<Request, Enrollment> assignment, long iteration, Request variable, Enrollment value) { return true; } public boolean neighbourSelected(Assignment<Request, Enrollment> assignment, long iteration, Neighbour<Request, Enrollment> neighbour) { sLog.debug("Select[" + iteration + "]: " + neighbour); return true; } public void neighbourFailed(Assignment<Request, Enrollment> assignment, long iteration, Neighbour<Request, Enrollment> neighbour) { } }); solution.addSolutionListener(new SolutionListener() { public void solutionUpdated(Solution solution) { } public void getInfo(Solution solution, java.util.Map info) { } public void getInfo(Solution solution, java.util.Map info, java.util.Collection variables) { } public void bestCleared(Solution solution) { } public void bestSaved(Solution solution) { StudentSectioningModel m = (StudentSectioningModel) solution.getModel(); Assignment<Request, Enrollment> a = solution.getAssignment(); sLog.debug("**BEST** V:" + m.nrAssignedVariables(a) + "/" + m.variables().size() + " - S:" + m.getContext(a).nrComplete() + "/" + m.getStudents().size() + " - TV:" + sDF.format(m.getTotalValue(a))); } public void bestRestored(Solution solution) { } }); try { new StudentSectioningXMLSaver(solver) .save(new File(new File(cfg.getProperty("General.Output", ".")), "input.xml")); } catch (Exception e) { sLog.error("Unable to save input data, reason: " + e.getMessage(), e); } solver.start(); try { solver.getSolverThread().join(); } catch (InterruptedException e) { } solution = solver.lastSolution(); solution.restoreBest(); model = (StudentSectioningModel) solution.getModel(); try { File outDir = new File(cfg.getProperty("General.Output", ".")); outDir.mkdirs(); CourseConflictTable cct = new CourseConflictTable((StudentSectioningModel) solution.getModel()); cct.createTable(assignment, true, false, true).save(new File(outDir, "conflicts-lastlike.csv")); cct.createTable(assignment, false, true, true).save(new File(outDir, "conflicts-real.csv")); DistanceConflictTable dct = new DistanceConflictTable((StudentSectioningModel) solution.getModel()); dct.createTable(assignment, true, false, true).save(new File(outDir, "distances-lastlike.csv")); dct.createTable(assignment, false, true, true).save(new File(outDir, "distances-real.csv")); if (cfg.getPropertyBoolean("Test.InevitableStudentConflictsCheck", false)) { InevitableStudentConflicts ch = new InevitableStudentConflicts(model); if (!ch.check(assignment)) ch.getCSVFile().save(new File(outDir, "inevitable-conflicts.csv")); } } catch (IOException e) { sLog.error(e.getMessage(), e); } solution.saveBest(); model.computeOnlineSectioningInfos(assignment); new OverlapCheck((StudentSectioningModel) solution.getModel()).check(assignment); new SectionLimitCheck((StudentSectioningModel) solution.getModel()).check(assignment); sLog.info("Best solution found after " + solution.getBestTime() + " seconds (" + solution.getBestIteration() + " iterations)."); sLog.info("Number of assigned variables is " + solution.getModel().nrAssignedVariables(assignment)); sLog.info("Number of students with complete schedule is " + model.getContext(assignment).nrComplete()); sLog.info("Total value of the solution is " + solution.getModel().getTotalValue(assignment)); sLog.info("Average unassigned priority " + sDF.format(model.avgUnassignPriority(assignment))); sLog.info("Average number of requests " + sDF.format(model.avgNrRequests())); sLog.info("Unassigned request weight " + sDF.format(model.getUnassignedRequestWeight(assignment)) + " / " + sDF.format(model.getTotalRequestWeight())); sLog.info("Info: " + ToolBox.dict2string(solution.getExtendedInfo(), 2)); PrintWriter pw = null; try { pw = new PrintWriter( new FileWriter(new File(new File(cfg.getProperty("General.Output", ".")), "info.properties"))); TreeSet entrySet = new TreeSet(new Comparator() { public int compare(Object o1, Object o2) { Map.Entry e1 = (Map.Entry) o1; Map.Entry e2 = (Map.Entry) o2; return ((Comparable) e1.getKey()).compareTo(e2.getKey()); } }); entrySet.addAll(solution.getExtendedInfo().entrySet()); for (Iterator i = entrySet.iterator(); i.hasNext();) { Map.Entry entry = (Map.Entry) i.next(); pw.println(entry.getKey().toString().toLowerCase().replace(' ', '.') + "=" + entry.getValue()); } pw.flush(); } catch (IOException e) { sLog.error("Unable to save info, reason: " + e.getMessage(), e); } finally { if (pw != null) pw.close(); } try { new StudentSectioningXMLSaver(solver) .save(new File(new File(cfg.getProperty("General.Output", ".")), "solution.xml")); } catch (Exception e) { sLog.error("Unable to save solution, reason: " + e.getMessage(), e); } try { new BatchStudentSectioningSaver(solver).save(); } catch (Exception e) { sLog.error("Unable to save solution, reason: " + e.getMessage(), e); } }
From source file:org.fao.geonet.kernel.csw.services.GetDomain.java
public static List<Element> handlePropertyName(String[] propertyNames, ServiceContext context, boolean freq, int maxRecords, String cswServiceSpecificConstraint, LuceneConfig luceneConfig) throws Exception { List<Element> domainValuesList = null; if (Log.isDebugEnabled(Geonet.CSW)) Log.debug(Geonet.CSW, "Handling property names '" + Arrays.toString(propertyNames) + "' with max records of " + maxRecords); for (int i = 0; i < propertyNames.length; i++) { if (i == 0) domainValuesList = new ArrayList<Element>(); // Initialize list of values element. Element listOfValues = null; // Generate DomainValues element Element domainValues = new Element("DomainValues", Csw.NAMESPACE_CSW); // FIXME what should be the type ??? domainValues.setAttribute("type", "csw:Record"); String property = propertyNames[i].trim(); // Set propertyName in any case. Element pn = new Element("PropertyName", Csw.NAMESPACE_CSW); domainValues.addContent(pn.setText(property)); GeonetContext gc = (GeonetContext) context.getHandlerContext(Geonet.CONTEXT_NAME); SearchManager sm = gc.getSearchmanager(); IndexAndTaxonomy indexAndTaxonomy = sm.getNewIndexReader(null); try {/*w w w . j a v a 2 s. co m*/ GeonetworkMultiReader reader = indexAndTaxonomy.indexReader; BooleanQuery groupsQuery = (BooleanQuery) CatalogSearcher.getGroupsQuery(context); BooleanQuery query = null; // Apply CSW service specific constraint if (StringUtils.isNotEmpty(cswServiceSpecificConstraint)) { Query constraintQuery = CatalogSearcher .getCswServiceSpecificConstraintQuery(cswServiceSpecificConstraint, luceneConfig); query = new BooleanQuery(); BooleanClause.Occur occur = LuceneUtils.convertRequiredAndProhibitedToOccur(true, false); query.add(groupsQuery, occur); query.add(constraintQuery, occur); } else { query = groupsQuery; } List<Pair<String, Boolean>> sortFields = Collections .singletonList(Pair.read(Geonet.SearchResult.SortBy.RELEVANCE, true)); Sort sort = LuceneSearcher.makeSort(sortFields, context.getLanguage(), false); CachingWrapperFilter filter = null; Pair<TopDocs, Element> searchResults = LuceneSearcher.doSearchAndMakeSummary(maxRecords, 0, maxRecords, context.getLanguage(), null, reader, query, filter, sort, null, false, false, false, false // Scoring is useless for GetDomain operation ); TopDocs hits = searchResults.one(); try { // Get mapped lucene field in CSW configuration String indexField = CatalogConfiguration.getFieldMapping().get(property.toLowerCase()); if (indexField != null) property = indexField; // check if params asked is in the index using getFieldNames ? FieldInfos fi = new SlowCompositeReaderWrapper(reader).getFieldInfos(); if (fi.fieldInfo(property) == null) continue; boolean isRange = false; if (CatalogConfiguration.getGetRecordsRangeFields().contains(property)) isRange = true; if (isRange) listOfValues = new Element("RangeOfValues", Csw.NAMESPACE_CSW); else listOfValues = new Element("ListOfValues", Csw.NAMESPACE_CSW); Set<String> fields = new HashSet<String>(); fields.add(property); fields.add("_isTemplate"); // parse each document in the index String[] fieldValues; SortedSet<String> sortedValues = new TreeSet<String>(); HashMap<String, Integer> duplicateValues = new HashMap<String, Integer>(); for (int j = 0; j < hits.scoreDocs.length; j++) { DocumentStoredFieldVisitor selector = new DocumentStoredFieldVisitor(fields); reader.document(hits.scoreDocs[j].doc, selector); Document doc = selector.getDocument(); // Skip templates and subTemplates String[] isTemplate = doc.getValues("_isTemplate"); if (isTemplate[0] != null && !isTemplate[0].equals("n")) continue; // Get doc values for specified property fieldValues = doc.getValues(property); if (fieldValues == null) continue; addtoSortedSet(sortedValues, fieldValues, duplicateValues); } SummaryComparator valuesComparator = new SummaryComparator(SortOption.FREQUENCY, Type.STRING, context.getLanguage(), null); TreeSet<Map.Entry<String, Integer>> sortedValuesFrequency = new TreeSet<Map.Entry<String, Integer>>( valuesComparator); sortedValuesFrequency.addAll(duplicateValues.entrySet()); if (freq) return createValuesByFrequency(sortedValuesFrequency); else listOfValues.addContent(createValuesElement(sortedValues, isRange)); } finally { // any children means that the catalog was unable to determine // anything about the specified parameter if (listOfValues != null && listOfValues.getChildren().size() != 0) domainValues.addContent(listOfValues); // Add current DomainValues to the list domainValuesList.add(domainValues); } } finally { sm.releaseIndexReader(indexAndTaxonomy); } } return domainValuesList; }
From source file:net.spfbl.dnsbl.QueryDNSBL.java
private static TreeSet<String> keySet() { TreeSet<String> keySet = new TreeSet<String>(); keySet.addAll(MAP.keySet()); return keySet; }
From source file:org.dasein.cloud.util.APITrace.java
static public void report(@Nonnull String prefix) { logger.info(""); if (logger.isInfoEnabled()) { synchronized (apiCount) { TreeSet<String> keys = new TreeSet<String>(); keys.addAll(apiCount.keySet()); logger.debug(prefix + "-> API calls: "); for (String key : keys) { logger.debug(prefix + "->\t" + key + " = " + apiCount.get(key)); }/* w ww. ja va 2 s .c o m*/ } } if (logger.isDebugEnabled()) { synchronized (operationCount) { TreeSet<String> keys = new TreeSet<String>(); keys.addAll(operationCount.keySet()); logger.debug(prefix + "-> Operation calls:"); for (String key : keys) { logger.debug(prefix + "->\t" + key + " = " + operationCount.get(key)); } } synchronized (operationApis) { TreeSet<String> keys = new TreeSet<String>(); keys.addAll(operationApis.keySet()); logger.debug(prefix + "-> API calls by operation:"); for (String key : keys) { logger.debug(prefix + "->\t" + key + " = " + operationApis.get(key)); } } } if (logger.isTraceEnabled()) { synchronized (operationTrace) { TreeSet<String> keys = new TreeSet<String>(); keys.addAll(operationTrace.keySet()); logger.trace(prefix + "-> Stack trace:"); for (String key : keys) { Map<String, Object> map = toJSON(operationTrace.get(key)); logger.trace((new JSONObject(map)).toString()); logger.trace(""); } } } logger.info(""); }
From source file:net.spfbl.dnsbl.QueryDNSBL.java
public static TreeSet<ServerDNSBL> getValues() { TreeSet<ServerDNSBL> serverSet = new TreeSet<ServerDNSBL>(); serverSet.addAll(MAP.values()); return serverSet; }