List of usage examples for java.util TreeSet iterator
public Iterator<E> iterator()
From source file:org.jboss.dashboard.ui.config.treeNodes.PanelInstancesGroupNode.java
protected List listChildren() { List children = new ArrayList(); String language = SessionManager.getLang(); try {/* w w w. j av a2s . com*/ WorkspaceImpl workspace = (WorkspaceImpl) UIServices.lookup().getWorkspacesManager() .getWorkspace(getWorkspaceId()); PanelInstance[] instances = workspace.getPanelInstances(); TreeSet instancias = new TreeSet(new Comparator() { public int compare(Object o1, Object o2) { PanelInstance p1 = (PanelInstance) o1; PanelInstance p2 = (PanelInstance) o2; return p1.getTitle(SessionManager.getLang()) .compareToIgnoreCase(p2.getTitle(SessionManager.getLang())); } }); for (int i = 0; i < instances.length; i++) { PanelInstance instance = instances[i]; String groupName = instance.getParameterValue(PanelInstance.PARAMETER_GROUP, language); String provider = instance.getProvider().getGroup(); if (getGroupName().equals(groupName) && getProviderId().equals(provider)) { instancias.add(instance); } } for (Iterator iterator = instancias.iterator(); iterator.hasNext();) { PanelInstance panelInstance = (PanelInstance) iterator.next(); children.add(getNewInstanceNode(panelInstance)); } } catch (Exception e) { log.error("Error: ", e); } return children; }
From source file:net.java.sip.communicator.impl.history.HistoryReaderImpl.java
/** * Used to limit the files if any starting or ending date exist * So only few files to be searched.//from w w w. j av a 2 s . c o m * * @param filelist Iterator * @param startDate Date * @param endDate Date * @param reverseOrder reverse order of files * @return Vector */ static Vector<String> filterFilesByDate(Iterator<String> filelist, Date startDate, Date endDate, final boolean reverseOrder) { if (startDate == null && endDate == null) { // no filtering needed then just return the same list Vector<String> result = new Vector<String>(); while (filelist.hasNext()) { result.add(filelist.next()); } Collections.sort(result, new Comparator<String>() { public int compare(String o1, String o2) { if (reverseOrder) return o2.compareTo(o1); else return o1.compareTo(o2); } }); return result; } // first convert all files to long TreeSet<Long> files = new TreeSet<Long>(); while (filelist.hasNext()) { String filename = filelist.next(); files.add(Long.parseLong(filename.substring(0, filename.length() - 4))); } TreeSet<Long> resultAsLong = new TreeSet<Long>(); // Temporary fix of a NoSuchElementException if (files.size() == 0) { return new Vector<String>(); } Long startLong; Long endLong; if (startDate == null) startLong = Long.MIN_VALUE; else startLong = startDate.getTime(); if (endDate == null) endLong = Long.MAX_VALUE; else endLong = endDate.getTime(); // get all records inclusive the one before the startdate for (Long f : files) { if (startLong <= f && f <= endLong) { resultAsLong.add(f); } } // get the subset before the start date, to get its last element // if exists if (!files.isEmpty() && files.first() <= startLong) { SortedSet<Long> setBeforeTheInterval = files.subSet(files.first(), true, startLong, true); if (!setBeforeTheInterval.isEmpty()) resultAsLong.add(setBeforeTheInterval.last()); } Vector<String> result = new Vector<String>(); Iterator<Long> iter = resultAsLong.iterator(); while (iter.hasNext()) { Long item = iter.next(); result.add(item.toString() + ".xml"); } Collections.sort(result, new Comparator<String>() { public int compare(String o1, String o2) { if (reverseOrder) return o2.compareTo(o1); else return o1.compareTo(o2); } }); return result; }
From source file:org.commoncrawl.mapred.ec2.parser.EC2ParserTask.java
public EC2ParserTask(Configuration conf) throws Exception { super(conf);//from w w w . jav a2 s . c o m if (!conf.getBoolean(CONF_PARAM_TEST_MODE, false)) { conf.set(VALID_SEGMENTS_PATH_PROPERTY, VALID_SEGMENTS_PATH); conf.set(SEGMENT_PATH_PROPERTY, SEGMENTS_PATH); conf.set(JOB_LOGS_PATH_PROPERTY, JOB_LOGS_PATH); conf.set(CHECKPOIINTS_PATH_PROPERTY, CHECKPOINTS_PATH); jobThreadSemaphore = new Semaphore(-(MAX_SIMULTANEOUS_JOBS - 1)); } else { conf.set(VALID_SEGMENTS_PATH_PROPERTY, TEST_VALID_SEGMENTS_PATH); conf.set(SEGMENT_PATH_PROPERTY, TEST_SEGMENTS_PATH); conf.set(JOB_LOGS_PATH_PROPERTY, TEST_JOB_LOGS_PATH); jobThreadSemaphore = new Semaphore(0); maxSimultaneousJobs = 1; } FileSystem fs = FileSystem.get(new URI("s3n://aws-publicdatasets"), conf); LOG.info( "FileSystem is:" + fs.getUri() + " Scanning for candidates at path:" + CRAWL_LOG_INTERMEDIATE_PATH); TreeSet<Path> candidateSet = buildCandidateList(fs, new Path(CRAWL_LOG_INTERMEDIATE_PATH)); LOG.info("Scanning for completed segments"); List<Path> processedLogs = scanForCompletedSegments(fs, conf); LOG.info("Found " + processedLogs.size() + " processed logs"); // remove processed from candidate set ... candidateSet.removeAll(processedLogs); // ok we are ready to go .. LOG.info("There are: " + candidateSet.size() + " logs in need of parsing"); while (candidateSet.size() != 0) { ImmutableList.Builder<Path> pathBuilder = new ImmutableList.Builder<Path>(); Iterator<Path> iterator = Iterators.limit(candidateSet.iterator(), LOGS_PER_ITERATION); while (iterator.hasNext()) { pathBuilder.add(iterator.next()); iterator.remove(); } LOG.info("Queueing Parse"); queue(fs, conf, pathBuilder.build()); LOG.info("Queued Parse"); // in test mode, queue only a single segment's worth of data if (conf.getBoolean(CONF_PARAM_TEST_MODE, false)) { LOG.info("Test Mode - Queueing only a single Item"); break; } } // queue shutdown items for (int i = 0; i < maxSimultaneousJobs; ++i) { _queue.put(new QueueItem()); } }
From source file:net.sourceforge.fenixedu.presentationTier.Action.messaging.ViewSentEmailsDA.java
@EntryPoint public ActionForward viewSentEmails(ActionMapping mapping, ActionForm actionForm, HttpServletRequest request, HttpServletResponse response) {//w w w.j av a2 s .c om final String senderParam = request.getParameter("senderId"); if (senderParam != null && !senderParam.isEmpty()) { return viewSentEmails(mapping, request, senderParam); } final User userView = Authenticate.getUser(); final Set<Sender> sendersGroups = new TreeSet<Sender>(Sender.COMPARATOR_BY_FROM_NAME); final TreeSet<ExecutionCourseSender> sendersGroupsCourses = new TreeSet<ExecutionCourseSender>( ExecutionCourseSender.COMPARATOR_BY_EXECUTION_COURSE_SENDER); for (final Sender sender : Bennu.getInstance().getUtilEmailSendersSet()) { boolean allow = sender.getMembers().isMember(userView); boolean isExecutionCourseSender = sender instanceof ExecutionCourseSender; if (allow && !isExecutionCourseSender) { sendersGroups.add(sender); } if (allow && isExecutionCourseSender) { sendersGroupsCourses.add((ExecutionCourseSender) sender); } } if (isSenderUnique(sendersGroups, sendersGroupsCourses)) { if (sendersGroupsCourses.size() == 1) { return viewSentEmails(mapping, request, (sendersGroupsCourses.iterator().next()).getExternalId()); } else { return viewSentEmails(mapping, request, sendersGroups.iterator().next().getExternalId()); } } request.setAttribute("sendersGroups", sendersGroups); request.setAttribute("sendersGroupsCourses", sendersGroupsCourses); final Person person = AccessControl.getPerson(); if (person != null && person.hasRole(RoleType.MANAGER)) { SearchSendersBean searchSendersBean = getRenderedObject("searchSendersBean"); if (searchSendersBean == null) { searchSendersBean = new SearchSendersBean(); } request.setAttribute("searchSendersBean", searchSendersBean); } return mapping.findForward("view.sent.emails"); }
From source file:org.fenixedu.academic.ui.struts.action.messaging.ViewSentEmailsDA.java
@EntryPoint public ActionForward viewSentEmails(ActionMapping mapping, ActionForm actionForm, HttpServletRequest request, HttpServletResponse response) {/*from ww w. j a v a2 s. com*/ final String senderParam = request.getParameter("senderId"); if (senderParam != null && !senderParam.isEmpty()) { return viewSentEmails(mapping, request, senderParam); } final User userView = Authenticate.getUser(); final Set<Sender> sendersGroups = new TreeSet<Sender>(Sender.COMPARATOR_BY_FROM_NAME); final TreeSet<ExecutionCourseSender> sendersGroupsCourses = new TreeSet<ExecutionCourseSender>( ExecutionCourseSender.COMPARATOR_BY_EXECUTION_COURSE_SENDER); for (final Sender sender : Bennu.getInstance().getUtilEmailSendersSet()) { boolean allow = sender.getMembers().isMember(userView); boolean isExecutionCourseSender = sender instanceof ExecutionCourseSender; if (allow && !isExecutionCourseSender) { sendersGroups.add(sender); } if (allow && isExecutionCourseSender) { sendersGroupsCourses.add((ExecutionCourseSender) sender); } } if (isSenderUnique(sendersGroups, sendersGroupsCourses)) { if (sendersGroupsCourses.size() == 1) { return viewSentEmails(mapping, request, (sendersGroupsCourses.iterator().next()).getExternalId()); } else { return viewSentEmails(mapping, request, sendersGroups.iterator().next().getExternalId()); } } request.setAttribute("sendersGroups", sendersGroups); request.setAttribute("sendersGroupsCourses", sendersGroupsCourses); final Person person = AccessControl.getPerson(); if (person != null && RoleType.MANAGER.isMember(person.getUser())) { SearchSendersBean searchSendersBean = getRenderedObject("searchSendersBean"); if (searchSendersBean == null) { searchSendersBean = new SearchSendersBean(); } request.setAttribute("searchSendersBean", searchSendersBean); } return mapping.findForward("view.sent.emails"); }
From source file:uk.ac.leeds.ccg.andyt.projects.moses.process.RegressionReport_UK1.java
public void writeAggregateStatisticsForOptimisationConstraints_ISARHP_ISARCEP(String a_OutputDir_String) throws Exception { HashMap a_ID_RecordID_HashMap = _ISARDataHandler.get_ID_RecordID_HashMap(); File optimisationConstraints_SARs = new File(a_OutputDir_String, "OptimisationConstraints_SARs.csv"); FileOutputStream a_FileOutputStream = new FileOutputStream(optimisationConstraints_SARs); OutputDataHandler_OptimisationConstraints.writeHSARHP_ISARCEPHeader(a_FileOutputStream); a_FileOutputStream.flush();//w ww . j ava 2 s.com Object[] fitnessCounts; HashMap<String, Integer> a_SARCounts = null; TreeSet<String> a_LADCodes_TreeSet = _CASDataHandler.getLADCodes_TreeSet(); String s2; String s1; Iterator<String> a_Iterator_String = a_LADCodes_TreeSet.iterator(); while (a_Iterator_String.hasNext()) { // Need to reorder data for each LAD as OAs not necessarily returned // in any order and an ordered result is wanted TreeMap<String, HashMap<String, Integer>> resultsForLAD = new TreeMap<String, HashMap<String, Integer>>(); boolean setPrevious_OA_String = true; s1 = a_Iterator_String.next(); s2 = s1.substring(0, 3); File resultsFile = new File(a_OutputDir_String + s2 + "/" + s1 + "/population.csv"); // A few results are missing if (resultsFile.exists()) { System.out.println(resultsFile.toString() + " exists"); String previous_OA_String = ""; BufferedReader aBufferedReader = new BufferedReader( new InputStreamReader(new FileInputStream(resultsFile))); StreamTokenizer aStreamTokenizer = new StreamTokenizer(aBufferedReader); Generic_StaticIO.setStreamTokenizerSyntax1(aStreamTokenizer); String line = ""; int tokenType = aStreamTokenizer.nextToken(); while (tokenType != StreamTokenizer.TT_EOF) { switch (tokenType) { case StreamTokenizer.TT_EOL: //System.out.println(line); String[] lineFields = line.split(","); String a_OA_String = lineFields[0]; if (previous_OA_String.equalsIgnoreCase(a_OA_String)) { if (lineFields[1].equalsIgnoreCase("HP")) { //System.out.println("HP"); long a_ISARRecordID = (Long) a_ID_RecordID_HashMap.get(new Long(lineFields[2])); ISARDataRecord a_ISARDataRecord = _ISARDataHandler .getISARDataRecord(a_ISARRecordID); GeneticAlgorithm_ISARHP_ISARCEP.addToCountsHP(a_ISARDataRecord, a_SARCounts, _Random); //System.out.println(a_HSARDataRecord.toString()); } else { //System.out.println("CEP"); // From the id of the ISARDataRecord get the // ISARRecordID. long a_ISARRecordID = (Long) a_ID_RecordID_HashMap.get(new Long(lineFields[2])); ISARDataRecord a_ISARDataRecord = _ISARDataHandler .getISARDataRecord(a_ISARRecordID); GeneticAlgorithm_ISARHP_ISARCEP.addToCountsCEP(a_ISARDataRecord, a_SARCounts, _Random); } } else { // Store result if (setPrevious_OA_String) { previous_OA_String = a_OA_String; setPrevious_OA_String = false; } else { // Store resultsForLAD.put(previous_OA_String, a_SARCounts); } // Initialise/Re-initialise CASDataRecord a_CASDataRecord = (CASDataRecord) _CASDataHandler .getDataRecord(a_OA_String); fitnessCounts = GeneticAlgorithm_ISARHP_ISARCEP.getFitnessCounts(a_CASDataRecord); a_SARCounts = (HashMap<String, Integer>) fitnessCounts[1]; // Start a new aggregation if (lineFields[1].equalsIgnoreCase("HP")) { //System.out.println("HP"); long a_ISARRecordID = (Long) a_ID_RecordID_HashMap.get(new Long(lineFields[2])); ISARDataRecord a_ISARDataRecord = _ISARDataHandler .getISARDataRecord(a_ISARRecordID); GeneticAlgorithm_ISARHP_ISARCEP.addToCountsHP(a_ISARDataRecord, a_SARCounts, _Random); //System.out.println(a_HSARDataRecord.toString()); } else { //System.out.println("CEP"); // From the id of the ISARDataRecord get the // ISARRecordID. long a_ISARRecordID = (Long) a_ID_RecordID_HashMap.get(new Long(lineFields[2])); ISARDataRecord a_ISARDataRecord = _ISARDataHandler .getISARDataRecord(a_ISARRecordID); GeneticAlgorithm_ISARHP_ISARCEP.addToCountsCEP(a_ISARDataRecord, a_SARCounts, _Random); //System.out.println(a_ISARDataRecord.toString()); } //a_OA_String = lineFields[0]; } previous_OA_String = a_OA_String; break; case StreamTokenizer.TT_WORD: line = aStreamTokenizer.sval; break; } tokenType = aStreamTokenizer.nextToken(); } } else { System.out.println(resultsFile.toString() + " !exists"); } Iterator<String> string_Iterator = resultsForLAD.keySet().iterator(); while (string_Iterator.hasNext()) { String oa_Code = string_Iterator.next(); a_SARCounts = resultsForLAD.get(oa_Code); //GeneticAlgorithm_ISARHP_ISARCEP.addToCountsCEP(null, a_ID_RecordID_HashMap, _Random) OutputDataHandler_OptimisationConstraints.writeISARHP_ISARCEP(a_SARCounts, oa_Code, a_FileOutputStream); } } a_FileOutputStream.close(); }
From source file:org.rhwlab.BHC.BHCTree.java
public NucleusLogNode[] nextTreeCut(TreeSet<NucleusLogNode> previous) { NucleusLogNode[] ret = new NucleusLogNode[3]; // find the minimum probability node that can be split Iterator<NucleusLogNode> iter = previous.iterator(); while (iter.hasNext()) { NucleusLogNode node = iter.next(); // search for the lowest probability node with children - not a leaf if (node.getLeft() != null && node.getRight() != null) { ret[0] = (NucleusLogNode) node.getLeft(); // split the node wtih the lowest probability ret[1] = (NucleusLogNode) node.getRight(); ret[2] = node;/* w w w . j a v a2 s .co m*/ return ret; } } return null; }
From source file:info.magnolia.cms.core.DefaultContentTest.java
@Test public void testNameFilteringWorksForBothBinaryAndNonBinaryProperties() throws Exception { String contentProperties = StringUtils.join(Arrays.asList("/somepage/mypage@type=mgnl:content", "/somepage/mypage/paragraphs@type=mgnl:contentNode", "/somepage/mypage/paragraphs/0@type=mgnl:contentNode", "/somepage/mypage/paragraphs/0@type=mgnl:contentNode", // 2 regular props "/somepage/mypage/paragraphs/0/attention=booyah", "/somepage/mypage/paragraphs/0/imaginary=date:2009-10-14T08:59:01.227-04:00", // 3 binaries "/somepage/mypage/paragraphs/0/attachment1@type=mgnl:resource", "/somepage/mypage/paragraphs/0/attachment1.fileName=hello", "/somepage/mypage/paragraphs/0/attachment1.extension=gif", // being a binary node, magnolia knows to store data as jcr:data w/o need to be explicitly told so "/somepage/mypage/paragraphs/0/attachment1=binary:X", "/somepage/mypage/paragraphs/0/attachment1.jcr\\:mimeType=image/gif", "/somepage/mypage/paragraphs/0/attachment1.jcr\\:lastModified=date:2009-10-14T08:59:01.227-04:00", "/somepage/mypage/paragraphs/0/attachment2@type=mgnl:resource", "/somepage/mypage/paragraphs/0/attachment2.fileName=test", "/somepage/mypage/paragraphs/0/attachment2.extension=jpeg", "/somepage/mypage/paragraphs/0/attachment2=binary:X", "/somepage/mypage/paragraphs/0/attachment2.jcr\\:mimeType=image/jpeg", "/somepage/mypage/paragraphs/0/attachment2.jcr\\:lastModified=date:2009-10-14T08:59:01.227-04:00", "/somepage/mypage/paragraphs/0/image3@type=mgnl:resource", "/somepage/mypage/paragraphs/0/image3.fileName=third", "/somepage/mypage/paragraphs/0/image3.extension=png", "/somepage/mypage/paragraphs/0/image3=binary:X", "/somepage/mypage/paragraphs/0/image3.jcr\\:mimeType=image/png", "/somepage/mypage/paragraphs/0/image3.jcr\\:lastModified=date:2009-10-14T08:59:01.227-04:00", // and more which should not match "/somepage/mypage/paragraphs/0/foo=bar", "/somepage/mypage/paragraphs/0/mybool=boolean:true", "/somepage/mypage/paragraphs/0/rand@type=mgnl:resource", "/somepage/mypage/paragraphs/0/rand.fileName=randdddd", "/somepage/mypage/paragraphs/0/rand.extension=png", "/somepage/mypage/paragraphs/0/rand=binary:X", "/somepage/mypage/paragraphs/0/rand.jcr\\:mimeType=image/png", "/somepage/mypage/paragraphs/0/rand.jcr\\:lastModified=date:2009-10-14T08:59:01.227-04:00"), "\n"); final HierarchyManager hm = MgnlContext.getHierarchyManager(RepositoryConstants.WEBSITE); new PropertiesImportExport().createContent(hm.getRoot(), IOUtils.toInputStream(contentProperties)); hm.save();// w w w . j a v a 2s . c o m final Content content = hm.getContent("/somepage/mypage/paragraphs/0"); final Collection<NodeData> props = content.getNodeDataCollection("att*|ima*"); assertEquals(5, props.size()); // sort by name final TreeSet<NodeData> sorted = new TreeSet<NodeData>(new Comparator<NodeData>() { @Override public int compare(NodeData o1, NodeData o2) { return o1.getName().compareTo(o2.getName()); } }); sorted.addAll(props); // sanity check - just recheck we still have 5 elements assertEquals(5, sorted.size()); final Iterator<NodeData> it = sorted.iterator(); final NodeData a = it.next(); final NodeData b = it.next(); final NodeData c = it.next(); final NodeData d = it.next(); final NodeData e = it.next(); assertEquals("attachment1", a.getName()); assertEquals(PropertyType.BINARY, a.getType()); assertEquals("attachment2", b.getName()); assertEquals(PropertyType.BINARY, b.getType()); assertEquals("image3", d.getName()); assertEquals(PropertyType.BINARY, d.getType()); assertEquals("image3", d.getName()); assertEquals(PropertyType.BINARY, d.getType()); assertEquals("attention", c.getName()); assertEquals(PropertyType.STRING, c.getType()); assertEquals("booyah", c.getString()); assertEquals("imaginary", e.getName()); assertEquals(PropertyType.DATE, e.getType()); assertEquals(true, e.getDate().before(Calendar.getInstance())); }
From source file:uk.ac.leeds.ccg.andyt.projects.moses.process.RegressionReport_UK1.java
public void writeAggregateStatisticsForOptimisationConstraints_HSARHP_ISARCEP(String a_OutputDir_String) throws Exception { HashMap a_HID_HSARDataRecordVector_HashMap = _HSARDataHandler.get_HID_HSARDataRecordVector_HashMap(); HashMap a_ID_RecordID_HashMap = _ISARDataHandler.get_ID_RecordID_HashMap(); File optimisationConstraints_SARs = new File(a_OutputDir_String, "OptimisationConstraints_SARs.csv"); FileOutputStream a_FileOutputStream = new FileOutputStream(optimisationConstraints_SARs); OutputDataHandler_OptimisationConstraints.writeHSARHP_ISARCEPHeader(a_FileOutputStream); a_FileOutputStream.flush();//from w w w. ja v a 2s . com HashMap<String, Integer> a_SARCounts = null; CASDataRecord a_CASDataRecord; TreeSet<String> a_LADCodes_TreeSet = _CASDataHandler.getLADCodes_TreeSet(); String s2; String s1; Iterator<String> a_Iterator_String = a_LADCodes_TreeSet.iterator(); while (a_Iterator_String.hasNext()) { // Need to reorder data for each LAD as OAs not necessarily returned // in any order and an ordered result is wanted TreeMap<String, HashMap<String, Integer>> resultsForLAD = new TreeMap<String, HashMap<String, Integer>>(); boolean setPrevious_OA_String = true; s1 = a_Iterator_String.next(); s2 = s1.substring(0, 3); File resultsFile = new File(a_OutputDir_String + s2 + "/" + s1 + "/population.csv"); // A few results are missing if (resultsFile.exists()) { System.out.println(resultsFile.toString() + " exists"); String previous_OA_String = ""; BufferedReader aBufferedReader = new BufferedReader( new InputStreamReader(new FileInputStream(resultsFile))); StreamTokenizer aStreamTokenizer = new StreamTokenizer(aBufferedReader); Generic_StaticIO.setStreamTokenizerSyntax1(aStreamTokenizer); String line = ""; int tokenType = aStreamTokenizer.nextToken(); while (tokenType != StreamTokenizer.TT_EOF) { switch (tokenType) { case StreamTokenizer.TT_EOL: //System.out.println(line); String[] lineFields = line.split(","); String a_OA_String = lineFields[0]; if (previous_OA_String.equalsIgnoreCase(a_OA_String)) { if (lineFields[1].equalsIgnoreCase("HP")) { //System.out.println("HP"); // From the id of a household get a Vector // of HSARDataRecords Vector household = (Vector) a_HID_HSARDataRecordVector_HashMap .get(new Integer(lineFields[2])); HSARDataRecord a_HSARDataRecord; for (int i = 0; i < household.size(); i++) { a_HSARDataRecord = (HSARDataRecord) household.elementAt(i); GeneticAlgorithm_HSARHP_ISARCEP.addToCounts(a_HSARDataRecord, a_SARCounts, _Random); } //System.out.println(a_HSARDataRecord.toString()); } else { //System.out.println("CEP"); // From the id of the ISARDataRecord get the // ISARRecordID. long a_ISARRecordID = (Long) a_ID_RecordID_HashMap.get(new Long(lineFields[2])); ISARDataRecord a_ISARDataRecord = _ISARDataHandler .getISARDataRecord(a_ISARRecordID); GeneticAlgorithm_HSARHP_ISARCEP.addToCountsCEP(a_ISARDataRecord, a_SARCounts, _Random); } } else { // Store result if (setPrevious_OA_String) { previous_OA_String = a_OA_String; setPrevious_OA_String = false; } else { // Store resultsForLAD.put(previous_OA_String, a_SARCounts); } // Initialise/Re-initialise a_CASDataRecord = (CASDataRecord) _CASDataHandler.getDataRecord(a_OA_String); Object[] fitnessCounts = GeneticAlgorithm_HSARHP_ISARCEP .getFitnessCounts(a_CASDataRecord); a_SARCounts = (HashMap<String, Integer>) fitnessCounts[1]; // Start a new aggregation if (lineFields[1].equalsIgnoreCase("HP")) { //System.out.println("HP"); // From the id of a household get a Vector // of HSARDataRecords Vector household = (Vector) a_HID_HSARDataRecordVector_HashMap .get(new Integer(lineFields[2])); HSARDataRecord a_HSARDataRecord; for (int i = 0; i < household.size(); i++) { a_HSARDataRecord = (HSARDataRecord) household.elementAt(i); GeneticAlgorithm_HSARHP_ISARCEP.addToCounts(a_HSARDataRecord, a_SARCounts, _Random); } //System.out.println(a_HSARDataRecord.toString()); } else { //System.out.println("CEP"); // From the id of the ISARDataRecord get the // ISARRecordID. long a_ISARRecordID = (Long) a_ID_RecordID_HashMap.get(new Long(lineFields[2])); ISARDataRecord a_ISARDataRecord = _ISARDataHandler .getISARDataRecord(a_ISARRecordID); GeneticAlgorithm_HSARHP_ISARCEP.addToCountsCEP(a_ISARDataRecord, a_SARCounts, _Random); //System.out.println(a_ISARDataRecord.toString()); } //a_OA_String = lineFields[0]; } previous_OA_String = a_OA_String; break; case StreamTokenizer.TT_WORD: line = aStreamTokenizer.sval; break; } tokenType = aStreamTokenizer.nextToken(); } } else { System.out.println(resultsFile.toString() + " !exists"); } Iterator<String> string_Iterator = resultsForLAD.keySet().iterator(); while (string_Iterator.hasNext()) { String oa_Code = string_Iterator.next(); OutputDataHandler_OptimisationConstraints.writeHSARHP_ISARCEP(resultsForLAD.get(oa_Code), oa_Code, a_FileOutputStream); } } a_FileOutputStream.close(); }
From source file:jails.http.MediaType.java
/** * Compares this {@link MediaType} to another alphabetically. * @param other media type to compare to * @see #sortBySpecificity(List)//from w ww .j av a 2s .co m */ public int compareTo(MediaType other) { int comp = this.type.compareToIgnoreCase(other.type); if (comp != 0) { return comp; } comp = this.subtype.compareToIgnoreCase(other.subtype); if (comp != 0) { return comp; } comp = this.parameters.size() - other.parameters.size(); if (comp != 0) { return comp; } TreeSet<String> thisAttributes = new TreeSet<String>(String.CASE_INSENSITIVE_ORDER); thisAttributes.addAll(this.parameters.keySet()); TreeSet<String> otherAttributes = new TreeSet<String>(String.CASE_INSENSITIVE_ORDER); otherAttributes.addAll(other.parameters.keySet()); Iterator<String> thisAttributesIterator = thisAttributes.iterator(); Iterator<String> otherAttributesIterator = otherAttributes.iterator(); while (thisAttributesIterator.hasNext()) { String thisAttribute = thisAttributesIterator.next(); String otherAttribute = otherAttributesIterator.next(); comp = thisAttribute.compareToIgnoreCase(otherAttribute); if (comp != 0) { return comp; } String thisValue = this.parameters.get(thisAttribute); String otherValue = other.parameters.get(otherAttribute); if (otherValue == null) { otherValue = ""; } comp = thisValue.compareTo(otherValue); if (comp != 0) { return comp; } } return 0; }