List of usage examples for java.util TreeMap containsKey
public boolean containsKey(Object key)
From source file:org.apache.hadoop.tools.HadoopArchives.java
/** * this method writes all the valid top level directories * into the srcWriter for indexing. This method is a little * tricky. example- /* w ww . ja v a2s . c om*/ * for an input with parent path /home/user/ and sources * as /home/user/source/dir1, /home/user/source/dir2 - this * will output <source, dir, dir1, dir2> (dir means that source is a dir * with dir1 and dir2 as children) and <source/dir1, file, null> * and <source/dir2, file, null> * @param srcWriter the sequence file writer to write the * directories to * @param paths the source paths provided by the user. They * are glob free and have full path (not relative paths) * @param parentPath the parent path that you wnat the archives * to be relative to. example - /home/user/dir1 can be archived with * parent as /home or /home/user. * @throws IOException */ private void writeTopLevelDirs(SequenceFile.Writer srcWriter, List<Path> paths, Path parentPath) throws IOException { //add all the directories List<Path> justDirs = new ArrayList<Path>(); for (Path p : paths) { if (!p.getFileSystem(getConf()).isFile(p)) { justDirs.add(new Path(p.toUri().getPath())); } else { justDirs.add(new Path(p.getParent().toUri().getPath())); } } /* find all the common parents of paths that are valid archive * paths. The below is done so that we do not add a common path * twice and also we need to only add valid child of a path that * are specified the user. */ TreeMap<String, HashSet<String>> allpaths = new TreeMap<String, HashSet<String>>(); /* the largest depth of paths. the max number of times * we need to iterate */ Path deepest = largestDepth(paths); Path root = new Path(Path.SEPARATOR); for (int i = parentPath.depth(); i < deepest.depth(); i++) { List<Path> parents = new ArrayList<Path>(); for (Path p : justDirs) { if (p.compareTo(root) == 0) { //do nothing } else { Path parent = p.getParent(); if (null != parent) { if (allpaths.containsKey(parent.toString())) { HashSet<String> children = allpaths.get(parent.toString()); children.add(p.getName()); } else { HashSet<String> children = new HashSet<String>(); children.add(p.getName()); allpaths.put(parent.toString(), children); } parents.add(parent); } } } justDirs = parents; } Set<Map.Entry<String, HashSet<String>>> keyVals = allpaths.entrySet(); for (Map.Entry<String, HashSet<String>> entry : keyVals) { final Path relPath = relPathToRoot(new Path(entry.getKey()), parentPath); if (relPath != null) { final String[] children = new String[entry.getValue().size()]; int i = 0; for (String child : entry.getValue()) { children[i++] = child; } append(srcWriter, 0L, relPath.toString(), children); } } }
From source file:com.sfs.whichdoctor.dao.VoteDAOImpl.java
/** * This method loads the votes cast by a particular group of people It is * built on top of the items functionality, the significant difference being * only one vote can be cast by each person in the group. * * @param groupGUID the group guid// w ww . ja v a 2s . c o m * * @return the tree map< integer, vote bean> * * @throws WhichDoctorDaoException the which doctor dao exception */ public final TreeMap<Integer, VoteBean> load(final int groupGUID) throws WhichDoctorDaoException { if (groupGUID == 0) { throw new WhichDoctorDaoException("Sorry a group guid greater than 0 is required"); } int year = 0; /* Load the group to get its year */ try { GroupBean group = this.groupDAO.loadGUID(groupGUID); year = group.getYear(); } catch (Exception e) { dataLogger.error("Error loading group associated with votes: " + e.getMessage()); } TreeMap<Integer, Integer> eligibleVotes = new TreeMap<Integer, Integer>(); TreeMap<Integer, VoteBean> votesCast = new TreeMap<Integer, VoteBean>(); try { eligibleVotes = loadEligible(groupGUID); } catch (Exception e) { throw new WhichDoctorDaoException("Error loading eligible votes: " + e.getMessage()); } // Load the vote items for the submitted group GUID TreeMap<String, ItemBean> items = new TreeMap<String, ItemBean>(); try { items = this.itemDAO.load(groupGUID, true, "Vote", "Votes"); } catch (Exception e) { dataLogger.error("Error loading votes cast: " + e.getMessage()); } if (items != null) { for (ItemBean item : items.values()) { // Get the unique vote id for each person // Add each vote to the votesCast map VoteBean vote = new VoteBean(); vote.setVoteNumber(PersonBean.getVoteNumber(item.getObject2GUID(), year)); vote.setGroupGUID(item.getObject1GUID()); if (eligibleVotes.containsKey(vote.getVoteNumber())) { // Set the basic details related to this vote and add it to // the treemap vote.setId(item.getId()); vote.setGUID(item.getGUID()); vote.setCandidateGUID(item.getWeighting()); vote.setPermission(item.getPermission()); vote.setActive(item.getActive()); vote.setCreatedBy(item.getCreatedBy()); vote.setCreatedDate(item.getCreatedDate()); vote.setCreatedUser(item.getCreatedUser()); vote.setModifiedBy(item.getModifiedBy()); vote.setModifiedDate(item.getModifiedDate()); vote.setModifiedUser(item.getModifiedUser()); // Add this to the vote treemap if this person is allowed to // vote votesCast.put(vote.getVoteNumber(), vote); } } } return votesCast; }
From source file:ubic.gemma.analysis.report.WhatsNewServiceImpl.java
/** * Give breakdown by taxon. "Private" experiments are not included. * // w ww .j a va2s .c om * @param ees * @return */ private Map<Taxon, Collection<Long>> getExpressionExperimentIdsByTaxon(Collection<ExpressionExperiment> ees) { /* * Sort taxa by name. */ TreeMap<Taxon, Collection<Long>> eesPerTaxon = new TreeMap<Taxon, Collection<Long>>( new Comparator<Taxon>() { @Override public int compare(Taxon o1, Taxon o2) { if (o1 == null) { return 1; } else if (o2 == null) { return -1; } else { return o1.getScientificName().compareTo(o2.getScientificName()); } } }); ExpressionExperiment ee = null; Taxon t = null; Collection<Long> ids; for (Iterator<ExpressionExperiment> it = ees.iterator(); it.hasNext();) { ee = it.next(); if (securityService.isPrivate(ee)) { continue; } t = expressionExperimentService.getTaxon(ee); if (t != null) { if (eesPerTaxon.containsKey(t)) { ids = eesPerTaxon.get(t); } else { ids = new ArrayList<Long>(); } ids.add(ee.getId()); eesPerTaxon.put(t, ids); } } return eesPerTaxon; }
From source file:com.sfs.whichdoctor.dao.RelationshipDAOImpl.java
/** * Returns a map of relationship names ordered by division -> type -> hierarchy. * * @param relationshipClass - the relationship class to search on * @param maximumHierarchy - the largest level of hierarchy to return * @return - A map of Division strings with a corresponding Map of supervisor * types and related hierarchies. * @throws WhichDoctorDaoException the which doctor dao exception *///from www . j a va2 s . c om public final TreeMap<String, TreeMap<String, Integer>> loadRelationships(final String relationshipClass, final int maximumHierarchy) throws WhichDoctorDaoException { dataLogger.info("Loading map of '" + relationshipClass + "' relationships types"); TreeMap<String, TreeMap<String, Integer>> supervisorMap = new TreeMap<String, TreeMap<String, Integer>>(); int max = maximumHierarchy + 1; Collection<RelationshipBean> relationships = loadSupervisors(relationshipClass); for (RelationshipBean rel : relationships) { if (rel.getHierarchy() > 0 && rel.getHierarchy() < max) { TreeMap<String, Integer> map = new TreeMap<String, Integer>(); if (supervisorMap.containsKey(rel.getDivision())) { map = supervisorMap.get(rel.getDivision()); } map.put(rel.getRelationshipType(), rel.getHierarchy()); supervisorMap.put(rel.getDivision(), map); } } return supervisorMap; }
From source file:com.sfs.whichdoctor.dao.WhichDoctorDAOImpl.java
/** * Load sibling beans./* w w w . j a v a 2 s.c o m*/ * * @param guid the guid * @param type the type * * @return the tree map< integer, collection< which doctor bean>> * * @throws WhichDoctorDaoException the whichdoctor dao exception */ @SuppressWarnings("unchecked") private TreeMap<Integer, Collection<WhichDoctorBean>> loadSiblingBeans(final int guid, final String type) throws WhichDoctorDaoException { dataLogger.info("Sibling history of GUID: " + guid + " requested"); String loadSQL = getSQL().getValue("whichdoctor/load/memo"); if (type.compareToIgnoreCase("membership") == 0) { loadSQL = getSQL().getValue("whichdoctor/load/membership"); } if (type.compareToIgnoreCase("address") == 0) { loadSQL = getSQL().getValue("whichdoctor/load/address"); } if (type.compareToIgnoreCase("phone") == 0) { loadSQL = getSQL().getValue("whichdoctor/load/phone"); } if (type.compareToIgnoreCase("email") == 0) { loadSQL = getSQL().getValue("whichdoctor/load/email"); } if (type.compareToIgnoreCase("specialty") == 0) { loadSQL = getSQL().getValue("whichdoctor/load/specialty"); } if (type.compareToIgnoreCase("workshop") == 0) { loadSQL = getSQL().getValue("whichdoctor/load/workshop"); } if (type.compareToIgnoreCase("rotation") == 0) { loadSQL = getSQL().getValue("whichdoctor/load/rotationSibling"); } if (type.compareToIgnoreCase("project") == 0) { loadSQL = getSQL().getValue("whichdoctor/load/project"); } if (type.compareToIgnoreCase("exam") == 0) { loadSQL = getSQL().getValue("whichdoctor/load/exam"); } if (type.compareToIgnoreCase("qualification") == 0) { loadSQL = getSQL().getValue("whichdoctor/load/qualification"); } if (type.compareToIgnoreCase("accreditation") == 0) { loadSQL = getSQL().getValue("whichdoctor/load/accreditation"); } if (type.compareToIgnoreCase("assessment") == 0) { loadSQL = getSQL().getValue("whichdoctor/load/assessment"); } if (type.compareToIgnoreCase("report") == 0) { loadSQL = getSQL().getValue("whichdoctor/load/report"); } if (type.compareToIgnoreCase("expenseclaim") == 0) { loadSQL = getSQL().getValue("whichdoctor/load/expenseClaim"); } if (type.compareToIgnoreCase("payment") == 0) { loadSQL = getSQL().getValue("whichdoctor/load/payment"); } TreeMap<Integer, Collection<WhichDoctorBean>> siblings = new TreeMap<Integer, Collection<WhichDoctorBean>>(); try { Collection<WhichDoctorBean> siblingCollection = this.getJdbcTemplateReader().query(loadSQL, new Object[] { guid }, new RowMapper() { public Object mapRow(final ResultSet rs, final int rowNum) throws SQLException { WhichDoctorBean whichdoctorBean = loadHistory(rs); whichdoctorBean.setObjectType(type); return whichdoctorBean; } }); for (WhichDoctorBean whichdoctorBean : siblingCollection) { Collection<WhichDoctorBean> history = new ArrayList<WhichDoctorBean>(); if (siblings.containsKey(whichdoctorBean.getGUID())) { history = siblings.get(whichdoctorBean.getGUID()); } history.add(whichdoctorBean); siblings.put(whichdoctorBean.getGUID(), history); } } catch (IncorrectResultSizeDataAccessException ie) { dataLogger.debug("No results found for this search: " + ie.getMessage()); } return siblings; }
From source file:annis.libgui.AnnisBaseUI.java
protected Map<String, InstanceConfig> loadInstanceConfig() { TreeMap<String, InstanceConfig> result = new TreeMap<String, InstanceConfig>(); // get a list of all directories that contain instance informations List<File> locations = getAllConfigLocations("instances"); for (File root : locations) { if (root.isDirectory()) { // get all sub-files ending on ".json" File[] instanceFiles = root.listFiles((FilenameFilter) new SuffixFileFilter(".json")); for (File i : instanceFiles) { if (i.isFile() && i.canRead()) { try { InstanceConfig config = getJsonMapper().readValue(i, InstanceConfig.class); String name = StringUtils.removeEnd(i.getName(), ".json"); config.setInstanceName(name); result.put(name, config); } catch (IOException ex) { log.warn("could not parsing instance config: " + ex.getMessage()); }/*from w ww . j a v a 2 s.com*/ } } } } // always provide a default instance if (!result.containsKey("default")) { InstanceConfig cfgDefault = new InstanceConfig(); cfgDefault.setInstanceDisplayName("ANNIS"); result.put("default", cfgDefault); } return result; }
From source file:net.tsquery.LastEndpoint.java
@SuppressWarnings("unchecked") private JSONObject PlotToStandardJSON(Plot plot, long tsFrom, long tsTo, int topN) { final JSONObject plotObject = new JSONObject(); JSONArray seriesArray = new JSONArray(); final TreeMap<Double, JSONObject> weightMap = new TreeMap<>(Collections.reverseOrder()); for (DataPoints dataPoints : plot.getDataPoints()) { double weight = 0; JSONArray dataArray = new JSONArray(); StringBuilder nameBuilder = new StringBuilder(); nameBuilder.append(dataPoints.metricName()).append(": "); Map<String, String> tags = dataPoints.getTags(); for (String s : tags.keySet()) { nameBuilder.append(String.format("%s=%s, ", s, tags.get(s))); }//from w w w . ja v a2s .co m nameBuilder.setLength(nameBuilder.length() - 2); JSONArray values = null; long latestTimestamp = 0; for (DataPoint point : dataPoints) { long timestamp = point.timestamp(); if (!(timestamp < tsFrom || timestamp > tsTo) && timestamp > latestTimestamp) { latestTimestamp = timestamp; double dpValue = getValue(point); values = new JSONArray(); values.add(timestamp * 1000); values.add(dpValue); } } dataArray.add(values); JSONObject series = new JSONObject(); series.put("name", nameBuilder.toString()); series.put("data", dataArray); while (weightMap.containsKey(weight)) weight -= 0.00000001; weightMap.put(weight, series); } int counter = 0; for (Map.Entry<Double, JSONObject> entry : weightMap.entrySet()) { seriesArray.add(entry.getValue()); ++counter; if ((topN > 0) && (counter >= topN)) break; } plotObject.put("plot", seriesArray); return plotObject; }
From source file:net.tsquery.DataEndpoint.java
@SuppressWarnings("unchecked") private JSONObject PlotToStandardJSON(Plot plot, long tsFrom, long tsTo, int topN) { final JSONObject plotObject = new JSONObject(); JSONArray seriesArray = new JSONArray(); final TreeMap<Double, JSONObject> weightMap = new TreeMap<>(Collections.reverseOrder()); for (DataPoints dataPoints : plot.getDataPoints()) { double weight = 0; JSONArray dataArray = new JSONArray(); StringBuilder nameBuilder = new StringBuilder(); nameBuilder.append(dataPoints.metricName()).append(": "); Map<String, String> tags = dataPoints.getTags(); for (String s : tags.keySet()) { nameBuilder.append(String.format("%s=%s, ", s, tags.get(s))); }/* w w w.j ava2 s . com*/ nameBuilder.setLength(nameBuilder.length() - 2); for (DataPoint point : dataPoints) { long timestamp = point.timestamp(); if (timestamp < tsFrom || timestamp > tsTo) continue; double dpValue = getValue(point); JSONArray values = new JSONArray(); values.add(timestamp * 1000); values.add(dpValue); weight += ((dpValue) / 1000000.0); dataArray.add(values); } JSONObject series = new JSONObject(); series.put("name", nameBuilder.toString()); series.put("data", dataArray); while (weightMap.containsKey(weight)) weight -= 0.00000001; weightMap.put(weight, series); } int counter = 0; for (Map.Entry<Double, JSONObject> entry : weightMap.entrySet()) { seriesArray.add(entry.getValue()); ++counter; if ((topN > 0) && (counter >= topN)) break; } plotObject.put("plot", seriesArray); return plotObject; }
From source file:playground.meisterk.org.matsim.run.facilities.ShopsOf2005ToFacilities.java
private static void processPickPayOpenTimes(final ActivityFacilitiesImpl facilities) { System.out.println("Setting up Pickpay open times..."); List<String> openTimeLines = null; List<String> addressLines = null; String[] openTokens = null, closeTokens = null; String[] addressTokens = null; Vector<Integer> openNumbers = new Vector<Integer>(); Vector<Integer> closeNumbers = new Vector<Integer>(); TreeMap<String, String> aPickpayOpentime = new TreeMap<String, String>(); String facilityId = null;/*from ww w. j a va2 s . com*/ int addressLinePointer = 1; // ignore header line final String OPEN = "Auf"; final String CLOSE = "Zu"; String openLinePattern = ".*\\s" + OPEN + "\\s.*"; String closeLinePattern = ".*\\s" + CLOSE + "\\s.*"; try { openTimeLines = FileUtils.readLines(new File(pickPayOpenTimesFilename), "UTF-8"); addressLines = FileUtils.readLines(new File(pickPayAdressesFilename), "UTF-8"); } catch (IOException e) { e.printStackTrace(); } // remember relevant lines only String key = null; for (String line : openTimeLines) { if (line.matches(openLinePattern)) { key = line; } else if (line.matches(closeLinePattern)) { if (!aPickpayOpentime.containsKey(key)) { aPickpayOpentime.put(key, line); } } } for (String openLine : aPickpayOpentime.keySet()) { openTokens = openLine.split(ANYTHING_BUT_DIGITS); addressTokens = addressLines.get(addressLinePointer).split(FIELD_DELIM); shopId = new ShopId(PICKPAY, "", addressTokens[1], "", addressTokens[4], addressTokens[5], addressTokens[2]); addressLinePointer++; facilityId = shopId.getShopId(); //System.out.println(facilityId); ActivityFacilityImpl theCurrentPickpay = (ActivityFacilityImpl) facilities.getFacilities() .get(Id.create(facilityId, ActivityFacility.class)); if (theCurrentPickpay != null) { // yeah, we can use the open times ActivityOptionImpl shopping = theCurrentPickpay.createActivityOption(ACTIVITY_TYPE_SHOP); openNumbers.clear(); closeNumbers.clear(); // print out and extract numbers //System.out.print(OPEN + ":\t"); for (String token : openTokens) { if (!token.equals("") && !token.equals(openTokens[0])) { openNumbers.add(Integer.valueOf(token)); //System.out.print(token + "\t"); } } //System.out.println(); closeTokens = aPickpayOpentime.get(openLine).split(ANYTHING_BUT_DIGITS); //System.out.print(CLOSE + ":\t"); for (String token : closeTokens) { if (!token.equals("")) { closeNumbers.add(Integer.valueOf(token)); //System.out.print(token + "\t"); } } //System.out.println(); // now process numbers //String day = "wkday"; Day[] days = Day.values(); int dayPointer = 0; OpeningTimeImpl opentime = null; int openSeconds = 0; int closeSeconds = 0; int previousOpenSeconds = 0; if (openNumbers.size() == closeNumbers.size()) { for (int ii = 0; ii < openNumbers.size(); ii += 2) { openSeconds = openNumbers.get(ii) * 3600 + openNumbers.get(ii + 1) * 60; closeSeconds = closeNumbers.get(ii) * 3600 + closeNumbers.get(ii + 1) * 60; // check if a new day starts if (openSeconds <= previousOpenSeconds) { dayPointer++; } previousOpenSeconds = openSeconds; opentime = new OpeningTimeImpl(days[dayPointer].getAbbrevEnglish(), openSeconds, closeSeconds); shopping.addOpeningTime(opentime); } } else { throw new RuntimeException("openNumbers[] and closeNumbers[] have different size. Aborting..."); } System.out.flush(); } else { System.out.println("A pickpay with id " + facilityId + " does not exist."); } } System.out.println("Setting up Pickpay open times...done."); }
From source file:com.upplication.s3fs.util.AmazonS3ClientMock.java
/** * list all objects without and return ObjectListing with all elements * and with truncated to false/* w ww .j a v a 2s . c om*/ */ @Override public ObjectListing listObjects(ListObjectsRequest listObjectsRequest) throws AmazonClientException { String bucketName = listObjectsRequest.getBucketName(); String prefix = listObjectsRequest.getPrefix(); String marker = listObjectsRequest.getMarker(); String delimiter = listObjectsRequest.getDelimiter(); ObjectListing objectListing = new ObjectListing(); objectListing.setBucketName(bucketName); objectListing.setPrefix(prefix); objectListing.setMarker(marker); objectListing.setDelimiter(delimiter); final Path bucket = find(bucketName); final TreeMap<String, S3Element> elems = new TreeMap<>(); try { for (Path elem : Files.newDirectoryStream(bucket)) { S3Element element = parse(elem, bucket); if (!elems.containsKey(element.getS3Object().getKey())) elems.put(element.getS3Object().getKey(), element); } } catch (IOException e) { throw new AmazonClientException(e); } Iterator<S3Element> iterator = elems.values().iterator(); int i = 0; boolean waitForMarker = !StringUtils.isNullOrEmpty(marker); while (iterator.hasNext()) { S3Element elem = iterator.next(); if (elem.getS3Object().getKey().equals("/")) continue; String key = elem.getS3Object().getKey(); if (waitForMarker) { waitForMarker = !key.startsWith(marker); if (waitForMarker) continue; } if (prefix != null && key.startsWith(prefix)) { int beginIndex = key.indexOf(prefix) + prefix.length(); String rest = key.substring(beginIndex); if (delimiter != null && delimiter.length() > 0 && rest.contains(delimiter)) { String substring = key.substring(0, beginIndex + rest.indexOf(delimiter)); if (!objectListing.getCommonPrefixes().contains(substring)) objectListing.getCommonPrefixes().add(substring); continue; } S3ObjectSummary s3ObjectSummary = parseToS3ObjectSummary(elem); objectListing.getObjectSummaries().add(s3ObjectSummary); if (i + 1 == LIMIT_AWS_MAX_ELEMENTS && iterator.hasNext()) { objectListing.setTruncated(true); objectListing.setNextMarker(iterator.next().getS3Object().getKey()); return objectListing; } objectListing.setTruncated(false); i++; } } Collections.sort(objectListing.getObjectSummaries(), new Comparator<S3ObjectSummary>() { @Override public int compare(S3ObjectSummary o1, S3ObjectSummary o2) { return o1.getKey().compareTo(o2.getKey()); } }); return objectListing; }