List of usage examples for java.util TreeMap entrySet
EntrySet entrySet
To view the source code for java.util TreeMap entrySet.
Click Source Link
From source file:service.ModuleService.java
public void changePosition(Long moduleId, Long newPosition, Long pkId) { Module Module = moduleDao.find(moduleId); Long groupId = Module.getGroup().getId(); updatePositionsAndGetAvailable(groupId, pkId); TreeMap<Long, Module> map = new TreeMap(); List<Module> modules = moduleDao.getActiveModules(groupId, pkId); Long oldPosition = Module.getPosition(); for (Module m : modules) { Long GID = m.getId();/*from w w w . j a v a 2 s. c o m*/ if (GID.equals(moduleId)) { map.put(newPosition, m); } else { // if (oldPosition > newPosition) { if (m.getPosition() < newPosition || m.getPosition() > oldPosition) { map.put(m.getPosition(), m); } else { map.put(m.getPosition() + 1, m); } // } else { if (m.getPosition() > newPosition || m.getPosition() < oldPosition) { map.put(m.getPosition(), m); } else { map.put(m.getPosition() - 1, m); } } } } for (Map.Entry<Long, Module> entry : map.entrySet()) { Long pos = entry.getKey(); Module m = entry.getValue(); m.setPosition(pos); if (validate(m)) { moduleDao.update(m); } } }
From source file:com.clust4j.algo.HDBSCAN.java
protected static int[] getLabels(ArrayList<CompQuadTup<Integer, Integer, Double, Integer>> condensed, TreeMap<Integer, Double> stability) { double subTreeStability; ArrayList<Integer> clusters = new ArrayList<Integer>(); HSet<Integer> clusterSet; TreeMap<Integer, Integer> clusterMap = new TreeMap<>(), reverseClusterMap = new TreeMap<>(); // Get descending sorted key set ArrayList<Integer> nodeList = GetLabelUtils.descSortedKeySet(stability); // Get tuples where child size > 1 EntryPair<ArrayList<double[]>, Integer> entry = GetLabelUtils.childSizeGtOneAndMaxChild(condensed); ArrayList<double[]> clusterTree = entry.getKey(); // Map of nodes to whether it's a cluster TreeMap<Integer, Boolean> isCluster = GetLabelUtils.initNodeMap(nodeList); // Get num points //int numPoints = entry.getValue(); // Iter over nodes for (Integer node : nodeList) { subTreeStability = GetLabelUtils.subTreeStability(clusterTree, node, stability); if (subTreeStability > stability.get(node)) { isCluster.put(node, false);/*w w w . ja v a2 s .c o m*/ stability.put(node, subTreeStability); } else { for (Integer subNode : GetLabelUtils.breadthFirstSearchFromClusterTree(clusterTree, node)) if (subNode.intValue() != node) isCluster.put(subNode, false); } } // Now add to clusters for (Map.Entry<Integer, Boolean> c : isCluster.entrySet()) if (c.getValue()) clusters.add(c.getKey()); clusterSet = new HSet<Integer>(clusters); // Build cluster map int n = 0; for (Integer clust : clusterSet) { clusterMap.put(clust, n); reverseClusterMap.put(n, clust); n++; } return doLabeling(condensed, clusters, clusterMap); }
From source file:MultiColumnPrinter.java
private void printSortedTable() { // Sort the table entries TreeMap sortedTable = new TreeMap(); Enumeration elm = table.elements(); while (elm.hasMoreElements()) { String[] row = (String[]) elm.nextElement(); // If keyCriteria contains valid info use that // to create the key; otherwise, use the default row[0] // for the key. if (keyCriteria != null && keyCriteria.length > 0) { String key = getKey(row); if (key != null) sortedTable.put(key, row); else//from ww w . j a v a 2 s .co m sortedTable.put(row[0], row); } else { sortedTable.put(row[0], row); } } // Iterate through the table entries Iterator iterator = sortedTable.entrySet().iterator(); while (iterator.hasNext()) { Map.Entry entry = (Map.Entry) iterator.next(); String[] row = ((String[]) entry.getValue()); printRow(row); } }
From source file:org.loklak.geo.GeoNames.java
public GeoNames(final File cities1000_zip, final File iso3166json, long minPopulation) throws IOException { // load iso3166 info this.iso3166toCountry = new HashMap<>(); try {/*from www. java 2s .c o m*/ //String jsonString = new String(Files.readAllBytes(iso3166json.toPath()), StandardCharsets.UTF_8); ObjectMapper jsonMapper = new ObjectMapper(DAO.jsonFactory); JsonNode j = jsonMapper.readTree(iso3166json); for (JsonNode n : j) { // contains name,alpha-2,alpha-3,country-code,iso_3166-2,region-code,sub-region-code String name = n.get("name").textValue(); String cc = n.get("alpha-2").textValue(); this.iso3166toCountry.put(cc, name); } } catch (IOException e) { this.iso3166toCountry = new HashMap<String, String>(); } // this is a processing of the cities1000.zip file from http://download.geonames.org/export/dump/ this.id2loc = new HashMap<>(); this.hash2ids = new HashMap<>(); this.stopwordHashes = new HashSet<>(); this.countryCenter = new HashMap<>(); Map<String, CountryBounds> countryBounds = new HashMap<>(); if (cities1000_zip == null || !cities1000_zip.exists()) { throw new IOException("GeoNames: file does not exist!"); } ZipFile zf = null; BufferedReader reader = null; try { zf = new ZipFile(cities1000_zip); String entryName = cities1000_zip.getName(); entryName = entryName.substring(0, entryName.length() - 3) + "txt"; final ZipEntry ze = zf.getEntry(entryName); final InputStream is = zf.getInputStream(ze); reader = new BufferedReader(new InputStreamReader(is, StandardCharsets.UTF_8)); } catch (final IOException e) { throw new IOException("GeoNames: Error when decompressing cities1000.zip!", e); } /* parse this fields: --------------------------------------------------- 00 geonameid : integer id of record in geonames database 01 name : name of geographical point (utf8) varchar(200) 02 asciiname : name of geographical point in plain ascii characters, varchar(200) 03 alternatenames : alternatenames, comma separated varchar(5000) 04 latitude : latitude in decimal degrees (wgs84) 05 longitude : longitude in decimal degrees (wgs84) 06 feature class : see http://www.geonames.org/export/codes.html, char(1) 07 feature code : see http://www.geonames.org/export/codes.html, varchar(10) 08 country code : ISO-3166 2-letter country code, 2 characters 09 cc2 : alternate country codes, comma separated, ISO-3166 2-letter country code, 60 characters 10 admin1 code : fipscode (subject to change to iso code), see exceptions below, see file admin1Codes.txt for display names of this code; varchar(20) 11 admin2 code : code for the second administrative division, a county in the US, see file admin2Codes.txt; varchar(80) 12 admin3 code : code for third level administrative division, varchar(20) 13 admin4 code : code for fourth level administrative division, varchar(20) 14 population : bigint (8 byte int) 15 elevation : in meters, integer 16 dem : digital elevation model, srtm3 or gtopo30, average elevation of 3''x3'' (ca 90mx90m) or 30''x30'' (ca 900mx900m) area in meters, integer. srtm processed by cgiar/ciat. 17 timezone : the timezone id (see file timeZone.txt) varchar(40) 18 modification date : date of last modification in yyyy-MM-dd format */ try { String line; String[] fields; while ((line = reader.readLine()) != null) { if (line.isEmpty()) { continue; } fields = CommonPattern.TAB.split(line); final long population = Long.parseLong(fields[14]); if (minPopulation > 0 && population < minPopulation) continue; final int geonameid = Integer.parseInt(fields[0]); Set<String> locnames = new LinkedHashSet<>(); locnames.add(fields[1]); locnames.add(fields[2]); for (final String s : CommonPattern.COMMA.split(fields[3])) locnames.add(s); ArrayList<String> locnamess = new ArrayList<>(locnames.size()); locnamess.addAll(locnames); String cc = fields[8]; //ISO-3166 final GeoLocation geoLocation = new GeoLocation(Float.parseFloat(fields[4]), Float.parseFloat(fields[5]), locnamess, cc); geoLocation.setPopulation(population); this.id2loc.put(geonameid, geoLocation); for (final String name : locnames) { if (name.length() < 4) continue; String normalized = normalize(name); int lochash = normalized.hashCode(); List<Integer> locs = this.hash2ids.get(lochash); if (locs == null) { locs = new ArrayList<Integer>(1); this.hash2ids.put(lochash, locs); } if (!locs.contains(geonameid)) locs.add(geonameid); } // update the country bounds CountryBounds bounds = countryBounds.get(cc); if (bounds == null) { bounds = new CountryBounds(); countryBounds.put(cc, bounds); } bounds.extend(geoLocation); } if (reader != null) reader.close(); if (zf != null) zf.close(); } catch (final IOException e) { } // calculate the center of the countries for (Map.Entry<String, CountryBounds> country : countryBounds.entrySet()) { this.countryCenter.put(country.getKey(), new double[] { (country.getValue().lon_west - country.getValue().lon_east) / 2.0, (country.getValue().lat_north - country.getValue().lat_south) / 2.0 }); // [longitude, latitude] } // finally create a statistic which names appear very often to have fill-word heuristic TreeMap<Integer, Set<Integer>> stat = new TreeMap<>(); // a mapping from number of occurrences of location name hashes to a set of location name hashes for (Map.Entry<Integer, List<Integer>> entry : this.hash2ids.entrySet()) { int occurrences = entry.getValue().size(); Set<Integer> hashes = stat.get(occurrences); if (hashes == null) { hashes = new HashSet<Integer>(); stat.put(occurrences, hashes); } hashes.add(entry.getKey()); } // we consider 3/4 of this list as fill-word (approx 300): those with the most occurrences int good = stat.size() / 4; Iterator<Map.Entry<Integer, Set<Integer>>> i = stat.entrySet().iterator(); for (int j = 0; j < good; j++) i.next(); // 'eat away' the good entries. while (i.hasNext()) { Set<Integer> morehashes = i.next().getValue(); this.stopwordHashes.addAll(morehashes); } }
From source file:org.apache.hadoop.hbase.rest.client.RemoteHTable.java
public void put(List<Put> puts) throws IOException { // this is a trick: The gateway accepts multiple rows in a cell set and // ignores the row specification in the URI // separate puts by row TreeMap<byte[], List<Cell>> map = new TreeMap<byte[], List<Cell>>(Bytes.BYTES_COMPARATOR); for (Put put : puts) { byte[] row = put.getRow(); List<Cell> cells = map.get(row); if (cells == null) { cells = new ArrayList<Cell>(); map.put(row, cells);// w ww.ja v a 2 s.com } for (List<Cell> l : put.getFamilyCellMap().values()) { cells.addAll(l); } } // build the cell set CellSetModel model = new CellSetModel(); for (Map.Entry<byte[], List<Cell>> e : map.entrySet()) { RowModel row = new RowModel(e.getKey()); for (Cell cell : e.getValue()) { KeyValue kv = KeyValueUtil.ensureKeyValue(cell); row.addCell(new CellModel(kv)); } model.addRow(row); } // build path for multiput StringBuilder sb = new StringBuilder(); sb.append('/'); sb.append(Bytes.toStringBinary(name)); sb.append("/$multiput"); // can be any nonexistent row for (int i = 0; i < maxRetries; i++) { Response response = client.put(sb.toString(), Constants.MIMETYPE_PROTOBUF, model.createProtobufOutput()); int code = response.getCode(); switch (code) { case 200: return; case 509: try { Thread.sleep(sleepTime); } catch (InterruptedException e) { throw (InterruptedIOException) new InterruptedIOException().initCause(e); } break; default: throw new IOException("multiput request failed with " + code); } } throw new IOException("multiput request timed out"); }
From source file:com.yanbang.portal.controller.PortalController.java
/** * ??/* w w w . j av a2 s .c o m*/ * * @param request * @param response * @return * @throws Exception */ @SuppressWarnings("rawtypes") @RequestMapping(params = "action=portalLeft") public ModelAndView portalBottom(HttpServletRequest request, HttpServletResponse response) throws Exception { Map<String, Object> map = new HashMap<String, Object>(); SysUser user = (SysUser) this.getLoginUser(request); Collection<SysRole> rolelist = portalBiz.findRolesByUserCode(user.getUserCode()); String soft_licence = portalBiz.getLicences(); UserCompareBiz usercom = UserCompareBiz.getInstance(); usercom.setAddress(portalBiz.getServerAddr()); if (usercom.validaeAddress() || ValidateLicense.isLicenceNoExpired(soft_licence)) { rolelist = portalBiz.findRolesByUserCode(user.getUserCode()); } else { if (ValidateLicense.isLicenceDateExpired(soft_licence)) { rolelist = new ArrayList<SysRole>(); } else { rolelist = portalBiz.findRolesByUserCode(user.getUserCode()); } } // ?? HashMap<Long, SysMenu> totalMenuList = new HashMap<Long, SysMenu>(); // ?? TreeMap<Long, SysMenu> firstMenuMap = new TreeMap<Long, SysMenu>(); // ?? for (SysRole role : rolelist) { Collection<SysMenu> menulist = portalBiz.findMenusByListId(role.getRoleMenus()); for (SysMenu menu : menulist) { totalMenuList.put(menu.getMenuId(), menu); if (menu.getMenuGrade() == 1) { firstMenuMap.put(menu.getMenuOrder(), menu); } } } // =========================================== String strTotalMenusIds = "-1"; Iterator iterTotal = totalMenuList.entrySet().iterator(); while (iterTotal.hasNext()) { Map.Entry entry = (Map.Entry) iterTotal.next(); SysMenu menu = (SysMenu) entry.getValue(); strTotalMenusIds = strTotalMenusIds + "," + menu.getMenuId(); } // ============================================ Iterator iter = firstMenuMap.entrySet().iterator(); ArrayList<SysMenu> menulist = new ArrayList<SysMenu>(); // ??? while (iter.hasNext()) { Map.Entry entry = (Map.Entry) iter.next(); SysMenu menu1 = (SysMenu) entry.getValue(); MenuModel menuModel1 = new MenuModel(); menuModel1.setMenuId(menu1.getMenuId()); menuModel1.setMenuName(menu1.getMenuName()); menuModel1.setMenuURL(menu1.getMenuURL()); menuModel1.setMenuParentId(menu1.getMenuParentId()); menuModel1.setMenuGrade(menu1.getMenuGrade()); menuModel1.setMenuTarget(menu1.getMenuTarget()); // ---------------------------------------------------------- // ??? Collection<SysMenu> menu2list = portalBiz.findAllChildMenus(menu1.getMenuId(), strTotalMenusIds); if (menu2list != null) { ArrayList<MenuModel> menu2RetList = new ArrayList<MenuModel>(); for (SysMenu menu2 : menu2list) { MenuModel menuModel2 = new MenuModel(); menuModel2.setMenuId(menu2.getMenuId()); menuModel2.setMenuName(menu2.getMenuName()); menuModel2.setMenuURL(menu2.getMenuURL()); menuModel2.setMenuParentId(menu2.getMenuParentId()); menuModel2.setMenuGrade(menu2.getMenuGrade()); menuModel2.setMenuTarget(menu2.getMenuTarget()); // ??? Collection<SysMenu> menu3list = portalBiz.findAllChildMenus(menu2.getMenuId(), strTotalMenusIds); if (menu3list != null) { ArrayList<MenuModel> menu3RetList = new ArrayList<MenuModel>(); for (SysMenu menu3 : menu3list) { MenuModel menuModel3 = new MenuModel(); menuModel3.setMenuId(menu3.getMenuId()); menuModel3.setMenuName(menu3.getMenuName()); menuModel3.setMenuURL(menu3.getMenuURL()); menuModel3.setMenuParentId(menu3.getMenuParentId()); menuModel3.setMenuGrade(menu3.getMenuGrade()); menuModel3.setMenuTarget(menu3.getMenuTarget()); menu3RetList.add(menuModel3); } menuModel2.setChildMenuList(menu3RetList); } menu2RetList.add(menuModel2); } menuModel1.setChildMenuList(menu2RetList); } // ---------------------------------------------------------- menulist.add(menuModel1); } map.put("menulist", menulist); return new ModelAndView("portal/portalLeft", map); }
From source file:org.ncic.bioinfo.sparkseq.algorithms.utils.reports.GATKReportTable.java
/** * Write the table to the PrintStream, formatted nicely to be human-readable, AWK-able, and R-friendly. * * @param out the PrintStream to which the table should be written *///from ww w . j a v a 2 s .com void write(final PrintStream out) { /* * Table header: * #:GATKTable:nColumns:nRows:(DataType for each column):; * #:GATKTable:TableName:Description :; * key colA colB * row1 xxxx xxxxx */ // write the table definition out.printf(GATKTABLE_HEADER_PREFIX + ":%d:%d", getNumColumns(), getNumRows()); // write the formats for all the columns for (final GATKReportColumn column : columnInfo) out.print(SEPARATOR + column.getFormat()); out.println(ENDLINE); // write the table name & description out.printf(GATKTABLE_HEADER_PREFIX + ":%s:%s\n", tableName, tableDescription); // write the column names boolean needsPadding = false; for (final GATKReportColumn column : columnInfo) { if (needsPadding) out.printf(" "); needsPadding = true; out.printf(column.getColumnFormat().getNameFormat(), column.getColumnName()); } out.println(); // write the table body switch (sortingWay) { case SORT_BY_COLUMN: Collections.sort(underlyingData, new Comparator<Object[]>() { //INVARIANT the two arrays are of the same length and corresponding elements are of the same type @Override public int compare(Object[] objectArr1, Object[] objectArr2) { final int EQUAL = 0; int result = EQUAL; int l = objectArr1.length; for (int x = 0; x < l; x++) { if (objectArr1[x] instanceof Integer) { result = ((Integer) objectArr1[x]).compareTo((Integer) objectArr2[x]); } else if (objectArr1[x] instanceof Double) { result = ((Double) objectArr1[x]).compareTo((Double) objectArr2[x]); } else { // default uses String comparison result = objectArr1[x].toString().compareTo(objectArr2[x].toString()); } if (result != EQUAL) { return result; } } return result; } }); for (final Object[] row : underlyingData) writeRow(out, row); break; case SORT_BY_ROW: // make sure that there are exactly the correct number of ID mappings if (rowIdToIndex.size() != underlyingData.size()) throw new ReviewedGATKException( "There isn't a 1-to-1 mapping from row ID to index; this can happen when rows are not created consistently"); final TreeMap<Object, Integer> sortedMap; try { sortedMap = new TreeMap<Object, Integer>(rowIdToIndex); } catch (ClassCastException e) { throw new ReviewedGATKException( "Unable to sort the rows based on the row IDs because the ID Objects are of different types"); } for (final Map.Entry<Object, Integer> rowKey : sortedMap.entrySet()) writeRow(out, underlyingData.get(rowKey.getValue())); break; case DO_NOT_SORT: for (final Object[] row : underlyingData) writeRow(out, row); } out.println(); }
From source file:org.ncic.bioinfo.sparkseq.algorithms.utils.reports.GATKReportTable.java
public String[] transIntoLines() { StringBuilder sBuilder = new StringBuilder(); /*/* ww w . ja v a2s . c om*/ * Table header: * #:GATKTable:nColumns:nRows:(DataType for each column):; * #:GATKTable:TableName:Description :; * key colA colB * row1 xxxx xxxxx */ // write the table definition sBuilder.append(String.format(GATKTABLE_HEADER_PREFIX + ":%d:%d", getNumColumns(), getNumRows())); // write the formats for all the columns for (final GATKReportColumn column : columnInfo) sBuilder.append(SEPARATOR + column.getFormat()); sBuilder.append(ENDLINE); sBuilder.append('\n'); // write the table name & description sBuilder.append(String.format(GATKTABLE_HEADER_PREFIX + ":%s:%s\n", tableName, tableDescription)); // write the column names boolean needsPadding = false; for (final GATKReportColumn column : columnInfo) { if (needsPadding) sBuilder.append(" "); needsPadding = true; sBuilder.append(String.format(column.getColumnFormat().getNameFormat(), column.getColumnName())); } sBuilder.append('\n'); // write the table body switch (sortingWay) { case SORT_BY_COLUMN: Collections.sort(underlyingData, new Comparator<Object[]>() { //INVARIANT the two arrays are of the same length and corresponding elements are of the same type @Override public int compare(Object[] objectArr1, Object[] objectArr2) { final int EQUAL = 0; int result = EQUAL; int l = objectArr1.length; for (int x = 0; x < l; x++) { if (objectArr1[x] instanceof Integer) { result = ((Integer) objectArr1[x]).compareTo((Integer) objectArr2[x]); } else if (objectArr1[x] instanceof Double) { result = ((Double) objectArr1[x]).compareTo((Double) objectArr2[x]); } else { // default uses String comparison result = objectArr1[x].toString().compareTo(objectArr2[x].toString()); } if (result != EQUAL) { return result; } } return result; } }); for (final Object[] row : underlyingData) writeRowIntoBuffer(sBuilder, row); break; case SORT_BY_ROW: // make sure that there are exactly the correct number of ID mappings if (rowIdToIndex.size() != underlyingData.size()) throw new ReviewedGATKException( "There isn't a 1-to-1 mapping from row ID to index; this can happen when rows are not created consistently"); final TreeMap<Object, Integer> sortedMap; try { sortedMap = new TreeMap<Object, Integer>(rowIdToIndex); } catch (ClassCastException e) { throw new ReviewedGATKException( "Unable to sort the rows based on the row IDs because the ID Objects are of different types"); } for (final Map.Entry<Object, Integer> rowKey : sortedMap.entrySet()) writeRowIntoBuffer(sBuilder, underlyingData.get(rowKey.getValue())); break; case DO_NOT_SORT: for (final Object[] row : underlyingData) writeRowIntoBuffer(sBuilder, row); } return StringUtils.split(sBuilder.toString(), '\n'); }
From source file:org.waterforpeople.mapping.app.web.KMLGenerator.java
public String bindPlacemark(AccessPoint ap, String vmName, String display, StandardType standardType) throws Exception { // if (ap.getCountryCode() != null && !ap.getCountryCode().equals("MW")) // {/*from ww w. ja va 2 s . co m*/ if (display != null && display.trim().equalsIgnoreCase(GOOGLE_EARTH_DISPLAY)) { vmName = "placemarkGoogleEarth.vm"; } if (ap.getCountryCode() == null) ap.setCountryCode("Unknown"); if (ap.getCountryCode() != null) { VelocityContext context = new VelocityContext(); context.put("organization", ORGANIZATION); if (display != null) { context.put("display", display); } context.put("countryCode", ap.getCountryCode()); if (ap.getCollectionDate() != null) { String timestamp = DateFormatUtils.formatUTC(ap.getCollectionDate(), DateFormatUtils.ISO_DATE_FORMAT.getPattern()); String formattedDate = DateFormat.getDateInstance(DateFormat.SHORT).format(ap.getCollectionDate()); context.put("collectionDate", formattedDate); context.put("timestamp", timestamp); String collectionYear = new SimpleDateFormat("yyyy").format(ap.getCollectionDate()); context.put("collectionYear", collectionYear); } else { String timestamp = DateFormatUtils.formatUTC(ap.getCreatedDateTime(), DateFormatUtils.ISO_DATE_FORMAT.getPattern()); String formattedDate = DateFormat.getDateInstance(DateFormat.SHORT).format(ap.getCreatedDateTime()); context.put("collectionDate", formattedDate); context.put("timestamp", timestamp); } if (ap.getCommunityCode() != null) context.put("communityCode", ap.getCommunityCode()); else context.put("communityCode", "Unknown" + new Date()); if (ap.getWaterForPeopleProjectFlag() != null) { context.put("waterForPeopleProject", encodeBooleanDisplay(ap.getWaterForPeopleProjectFlag())); } else { context.put("waterForPeopleProject", "null"); } if (ap.getCurrentProblem() != null) { context.put("currentProblem", ap.getCurrentProblem()); } else { context.put("currentProblem", ap.getCurrentProblem()); } if (ap.getWaterForPeopleRole() != null) { context.put("waterForPeopleRole", ap.getWaterForPeopleRole()); } else { context.put("waterForPeopleRole", "null"); } if (ap.getPhotoURL() != null && ap.getPhotoURL().trim() != "") context.put("photoUrl", ap.getPhotoURL()); else context.put("photoUrl", "http://waterforpeople.s3.amazonaws.com/images/wfplogo.jpg"); if (ap.getPointType() != null) { if (ap.getPointType().equals(AccessPoint.AccessPointType.WATER_POINT)) { context.put("typeOfPoint", "Water"); context.put("type", "water"); } else if (ap.getPointType().equals(AccessPointType.SANITATION_POINT)) { context.put("typeOfPoint", "Sanitation"); context.put("type", "sanitation"); } else if (ap.getPointType().equals(AccessPointType.PUBLIC_INSTITUTION)) { context.put("typeOfPoint", "Public Institutions"); context.put("type", "public_institutions"); } else if (ap.getPointType().equals(AccessPointType.HEALTH_POSTS)) { context.put("typeOfPoint", "Health Posts"); context.put("type", "health_posts"); } else if (ap.getPointType().equals(AccessPointType.SCHOOL)) { context.put("typeOfPoint", "School"); context.put("type", "school"); } } else { context.put("typeOfPoint", "Water"); context.put("type", "water"); } if (ap.getTypeTechnologyString() == null) { context.put("primaryTypeTechnology", "Unknown"); } else { context.put("primaryTypeTechnology", ap.getTypeTechnologyString()); } if (ap.getHasSystemBeenDown1DayFlag() == null) { context.put("down1DayFlag", "Unknown"); } else { context.put("down1DayFlag", encodeBooleanDisplay(ap.getHasSystemBeenDown1DayFlag())); } if (ap.getInstitutionName() == null) { context.put("institutionName", "Unknown"); } else { context.put("institutionName", ap.getInstitutionName()); } if (ap.getExtimatedPopulation() != null) { context.put("estimatedPopulation", ap.getExtimatedPopulation()); } else { context.put("estimatedPopulation", "null"); } if (ap.getConstructionDateYear() == null || ap.getConstructionDateYear().trim().equals("")) { context.put("constructionDateOfWaterPoint", "Unknown"); } else { String constructionDateYear = ap.getConstructionDateYear(); if (constructionDateYear.contains(".0")) { constructionDateYear = constructionDateYear.replace(".0", ""); } context.put("constructionDateOfWaterPoint", constructionDateYear); } if (ap.getNumberOfHouseholdsUsingPoint() != null) { context.put("numberOfHouseholdsUsingWaterPoint", ap.getNumberOfHouseholdsUsingPoint()); } else { context.put("numberOfHouseholdsUsingWaterPoint", "null"); } if (ap.getCostPer() == null) { context.put("costPer", "N/A"); } else { context.put("costPer", ap.getCostPer()); } if (ap.getFarthestHouseholdfromPoint() == null || ap.getFarthestHouseholdfromPoint().trim().equals("")) { context.put("farthestHouseholdfromWaterPoint", "N/A"); } else { context.put("farthestHouseholdfromWaterPoint", ap.getFarthestHouseholdfromPoint()); } if (ap.getCurrentManagementStructurePoint() == null) { context.put("currMgmtStructure", "N/A"); } else { context.put("currMgmtStructure", ap.getCurrentManagementStructurePoint()); } if (ap.getPointPhotoCaption() == null || ap.getPointPhotoCaption().trim().equals("")) { context.put("waterPointPhotoCaption", defaultPhotoCaption); } else { context.put("waterPointPhotoCaption", ap.getPointPhotoCaption()); } if (ap.getCommunityName() == null) { context.put("communityName", "Unknown"); } else { context.put("communityName", ap.getCommunityName()); } if (ap.getHeader() == null) { context.put("header", "Water For People"); } else { context.put("header", ap.getHeader()); } if (ap.getFooter() == null) { context.put("footer", "Water For People"); } else { context.put("footer", ap.getFooter()); } if (ap.getPhotoName() == null) { context.put("photoName", "Water For People"); } else { context.put("photoName", ap.getPhotoName()); } // if (ap.getCountryCode() == "RW") { if (ap.getMeetGovtQualityStandardFlag() == null) { context.put("meetGovtQualityStandardFlag", "N/A"); } else { context.put("meetGovtQualityStandardFlag", encodeBooleanDisplay(ap.getMeetGovtQualityStandardFlag())); } // } else { // context.put("meetGovtQualityStandardFlag", "unknown"); // } if (ap.getMeetGovtQuantityStandardFlag() == null) { context.put("meetGovtQuantityStandardFlag", "N/A"); } else { context.put("meetGovtQuantityStandardFlag", encodeBooleanDisplay(ap.getMeetGovtQuantityStandardFlag())); } if (ap.getWhoRepairsPoint() == null) { context.put("whoRepairsPoint", "N/A"); } else { context.put("whoRepairsPoint", ap.getWhoRepairsPoint()); } if (ap.getSecondaryTechnologyString() == null) { context.put("secondaryTypeTechnology", "N/A"); } else { context.put("secondaryTypeTechnology", ap.getSecondaryTechnologyString()); } if (ap.getProvideAdequateQuantity() == null) { context.put("provideAdequateQuantity", "N/A"); } else { context.put("provideAdequateQuantity", encodeBooleanDisplay(ap.getProvideAdequateQuantity())); } if (ap.getBalloonTitle() == null) { context.put("title", "Water For People"); } else { context.put("title", ap.getBalloonTitle()); } if (ap.getProvideAdequateQuantity() == null) { context.put("provideAdequateQuantity", "N/A"); } else { context.put("provideAdequateQuantity", encodeBooleanDisplay(ap.getProvideAdequateQuantity())); } if (ap.getQualityDescription() != null) { context.put("qualityDescription", ap.getQualityDescription()); } if (ap.getQuantityDescription() != null) { context.put("quantityDescription", ap.getQuantityDescription()); } if (ap.getSub1() != null) { context.put("sub1", ap.getSub1()); } if (ap.getSub2() != null) { context.put("sub2", ap.getSub2()); } if (ap.getSub3() != null) { context.put("sub3", ap.getSub3()); } if (ap.getSub4() != null) { context.put("sub4", ap.getSub4()); } if (ap.getSub5() != null) { context.put("sub5", ap.getSub5()); } if (ap.getSub6() != null) { context.put("sub6", ap.getSub6()); } if (ap.getAccessPointCode() != null) { context.put("accessPointCode", ap.getAccessPointCode()); } if (ap.getAccessPointUsage() != null) { context.put("accessPointUsage", ap.getAccessPointUsage()); } if (ap.getDescription() != null) context.put("description", ap.getDescription()); else context.put("description", "Unknown"); // Need to check this if (ap.getPointType() != null) { if (Boolean.parseBoolean(PropertyUtil.getProperty(DYNAMIC_SCORING_FLAG))) { TreeMap<String, String> combinedScore = fetchLevelOfServiceScoreStatus(ap); for (Map.Entry<String, String> entry : combinedScore.entrySet()) { context.put(entry.getKey(), entry.getValue()); String style = null; if (standardType != null) { if (standardType.equals(StandardType.WaterPointLevelOfService) && entry.getKey() .equals(StandardType.WaterPointLevelOfService.toString() + "-pinStyle")) { style = entry.getValue(); } else if (standardType.equals(StandardType.WaterPointSustainability) && entry.getKey() .equals(StandardType.WaterPointSustainability.toString() + "-pinStyle")) { style = entry.getValue(); } } context.put("pinStyle", style); } } else { encodeStatusString(ap, context); context.put("pinStyle", encodePinStyle(ap.getPointType(), ap.getPointStatus())); } } else { context.put("pinStyle", "waterpushpinblk"); } String output = mergeContext(context, vmName); context = null; return output; } return null; }
From source file:com.sa.npopa.samples.hbase.rest.client.RemoteHTable.java
@Override public void put(List<Put> puts) throws IOException { // this is a trick: The gateway accepts multiple rows in a cell set and // ignores the row specification in the URI // separate puts by row TreeMap<byte[], List<Cell>> map = new TreeMap<byte[], List<Cell>>(Bytes.BYTES_COMPARATOR); for (Put put : puts) { byte[] row = put.getRow(); List<Cell> cells = map.get(row); if (cells == null) { cells = new ArrayList<Cell>(); map.put(row, cells);//from w w w .j a v a2 s . com } for (List<Cell> l : put.getFamilyCellMap().values()) { cells.addAll(l); } } // build the cell set CellSetModel model = new CellSetModel(); for (Map.Entry<byte[], List<Cell>> e : map.entrySet()) { RowModel row = new RowModel(e.getKey()); for (Cell cell : e.getValue()) { row.addCell(new CellModel(cell)); } model.addRow(row); } // build path for multiput StringBuilder sb = new StringBuilder(); sb.append('/'); sb.append(Bytes.toStringBinary(name)); sb.append("/$multiput"); // can be any nonexistent row for (int i = 0; i < maxRetries; i++) { Response response = client.put(sb.toString(), Constants.MIMETYPE_PROTOBUF, model.createProtobufOutput()); int code = response.getCode(); switch (code) { case 200: return; case 509: try { Thread.sleep(sleepTime); } catch (InterruptedException e) { throw (InterruptedIOException) new InterruptedIOException().initCause(e); } break; default: throw new IOException("multiput request failed with " + code); } } throw new IOException("multiput request timed out"); }