List of usage examples for java.util TreeMap keySet
public Set<K> keySet()
From source file:org.apache.hadoop.hbase.TestGet.java
private void verifyGet(final HRegion r, final String expectedServer) throws IOException { // This should return a value because there is only one family member byte[] value = r.get(ROW_KEY, CONTENTS); assertNotNull(value);/*w w w . j a va2s.c o m*/ // This should not return a value because there are multiple family members value = r.get(ROW_KEY, HConstants.COLUMN_FAMILY); assertNull(value); // Find out what getFull returns TreeMap<Text, byte[]> values = r.getFull(ROW_KEY); // assertEquals(4, values.keySet().size()); for (Iterator<Text> i = values.keySet().iterator(); i.hasNext();) { Text column = i.next(); if (column.equals(HConstants.COL_SERVER)) { String server = Writables.bytesToString(values.get(column)); assertEquals(expectedServer, server); LOG.info(server); } } }
From source file:com.romeikat.datamessie.core.base.dao.impl.AbstractEntityWithIdAndVersionDaoTest.java
@Test public void getIdsWithVersion_firstResult_maxResults() { TreeMap<Long, Long> idsWithVersion = dao.getIdsWithVersion(sessionProvider.getStatelessSession(), 0l, 1); Collection<Long> expected = Arrays.asList(1l); assertEquals(expected, Lists.newArrayList(idsWithVersion.keySet())); idsWithVersion = dao.getIdsWithVersion(sessionProvider.getStatelessSession(), 1l, 1); expected = Arrays.asList(1l); assertEquals(expected, Lists.newArrayList(idsWithVersion.keySet())); idsWithVersion = dao.getIdsWithVersion(sessionProvider.getStatelessSession(), 2l, 2); expected = Arrays.asList(2l, 3l); assertEquals(expected, Lists.newArrayList(idsWithVersion.keySet())); idsWithVersion = dao.getIdsWithVersion(sessionProvider.getStatelessSession(), 2l, 0); expected = Arrays.asList(2l, 3l); assertEquals(expected, Lists.newArrayList(idsWithVersion.keySet())); idsWithVersion = dao.getIdsWithVersion(sessionProvider.getStatelessSession(), 2l, 3); expected = Arrays.asList(2l, 3l); assertEquals(expected, Lists.newArrayList(idsWithVersion.keySet())); idsWithVersion = dao.getIdsWithVersion(sessionProvider.getStatelessSession(), 5l, 0); expected = Arrays.asList();//from w ww . j a va2s . co m assertEquals(expected, Lists.newArrayList(idsWithVersion.keySet())); dbSetupTracker.skipNextLaunch(); }
From source file:net.triptech.buildulator.model.DataGrid.java
/** * Gets the rows as a list.// w w w .j a va 2 s . c o m * * @return the list of row data */ public List<List<String>> getRows() { List<List<String>> data = new ArrayList<List<String>>(); for (int i : body.keySet()) { TreeMap<Integer, String> row = body.get(i); List<String> rowData = new ArrayList<String>(); for (int index : row.keySet()) { rowData.add(row.get(index)); } data.add(rowData); } return data; }
From source file:org.starfishrespect.myconsumption.server.business.sensors.SensorsDataRetriever.java
/** * Retrieves and stores the data for one user * * @param onlyThisSensorId retrieve only data for one sensor with this id * @return false if something goes wrong; true otherwise *//*from ww w. j a v a 2 s . co m*/ public boolean retrieve(List<Sensor> sensors, String onlyThisSensorId) { boolean allSuccessful = true; for (Sensor sensor : sensors) { System.out.println("Retrieve data for sensor " + sensor.getId()); try { valuesRepository.setSensor(sensor.getId()); valuesRepository.init(); if (onlyThisSensorId != null) { if (!sensor.getId().equals(onlyThisSensorId)) { continue; } } HashMap<Integer, HashMap<Integer, Integer>> sortedValues = new HashMap<Integer, HashMap<Integer, Integer>>(); Date lastValue = sensor.getLastValue(); SensorRetriever retriever = null; if (sensor instanceof FluksoSensor) { retriever = new FluksoRetriever((FluksoSensor) sensor); } if (retriever == null) { System.out.println("This sensor type has not been found!"); continue; } TreeMap<Integer, Integer> data = retriever.getDataSince(lastValue).getData(); if (data.size() != 0) { for (int key : data.keySet()) { int hour = key - key % 3600; HashMap<Integer, Integer> hourData = sortedValues.get(hour); if (hourData == null) { hourData = new HashMap<Integer, Integer>(); sortedValues.put(hour, hourData); } hourData.put(key % 3600, data.get(key)); } for (int key : sortedValues.keySet()) { Date dateKey = new Date(key * 1000L); SensorDataset newValue = new SensorDataset(dateKey); newValue.addAllValues(sortedValues.get(key)); valuesRepository.insertOrUpdate(newValue); } if (sensor.getLastValue().before(new Date(data.lastKey() * 1000L))) { sensor.setLastValue(new Date(data.lastKey() * 1000L)); } if (sensor.getFirstValue().after(new Date(data.firstKey() * 1000L)) || sensor.getFirstValue().getTime() == 0) { sensor.setFirstValue(new Date(data.firstKey() * 1000L)); } // sync operation, this avoid to insert a sensor who would have been deleted // while retrieving its data int currentUsageCount = sensorRepository.getUsageCount(sensor.getId()); if (currentUsageCount > -1) { // update, the field may have been incremented during retrieving sensor.setUsageCount(currentUsageCount); sensor.setDead(false); sensorRepository.updateSensor(sensor); } System.out.println("Retrieve successful"); } else { System.out.println("No values retrieved for this sensor"); if (!sensor.isDead()) { // test if sensor is dead ? Calendar cal = new GregorianCalendar(); cal.add(Calendar.HOUR, -6); if (sensor.getLastValue().before(new Date(cal.getTimeInMillis()))) { System.out.println( "Sensor has not sent anything in the last 6 hours! Set its status as dead."); sensor.setDead(true); sensorRepository.updateSensor(sensor); } } else { System.out.println("Sensor is still dead"); } } } catch (RetrieveException | DaoException e) { System.err.println(e.getMessage()); allSuccessful = false; } } return allSuccessful; }
From source file:org.starfishrespect.myconsumption.server.business.repositories.repositoriesimpl.SensorRepositoryImpl.java
@Override public List<List<Integer>> getValues(String sensorId, int startTime, int endTime) throws DaoException { if (endTime == 0) endTime = Integer.MAX_VALUE; if (startTime < 0 || endTime < 0 || startTime > endTime) { throw new BadRequestException(); }//from w w w . ja va 2s . c o m if (!sensorExists(sensorId)) throw new NotFoundException(); int effectiveStart = startTime - startTime % 3600; mValuesRepository.setSensor(sensorId); List<SensorDataset> daoValues = mValuesRepository.getSensor(new Date(((long) effectiveStart) * 1000L), new Date(((long) endTime) * 1000L)); List<List<Integer>> values = new ArrayList<>(); for (SensorDataset value : daoValues) { int start = (int) (value.getTimestamp().getTime() / 1000); TreeMap<Integer, MinuteValues> v = value.getValues(); if (v == null) { continue; } for (int key : v.keySet()) { for (int second : v.get(key).containedSeconds()) { int time = start + key * 60 + second; if (time < startTime || time > endTime) { continue; } List<Integer> item = new ArrayList<>(); item.add(time); item.add(value.getValues().get(key).getValue(second)); values.add(item); } } } return values; }
From source file:com.sfs.whichdoctor.formatter.PersonFormatter.java
/** * Calculate training summary total./* w w w. java 2 s . c o m*/ * * @param summary the summary * @param inMonths the in months flag * @return the string */ private static String calculateTrainingSummaryTotal(final TreeMap<String, AccreditationBean[]> summary, final boolean inMonths) { StringBuffer value = new StringBuffer(); int coreTotal = 0; int nonCoreTotal = 0; if (summary != null) { for (String key : summary.keySet()) { try { AccreditationBean[] details = summary.get(key); AccreditationBean core = details[0]; AccreditationBean nonCore = details[1]; coreTotal += core.getWeeksCertified(); nonCoreTotal += nonCore.getWeeksCertified(); } finally { } } } if (inMonths) { coreTotal = Formatter.getWholeMonths(coreTotal); nonCoreTotal = Formatter.getWholeMonths(nonCoreTotal); } value.append(coreTotal); value.append(" ("); value.append(nonCoreTotal); value.append(")"); return value.toString(); }
From source file:edu.ucsb.eucalyptus.transport.query.WalrusQuerySecurityHandler.java
private String getCanonicalizedAmzHeaders(CaseInsensitiveMap headers) { String result = ""; TreeMap amzHeaders = headers.removeSub("x-amz-"); Iterator iterator = amzHeaders.keySet().iterator(); while (iterator.hasNext()) { Object key = iterator.next(); String trimmedKey = key.toString().trim(); String value = (String) amzHeaders.get(key); String trimmedValue = value.trim(); result += trimmedKey + ":" + trimmedValue + "\n"; }/*from w w w .ja v a 2 s.c om*/ return result; }
From source file:com.mmounirou.spotirss.spotify.tracks.SpotifyHrefQuery.java
private XTracks findBestMatchingTrack(List<XTracks> xtracks, final Track track) { if (xtracks.size() == 1) { return xtracks.get(0); }//from ww w.j ava 2s . c om TreeMap<Integer, XTracks> sortedTrack = Maps.newTreeMap(); for (XTracks xTrack : xtracks) { sortedTrack.put(getLevenshteinDistance(xTrack, track), xTrack); } Integer minDistance = Iterables.get(sortedTrack.keySet(), 0); XTracks choosedTrack = sortedTrack.get(minDistance); if (minDistance > 1) { SpotiRss.LOGGER.info(String.format("(%s:%s) choosed for (%s:%s) with distance %d", choosedTrack.getOriginalTrackName(), Joiner.on(",").join(choosedTrack.getAllArtists()), track.getSong(), Joiner.on(",").join(track.getArtists()), minDistance)); } else { SpotiRss.LOGGER.debug(String.format("(%s:%s) choosed for (%s:%s) with distance %d", choosedTrack.getOriginalTrackName(), Joiner.on(",").join(choosedTrack.getAllArtists()), track.getSong(), Joiner.on(",").join(track.getArtists()), minDistance)); } return choosedTrack; }
From source file:my.mavenproject10.FileuploadController.java
@RequestMapping(method = RequestMethod.POST) ModelAndView upload(HttpServletRequest request, HttpServletResponse response) { boolean isMultipart = ServletFileUpload.isMultipartContent(request); String fileName = ""; int size = 0; ArrayList<String> result = new ArrayList<String>(); if (isMultipart) { FileItemFactory factory = new DiskFileItemFactory(); ServletFileUpload upload = new ServletFileUpload(factory); try {// w w w .ja v a 2 s . co m List items = upload.parseRequest(request); Iterator iterator = items.iterator(); while (iterator.hasNext()) { FileItem item = (FileItem) iterator.next(); fileName = item.getName(); System.out.println("file name " + item.getName()); JAXBContext jc = JAXBContext.newInstance(CustomersType.class); SAXParserFactory spf = SAXParserFactory.newInstance(); XMLReader xmlReader = spf.newSAXParser().getXMLReader(); InputSource inputSource = new InputSource( new InputStreamReader(item.getInputStream(), "UTF-8")); SAXSource source = new SAXSource(xmlReader, inputSource); Unmarshaller unmarshaller = jc.createUnmarshaller(); CustomersType data2 = (CustomersType) unmarshaller.unmarshal(source); //System.out.println("size " + data2.getCustomer().size()); size = data2.getCustomer().size(); for (CustomerType customer : data2.getCustomer()) { System.out.println(customer.toString()); } // double summ = 0.0; HashMap<Integer, Float> ordersMap = new HashMap<Integer, Float>(); for (CustomerType customer : data2.getCustomer()) { for (OrderType orderType : customer.getOrders().getOrder()) { Float summPerOrder = 0.0f; //System.out.println(orderType); for (PositionType positionType : orderType.getPositions().getPosition()) { //System.out.println(positionType); summPerOrder += positionType.getCount() * positionType.getPrice(); summ += positionType.getCount() * positionType.getPrice(); } ordersMap.put(orderType.getId(), summPerOrder); } } summ = new BigDecimal(summ).setScale(2, RoundingMode.UP).doubleValue(); System.out.println(" " + summ); result.add(" " + summ); // HashMap<Integer, Float> customersMap = new HashMap<Integer, Float>(); for (CustomerType customer : data2.getCustomer()) { Float summPerCust = 0.0f; customersMap.put(customer.getId(), summPerCust); for (OrderType orderType : customer.getOrders().getOrder()) { for (PositionType positionType : orderType.getPositions().getPosition()) { summPerCust += positionType.getCount() * positionType.getPrice(); } } //System.out.println(customer.getId() + " orders " + summPerCust); customersMap.put(customer.getId(), summPerCust); } TreeMap sortedMap = sortByValue(customersMap); System.out.println(" " + sortedMap.keySet().toArray()[0] + " : " + sortedMap.get(sortedMap.firstKey())); result.add(" " + sortedMap.keySet().toArray()[0] + " : " + sortedMap.get(sortedMap.firstKey())); // TreeMap sortedMapOrders = sortByValue(ordersMap); System.out.println(" " + sortedMapOrders.keySet().toArray()[0] + " : " + sortedMapOrders.get(sortedMapOrders.firstKey())); result.add(" " + sortedMapOrders.keySet().toArray()[0] + " : " + sortedMapOrders.get(sortedMapOrders.firstKey())); // System.out.println(" " + sortedMapOrders.keySet().toArray()[sortedMapOrders.keySet().toArray().length - 1] + " : " + sortedMapOrders.get(sortedMapOrders.lastKey())); result.add(" " + sortedMapOrders.keySet().toArray()[sortedMapOrders.keySet().toArray().length - 1] + " : " + sortedMapOrders.get(sortedMapOrders.lastKey())); // System.out.println(" " + sortedMapOrders.size()); result.add(" " + sortedMapOrders.size()); // ArrayList<Float> floats = new ArrayList<Float>(sortedMapOrders.values()); Float summAvg = 0.0f; Float avg = 0.0f; for (Float f : floats) { summAvg += f; } avg = new BigDecimal(summAvg / floats.size()).setScale(2, RoundingMode.UP).floatValue(); System.out.println(" " + avg); result.add(" " + avg); } } catch (FileUploadException e) { System.out.println("FileUploadException:- " + e.getMessage()); } catch (JAXBException ex) { //Logger.getLogger(FileuploadController.class.getName()).log(Level.SEVERE, null, ex); } catch (UnsupportedEncodingException ex) { Logger.getLogger(FileuploadController.class.getName()).log(Level.SEVERE, null, ex); } catch (IOException ex) { Logger.getLogger(FileuploadController.class.getName()).log(Level.SEVERE, null, ex); } catch (ParserConfigurationException ex) { Logger.getLogger(FileuploadController.class.getName()).log(Level.SEVERE, null, ex); } catch (SAXException ex) { Logger.getLogger(FileuploadController.class.getName()).log(Level.SEVERE, null, ex); } } ModelAndView modelAndView = new ModelAndView("fileuploadsuccess"); modelAndView.addObject("files", result); modelAndView.addObject("name", fileName); modelAndView.addObject("size", size); return modelAndView; }
From source file:uk.ac.leeds.ccg.andyt.projects.fluvialglacial.SlopeAreaAnalysis.java
public void PrintDataSummary(TreeMap<Integer, Object[]> allData) { Iterator<Integer> ite; ite = allData.keySet().iterator(); int ID;/*from w w w .ja va 2 s. c om*/ Object[] data; ArrayList<Generic_XYNumericalData> theGeneric_XYNumericalData; Generic_XYNumericalData point; BigDecimal maxx; BigDecimal minx; BigDecimal maxy; BigDecimal miny; System.out.println("N, MaxX, MinX, MaxY, MinY"); while (ite.hasNext()) { ID = ite.next(); data = allData.get(ID); theGeneric_XYNumericalData = (ArrayList<Generic_XYNumericalData>) data[0]; maxx = (BigDecimal) data[1]; minx = (BigDecimal) data[2]; maxy = (BigDecimal) data[3]; miny = (BigDecimal) data[4]; System.out.println( "" + theGeneric_XYNumericalData.size() + ", " + maxx + ", " + minx + ", " + maxy + ", " + miny); } }