List of usage examples for java.util TreeMap firstKey
public K firstKey()
From source file:Main.java
/** * @param r// ww w. j a v a 2 s.co m * the original rectangle * @param includeReservedInsets * if taskbar and other windowing insets should be included in the * returned area * @return iff there are multiple monitors the other monitor than the * effective view of the monitor that the rectangle mostly coveres, or * null if there is just one screen */ public static Rectangle getOppositeFullScreenBoundsFor(Rectangle r, boolean includeReservedInsets) { GraphicsEnvironment ge = GraphicsEnvironment.getLocalGraphicsEnvironment(); TreeMap<Integer, Rectangle> prioMap = new TreeMap<Integer, Rectangle>(); for (GraphicsDevice dev : ge.getScreenDevices()) { Rectangle bounds; if ((!includeReservedInsets) && dev == ge.getDefaultScreenDevice()) { bounds = ge.getMaximumWindowBounds(); } else { bounds = dev.getDefaultConfiguration().getBounds(); } Rectangle intersection = bounds.intersection(r); prioMap.put(intersection.width * intersection.height, bounds); } if (prioMap.size() <= 1) { return null; } else { return prioMap.get(prioMap.firstKey()); } }
From source file:cit360.sandbox.BackEndMenu.java
public static void ticketPrices() { TreeMap ageGroup = new TreeMap(); // Add some ageGroup. ageGroup.put("Adult", 8.75); ageGroup.put("Child", 5.50); ageGroup.put("Senior Citizen", 5.25); ageGroup.put("Military Veteran", 5.00); // Iterate over all ageGroup, using the keySet method. for (Object key : ageGroup.keySet()) System.out.println(key + " - $" + ageGroup.get(key)); System.out.println();/* ww w.j a v a 2s .com*/ System.out.println("Highest key: " + ageGroup.lastKey()); System.out.println("Lowest key: " + ageGroup.firstKey()); System.out.println("\nPrinting all values: "); for (Object val : ageGroup.values()) System.out.println("$" + val); System.out.println(); // Clear all values. ageGroup.clear(); // Equals to zero. System.out.println("After clear operation, size: " + ageGroup.size()); }
From source file:edu.synth.state.SyntHelperState.java
public void handleObsData() throws IOException { File obsFile = new File(Constants.OBS_DATA); TreeMap<Double, Double> obs = FileWorker.getSortedDoubleData(obsFile); synthSettings.setStartSynth(new BigDecimal(obs.firstKey()).setScale(0, RoundingMode.DOWN).intValue()); synthSettings.setEndSynth(new BigDecimal(obs.lastKey()).setScale(0, RoundingMode.UP).intValue()); List<String> strs = new ArrayList<String>(); for (Entry<Double, Double> ent : obs.entrySet()) strs.add(String.format(Locale.ENGLISH, "%1.4f %1.4f", ent.getKey(), ent.getValue())); FileWorker.write(obsFile, strs);//from ww w .j a v a 2 s . c o m }
From source file:nl.rivm.cib.episim.model.disease.infection.MSEIRSTest.java
public static Observable<Entry<Double, long[]>> stochasticSellke(final SIRConfig config, final double maxDt) { return Observable.create(sub -> { final double beta = config.reproduction() / config.recovery(); final long[] y = config.population(); final double[] T = config.t(); final double dt = Double.isFinite(maxDt) && maxDt > 0 ? maxDt : T[1]; final Long seed = config.seed(); final RandomGenerator rng = new MersenneTwister(seed == null ? System.currentTimeMillis() : seed); final ExponentialDistribution resistanceDist = new ExponentialDistribution(rng, 1), recoverDist = new ExponentialDistribution(rng, config.recovery()); // pending infections (mapping resistance -> amount) final TreeMap<Double, Integer> tInfect = IntStream.range(0, (int) y[0]) .mapToObj(i -> resistanceDist.sample()) .collect(Collectors.toMap(r -> r, r -> 1, Integer::sum, TreeMap::new)); // pending recoveries (mapping time -> amount) final TreeMap<Double, Integer> tRecover = new TreeMap<>(); double cumPres = 0; // Re-initialize infectives as susceptibles with zero resistance tInfect.put(cumPres, (int) y[1]); y[0] += y[1]; // I -> S y[1] -= y[1]; // I -> 0 for (double t = T[0]; t < T[1];) { publishCopy(sub, t, y);/*from w w w.jav a 2s .c o m*/ final long localPopSize = y[0] + y[1] + y[2]; final Double ri = tInfect.isEmpty() ? null : tInfect.firstKey(), ti = ri == null ? null : // now + remaining resistance per relative pressure t + (ri - cumPres) / (beta * Math.max(y[1], 1) / localPopSize), tr = tRecover.isEmpty() ? null : tRecover.firstKey(); // time of next infection is earliest if (ti != null && (tr == null || ti < tr)) { final int ni = tInfect.remove(ri); cumPres = ri; // publish intermediate values for (double t1 = Math.min(ti, t + dt), tMax = Math.min(T[1], ti); t1 < tMax; t1 += dt) publishCopy(sub, t1, y); // infect t = ti; y[0] -= ni; // from S y[1] += ni; // to I // schedule S_t recoveries at t+Exp(1/gamma) for (int i = 0; i < ni; i++) tRecover.compute(t + recoverDist.sample(), (k, v) -> v == null ? 1 : v + 1); } // time of next recovery is earliest else if (tr != null) { final int nr = tRecover.remove(tr); if (ri != null) // advance cumulative pressure by dt * relative pressure cumPres += (tr - t) * beta * y[1] / localPopSize; // publish intermediate values for (double t1 = Math.min(tr, t + dt), tMax = Math.min(T[1], tr); t1 < tMax; t1 += dt) publishCopy(sub, t1, y); // recover t = tr; y[1] -= nr; // from I y[2] += nr; // to R } // no events remaining else { // publish intermediate values for (double t1 = t + dt; t1 < T[1]; t1 += dt) publishCopy(sub, t1, y); // time ends break; } } sub.onComplete(); }); }
From source file:my.mavenproject10.FileuploadController.java
@RequestMapping(method = RequestMethod.POST) ModelAndView upload(HttpServletRequest request, HttpServletResponse response) { boolean isMultipart = ServletFileUpload.isMultipartContent(request); String fileName = ""; int size = 0; ArrayList<String> result = new ArrayList<String>(); if (isMultipart) { FileItemFactory factory = new DiskFileItemFactory(); ServletFileUpload upload = new ServletFileUpload(factory); try {/*from ww w . j a v a 2s . c o m*/ List items = upload.parseRequest(request); Iterator iterator = items.iterator(); while (iterator.hasNext()) { FileItem item = (FileItem) iterator.next(); fileName = item.getName(); System.out.println("file name " + item.getName()); JAXBContext jc = JAXBContext.newInstance(CustomersType.class); SAXParserFactory spf = SAXParserFactory.newInstance(); XMLReader xmlReader = spf.newSAXParser().getXMLReader(); InputSource inputSource = new InputSource( new InputStreamReader(item.getInputStream(), "UTF-8")); SAXSource source = new SAXSource(xmlReader, inputSource); Unmarshaller unmarshaller = jc.createUnmarshaller(); CustomersType data2 = (CustomersType) unmarshaller.unmarshal(source); //System.out.println("size " + data2.getCustomer().size()); size = data2.getCustomer().size(); for (CustomerType customer : data2.getCustomer()) { System.out.println(customer.toString()); } // double summ = 0.0; HashMap<Integer, Float> ordersMap = new HashMap<Integer, Float>(); for (CustomerType customer : data2.getCustomer()) { for (OrderType orderType : customer.getOrders().getOrder()) { Float summPerOrder = 0.0f; //System.out.println(orderType); for (PositionType positionType : orderType.getPositions().getPosition()) { //System.out.println(positionType); summPerOrder += positionType.getCount() * positionType.getPrice(); summ += positionType.getCount() * positionType.getPrice(); } ordersMap.put(orderType.getId(), summPerOrder); } } summ = new BigDecimal(summ).setScale(2, RoundingMode.UP).doubleValue(); System.out.println(" " + summ); result.add(" " + summ); // HashMap<Integer, Float> customersMap = new HashMap<Integer, Float>(); for (CustomerType customer : data2.getCustomer()) { Float summPerCust = 0.0f; customersMap.put(customer.getId(), summPerCust); for (OrderType orderType : customer.getOrders().getOrder()) { for (PositionType positionType : orderType.getPositions().getPosition()) { summPerCust += positionType.getCount() * positionType.getPrice(); } } //System.out.println(customer.getId() + " orders " + summPerCust); customersMap.put(customer.getId(), summPerCust); } TreeMap sortedMap = sortByValue(customersMap); System.out.println(" " + sortedMap.keySet().toArray()[0] + " : " + sortedMap.get(sortedMap.firstKey())); result.add(" " + sortedMap.keySet().toArray()[0] + " : " + sortedMap.get(sortedMap.firstKey())); // TreeMap sortedMapOrders = sortByValue(ordersMap); System.out.println(" " + sortedMapOrders.keySet().toArray()[0] + " : " + sortedMapOrders.get(sortedMapOrders.firstKey())); result.add(" " + sortedMapOrders.keySet().toArray()[0] + " : " + sortedMapOrders.get(sortedMapOrders.firstKey())); // System.out.println(" " + sortedMapOrders.keySet().toArray()[sortedMapOrders.keySet().toArray().length - 1] + " : " + sortedMapOrders.get(sortedMapOrders.lastKey())); result.add(" " + sortedMapOrders.keySet().toArray()[sortedMapOrders.keySet().toArray().length - 1] + " : " + sortedMapOrders.get(sortedMapOrders.lastKey())); // System.out.println(" " + sortedMapOrders.size()); result.add(" " + sortedMapOrders.size()); // ArrayList<Float> floats = new ArrayList<Float>(sortedMapOrders.values()); Float summAvg = 0.0f; Float avg = 0.0f; for (Float f : floats) { summAvg += f; } avg = new BigDecimal(summAvg / floats.size()).setScale(2, RoundingMode.UP).floatValue(); System.out.println(" " + avg); result.add(" " + avg); } } catch (FileUploadException e) { System.out.println("FileUploadException:- " + e.getMessage()); } catch (JAXBException ex) { //Logger.getLogger(FileuploadController.class.getName()).log(Level.SEVERE, null, ex); } catch (UnsupportedEncodingException ex) { Logger.getLogger(FileuploadController.class.getName()).log(Level.SEVERE, null, ex); } catch (IOException ex) { Logger.getLogger(FileuploadController.class.getName()).log(Level.SEVERE, null, ex); } catch (ParserConfigurationException ex) { Logger.getLogger(FileuploadController.class.getName()).log(Level.SEVERE, null, ex); } catch (SAXException ex) { Logger.getLogger(FileuploadController.class.getName()).log(Level.SEVERE, null, ex); } } ModelAndView modelAndView = new ModelAndView("fileuploadsuccess"); modelAndView.addObject("files", result); modelAndView.addObject("name", fileName); modelAndView.addObject("size", size); return modelAndView; }
From source file:edu.utexas.cs.tactex.subscriptionspredictors.LWRCustOldAppache.java
/** * @param candidateEval/*from ww w. j av a 2 s .c om*/ * @param e2n * @return */ @Override public Double predictNumSubs(double candidateEval, TreeMap<Double, Double> e2n, CustomerInfo customer, int timeslot) { // tree map guarantees that keys are unique // so we are suppose to be able to run LWR // if there are at least 3 entries (even 2) // LWR, run n-fold cross validation with different bandwidth double min = e2n.firstKey(); double max = e2n.lastKey(); ArrayRealVector xVec = createNormalizedXVector(e2n.keySet(), min, max); ArrayRealVector yVec = createYVector(e2n.values()); double bestTau = Double.MAX_VALUE; double bestMSE = Double.MAX_VALUE; ArrayList<Double> candidateTaus = new ArrayList<Double>(); //candidateTaus.add(0.025 * SQUEEZE); candidateTaus.add(0.05);// * SQUEEZE); candidateTaus.add(0.1);// * SQUEEZE); candidateTaus.add(0.2);// * SQUEEZE); candidateTaus.add(0.3);// * SQUEEZE); candidateTaus.add(0.4);// * SQUEEZE); candidateTaus.add(0.5);// * SQUEEZE); candidateTaus.add(0.6);// * SQUEEZE); candidateTaus.add(0.7);// * SQUEEZE); candidateTaus.add(0.8);// * SQUEEZE); candidateTaus.add(0.9);// * SQUEEZE); candidateTaus.add(1.0);// * SQUEEZE); for (Double tau : candidateTaus) { Double mse = CrossValidationError(tau, xVec, yVec); if (null == mse) { log.error(" cp cross-validation failed, return null"); return null; } if (mse < bestMSE) { bestMSE = mse; bestTau = tau; } } log.info(" cp LWR bestTau " + bestTau); double x0 = candidateEval; Double prediction = LWRPredict(xVec, yVec, normalizeX(x0, min, max), bestTau); if (null == prediction) { log.error("LWR passed CV but cannot predict on new point. falling back to interpolateOrNN()"); log.error("e2n: " + e2n.toString()); log.error("candidateEval " + candidateEval); return null; } // cast to int, and cannot be negative return Math.max(0, (double) (int) (double) prediction); }
From source file:com.compomics.pride_asa_pipeline.core.logic.modification.PTMMapper.java
public PtmSettings removeDuplicateMasses(PtmSettings modProfile, double precursorMassAcc) { TreeMap<Double, String> massToModMap = new TreeMap<>(); for (String aModName : modProfile.getAllModifications()) { massToModMap.put(modProfile.getPtm(aModName).getMass(), aModName); }/* ww w . ja va 2 s .co m*/ double previousMass = massToModMap.firstKey() - precursorMassAcc; for (Double aModMass : massToModMap.keySet()) { if (Math.abs(aModMass - previousMass) < precursorMassAcc) { String originalModification = massToModMap.get(previousMass); String duplicateModification = massToModMap.get(aModMass); if (originalModification != null) { System.out.println("Duplicate masses found : " + originalModification + "(" + previousMass + ")" + " vs " + duplicateModification + "(" + aModMass + ")"); if (modProfile.getFixedModifications().contains(duplicateModification)) { modProfile.removeFixedModification(duplicateModification); } else { modProfile.removeVariableModification(duplicateModification); } } } previousMass = aModMass; } return modProfile; }
From source file:org.mahasen.util.SearchUtil.java
/** * @param propertyTreeId/*from w w w. j av a 2s. c o m*/ * @param initialValue * @param lastValue * @return * @throws InterruptedException * @throws MahasenException */ private Vector<Id> getResourceIdVector(Id propertyTreeId, String initialValue, String lastValue) throws InterruptedException, MahasenException { Vector<Id> resultantIds = new Vector<Id>(); TreeMap propertyTree = mahasenManager.lookupPropertyTreeDHT(propertyTreeId); if (propertyTree == null) { throw new MahasenException("Property not found"); } else { if (propertyTree.firstKey() instanceof String) { System.out.println("this is the property tree " + propertyTree); NavigableMap<String, Vector<Id>> resultMap = propertyTree.subMap(initialValue.toLowerCase(), true, lastValue.toLowerCase(), true); Iterator keys = resultMap.keySet().iterator(); while (keys.hasNext()) { resultantIds.addAll(resultMap.get(keys.next())); } } else if (propertyTree.firstKey() instanceof Integer) { System.out.println("this is the property tree " + propertyTree); NavigableMap<Integer, Vector<Id>> resultMap = propertyTree.subMap(Integer.valueOf(initialValue), true, Integer.valueOf(lastValue), true); Iterator keys = resultMap.keySet().iterator(); while (keys.hasNext()) { resultantIds.addAll(resultMap.get(keys.next())); } } } return resultantIds; }
From source file:org.mahasen.util.SearchUtil.java
/** * @param propertyTreeId// w w w.j a v a 2 s . co m * @param propertyValue * @return * @throws InterruptedException * @throws MahasenException */ private Vector<Id> getResourceIdVector(Id propertyTreeId, String propertyValue) throws InterruptedException, MahasenException { TreeMap<?, Vector<Id>> propertyTree = mahasenManager.lookupPropertyTreeDHT(propertyTreeId); if (propertyTree == null) { throw new MahasenException("Property not found"); } else { if (propertyTree.firstKey() instanceof String) { return propertyTree.get(propertyValue.toLowerCase()); } else if (propertyTree.firstKey() instanceof Integer) { return propertyTree.get(Integer.valueOf(propertyValue)); } } System.out.println("this is the property tree " + propertyTree); return null; }
From source file:gov.usgs.anss.query.MultiplexedMSOutputer.java
/** * This does the hard work of sorting - called as a shutdown hook. * TODO: consider recursion./*w w w.j a v a2 s . co m*/ * @param outputName name for the output file. * @param files list of MiniSEED files to multiplex. * @param cleanup flag indicating whether to cleanup after ourselves or not. * @throws IOException */ public static void multiplexFiles(String outputName, List<File> files, boolean cleanup, boolean allowEmpty) throws IOException { ArrayList<File> cleanupFiles = new ArrayList<File>(files); ArrayList<File> moreFiles = new ArrayList<File>(); File outputFile = new File(outputName); File tempOutputFile = new File(outputName + ".tmp"); do { // This checks if we're in a subsequent (i.e. not the first) iteration and if there are any more files to process...? if (!moreFiles.isEmpty()) { logger.info("more files left to multiplex..."); FileUtils.deleteQuietly(tempOutputFile); FileUtils.moveFile(outputFile, tempOutputFile); cleanupFiles.add(tempOutputFile); moreFiles.add(tempOutputFile); files = moreFiles; moreFiles = new ArrayList<File>(); } logger.log(Level.FINE, "Multiplexing blocks from {0} temp files to {1}", new Object[] { files.size(), outputName }); BufferedOutputStream out = new BufferedOutputStream(FileUtils.openOutputStream(outputFile)); // The hard part, sorting the temp files... TreeMap<MiniSeed, FileInputStream> blks = new TreeMap<MiniSeed, FileInputStream>( new MiniSeedTimeOnlyComparator()); // Prime the TreeMap logger.log(Level.FINEST, "Priming the TreeMap with files: {0}", files); for (File file : files) { logger.log(Level.INFO, "Reading first block from {0}", file.toString()); try { FileInputStream fs = FileUtils.openInputStream(file); MiniSeed ms = getNextValidMiniSeed(fs, allowEmpty); if (ms != null) { blks.put(ms, fs); } else { logger.log(Level.WARNING, "Failed to read valid MiniSEED block from {0}", file.toString()); } } catch (IOException ex) { // Catch "Too many open files" i.e. hitting ulimit, throw anything else. if (ex.getMessage().contains("Too many open files")) { logger.log(Level.INFO, "Too many open files - {0} deferred.", file.toString()); moreFiles.add(file); } else throw ex; } } while (!blks.isEmpty()) { MiniSeed next = blks.firstKey(); out.write(next.getBuf(), 0, next.getBlockSize()); FileInputStream fs = blks.remove(next); next = getNextValidMiniSeed(fs, allowEmpty); if (next != null) { blks.put(next, fs); } else { fs.close(); } } out.close(); } while (!moreFiles.isEmpty()); if (cleanup) { logger.log(Level.INFO, "Cleaning up..."); for (File file : cleanupFiles) { FileUtils.deleteQuietly(file); } } }