List of usage examples for java.util TreeMap get
public V get(Object key)
From source file:org.apache.hadoop.hbase.TestScanner2.java
private List<HRegionInfo> scan(final HTable t) throws IOException { List<HRegionInfo> regions = new ArrayList<HRegionInfo>(); HRegionInterface regionServer = null; long scannerId = -1L; try {/*from w w w . ja va2 s .c o m*/ HRegionLocation rl = t.getRegionLocation(t.getTableName()); regionServer = t.getConnection().getHRegionConnection(rl.getServerAddress()); scannerId = regionServer.openScanner(rl.getRegionInfo().getRegionName(), HConstants.COLUMN_FAMILY_ARRAY, new Text(), System.currentTimeMillis(), null); while (true) { TreeMap<Text, byte[]> results = new TreeMap<Text, byte[]>(); MapWritable values = regionServer.next(scannerId); if (values == null || values.size() == 0) { break; } for (Map.Entry<Writable, Writable> e : values.entrySet()) { HStoreKey k = (HStoreKey) e.getKey(); results.put(k.getColumn(), ((ImmutableBytesWritable) e.getValue()).get()); } HRegionInfo info = (HRegionInfo) Writables.getWritable(results.get(HConstants.COL_REGIONINFO), new HRegionInfo()); byte[] bytes = results.get(HConstants.COL_SERVER); String serverName = Writables.bytesToString(bytes); long startCode = Writables.bytesToLong(results.get(HConstants.COL_STARTCODE)); LOG.info(Thread.currentThread().getName() + " scanner: " + Long.valueOf(scannerId) + ": regioninfo: {" + info.toString() + "}, server: " + serverName + ", startCode: " + startCode); regions.add(info); } } finally { try { if (scannerId != -1L) { if (regionServer != null) { regionServer.close(scannerId); } } } catch (IOException e) { LOG.error(e); } } return regions; }
From source file:com.opengamma.analytics.financial.provider.calculator.discounting.CashFlowEquivalentCalculator.java
@Override public AnnuityPaymentFixed visitBondFixedSecurity(final BondFixedSecurity bond, final MulticurveProviderInterface multicurves) { ArgumentChecker.notNull(bond, "Bond"); ArgumentChecker.notNull(multicurves, "Multicurves provider"); final Currency ccy = bond.getCurrency(); final TreeMap<Double, Double> flow = new TreeMap<>(); final AnnuityPaymentFixed cfeNom = bond.getNominal().accept(this, multicurves); final AnnuityPaymentFixed cfeCpn = bond.getCoupon().accept(this, multicurves); for (final PaymentFixed p : cfeNom.getPayments()) { flow.put(p.getPaymentTime(), p.getAmount()); }/* w w w . j a va2s. c o m*/ for (final PaymentFixed p : cfeCpn.getPayments()) { addcf(flow, p.getPaymentTime(), p.getAmount()); } final PaymentFixed[] agregatedCfe = new PaymentFixed[flow.size()]; int loopcf = 0; for (final double time : flow.keySet()) { agregatedCfe[loopcf++] = new PaymentFixed(ccy, time, flow.get(time)); } return new AnnuityPaymentFixed(agregatedCfe); }
From source file:it.uniroma2.sag.kelp.learningalgorithm.clustering.kernelbasedkmeans.KernelBasedKMeansEngine.java
/** * Count the reassignment as a stopping criteria for the algorithm * /* w w w . j a v a 2s . co m*/ * @param exampleIdToClusterMap * The map of assignment for the previous iteration * @param clusterList * The actual clusters * @return */ private int countReassigment(TreeMap<Long, Integer> exampleIdToClusterMap, List<Cluster> clusterList) { int reassignment = 0; TreeMap<Long, Integer> currentExampleIdToClusterMap = new TreeMap<Long, Integer>(); int clusterId = 0; for (Cluster cluster : clusterList) { for (ClusterExample clusterExample : cluster.getExamples()) { currentExampleIdToClusterMap.put(clusterExample.getExample().getId(), clusterId); } clusterId++; } for (Long currentExId : currentExampleIdToClusterMap.keySet()) { if (exampleIdToClusterMap.get(currentExId).intValue() != currentExampleIdToClusterMap.get(currentExId) .intValue()) reassignment++; } return reassignment; }
From source file:com.npower.wurfl.ListManager.java
/** * Find WurflDevice by brand//from ww w . j a v a 2 s .c om * @param manufacturer * @param modelExtID * @param lm * @return */ public WurflDevice getDeviceByBrand(String manufacturer, String modelExtID) { if (StringUtils.isEmpty(manufacturer) || StringUtils.isEmpty(modelExtID)) { return null; } // Translate manufacturer = translateBrandName(manufacturer); modelExtID = translateModelName(manufacturer, modelExtID); // Initializing if (modelBrandMap.isEmpty()) { // Generate brand Map TreeMap<String, WurflDevice> load = this.getActualDeviceElementsList(); Iterator<String> keys = load.keySet().iterator(); while (keys.hasNext()) { String key = keys.next(); WurflDevice wd = load.get(key); if (!modelBrandMap.containsKey(wd.getBrandName().toLowerCase())) { modelBrandMap.put(wd.getBrandName().toLowerCase(), new HashMap<String, WurflDevice>()); } Map<String, WurflDevice> modelOfBrandMap = modelBrandMap.get(wd.getBrandName().toLowerCase()); if (!modelOfBrandMap.containsKey(wd.getModelName().toLowerCase())) { modelOfBrandMap.put(wd.getModelName().toLowerCase(), wd); } } } // Retrieve if (modelBrandMap.containsKey(manufacturer.trim().toLowerCase())) { Map<String, WurflDevice> modelOfBrandMap = modelBrandMap.get(manufacturer.trim().toLowerCase()); WurflDevice device = modelOfBrandMap.get(modelExtID.trim().toLowerCase()); return device; } return null; /* TreeMap<String, WurflDevice> load = this.getActualDeviceElementsList(); Iterator<String> keys = load.keySet().iterator(); WurflDevice foundDevice = null; while (keys.hasNext()) { String key = keys.next(); WurflDevice wd = load.get(key); if (manufacturer.equalsIgnoreCase(wd.getBrandName()) && modelExtID.equalsIgnoreCase(wd.getModelName())) { foundDevice = wd; } } return foundDevice; */ }
From source file:com.acc.test.orders.AcceleratorTestOrderData.java
protected Map<String, Long> getEntryQuantityMap(final OrderModel order) { final TreeMap<String, Long> result = new TreeMap<String, Long>(); for (final AbstractOrderEntryModel entry : order.getEntries()) { final ProductModel product = entry.getProduct(); if (product != null) { final String productCode = product.getCode(); if (result.containsKey(productCode)) { final long newQuantity = result.get(productCode).longValue() + entry.getQuantity().longValue(); result.put(productCode, Long.valueOf(newQuantity)); } else { result.put(productCode, entry.getQuantity()); }/*from w ww . ja v a2 s . c o m*/ } } return result; }
From source file:com.opengamma.analytics.financial.provider.calculator.discounting.CashFlowEquivalentCalculator.java
@Override public AnnuityPaymentFixed visitSwap(final Swap<?, ?> swap, final MulticurveProviderInterface multicurves) { ArgumentChecker.notNull(swap, "Swap"); ArgumentChecker.notNull(multicurves, "Multicurves provider"); final Currency ccy = swap.getFirstLeg().getCurrency(); Validate.isTrue(ccy.equals(swap.getSecondLeg().getCurrency()), "Cash flow equivalent available only for single currency swaps."); final TreeMap<Double, Double> flow = new TreeMap<>(); final AnnuityPaymentFixed cfeLeg1 = swap.getFirstLeg().accept(this, multicurves); final AnnuityPaymentFixed cfeLeg2 = swap.getSecondLeg().accept(this, multicurves); for (final PaymentFixed p : cfeLeg1.getPayments()) { flow.put(p.getPaymentTime(), p.getAmount()); }//from ww w . j a v a2s.c o m for (final PaymentFixed p : cfeLeg2.getPayments()) { addcf(flow, p.getPaymentTime(), p.getAmount()); } final PaymentFixed[] agregatedCfe = new PaymentFixed[flow.size()]; int loopcf = 0; for (final double time : flow.keySet()) { agregatedCfe[loopcf++] = new PaymentFixed(ccy, time, flow.get(time)); } return new AnnuityPaymentFixed(agregatedCfe); }
From source file:de.tudarmstadt.ukp.uby.integration.alignment.xml.transform.sensealignments.FnWnSenseAlignmentXml.java
/** * Collect UBY SenseIds for the aligned senses based on synsetId and lemma * for WordNet and based on lexical unit id for FrameNet * * @throws IOException/*w w w .j av a2 s . c o m*/ */ @Override public void toAlignmentXml(XmlMeta metadata) throws IOException { System.err.println("to Alignment Xml"); TreeMap<String, Source> sourceMap = new TreeMap<>(); List<String[]> data = null; data = readAlignmentFile(); int counter = 0; // input sense pairs int found = 0; // output sense pairs // iterate over alignment entries for (String[] d : data) { counter++; // show progress: if ((counter % 1000) == 0) { logger.info("# processed alignments: " + counter); } // use FrameNet sense externalReference (lexical unit Id) String fnSenseId = d[0]; // SOURCE Source source = null; if (sourceMap.containsKey(fnSenseId)) { source = sourceMap.get(fnSenseId); } else { source = new Source(); } source.ref = fnSenseId; List<Target> targets = new LinkedList<Target>(); // get WordNet sense by Synset Offset and Lemma List<Sense> wnSenses = uby.getSensesByWNSynsetId(d[1]); // List<Sense> wnSenses = uby.wordNetSenses(partOfSpeech, offset); for (Sense wnSense : wnSenses) { Target target = new Target(); target.ref = wnSense.getId(); Decision decision = new Decision(); decision.confidence = SenseAlignmentGenericXml.DEFAULTCONFSCORE; decision.value = true; // decision.src = metadata.decisiontypes.get(0).name; target.decision = decision; targets.add(target); found++; } if (targets.size() > 0) { source.targets = targets; sourceMap.put(source.ref, source); } } writer.writeMetaData(metadata); Alignments alignments = new Alignments(); alignments.source = new LinkedList<>(); alignments.source.addAll(sourceMap.values()); writer.writeAlignments(alignments); writer.close(); System.err.println("Alignments in: " + counter + " OUT" + found); logger.info("Alignments in: " + counter + "Alignments out: " + found); }
From source file:com.sec.ose.osi.report.standard.data.BillOfMaterialsRowGenerator.java
private String getFileCountForFolders(ArrayList<IdentifiedFilesRow> fileEntList) { TreeMap<String, Integer> map = new TreeMap<String, Integer>(); // parent path, value if (fileEntList == null || fileEntList.size() == 0) return "<None>"; for (IdentifiedFilesRow ent : fileEntList) { String parentPath = (new File(ent.getFullPath())).getParent(); if (parentPath == null) parentPath = ""; if (map.containsKey(parentPath) == false) { map.put(parentPath, 0);/*w ww . j a v a 2 s . c o m*/ } map.put(parentPath, map.get(parentPath) + 1); } if (map.size() == 0) return ""; if (map.size() == 1) return ("(" + map.get(map.firstKey()) + " files)\n"); String msg = ""; for (String path : map.keySet()) { msg += path; if (!path.endsWith("/")) msg += "/ "; msg += "(" + map.get(path) + " files)\n"; } msg = msg.replace("\\", "/"); if (msg.length() > 0) { return msg.substring(0, msg.length() - 1); } return ""; }
From source file:de.dfki.km.perspecting.obie.model.Document.java
public List<TokenSequence<Integer>> getSentences() { TreeMap<Integer, TokenSequence<Integer>> sentences = new TreeMap<Integer, TokenSequence<Integer>>(); for (Entry<String, Integer> token : this.data.integerEntries(TokenSequence.SENTENCE)) { int start = Integer.parseInt(token.getKey()); TokenSequence<Integer> sentence = sentences.get(token.getValue()); if (sentence == null) { sentence = new TokenSequence<Integer>(token.getValue()); sentences.put(token.getValue(), sentence); }/*from w w w .j a va 2s. com*/ sentence.addToken(new Token(start, this)); } return new ArrayList<TokenSequence<Integer>>(sentences.values()); }
From source file:nars.predict.RNNBeliefPrediction.java
@Override protected void train() { ///* w ww. j a v a 2s . co m*/ //double[] target = {((data[x(i1)] + data[x(i2)])/2.0)}; //new Sample(data, target, 2, length, 1, 1); TreeMap<Integer, double[]> d = new TreeMap(); int cc = 0; int hd = Math.round(predictionTimeSpanFactor * nar.memory.getDuration() / 2f / downSample); for (Concept c : concepts) { for (Sentence s : c.beliefs) { if (s.isEternal()) { continue; } int o = (int) Math.round(((double) s.getOccurenceTime()) / ((double) downSample)); if (o > nar.time()) { continue; //non-future beliefs } for (int oc = o - hd; oc <= o + hd; oc++) { double[] x = d.get(oc); if (x == null) { x = new double[inputSize]; d.put(oc, x); } float freq = 2f * (s.truth.getFrequency() - 0.5f); float conf = s.truth.getConfidence(); if (freq < 0) { } x[cc] += freq * conf; } } cc++; } if (d.size() < 2) { data = null; return; } data = new SampleSet(); int first = d.firstKey(); int last = (int) nar.time(); if (last - first > maxDataFrames * downSample) { first = last - maxDataFrames * downSample; } int frames = (int) (last - first); int bsize = getInputSize() * frames; int isize = getPredictionSize() * frames; if (actual == null || actual.length != bsize) actual = new double[bsize]; else Arrays.fill(actual, 0); if (ideal == null || ideal.length != isize) ideal = new double[isize]; else Arrays.fill(ideal, 0); int idealSize = getPredictionSize(); int ac = 0, id = 0; double[] prevX = null; for (int i = first; i <= last; i++) { double[] x = d.get(i); if (x == null) { x = new double[inputSize]; } else { if (normalizeInputVectors) { x = normalize(x); } } if (prevX != null) { System.arraycopy(prevX, 0, actual, ac, inputSize); ac += inputSize; System.arraycopy(getTrainedPrediction(x), 0, ideal, id, idealSize); id += idealSize; } prevX = x; } Sample s = new Sample(actual, ideal, inputSize, idealSize); data.add(s); //System.out.println(data); if (trainer == null) { trainer = new GradientDescent(); trainer.setNet(net); trainer.setRnd(rnd); trainer.setPermute(true); trainer.setTrainingSet(data); trainer.setLearningRate(learningrate); trainer.setMomentum(momentum); trainer.setEpochs(trainIterationsPerCycle); trainer.setEarlyStopping(false); trainer.setOnline(true); trainer.setTargetError(0); trainer.clearListener(); } else { //trainer.reset(); } trainer.train(); //System.out.println("LSTM error: " + trainer.getTrainingError()); }