List of usage examples for java.util TreeMap get
public V get(Object key)
From source file:de.micromata.genome.gwiki.plugin.rogmp3_1_0.CsvTable.java
public List<String[]> findEquals(int idx, String name) { TreeMap<String, List<String[]>> index = indices.get(idx); if (index != null) { List<String[]> ret = index.get(name); if (ret != null) { return ret; }/*from w w w . j a v a 2 s . c om*/ return new ArrayList<String[]>(); } List<String[]> ret = new ArrayList<String[]>(); for (String[] rec : table) { if (rec.length > idx) { String n = rec[idx]; if (name.equals(n) == true) { ret.add(rec); } } } return ret; }
From source file:no.met.jtimeseries.netcdf.NetcdfChartProvider.java
public void getCsv(PrintStream out, Iterable<String> variables) throws ParseException, IOException { Vector<NumberPhenomenon> data = getWantedPhenomena(variables); // header// ww w . j av a 2 s.c o m out.print("# Time"); for (NumberPhenomenon p : data) out.print(",\t" + p.getPhenomenonName() + " (" + p.getPhenomenonUnit() + ")"); out.println(); TreeMap<Date, Double[]> displayData = new TreeMap<Date, Double[]>(); for (int i = 0; i < data.size(); i++) { for (NumberValueItem atom : data.get(i)) { Double[] d = displayData.get(atom.getTimeFrom()); if (d == null) { d = new Double[data.size()]; displayData.put(atom.getTimeFrom(), d); } d[i] = atom.getValue(); } } SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss"); for (Entry<Date, Double[]> element : displayData.entrySet()) { out.print(format.format(element.getKey())); Double[] d = element.getValue(); for (int i = 0; i < d.length; i++) out.print(",\t" + d[i]); out.println(); } }
From source file:org.starfishrespect.myconsumption.server.business.repositories.repositoriesimpl.SensorRepositoryImpl.java
@Override public List<List<Integer>> getValues(String sensorId, int startTime, int endTime) throws DaoException { if (endTime == 0) endTime = Integer.MAX_VALUE; if (startTime < 0 || endTime < 0 || startTime > endTime) { throw new BadRequestException(); }/* w w w. j av a 2 s . c o m*/ if (!sensorExists(sensorId)) throw new NotFoundException(); int effectiveStart = startTime - startTime % 3600; mValuesRepository.setSensor(sensorId); List<SensorDataset> daoValues = mValuesRepository.getSensor(new Date(((long) effectiveStart) * 1000L), new Date(((long) endTime) * 1000L)); List<List<Integer>> values = new ArrayList<>(); for (SensorDataset value : daoValues) { int start = (int) (value.getTimestamp().getTime() / 1000); TreeMap<Integer, MinuteValues> v = value.getValues(); if (v == null) { continue; } for (int key : v.keySet()) { for (int second : v.get(key).containedSeconds()) { int time = start + key * 60 + second; if (time < startTime || time > endTime) { continue; } List<Integer> item = new ArrayList<>(); item.add(time); item.add(value.getValues().get(key).getValue(second)); values.add(item); } } } return values; }
From source file:com.datatorrent.contrib.hdht.HDHTAppTest.java
@Test public void test() throws Exception { File file = new File("target/hds2"); FileUtils.deleteDirectory(file);//from w w w .j a v a 2s. c o m LocalMode lma = LocalMode.newInstance(); Configuration conf = new Configuration(false); conf.set("dt.operator.Store.fileStore.basePath", file.toURI().toString()); //conf.set("dt.operator.Store.flushSize", "0"); conf.set("dt.operator.Store.flushIntervalCount", "1"); conf.set("dt.operator.Store.partitionCount", "2"); conf.set("dt.operator.Store.numberOfBuckets", "2"); lma.prepareDAG(new HDHTAppTest(), conf); LocalMode.Controller lc = lma.getController(); //lc.setHeartbeatMonitoringEnabled(false); lc.runAsync(); long tms = System.currentTimeMillis(); File f0 = new File(file, "0/0-0"); File f1 = new File(file, "1/1-0"); File wal0 = new File(file, "/WAL/2/_WAL-0"); File wal1 = new File(file, "/WAL/3/_WAL-0"); while (System.currentTimeMillis() - tms < 30000) { if (f0.exists() && f1.exists()) { break; } Thread.sleep(100); } lc.shutdown(); Assert.assertTrue("exists " + f0, f0.exists() && f0.isFile()); Assert.assertTrue("exists " + f1, f1.exists() && f1.isFile()); Assert.assertTrue("exists " + wal0, wal0.exists() && wal0.exists()); Assert.assertTrue("exists " + wal1, wal1.exists() && wal1.exists()); FileAccessFSImpl fs = new MockFileAccess(); fs.setBasePath(file.toURI().toString()); fs.init(); TreeMap<Slice, Slice> data = new TreeMap<Slice, Slice>(new HDHTWriterTest.SequenceComparator()); fs.getReader(0, "0-0").readFully(data); Assert.assertArrayEquals("read key=" + new String(KEY0), DATA0.getBytes(), data.get(new Slice(KEY0)).toByteArray()); data.clear(); fs.getReader(1, "1-0").readFully(data); Assert.assertArrayEquals("read key=" + new String(KEY1), DATA1.getBytes(), data.get(new Slice(KEY1)).toByteArray()); fs.close(); }
From source file:com.ebay.erl.mobius.core.model.TupleColumnComparator.java
private static int compare(TreeMap<String, String> m1, TreeMap<String, String> m2) { int _COMPARE_RESULT = Integer.MAX_VALUE; int m1_size = m1.size(); int m2_size = m2.size(); if (m1_size == 0 || m2_size == 0) { if (m1_size == m2_size) return 0; else if (m1_size != 0) return 1; else//from w w w. ja v a2 s. c om return -1; } Iterator<String> k1_it = m1.keySet().iterator(); Iterator<String> k2_it = m2.keySet().iterator(); boolean hasDiff = false; while (k1_it.hasNext()) { String k1 = k1_it.next(); if (k2_it.hasNext()) { String k2 = k2_it.next(); _COMPARE_RESULT = String.CASE_INSENSITIVE_ORDER.compare(k1, k2); if (_COMPARE_RESULT == 0) { // same key, check their value String v1 = m1.get(k1); String v2 = m2.get(k2); _COMPARE_RESULT = v1.compareTo(v2); } } else { // m1 has more keys than m2 and m1 has the same // values for all the keys in m2 _COMPARE_RESULT = 1; } if (_COMPARE_RESULT != 0 && _COMPARE_RESULT != Integer.MAX_VALUE) { hasDiff = true; break;// has result } } if (!hasDiff) { if (k2_it.hasNext()) { // m2 has more keys than m1, and m2 has the same // values for all the keys in m1 _COMPARE_RESULT = -1; } else { // m1 and m2 are the same } } return _COMPARE_RESULT; }
From source file:ark.util.CounterTable.java
public TreeMap<Integer, List<T>> getSortedCounts() { TreeMap<Integer, List<T>> sortedCounts = new TreeMap<Integer, List<T>>(); for (Entry<T, Integer> entry : this.counts.entrySet()) { if (!sortedCounts.containsKey(entry.getValue())) sortedCounts.put(entry.getValue(), new ArrayList<T>()); sortedCounts.get(entry.getValue()).add(entry.getKey()); }/*from w ww.jav a2 s. co m*/ return sortedCounts; }
From source file:com.sangupta.jerry.oauth.OAuthUtils.java
/** * Given a list of parameters (including the OAuth parameters) build the * unique parameter string that is used to generate the signable string. * /*w ww . ja v a2 s . c o m*/ * @param params * the request parameters if any * * @param oauthParams * the OAuth params * * @return the parameters string to be used to generate the signable string */ public static String buildParamString(TreeMap<String, String> params, WebForm oauthParams) { StringBuilder builder = new StringBuilder(1024); // add all to the list of params for (NameValuePair pair : oauthParams.build()) { if (pair.getName().startsWith("oauth_")) { params.put(pair.getName(), pair.getValue()); } } // build the string boolean first = true; for (String key : params.keySet()) { if (!first) { builder.append('&'); } else { first = false; } builder.append(key); builder.append('='); builder.append(UriUtils.encodeURIComponent(params.get(key))); } return builder.toString(); }
From source file:com.rackspacecloud.blueflood.io.serializers.HistogramSerializationTest.java
private boolean areHistogramsEqual(HistogramRollup first, HistogramRollup second) { final TreeMap<Double, Double> firstBinsAsOrderedMap = getNonZeroBinsAsMap(first); final TreeMap<Double, Double> secondBinsAsOrderedMap = getNonZeroBinsAsMap(second); if (firstBinsAsOrderedMap.size() != secondBinsAsOrderedMap.size()) { return false; }/*from ww w .j a va 2 s .c o m*/ for (Map.Entry<Double, Double> firstBin : firstBinsAsOrderedMap.entrySet()) { Double val = secondBinsAsOrderedMap.get(firstBin.getKey()); if (val == null || !firstBin.getValue().equals(val)) { return false; } } return true; }
From source file:me.philnate.textmanager.updates.Updater.java
/** * checks what the actual db version is, if an old version is encountered * appropriate updates are performed to get the db to the latest version *//* w w w .j av a 2 s . co m*/ public static void checkUpdateNeeded(String packageName) { checkArgument(StringUtils.isNotBlank(packageName), "You must insert a packageName"); TreeMap<Version, Class<? extends Update>> updates = createUpdateList(packageName); Setting v = Setting.find("version"); // check that an version is set, if none was found set it to 1 LOG.info(format("Database version is %s", v.getValue())); if (StringUtils.isBlank(v.getValue())) { Version db = (Version) ComparatorUtils.min(startVersion, updates.lastKey(), null); LOG.debug(String.format("No Version set, assuming []", db.toString())); v = new Setting("version", db); ds.save(v); } LOG.info(format("Found these Database upgrades: '%s'", updates.keySet())); for (Version vers : updates.keySet()) { if (vers.compareTo(new Version(v.getValue())) < Version.AFTER) { // if version is smaller than actual db version we have nothing // todo here LOG.debug(format("Database is already newer than '%s'", vers)); continue; } try { LOG.info(format("Going to update Database to version '%s'", vers)); backUp(); // create new Instance Update up = updates.get(vers).newInstance(); // verify that everything is met for this update up.preCheck(); // do the actual update up.upgrade(); // verify that everything is as expected up.postCheck(); // update the version v.setValue(vers.toString()).save(); } catch (Exception e) { // in case of an exception stop further rollback and stop // further updates LOG.error("Update process caused an exception going to rollback", e); rollback(); return; } finally { // finally drop backup directory to avoid to get conflicting // data versions try { FileUtils.deleteDirectory(backUpPath); } catch (IOException e) { LOG.error("Could not remove file", e); } } } }
From source file:org.apache.hadoop.hbase.TestRegionServerExit.java
private Thread startVerificationThread(final String tableName, final Text row) { Runnable runnable = new Runnable() { public void run() { HScannerInterface scanner = null; try { // Verify that the client can find the data after the region has moved // to a different server scanner = table.obtainScanner(HConstants.COLUMN_FAMILY_ARRAY, new Text()); LOG.info("Obtained scanner " + scanner); HStoreKey key = new HStoreKey(); TreeMap<Text, byte[]> results = new TreeMap<Text, byte[]>(); while (scanner.next(key, results)) { assertTrue(key.getRow().equals(row)); assertEquals(1, results.size()); byte[] bytes = results.get(HConstants.COLUMN_FAMILY); assertNotNull(bytes); assertTrue(tableName.equals(new String(bytes, HConstants.UTF8_ENCODING))); }/*from w w w . j a v a2 s . co m*/ LOG.info("Success!"); } catch (IOException e) { e.printStackTrace(); } finally { if (scanner != null) { LOG.info("Closing scanner " + scanner); try { scanner.close(); } catch (IOException e) { e.printStackTrace(); } } } } }; return new Thread(runnable); }