Example usage for java.util TreeMap keySet

List of usage examples for java.util TreeMap keySet

Introduction

In this page you can find the example usage for java.util TreeMap keySet.

Prototype

public Set<K> keySet() 

Source Link

Document

Returns a Set view of the keys contained in this map.

Usage

From source file:com.facebook.tsdb.tsdash.server.model.Metric.java

/**
 * create a new metric with rows aggregated after dissolving the given tags.
 * The resulted metric will not be able to accept filters on this tag
 * anymore.//from w  w  w.  jav  a 2 s  . c om
 *
 * @param tagName
 * @param aggregatorName
 *            'sum', 'max', 'min' or 'avg'
 * @return a new Metric object that contains the aggregated rows
 * @throws IDNotFoundException
 * @throws IOException
 */
public Metric dissolveTags(ArrayList<String> tagsName, String aggregatorName)
        throws IOException, IDNotFoundException {
    if (tagsName.size() == 0) {
        return this;
    }
    HashMap<String, HashSet<String>> tagsSet = getTagsSet();
    for (String tagName : tagsName) {
        if (!tagsSet.containsKey(tagName)) {
            // TODO: throw an exception here
            logger.error("Dissolve error: tag '" + tagName + "' is not part of the tag set");
            return null;
        }
        // we can only dissolve once a given tag
        if (dissolvedTags.contains(tagName)) {
            // TODO: throw an exception here
            logger.error("Metric already dissolved tag " + tagName);
            return null;
        }
    }
    // this aligns the time series in a perfect grid
    alignAllTimeSeries();

    Metric newData = new Metric(id, name, idMap);
    Tag[] toDissolve = new Tag[tagsName.size()];
    for (int i = 0; i < toDissolve.length; i++) {
        toDissolve[i] = new Tag(tagsName.get(i), idMap);
        newData.dissolvedTags.add(tagsName.get(i));
    }
    TreeMap<TagsArray, ArrayList<ArrayList<DataPoint>>> dissolved = new TreeMap<TagsArray, ArrayList<ArrayList<DataPoint>>>(
            Tag.arrayComparator());
    // sort the tags we will dissolve for calling disableTags()
    Arrays.sort(toDissolve, Tag.keyComparator());
    for (TagsArray header : timeSeries.keySet()) {
        TagsArray dissolvedRowTags = header.copy();
        if (toDissolve.length == 1) {
            dissolvedRowTags.disableTag(toDissolve[0]);
        } else {
            dissolvedRowTags.disableTags(toDissolve);
        }
        if (!dissolved.containsKey(dissolvedRowTags)) {
            dissolved.put(dissolvedRowTags, new ArrayList<ArrayList<DataPoint>>());
        }
        dissolved.get(dissolvedRowTags).add(timeSeries.get(header));
    }
    Aggregator aggregator = getAggregator(aggregatorName);
    newData.aggregatorName = aggregatorName;
    for (TagsArray header : dissolved.keySet()) {
        newData.timeSeries.put(header, TimeSeries.aggregate(dissolved.get(header), aggregator));
    }
    return newData;
}

From source file:mrmc.chart.ROCCurvePlot.java

/**
 * Sole constructor. Creates a line plot display ROC curves
 * //from   w  ww  .  j a va2  s .c om
 * @param title Title of the chart
 * @param xaxis x-axis label
 * @param yaxis y-axis label
 * @param treeMap Mapping of readers to a set of points defining an ROC curve
 */
public ROCCurvePlot(final String title, String xaxis, String yaxis, TreeMap<String, TreeSet<XYPair>> treeMap) {
    super(title);

    createDataset(treeMap);
    final JFreeChart chart = ChartFactory.createScatterPlot(title, xaxis, yaxis, seriesCollection,
            PlotOrientation.VERTICAL, true, true, false);
    XYPlot xyplot = (XYPlot) chart.getPlot();
    xyplot.setDomainCrosshairVisible(true);
    xyplot.setRangeCrosshairVisible(true);
    NumberAxis domain = (NumberAxis) xyplot.getDomainAxis();
    domain.setRange(0.00, 1.00);
    domain.setTickUnit(new NumberTickUnit(0.1));
    NumberAxis range = (NumberAxis) xyplot.getRangeAxis();
    range.setRange(0.00, 1.00);
    range.setTickUnit(new NumberTickUnit(0.1));
    renderer = new XYLineAndShapeRenderer();
    chart.getXYPlot().setRenderer(renderer);
    ChartPanel chartPanel = new ChartPanel(chart);

    JPanel readerSelect = new JPanel(new WrapLayout());
    readerSeriesBoxes = new ArrayList<JCheckBox>();

    for (String r : treeMap.keySet()) {
        JCheckBox aBox = new JCheckBox("" + r);
        aBox.setSelected(false);
        aBox.addItemListener(new SeriesSelectListener());
        hideSeries("" + r);
        readerSeriesBoxes.add(aBox);
        readerSelect.add(aBox);
    }

    renderer.setSeriesShapesVisible(seriesCollection.getSeriesIndex("Vertical Average"), false);
    renderer.setSeriesStroke(seriesCollection.getSeriesIndex("Vertical Average"), new java.awt.BasicStroke(3f));
    renderer.setSeriesShapesVisible(seriesCollection.getSeriesIndex("Horizontal Average"), false);
    renderer.setSeriesStroke(seriesCollection.getSeriesIndex("Horizontal Average"),
            new java.awt.BasicStroke(3f));
    renderer.setSeriesShapesVisible(seriesCollection.getSeriesIndex("Diagonal Average"), false);
    renderer.setSeriesStroke(seriesCollection.getSeriesIndex("Diagonal Average"), new java.awt.BasicStroke(3f));
    renderer.setSeriesStroke(seriesCollection.getSeriesIndex("Pooled Average"), new java.awt.BasicStroke(3f));

    vert = new JCheckBox("Vertical Average");
    vert.setSelected(true);
    vert.addItemListener(new SeriesSelectListener());
    readerSelect.add(vert);
    horiz = new JCheckBox("Horizontal Average");
    horiz.setSelected(true);
    horiz.addItemListener(new SeriesSelectListener());
    readerSelect.add(horiz);
    diag = new JCheckBox("Diagonal Average");
    diag.setSelected(true);
    diag.addItemListener(new SeriesSelectListener());
    readerSelect.add(diag);
    pooled = new JCheckBox("Pooled Average");
    pooled.setSelected(true);
    pooled.addItemListener(new SeriesSelectListener());
    readerSelect.add(pooled);

    JCheckBox allReaders = new JCheckBox("Show Readers");
    allReaders.setSelected(false);
    allReaders.addItemListener(new ReadersSelectListener());
    readerSelect.add(allReaders);
    JCheckBox allAverages = new JCheckBox("Show Averages");
    allAverages.setSelected(true);
    allAverages.addItemListener(new AverageSelectListener());
    readerSelect.add(allAverages);

    chartPanel.setPreferredSize(new java.awt.Dimension(700, 700));
    this.add(chartPanel);
    this.add(readerSelect, BorderLayout.PAGE_END);

}

From source file:edu.isi.wings.portal.controllers.PlanController.java

private ArrayList<TreeMap<String, Binding>> getParameterBindings(ArrayList<Template> cts) {
    ArrayList<TreeMap<String, Binding>> bindings_b = new ArrayList<TreeMap<String, Binding>>();
    for (Template bt : cts) {
        TreeMap<String, Binding> binding_b = new TreeMap<String, Binding>();
        for (Variable v : bt.getInputVariables()) {
            if (v.isParameterVariable() && v.getBinding() != null) {
                binding_b.put(v.getName(), v.getBinding());
            }//from  w ww . j av a2 s. c  o m
        }
        bindings_b.add(binding_b);
    }

    // Expanding collections into multiple configurations
    // FIXME: Cannot handle parameter collections right now
    ArrayList<TreeMap<String, Binding>> bindings = new ArrayList<TreeMap<String, Binding>>();
    HashMap<String, Boolean> bstrs = new HashMap<String, Boolean>();
    while (!bindings_b.isEmpty()) {
        boolean hasSets = false;
        TreeMap<String, Binding> binding_b = bindings_b.remove(0);
        TreeMap<String, Binding> binding = new TreeMap<String, Binding>();

        for (String v : binding_b.keySet()) {
            Binding b = binding_b.get(v);
            if (b.isSet() && b.size() > 1) {
                for (WingsSet cb : b) {
                    TreeMap<String, Binding> binding_x = new TreeMap<String, Binding>();
                    for (String v1 : binding_b.keySet()) {
                        Binding b1 = binding_b.get(v1);
                        binding_x.put(v1, b1);
                    }
                    binding_x.put(v, (Binding) cb);
                    bindings_b.add(binding_x);
                }
                hasSets = true;
            } else if (b.isSet() && b.size() == 1) {
                Binding tmpb = (Binding) b.get(0);
                while (tmpb.isSet() && tmpb.size() == 1) {
                    tmpb = (Binding) tmpb.get(0);
                }
                ValueBinding vb = (ValueBinding) tmpb;
                binding.put(v, new ValueBinding(vb.getValue(), vb.getDatatype()));
            } else if (!b.isSet()) {
                ValueBinding vb = (ValueBinding) b;
                binding.put(v, new ValueBinding(vb.getValue(), vb.getDatatype()));
            }
        }
        if (!hasSets) {
            String bstr = "";
            for (String v : binding.keySet()) {
                bstr += binding.get(v).toString() + ",";
            }
            if (!bstrs.containsKey(bstr)) {
                bstrs.put(bstr, true);
                bindings.add(binding);
            }
        }
    }

    return bindings;
}

From source file:uk.ac.leeds.ccg.andyt.projects.fluvialglacial.SlopeAreaAnalysis.java

public void run(TreeMap<Integer, Object[]> allData, File outDir, File outFile2, int minNumberOfDataPoints) {
    File outfile;//from  www . j  a  v  a 2  s  .  c  o m
    PrintWriter pw = null;
    try {
        pw = new PrintWriter(outFile2);
    } catch (FileNotFoundException ex) {
        Logger.getLogger(SlopeAreaAnalysis.class.getName()).log(Level.SEVERE, null, ex);
    }
    //pw.println("ID, log(Slope)");
    pw.println("ID, Slope");
    int dataWidth = 500;//400;//250;
    int dataHeight = 500;//657;
    String xAxisLabel = "x = log(Slope)";
    String yAxisLabel = "y = log(Upstream Accumulation Area)";
    boolean drawOriginLinesOnPlot;
    //        drawOriginLinesOnPlot = true;
    drawOriginLinesOnPlot = false;
    int decimalPlacePrecisionForCalculations = 10;
    int decimalPlacePrecisionForDisplay = 3;
    RoundingMode aRoundingMode = RoundingMode.HALF_UP;
    ExecutorService executorService = Executors.newSingleThreadExecutor();
    SlopeAreaScatterPlot plot;

    String format = "PNG";
    String title;

    Iterator<Integer> ite;
    ite = allData.keySet().iterator();
    int ID;
    Object[] data;
    while (ite.hasNext()) {
        ID = ite.next();
        //if (ID == 388) {

        data = allData.get(ID);
        ArrayList<Generic_XYNumericalData> theGeneric_XYNumericalData;
        theGeneric_XYNumericalData = (ArrayList<Generic_XYNumericalData>) data[0];
        int np;
        np = theGeneric_XYNumericalData.size();
        for (int degree = 2; degree < 3; degree++) {
            title = "GlacierID " + ID + ", n = " + np;
            //title += ", degree = " + degree;
            File outDir2 = new File(outDir, "degree" + degree);
            outDir2.mkdirs();
            outfile = new File(outDir2, "SlopeUAAScatterPlot" + ID + ".PNG");
            if (np >= minNumberOfDataPoints) {
                plot = new SlopeAreaScatterPlot(degree, data, executorService, outfile, format, title,
                        dataWidth, dataHeight, xAxisLabel, yAxisLabel, drawOriginLinesOnPlot,
                        decimalPlacePrecisionForCalculations, decimalPlacePrecisionForDisplay, aRoundingMode);
                //plot.setData(plot.getDefaultData());
                //plot.setStartAgeOfEndYearInterval(0); // To avoid null pointer
                plot.run();
                if (plot.isHump) {
                    System.out.println("" + ID + ", " + plot.xAtMaxy2);
                    double x = Math.pow(10.0d, plot.xAtMaxy2);
                    //pw.println("" + ID + ", " + plot.xAtMaxy2);
                    pw.println("" + ID + ", " + x);
                }
            }
        }
        pw.flush();
        //}
    }
    pw.close();
}

From source file:uk.ac.leeds.ccg.andyt.projects.moses.process.RegressionReport_UK1.java

public void writeOutResidualsISARHP_ISARCEP(File observed_File, File expected_File) throws Exception {
    File outputFile = new File(observed_File.getParentFile(), "residuals.csv");
    FileOutputStream a_FileOutputStream = new FileOutputStream(outputFile);
    TreeMap<String, double[]> a_SAROptimistaionConstraints = loadCASOptimistaionConstraints(observed_File);
    TreeMap<String, double[]> a_CASOptimistaionConstraints = loadCASOptimistaionConstraints(expected_File);
    String line = OutputDataHandler_OptimisationConstraints.getISARHP_ISARCEPHeader();
    String[] variableNames = line.split(",");
    a_FileOutputStream.write(line.getBytes());
    a_FileOutputStream.write(StreamTokenizer.TT_EOL);
    a_FileOutputStream.flush();/*from   w w w .  ja v  a2s.c  o m*/
    String oa;
    double[] a_SARExpectedRow;
    double[] a_CASObservedRow;
    double[] a_Residual;
    Iterator<String> iterator_String = a_SAROptimistaionConstraints.keySet().iterator();
    while (iterator_String.hasNext()) {
        oa = iterator_String.next();
        line = oa + ",";
        a_SARExpectedRow = a_SAROptimistaionConstraints.get(oa);
        a_CASObservedRow = a_CASOptimistaionConstraints.get(oa);
        a_Residual = new double[a_SARExpectedRow.length];
        for (int i = 0; i < a_SARExpectedRow.length; i++) {
            a_Residual[i] = a_SARExpectedRow[i] - a_CASObservedRow[i];
            if (i == a_SARExpectedRow.length - 1) {
                line += a_Residual[i];
            } else {
                line += a_Residual[i] + ",";
            }
        }
        a_FileOutputStream.write(line.getBytes());
        a_FileOutputStream.write(StreamTokenizer.TT_EOL);
        a_FileOutputStream.flush();
    }
}

From source file:uk.ac.leeds.ccg.andyt.projects.moses.process.RegressionReport_UK1.java

public void writeOutResidualsHSARHP_ISARCEP(File observed_File, File expected_File) throws Exception {
    File outputFile = new File(observed_File.getParentFile(), "residuals.csv");
    FileOutputStream a_FileOutputStream = new FileOutputStream(outputFile);
    TreeMap<String, double[]> a_SAROptimistaionConstraints = loadCASOptimistaionConstraints(observed_File);
    TreeMap<String, double[]> a_CASOptimistaionConstraints = loadCASOptimistaionConstraints(expected_File);
    String line = OutputDataHandler_OptimisationConstraints.getHSARHP_ISARCEPHeader();
    String[] variableNames = line.split(",");
    a_FileOutputStream.write(line.getBytes());
    a_FileOutputStream.write(StreamTokenizer.TT_EOL);
    a_FileOutputStream.flush();// ww  w  .j a va2s .  c om
    String oa;
    double[] a_SARExpectedRow;
    double[] a_CASObservedRow;
    double[] a_Residual;
    Iterator<String> iterator_String = a_SAROptimistaionConstraints.keySet().iterator();
    while (iterator_String.hasNext()) {
        oa = iterator_String.next();
        line = oa + ",";
        a_SARExpectedRow = a_SAROptimistaionConstraints.get(oa);
        a_CASObservedRow = a_CASOptimistaionConstraints.get(oa);
        a_Residual = new double[a_SARExpectedRow.length];
        for (int i = 0; i < a_SARExpectedRow.length; i++) {
            a_Residual[i] = a_SARExpectedRow[i] - a_CASObservedRow[i];
            if (i == a_SARExpectedRow.length - 1) {
                line += a_Residual[i];
            } else {
                line += a_Residual[i] + ",";
            }
        }
        a_FileOutputStream.write(line.getBytes());
        a_FileOutputStream.write(StreamTokenizer.TT_EOL);
        a_FileOutputStream.flush();
    }
}

From source file:util.DiarynetTreeCache.java

/**
 * Put the object into cache based on the id. The id will
 * be used to identify the object.<p>
 *
 * This method is potentially called by multiple threads.
 *
 * @param fqn this is the fqn of the node
 * @param id id of the object to be put in cache
 * @param obj object to be put in the cache
 * @exception BasicCacheException run time exception in case there is a problem
 *///ww  w  .  ja va2 s.  co  m
public synchronized void put(Fqn fqn, Object id, Object obj) throws BasicCacheException {
    if (isCachingDisabled())
        return;
    //logger.info("Enter put(): fqn = " + fqn.toString());
    Stringifier jobj = new Stringifier();
    jobj.setObject("object", obj);
    Long t = new Long(new Date().getTime());
    jobj.setObject("pubdate", t);
    jobj.setObject("accessDate", new Long(new Date().getTime()));

    try {
        // TODO: this is a performance penalty!
        Node node = treeCache.get(fqn);
        if (node != null) { // check for cache overflow
            Map map = node.getData();
            if (map != null) {
                int nodeSize = map.size();
                if (nodeSize >= maxItems) { // TODO: this is expensive!
                    TreeMap treeMap = new TreeMap();
                    Iterator iter = map.keySet().iterator();
                    while (iter.hasNext()) {
                        Stringifier stringifier = (Stringifier) map.get(iter.next());
                        if (stringifier != null) {
                            if (stringifier.getObject("accessDate") != null) {
                                ModelCacheKey cacheKey = new ModelCacheKey(
                                        ((Long) stringifier.getObject("accessDate")).longValue(), false);
                                treeMap.put((Object) cacheKey, (Object) stringifier);
                            }
                        }
                    }
                    Iterator iter1 = treeMap.keySet().iterator();

                    // sweep oldest cache entries
                    int cntr = 0;
                    logger.info(fqn.toString() + " sweeping items...");
                    while (iter1.hasNext() && (cntr < sweepItems)) {
                        Object key = iter1.next();
                        Object rbobj = treeMap.get(key);
                        if (rbobj != null) {
                            Stringifier stringifier = (Stringifier) rbobj;
                            Object rbid = stringifier.getObject("id");
                            if (rbobj != null) {
                                if (exists(fqn, rbid)) {
                                    remove(fqn, rbid); // from treeCache!
                                    cntr++;
                                }
                            }
                        }
                    }
                    logger.info("...done sweeping " + cntr + " items.");
                    Node node1 = treeCache.get(fqn);
                    if (node1 != null) {
                        Map map1 = node1.getData();
                        if (map1 != null) {
                            int nodeSize1 = map1.size();
                            if ((fqn != null) && (fqn.toString() != null)) {
                                logger.warn("Max Items = " + maxItems + ", Size = " + nodeSize1 + ", Removed "
                                        + cntr + " items from cache. sweepItems is " + sweepItems);
                            }
                        }
                    }
                }
            }
        }
        treeCache.put(fqn, id, (Object) jobj);
    } catch (LockingException e) {
        throw new BasicCacheException("Exception occurred while putting content into cache. id = " + id, e);
    } catch (TimeoutException e1) {
        throw new BasicCacheException("Exception occurred while fetching content from cache. id = " + id, e1);
    } catch (org.jboss.cache.CacheException e2) {
        throw new BasicCacheException("Exception occurred while fetching content from cache. id = " + id, e2);
    }
    //logger.info("put(): before yield = " + fqn.toString());
    Thread.currentThread().yield();
    //logger.info("Exit put(): fqn = " + fqn.toString());
}

From source file:com.sfs.whichdoctor.dao.MembershipDAOImpl.java

/**
 * Test if the person is applicable for a membership upgrade.
 *
 * @param person the person/*from   w  ww  . j  a  va 2  s .c  o  m*/
 * @return true, if successful
 */
private boolean testIfMembershipUpgradeApplicable(final PersonBean person) {

    boolean membershipUpgradeApplicable = false;

    dataLogger.info("Automated training check: person is a basic trainee");

    boolean passedWritten = false;
    boolean passedClinical = false;
    int totalTraining = 0;

    if (person.getExams() != null) {
        for (ExamBean exam : person.getExams()) {
            if (StringUtils.equalsIgnoreCase(exam.getStatus(), "Passed")) {
                if (StringUtils.equalsIgnoreCase(exam.getType(), "Written Exam")) {
                    passedWritten = true;
                }
                if (StringUtils.equalsIgnoreCase(exam.getType(), "Clinical Exam")) {
                    passedClinical = true;
                }
            }
        }
    }

    final TreeMap<String, AccreditationBean[]> summary = person.getTrainingSummary("Basic Training");

    if (summary != null) {
        for (String key : summary.keySet()) {
            AccreditationBean[] details = summary.get(key);
            AccreditationBean core = details[0];
            AccreditationBean nonCore = details[1];

            totalTraining += core.getWeeksCertified();
            totalTraining += nonCore.getWeeksCertified();
        }
    }

    final int basicTrainingLimit = this.getTrainingLimit("Basic Training");

    if (totalTraining >= basicTrainingLimit && passedWritten && passedClinical) {
        membershipUpgradeApplicable = true;
    }

    return membershipUpgradeApplicable;
}

From source file:org.kuali.kfs.sys.context.GenerateDataDictionaryInquirySectionFile.java

public void testGenerateInquirySections() throws Exception {
    TreeMap<String, List<InquirySectionDefinition>> boInquirySections = new TreeMap<String, List<InquirySectionDefinition>>();
    for (org.kuali.rice.krad.datadictionary.BusinessObjectEntry kradBusinessObjectEntry : dataDictionary
            .getBusinessObjectEntries().values()) {
        BusinessObjectEntry businessObjectEntry = (BusinessObjectEntry) kradBusinessObjectEntry;
        if (businessObjectEntry.getInquiryDefinition() != null) {
            //LOG.info("Processing inquiry section for " + businessObjectEntry.getBusinessObjectClass().getName());
            InquiryDefinition inqDef = businessObjectEntry.getInquiryDefinition();
            boInquirySections.put(businessObjectEntry.getBusinessObjectClass().getName(),
                    inqDef.getInquirySections());
        }/*from  w  ww  .  j  a  v a2  s. c  o m*/
    }
    LOG.info("Class URI: " + getClass().getProtectionDomain().getCodeSource().getLocation().toURI());
    File f = new File(new File(getClass().getProtectionDomain().getCodeSource().getLocation().toURI()),
            getClass().getPackage().getName().replace(".", File.separator) + File.separator
                    + "boInquirySections.properties");
    f = new File(f.getAbsolutePath().replace("/classes/", "/unit/src/"));
    LOG.info("File path:" + f.getAbsolutePath());
    FileWriter fw = new FileWriter(f);
    for (String className : boInquirySections.keySet()) {
        fw.write(className);
        fw.write('=');
        Iterator<InquirySectionDefinition> i = boInquirySections.get(className).iterator();
        while (i.hasNext()) {
            String title = i.next().getTitle();
            if (title == null) {
                title = "(null)";
            } else if (StringUtils.isBlank(title)) {
                title = "(blank)";
            }
            fw.write(title);
            if (i.hasNext()) {
                fw.write(',');
            }
        }
        fw.write('\n');
    }
    fw.flush();
    fw.close();
}

From source file:ch.icclab.cyclops.support.database.influxdb.client.InfluxDBClient.java

/**
 * This method gets the data from the database for a parametrized Query, format it and send it back as a TSDBData.
 *
 * @param parameterQuery// w w w. ja va2 s . c  om
 * @return
 */
public TSDBData getData(String parameterQuery) {
    //TODO: check the sense of the TSDBData[] and simplify/split the code
    logger.debug("Attempting to get Data");
    InfluxDB influxDB = InfluxDBFactory.connect(this.url, this.username, this.password);
    JSONArray resultArray;
    TSDBData[] dataObj = null;
    ObjectMapper mapper = new ObjectMapper();
    int timeIndex = -1;
    int usageIndex = -1;
    Query query = new Query(parameterQuery, dbName);
    try {
        logger.debug("Attempting to execute the query: " + parameterQuery + " into the db: " + dbName);
        resultArray = new JSONArray(influxDB.query(query).getResults());
        logger.debug("Obtained results: " + resultArray.toString());
        if (!resultArray.isNull(0)) {
            if (resultArray.toString().equals("[{}]")) {
                TSDBData data = new TSDBData();
                data.setColumns(new ArrayList<String>());
                data.setPoints(new ArrayList<ArrayList<Object>>());
                data.setTags(new HashMap());
                return data;
            } else {
                JSONObject obj = (JSONObject) resultArray.get(0);
                JSONArray series = (JSONArray) obj.get("series");
                for (int i = 0; i < series.length(); i++) {
                    String response = series.get(i).toString();
                    response = response.split("values")[0] + "points" + response.split("values")[1];
                    series.put(i, new JSONObject(response));
                }
                dataObj = mapper.readValue(series.toString(), TSDBData[].class);

                //Filter the points for repeated timestamps and add their usage/avg value
                for (int i = 0; i < dataObj.length; i++) {
                    for (int o = 0; o < dataObj[i].getColumns().size(); o++) {
                        if (dataObj[i].getColumns().get(o).equalsIgnoreCase("time"))
                            timeIndex = o;
                        if (dataObj[i].getColumns().get(o).equalsIgnoreCase("usage")
                                || dataObj[i].getColumns().get(o).equalsIgnoreCase("avg"))
                            usageIndex = o;
                    }
                    if (usageIndex > -1) {
                        //If the json belongs to a meter point, filter and add to another if necessary.
                        TreeMap<String, ArrayList> points = new TreeMap<String, ArrayList>();
                        for (ArrayList point : dataObj[i].getPoints()) {
                            if (points.containsKey(point.get(timeIndex))) {
                                String time = (String) point.get(timeIndex);
                                Double usage = Double.parseDouble(points.get(time).get(usageIndex).toString());
                                usage = Double.parseDouble(point.get(usageIndex).toString()) + usage;
                                point.set(usageIndex, usage);
                            }
                            points.put((String) point.get(timeIndex), point);
                        }
                        ArrayList<ArrayList<Object>> result = new ArrayList<ArrayList<Object>>();
                        for (String key : points.keySet()) {
                            result.add(points.get(key));
                        }
                        dataObj[i].setPoints(result);
                    }
                }

            }
        }
    } catch (JSONException e) {
        e.printStackTrace();
    } catch (IOException e) {
        e.printStackTrace();
    }
    return dataObj[0];
}