Example usage for java.lang Double intValue

List of usage examples for java.lang Double intValue

Introduction

In this page you can find the example usage for java.lang Double intValue.

Prototype

public int intValue() 

Source Link

Document

Returns the value of this Double as an int after a narrowing primitive conversion.

Usage

From source file:com.imarchuang.storm.perftest.Main.java

public void realMain(String[] args) throws Exception {
    Map clusterConf = Utils.readStormConfig();
    clusterConf.putAll(Utils.readCommandLineOpts());
    Nimbus.Client client = NimbusClient.getConfiguredClient(clusterConf).getClient();

    CmdLineParser parser = new CmdLineParser(this);
    parser.setUsageWidth(80);/*from   w w w . jav a  2 s.  c  o  m*/
    try {
        // parse the arguments.
        parser.parseArgument(args);
    } catch (CmdLineException e) {
        // if there's a problem in the command line,
        // you'll get this exception. this will report
        // an error message.
        System.err.println(e.getMessage());
        _help = true;
    }
    if (_help) {
        parser.printUsage(System.err);
        System.err.println();
        return;
    }
    if (_numWorkers <= 0) {
        throw new IllegalArgumentException("Need at least one worker");
    }
    if (_name == null || _name.isEmpty()) {
        throw new IllegalArgumentException("name must be something");
    }
    if (!_ackEnabled) {
        _ackers = 0;
    }

    try {
        for (int topoNum = 0; topoNum < _numTopologies; topoNum++) {
            TopologyBuilder builder = new TopologyBuilder();
            LOG.info("Adding in " + _spoutParallel + " spouts");
            builder.setSpout("messageSpout", new SOLSpout(_messageSize, _ackEnabled, _spoutSleepMs),
                    _spoutParallel);
            LOG.info("Adding in " + _boltParallel + " bolts");
            builder.setBolt("messageBolt", new SOLBolt(_boltSleepMs), _boltParallel)
                    .shuffleGrouping("messageSpout");
            for (int levelNum = 2; levelNum <= _numLevels; levelNum++) {
                LOG.info("Adding in " + _boltParallel + " bolts at level " + levelNum);
                builder.setBolt("messageBolt" + levelNum, new SOLBolt(_boltSleepMs), _boltParallel)
                        .shuffleGrouping("messageBolt" + (levelNum - 1));
            }

            Config conf = new Config();
            conf.setDebug(_debug);
            conf.setNumWorkers(_numWorkers);
            conf.setNumAckers(_ackers);
            if (_maxSpoutPending > 0) {
                conf.setMaxSpoutPending(_maxSpoutPending);
            }
            conf.setStatsSampleRate(1.0);
            Double RECEIVE_BUFFER_SIZE = pow(2, _receiveBufferSize);
            Double SEND_BUFFER_SIZE = pow(2, _sendBufferSize);
            conf.put(Config.TOPOLOGY_EXECUTOR_RECEIVE_BUFFER_SIZE, RECEIVE_BUFFER_SIZE.intValue());
            conf.put(Config.TOPOLOGY_EXECUTOR_SEND_BUFFER_SIZE, SEND_BUFFER_SIZE.intValue());

            StormSubmitter.submitTopology(_name + "_" + topoNum, conf, builder.createTopology());
        }
        metrics(client, _messageSize, _pollFreqSec, _testRunTimeSec);
    } finally {
        //Kill it right now!!!
        KillOptions killOpts = new KillOptions();
        killOpts.set_wait_secs(0);

        for (int topoNum = 0; topoNum < _numTopologies; topoNum++) {
            LOG.info("KILLING " + _name + "_" + topoNum);
            try {
                client.killTopologyWithOpts(_name + "_" + topoNum, killOpts);
            } catch (Exception e) {
                LOG.error("Error tying to kill " + _name + "_" + topoNum, e);
            }
        }
    }
}

From source file:com.act.lcms.LCMSmzMLParser.java

protected LCMSSpectrum handleSpectrumEntry(Document doc) throws XPathException {
    XPath xpath = getXPathFactory().newXPath();

    Double spectrumIndexD = (Double) xpath.evaluate(SPECTRUM_PATH_INDEX, doc, XPathConstants.NUMBER);
    if (spectrumIndexD == null) {
        System.err.format("WARNING: found spectrum document without index attribute.\n");
        return null;
    }//from  w ww  .  ja v  a 2 s . co m
    Integer spectrumIndex = spectrumIndexD.intValue();

    if (xpath.evaluate(SPECTRUM_PATH_EXPECTED_VERSION, doc, XPathConstants.NODE) == null) {
        // if it is not MS1 Spectrum data then we will skip from the output.

        // check if it entry we see here is the diode array data, those we expect to silently skip
        // if on the other hand, even that is not matched; we truly have some unexpected entries, so report to user
        if (xpath.evaluate(SPECTRUM_PATH_EXPECTED_VERSION_DIODE_ARRAY, doc, XPathConstants.NODE) == null) {
            System.err.format(
                    "WARNING: found unexpected MS spectrum version in spectrum document %d.  Skipping.\n",
                    spectrumIndex);
        }

        return null;
    }

    String spectrumId = (String) xpath.evaluate(SPECTRUM_PATH_ID, doc, XPathConstants.STRING);
    if (spectrumId == null) {
        System.err.format("WARNING: no spectrum id found for documnt %d\n", spectrumIndex);
        return null;
    }

    Matcher matcher = SPECTRUM_EXTRACTION_REGEX.matcher(spectrumId);
    if (!matcher.find()) {
        System.err.format("WARNING: spectrum id for documnt %d did not match regex: %s\n", spectrumIndex,
                spectrumId);
        return null;
    }
    Integer spectrumFunction = Integer.parseInt(matcher.group(1));
    Integer spectrumScan = Integer.parseInt(matcher.group(3));

    Integer scanListCount = ((Double) xpath.evaluate(SPECTRUM_PATH_SCAN_LIST_COUNT, doc, XPathConstants.NUMBER))
            .intValue();
    if (!Integer.valueOf(1).equals(scanListCount)) {
        System.err.format("WARNING: unexpected number of scan entries in spectrum document %d: %d",
                spectrumIndex, scanListCount);
        return null;
    }

    Integer binaryDataCount = ((Double) xpath.evaluate(SPECTRUM_PATH_BINARY_DATA_ARRAY_LIST_COUNT, doc,
            XPathConstants.NUMBER)).intValue();
    if (!Integer.valueOf(2).equals(binaryDataCount)) {
        System.err.format("WARNING: unexpected number of binary data entries in spectrum document %d: %d",
                spectrumIndex, binaryDataCount);
        return null;
    }

    Double basePeakMz = (Double) xpath.evaluate(SPECTRUM_PATH_BASE_PEAK_MZ, doc, XPathConstants.NUMBER);
    if (basePeakMz == null) {
        System.err.format("WARNING: no base peak m/z found for spectrum document %d\n", spectrumIndex);
        return null;
    }

    Double basePeakIntensity = (Double) xpath.evaluate(SPECTRUM_PATH_BASE_PEAK_INTENSITY, doc,
            XPathConstants.NUMBER);
    if (basePeakIntensity == null) {
        System.err.format("WARNING: no base peak intensity found for spectrum document %d\n", spectrumIndex);
        return null;
    }

    Double scanStartTime = (Double) xpath.evaluate(SPECTRUM_PATH_SCAN_START_TIME, doc, XPathConstants.NUMBER);
    if (scanStartTime == null) {
        System.err.format("WARNING: no scan start time found for spectrum document %d\n", spectrumIndex);
        return null;
    }

    String scanStartTimeUnit = (String) xpath.evaluate(SPECTRUM_PATH_SCAN_START_TIME_UNIT, doc,
            XPathConstants.STRING);
    if (scanStartTimeUnit == null) {
        System.err.format("WARNING: no scan start time unit found for spectrum document %d\n", spectrumIndex);
        return null;
    }

    String mzData = (String) xpath.evaluate(SPECTRUM_PATH_MZ_BINARY_DATA, doc, XPathConstants.STRING);
    if (mzData == null) {
        System.err.format("WARNING: no m/z data found for spectrum document %d\n", spectrumIndex);
        return null;
    }

    String intensityData = (String) xpath.evaluate(SPECTRUM_PATH_INTENSITY_BINARY_DATA, doc,
            XPathConstants.STRING);
    if (intensityData == null) {
        System.err.format("WARNING: no intensity data found for spectrum document %d\n", spectrumIndex);
        return null;
    }

    List<Double> mzs = base64ToDoubleList(mzData);
    List<Double> intensities = base64ToDoubleList(intensityData);
    List<Pair<Double, Double>> mzIntensityPairs = zipLists(mzs, intensities);

    return new LCMSSpectrum(spectrumIndex, scanStartTime, scanStartTimeUnit, mzIntensityPairs, basePeakMz,
            basePeakIntensity, spectrumFunction, spectrumScan, null);
}

From source file:com.imarchuang.storm.perftest.MainSimulatedBatch.java

public void realMain(String[] args) throws Exception {
    Map clusterConf = Utils.readStormConfig();
    clusterConf.putAll(Utils.readCommandLineOpts());
    Nimbus.Client client = NimbusClient.getConfiguredClient(clusterConf).getClient();

    CmdLineParser parser = new CmdLineParser(this);
    parser.setUsageWidth(80);/*  www  . j av a 2  s  .  c o  m*/
    try {
        // parse the arguments.
        parser.parseArgument(args);
    } catch (CmdLineException e) {
        // if there's a problem in the command line,
        // you'll get this exception. this will report
        // an error message.
        System.err.println(e.getMessage());
        _help = true;
    }
    if (_help) {
        parser.printUsage(System.err);
        System.err.println();
        return;
    }
    if (_numWorkers <= 0) {
        throw new IllegalArgumentException("Need at least one worker");
    }
    if (_name == null || _name.isEmpty()) {
        throw new IllegalArgumentException("name must be something");
    }
    if (!_ackEnabled) {
        _ackers = 0;
    }

    try {
        for (int topoNum = 0; topoNum < _numTopologies; topoNum++) {
            TopologyBuilder builder = new TopologyBuilder();
            LOG.info("Adding in " + _spoutParallel + " batch spouts");
            builder.setSpout("messageSpout",
                    new SOLSpoutBatch(_messageSize, _ackEnabled, _spoutSleepMs, _batchSize), _spoutParallel);

            LOG.info("Adding in " + _boltParallel + " user bolts with debatcher in itself");
            builder.setBolt("messageBolt", new SOLBolt_wDeBatch(_boltSleepMs), _boltParallel)
                    .shuffleGrouping("messageSpout");
            //LOG.info("Adding in "+_boltParallel +" bolts");
            //builder.setBolt("messageBolt", new SOLBolt(), _boltParallel).shuffleGrouping("messageBoltDeBatch");

            Config conf = new Config();
            conf.setDebug(_debug);
            conf.setNumWorkers(_numWorkers);
            conf.setNumAckers(_ackers);
            if (_maxSpoutPending > 0) {
                conf.setMaxSpoutPending(_maxSpoutPending);
            }
            conf.setStatsSampleRate(1.0);
            Double RECEIVE_BUFFER_SIZE = pow(2, _receiveBufferSize);
            Double SEND_BUFFER_SIZE = pow(2, _sendBufferSize);
            conf.put(Config.TOPOLOGY_EXECUTOR_RECEIVE_BUFFER_SIZE, RECEIVE_BUFFER_SIZE.intValue());
            conf.put(Config.TOPOLOGY_EXECUTOR_SEND_BUFFER_SIZE, SEND_BUFFER_SIZE.intValue());

            StormSubmitter.submitTopology(_name + "_" + topoNum, conf, builder.createTopology());
        }
        metrics(client, _messageSize, _pollFreqSec, _testRunTimeSec);
    } finally {
        //Kill it right now!!!
        KillOptions killOpts = new KillOptions();
        killOpts.set_wait_secs(0);

        for (int topoNum = 0; topoNum < _numTopologies; topoNum++) {
            LOG.info("KILLING " + _name + "_" + topoNum);
            try {
                client.killTopologyWithOpts(_name + "_" + topoNum, killOpts);
            } catch (Exception e) {
                LOG.error("Error tying to kill " + _name + "_" + topoNum, e);
            }
        }
    }
}

From source file:edu.uci.imbs.actor.VariablePopulationProtectionStatistics.java

private void buildPeasantProportionRecordEntries() {
    peasantProportionRecordEntries = new ArrayList<PeasantProportionRecordEntry>();
    Iterator<String> it = peasantProportionHeadings.iterator();
    Iterator<Entry<Double, Integer>> itNumbers = sortedFullDistributionEntries.iterator();
    int total = peasants.size();
    while (it.hasNext()) {
        Double number = new Double(itNumbers.next().getValue());
        double proportion = (total != 0) ? number / total : 0;
        peasantProportionRecordEntries/*from  w w  w.  j a  va  2 s  .  c om*/
                .add(new PeasantProportionRecordEntry(it.next(), number.intValue(), it.next(), proportion));
    }

}

From source file:com.imarchuang.storm.perftest.MainBatch_woDebatch.java

public void realMain(String[] args) throws Exception {
    Map clusterConf = Utils.readStormConfig();
    clusterConf.putAll(Utils.readCommandLineOpts());
    Nimbus.Client client = NimbusClient.getConfiguredClient(clusterConf).getClient();

    CmdLineParser parser = new CmdLineParser(this);
    parser.setUsageWidth(80);/*w w  w .j  a va2  s  .  c  o  m*/
    try {
        // parse the arguments.
        parser.parseArgument(args);
    } catch (CmdLineException e) {
        // if there's a problem in the command line,
        // you'll get this exception. this will report
        // an error message.
        System.err.println(e.getMessage());
        _help = true;
    }
    if (_help) {
        parser.printUsage(System.err);
        System.err.println();
        return;
    }
    if (_numWorkers <= 0) {
        throw new IllegalArgumentException("Need at least one worker");
    }
    if (_name == null || _name.isEmpty()) {
        throw new IllegalArgumentException("name must be something");
    }
    if (!_ackEnabled) {
        _ackers = 0;
    }

    try {
        for (int topoNum = 0; topoNum < _numTopologies; topoNum++) {
            TopologyBuilder builder = new TopologyBuilder();
            LOG.info("Adding in " + _spoutParallel + " spouts");
            builder.setSpout("messageSpout", new SOLSpout(_messageSize, _ackEnabled, _spoutSleepMs),
                    _spoutParallel);
            int freq = (int) (_testRunTimeSec * 1000 / 3) - 10;
            builder.setSpout("signalsSpout", new SOLSignalsSpout(freq));
            LOG.info("Adding in " + _boltParallel + " batcher bolts with _batchSize=" + _batchSize);
            builder.setBolt("messageBoltBatch", new SOLBatchBolt(_batchSize), _boltParallel)
                    .shuffleGrouping("messageSpout");
            LOG.info("Adding in " + _boltParallel + " user bolts with debatcher in itself");
            builder.setBolt("messageBolt", new SOLBolt_wDeBatch(_boltSleepMs), _boltParallel)
                    .shuffleGrouping("messageBoltBatch");
            //LOG.info("Adding in "+_boltParallel +" bolts");
            //builder.setBolt("messageBolt", new SOLBolt(), _boltParallel).shuffleGrouping("messageBoltDeBatch");

            Config conf = new Config();
            conf.setDebug(_debug);
            conf.setNumWorkers(_numWorkers);
            conf.setNumAckers(_ackers);
            if (_maxSpoutPending > 0) {
                conf.setMaxSpoutPending(_maxSpoutPending);
            }
            conf.setStatsSampleRate(1.0);
            Double RECEIVE_BUFFER_SIZE = pow(2, _receiveBufferSize);
            Double SEND_BUFFER_SIZE = pow(2, _sendBufferSize);
            conf.put(Config.TOPOLOGY_EXECUTOR_RECEIVE_BUFFER_SIZE, RECEIVE_BUFFER_SIZE.intValue());
            conf.put(Config.TOPOLOGY_EXECUTOR_SEND_BUFFER_SIZE, SEND_BUFFER_SIZE.intValue());

            StormSubmitter.submitTopology(_name + "_" + topoNum, conf, builder.createTopology());
        }
        metrics(client, _messageSize, _pollFreqSec, _testRunTimeSec);
    } finally {
        //Kill it right now!!!
        KillOptions killOpts = new KillOptions();
        killOpts.set_wait_secs(0);

        for (int topoNum = 0; topoNum < _numTopologies; topoNum++) {
            LOG.info("KILLING " + _name + "_" + topoNum);
            try {
                client.killTopologyWithOpts(_name + "_" + topoNum, killOpts);
            } catch (Exception e) {
                LOG.error("Error tying to kill " + _name + "_" + topoNum, e);
            }
        }
    }
}

From source file:de.bitzeche.video.transcoding.zencoder.ZencoderClient.java

private Integer findIdFromOutputNode(Node output) throws XPathExpressionException {
    Double idDouble = (Double) xPath.evaluate("output/id", output, XPathConstants.NUMBER);
    return idDouble == null ? null : idDouble.intValue();
}

From source file:org.openmrs.module.pharmacyapi.api.prescription.util.AbstractPrescriptionItemGenerator.java

public boolean isOrderExpired(final PrescriptionItem item, final Date creationDate) {

    final Double drugToPickUp = item.getDrugToPickUp();

    final Date nextPickUpDate = this.getNextPickUpDate(item.getDrugOrder());

    final Calendar calendar = Calendar.getInstance();
    calendar.setTime(nextPickUpDate);// w  w w.  j  ava 2s.c  o  m
    calendar.set(Calendar.HOUR_OF_DAY, 0);
    calendar.set(Calendar.MINUTE, 0);
    calendar.set(Calendar.SECOND, 0);
    calendar.set(Calendar.MILLISECOND, 0);

    calendar.add(Calendar.DAY_OF_MONTH, drugToPickUp.intValue());

    while ((calendar.get(Calendar.DAY_OF_WEEK) == Calendar.SUNDAY)
            || (calendar.get(Calendar.DAY_OF_WEEK) == Calendar.SATURDAY)) {
        calendar.add(Calendar.DAY_OF_MONTH, -1);
    }

    return creationDate.after(calendar.getTime());
}

From source file:com.imarchuang.storm.perftest.MainBatch.java

public void realMain(String[] args) throws Exception {
    Map clusterConf = Utils.readStormConfig();
    clusterConf.putAll(Utils.readCommandLineOpts());
    Nimbus.Client client = NimbusClient.getConfiguredClient(clusterConf).getClient();

    CmdLineParser parser = new CmdLineParser(this);
    parser.setUsageWidth(80);//from ww w.j a v  a 2  s  .  co  m
    try {
        // parse the arguments.
        parser.parseArgument(args);
    } catch (CmdLineException e) {
        // if there's a problem in the command line,
        // you'll get this exception. this will report
        // an error message.
        System.err.println(e.getMessage());
        _help = true;
    }
    if (_help) {
        parser.printUsage(System.err);
        System.err.println();
        return;
    }
    if (_numWorkers <= 0) {
        throw new IllegalArgumentException("Need at least one worker");
    }
    if (_name == null || _name.isEmpty()) {
        throw new IllegalArgumentException("name must be something");
    }
    if (!_ackEnabled) {
        _ackers = 0;
    }

    try {
        for (int topoNum = 0; topoNum < _numTopologies; topoNum++) {
            TopologyBuilder builder = new TopologyBuilder();
            LOG.info("Adding in " + _spoutParallel + " spouts");
            builder.setSpout("messageSpout", new SOLSpout(_messageSize, _ackEnabled, _spoutSleepMs),
                    _spoutParallel);
            int freq = (int) (_testRunTimeSec * 1000 / 3) - 10;
            builder.setSpout("signalsSpout", new SOLSignalsSpout(freq));
            LOG.info("Adding in " + _boltParallel + " batcher bolts with _batchSize=" + _batchSize);
            builder.setBolt("messageBoltBatch", new SOLBatchBolt(_batchSize), _boltParallel)
                    .shuffleGrouping("messageSpout");
            LOG.info("Adding in " + _boltParallel + " debatcher bolts");
            builder.setBolt("messageBoltDeBatch", new SOLDebatcherBolt(), _boltParallel)
                    .shuffleGrouping("messageBoltBatch");
            LOG.info("Adding in " + _boltParallel + " bolts");
            builder.setBolt("messageBolt", new SOLBolt(_boltSleepMs), _boltParallel)
                    .shuffleGrouping("messageBoltDeBatch");
            for (int levelNum = 2; levelNum <= _numLevels; levelNum++) {
                LOG.info("Adding in " + _boltParallel + " bolts at level " + levelNum);
                builder.setBolt("messageBolt" + levelNum, new SOLBolt(_boltSleepMs), _boltParallel)
                        .shuffleGrouping("messageBolt" + (levelNum - 1));
            }

            Config conf = new Config();
            conf.setDebug(_debug);
            conf.setNumWorkers(_numWorkers);
            conf.setNumAckers(_ackers);
            if (_maxSpoutPending > 0) {
                conf.setMaxSpoutPending(_maxSpoutPending);
            }
            conf.setStatsSampleRate(1.0);
            Double RECEIVE_BUFFER_SIZE = pow(2, _receiveBufferSize);
            Double SEND_BUFFER_SIZE = pow(2, _sendBufferSize);
            conf.put(Config.TOPOLOGY_EXECUTOR_RECEIVE_BUFFER_SIZE, RECEIVE_BUFFER_SIZE.intValue());
            conf.put(Config.TOPOLOGY_EXECUTOR_SEND_BUFFER_SIZE, SEND_BUFFER_SIZE.intValue());

            StormSubmitter.submitTopology(_name + "_" + topoNum, conf, builder.createTopology());
        }
        metrics(client, _messageSize, _pollFreqSec, _testRunTimeSec);
    } finally {
        //Kill it right now!!!
        KillOptions killOpts = new KillOptions();
        killOpts.set_wait_secs(0);

        for (int topoNum = 0; topoNum < _numTopologies; topoNum++) {
            LOG.info("KILLING " + _name + "_" + topoNum);
            try {
                client.killTopologyWithOpts(_name + "_" + topoNum, killOpts);
            } catch (Exception e) {
                LOG.error("Error tying to kill " + _name + "_" + topoNum, e);
            }
        }
    }
}

From source file:org.openscience.cdk.applications.taverna.weka.classification.EvaluateClassificationResultsAsPDFActivity.java

private void createDataset(Instances dataset, Classifier classifier, DefaultCategoryDataset chartDataset,
        LinkedList<Double> setPercentage, String setname) throws Exception {
    WekaTools tools = new WekaTools();
    HashMap<UUID, Double> orgClassMap = new HashMap<UUID, Double>();
    HashMap<UUID, Double> calcClassMap = new HashMap<UUID, Double>();
    Instances trainUUIDSet = Filter.useFilter(dataset, tools.getIDGetter(dataset));
    dataset = Filter.useFilter(dataset, tools.getIDRemover(dataset));
    for (int k = 0; k < dataset.numInstances(); k++) {
        double pred = classifier.classifyInstance(dataset.instance(k));
        UUID uuid = UUID.fromString(trainUUIDSet.instance(k).stringValue(0));
        calcClassMap.put(uuid, pred);/*from  ww  w.  jav a  2  s . c o m*/
        orgClassMap.put(uuid, dataset.instance(k).classValue());
    }
    HashMap<Double, Integer> correctPred = new HashMap<Double, Integer>();
    HashMap<Double, Integer> occurances = new HashMap<Double, Integer>();
    for (int k = 0; k < dataset.numInstances(); k++) {
        UUID uuid = UUID.fromString(trainUUIDSet.instance(k).stringValue(0));
        double pred = calcClassMap.get(uuid);
        double org = orgClassMap.get(uuid);
        Integer oc = occurances.get(org);
        if (oc == null) {
            occurances.put(org, 1);
        } else {
            occurances.put(org, ++oc);
        }
        if (pred == org) {
            Integer co = correctPred.get(org);
            if (co == null) {
                correctPred.put(org, 1);
            } else {
                correctPred.put(org, ++co);
            }
        }
    }
    double overall = 0;
    for (Entry<Double, Integer> entry : occurances.entrySet()) {
        Double key = entry.getKey();
        int occ = entry.getValue();
        Integer pred = correctPred.get(key);
        int pre = pred == null ? 0 : pred;
        double ratio = pre / (double) occ * 100;
        overall += ratio;
        chartDataset.addValue(ratio, setname, dataset.classAttribute().value(key.intValue()));
    }
    overall /= occurances.size();
    setPercentage.add(overall);
    chartDataset.addValue(overall, setname, "Overall");
}

From source file:com.evidon.areweprivateyet.Aggregator.java

private void createTSV() throws Exception {
    // TSV only contains decreases numbers for the graph.
    StringBuilder top = new StringBuilder(), content = new StringBuilder();

    top.append("Points");

    // header//from w  ww  . jav  a  2s  . co  m
    for (String database : decrease.keySet()) {
        for (String type : decrease.get(database).keySet()) {
            top.append(",");
            top.append(type);
        }

        break;
    }

    // content
    for (String database : decrease.keySet()) {
        if (database.equals("baseline")) {
            continue;
        }

        content.append(database);

        for (String type : decrease.get(database).keySet()) {
            content.append(",");
            Double d = Double.parseDouble(decrease.get(database).get(type));
            content.append((d.intValue() >= 0) ? d.intValue() : "0");
        }

        content.append("\n");
    }

    String o = top.toString();
    o += "\n" + content.toString();
    BufferedWriter bw = new BufferedWriter(new FileWriter(path + "tsv"));
    bw.write(o);
    bw.close();
}