Example usage for java.util TreeMap containsKey

List of usage examples for java.util TreeMap containsKey

Introduction

In this page you can find the example usage for java.util TreeMap containsKey.

Prototype

public boolean containsKey(Object key) 

Source Link

Document

Returns true if this map contains a mapping for the specified key.

Usage

From source file:org.mahasen.node.MahasenPropertyPastContent.java

/**
 * @param id//w w w . j  a va2s  .  c  o m
 * @param existingContent
 * @return
 * @throws PastException
 */
@Override
public PastContent checkInsert(rice.p2p.commonapi.Id id, PastContent existingContent) throws PastException {
    if (existingContent != null) {

        if (((MahasenPropertyPastContent) existingContent).getTreeType()
                .equals(MahasenConstants.STRING_PROPERTY_TREE)
                && this.treeType.endsWith(MahasenConstants.STRING_PROPERTY_TREE)) {
            TreeMap<String, Vector<Id>> existingTree = ((MahasenPropertyPastContent) existingContent)
                    .getPropertyTree();

            log.debug("EXISTING TREE " + existingTree);
            log.debug("NEW TREE " + stringPropertyTree);

            if (existingTree != null && stringPropertyTree != null) {
                Iterator keys = stringPropertyTree.keySet().iterator();
                while (keys.hasNext()) {

                    String propertyValue = keys.next().toString();
                    log.debug("property value " + propertyValue);

                    if (existingTree.containsKey(propertyValue)) {
                        log.debug("existing tree contains the property value " + propertyValue);
                        log.debug("get node for existing property value " + existingTree.get(propertyValue));

                        log.debug("node is to delete " + (this.isToDelete));
                        // this will update the Id vector for the existing property value in the exsisting TreeMap node
                        if (!this.isToDelete) {
                            log.debug("adding resource id to " + propertyValue);
                            if (!existingTree.get(propertyValue)
                                    .contains(stringPropertyTree.get(propertyValue).get(0))) {
                                existingTree.get(propertyValue)
                                        .add(stringPropertyTree.get(propertyValue).get(0));
                            }
                        } else {
                            if (existingTree.get(propertyValue)
                                    .contains(stringPropertyTree.get(propertyValue).get(0))) {
                                log.debug("removing resource id from " + propertyValue);
                                boolean removed = existingTree.get(propertyValue)
                                        .remove(stringPropertyTree.get(propertyValue).get(0));
                                log.debug("deleted " + removed);
                                if (existingTree.get(propertyValue).size() == 0) {
                                    existingTree.remove(propertyValue);
                                }

                            }

                        }

                    } else {
                        log.debug("existing tree does not contain the property value " + propertyValue);
                        // this will add the new property value and the resource id to the TreeMap
                        existingTree.put(propertyValue, stringPropertyTree.get(propertyValue));
                    }

                    log.debug("Tree after modifications " + existingTree.toString());

                }
            }

        } else if (((MahasenPropertyPastContent) existingContent).getTreeType()
                .equals(MahasenConstants.INTEGER_PROPERTY_TREE)
                && this.treeType.equals(MahasenConstants.INTEGER_PROPERTY_TREE)) {

            TreeMap<Integer, Vector<Id>> existingTree = ((MahasenPropertyPastContent) existingContent)
                    .getPropertyTree();

            log.debug("EXISTING TREE " + existingTree);
            log.debug("NEW TREE " + intPropertyTree);
            //for(String propertyValue:propertyTree.keySet())  {

            if (existingTree != null && intPropertyTree != null) {
                Iterator keys = intPropertyTree.keySet().iterator();
                while (keys.hasNext()) {

                    Integer propertyValue = Integer.valueOf(keys.next().toString());
                    log.debug("property value " + propertyValue);

                    if (existingTree.containsKey(propertyValue)) {
                        log.debug("existing tree contains the property value " + propertyValue);
                        log.debug("get node for existing property value " + existingTree.get(propertyValue));
                        log.debug("node is to delete " + (this.isToDelete));

                        // this will update the Id vector for the existing property value in the exsisting TreeMap node
                        if (!this.isToDelete) {
                            if (!existingTree.get(propertyValue)
                                    .contains(intPropertyTree.get(propertyValue).get(0))) {
                                existingTree.get(propertyValue).add(intPropertyTree.get(propertyValue).get(0));
                            }
                        } else {
                            if (existingTree.get(propertyValue)
                                    .contains(intPropertyTree.get(propertyValue).get(0))) {
                                existingTree.get(propertyValue)
                                        .remove(intPropertyTree.get(propertyValue).get(0));
                                if (existingTree.get(propertyValue).size() == 0) {
                                    existingTree.remove(propertyValue);
                                }
                            }

                        }
                    } else {
                        log.debug("existing tree does not contain the property value " + propertyValue);
                        // this will add the new property value and the resource id to the TreeMap
                        existingTree.put(propertyValue, intPropertyTree.get(propertyValue));
                    }

                    log.debug("Existing PropertyTree" + existingTree.toString());

                }
            }
        }
        return existingContent;
    } else {
        log.debug("===== crate a new property tree====");
        log.debug("Existing PropertyTree" + this.treeType);
        return this;
    }
}

From source file:org.opentaps.domain.container.ConstantsGeneratorContainer.java

private TreeMap<String, ConstantModel> readConstantConfiguration(Properties config) throws ContainerException {
    TreeMap<String, ConstantModel> models = new TreeMap<String, ConstantModel>();
    // first collect the entity names
    Enumeration<?> propertyNames = config.propertyNames();
    while (propertyNames.hasMoreElements()) {
        String key = (String) propertyNames.nextElement();
        if (GENERATE_VALUE.equals(config.getProperty(key))) {
            if (models.containsKey(key)) {
                throw new ContainerException("Entity: [" + key + "] already defined in the configuration.");
            }//from   w ww  .  j a v a  2  s  .c  o  m

            models.put(key, new ConstantModel(key));
        }
    }
    // then for each entity read the configuration
    for (String key : models.keySet()) {
        ConstantModel model = models.get(key);
        model.setClassName(config.getProperty(key + ".className"));
        model.setDescription(config.getProperty(key + ".description"));
        model.setTypeField(config.getProperty(key + ".typeField"));
        model.setNameField(config.getProperty(key + ".nameField"));
        model.setDescriptionField(config.getProperty(key + ".descriptionField"));
        model.setConstantField(config.getProperty(key + ".constantField"));
        model.setWhere(config.getProperty(key + ".where"));
    }

    return models;
}

From source file:me.oriley.crate.CrateGenerator.java

private void listFiles(@NonNull TreeMap<String, Asset> allAssets, @NonNull TypeSpec.Builder parentBuilder,
        @NonNull String classPathString, @NonNull File directory, @NonNull String variantAssetDir,
        boolean root) {

    String rootName = root ? ASSETS : directory.getName();
    TypeSpec.Builder builder = TypeSpec.classBuilder(capitalise(rootName + CLASS)).addModifiers(PUBLIC, STATIC,
            FINAL);//from   w w w.  j  a  v  a2 s  . c o  m

    List<File> files = getFileList(directory);
    TreeMap<String, Asset> assetMap = new TreeMap<>();
    boolean isFontFolder = true;
    boolean isImageFolder = true;

    for (File file : files) {
        if (file.isDirectory()) {
            listFiles(allAssets, builder, classPathString + file.getName() + ".", file, variantAssetDir, false);
        } else {
            String fileName = file.getName();
            String fieldName = sanitiseFieldName(fileName).toUpperCase(US);

            if (assetMap.containsKey(fieldName)) {
                String baseFieldName = fieldName + "_";
                int counter = 0;
                while (assetMap.containsKey(fieldName)) {
                    fieldName = baseFieldName + counter;
                }
            }

            String filePath = file.getPath().replace(variantAssetDir + "/", "");

            String fileExtension = getFileExtension(fileName).toLowerCase(US);
            AssetHolder asset;
            if (FONT_EXTENSIONS.contains(fileExtension)) {
                isImageFolder = false;
                String fontName = getFontName(file.getPath());
                asset = new FontAssetHolder(fieldName, filePath, fileName,
                        fontName != null ? fontName : fileName);
                builder.addField(createFontAssetField((FontAssetHolder) asset));
            } else if (IMAGE_EXTENSIONS.contains(fileExtension)) {
                isFontFolder = false;

                int width = 0;
                int height = 0;
                try {
                    BufferedImage image = ImageIO.read(file);
                    if (image != null) {
                        width = image.getWidth();
                        height = image.getHeight();
                    }
                } catch (IOException e) {
                    logError("Error parsing image: " + file.getPath(), e, false);
                }

                asset = new ImageAssetHolder(fieldName, filePath, fileName, width, height);
                builder.addField(createImageAssetField((ImageAssetHolder) asset));
            } else {
                isFontFolder = false;
                isImageFolder = false;
                asset = new AssetHolder(fieldName, filePath, fileName);
                builder.addField(createAssetField(asset));
            }
            assetMap.put(fieldName, asset);
            allAssets.put(classPathString + fieldName, asset);
        }
    }

    if (!assetMap.isEmpty()) {
        TypeName elementType = TypeVariableName
                .get(isFontFolder ? FontAsset.class : isImageFolder ? ImageAsset.class : Asset.class);
        TypeName listType = ParameterizedTypeName.get(ClassName.get(List.class), elementType);
        builder.addField(createListField(listType, "LIST", assetMap));
    }

    if (root && !allAssets.isEmpty()) {
        TypeName listType = ParameterizedTypeName.get(ClassName.get(List.class),
                TypeVariableName.get(Asset.class));
        builder.addField(createListField(listType, "FULL_LIST", allAssets));
    }

    parentBuilder.addType(builder.build());
    parentBuilder.addField(createNonStaticClassField(rootName));
}

From source file:chatbot.Chatbot.java

/** *************************************************************************************************
 * @return a list of matches ranked by relevance to the input.
 *///from   w ww.j ava2 s. c  o m
public TreeMap<Float, ArrayList<Integer>> matchInputFull(String input) {

    //System.out.println("Info in TFIDF.matchInputFull(): input: " + input);
    //System.out.println("Info in TFIDF.matchInputFull(): lines: " + lines);
    ArrayList<String> result = new ArrayList<String>();
    if (isNullOrEmpty(input))
        System.exit(0);
    Integer negone = new Integer(-1);
    processDoc(input, negone);
    calcIDF(lines.size() + 1);
    calcOneTFIDF(negone);
    calcDocSim();
    TreeMap<Float, ArrayList<Integer>> sortedSim = new TreeMap<Float, ArrayList<Integer>>();
    if (docSim == null)
        return sortedSim;
    Iterator<Integer> it = docSim.keySet().iterator();
    while (it.hasNext()) {
        Integer i = it.next();
        Float f = docSim.get(i);
        if (sortedSim.containsKey(f)) {
            ArrayList<Integer> vals = sortedSim.get(f);
            vals.add(i);
        } else {
            ArrayList<Integer> vals = new ArrayList<Integer>();
            vals.add(i);
            sortedSim.put(f, vals);
        }
    }
    return sortedSim;
}

From source file:com.sfs.whichdoctor.analysis.RevenueAnalysisDAOImpl.java

/**
 * Consolidate summary.//from   w ww.  j  a v a  2  s .  c o  m
 *
 * @param revenueMap the revenue map
 *
 * @return the tree map< revenue bean, collection< revenue bean>>
 */
private RevenueAnalysisBean consolidateSummary(final TreeMap<Object, ArrayList<RevenueBean>> revenueMap) {

    final RevenueAnalysisBean result = new RevenueAnalysisBean();

    final Collection<RevenueBean> summary = new ArrayList<RevenueBean>();

    for (Object key : revenueMap.keySet()) {
        RevenueBean summaryRevenue = new RevenueBean();
        for (RevenueBean revenue : revenueMap.get(key)) {

            summaryRevenue.setBatchReference(revenue.getBatchReference());
            summaryRevenue.setBatchNo(revenue.getBatchNo());
            summaryRevenue.setRevenueType(revenue.getRevenueType());
            summaryRevenue.setRevenueClass(revenue.getRevenueClass());

            final double summaryValue = summaryRevenue.getValue();
            final double summaryNetValue = summaryRevenue.getNetValue();

            /* Update the summary revenue totals for this batch */
            summaryRevenue.setValue(summaryValue + revenue.getValue());
            summaryRevenue.setNetValue(summaryNetValue + revenue.getNetValue());

            for (Double gstRate : revenue.getGSTValues().keySet()) {
                final double gstValue = revenue.getGSTValues().get(gstRate);
                double gstSubtotal = 0;
                if (summaryRevenue.getGSTValues().containsKey(gstRate)) {
                    gstSubtotal = summaryRevenue.getGSTValues().get(gstRate);
                }
                summaryRevenue.setGSTValue(gstRate, gstSubtotal + gstValue);
            }

            /* Add receipts/payments to this revenue batch */
            TreeMap<Integer, ReceiptBean> receipts = summaryRevenue.getReceipts();
            if (receipts == null) {
                receipts = new TreeMap<Integer, ReceiptBean>();
            }
            for (Integer receiptId : revenue.getReceipts().keySet()) {
                ReceiptBean receipt = revenue.getReceipts().get(receiptId);

                if (receipts.containsKey(receiptId)) {
                    ReceiptBean summaryReceipt = receipts.get(receiptId);

                    ArrayList<PaymentBean> payments = (ArrayList<PaymentBean>) summaryReceipt.getPayments();
                    if (payments == null) {
                        payments = new ArrayList<PaymentBean>();
                    }

                    if (receipt.getPayments() != null) {
                        for (PaymentBean payment : receipt.getPayments()) {
                            payments.add(payment);
                        }
                    }
                    summaryReceipt.setPayments(payments);

                    receipts.put(receiptId, summaryReceipt);
                } else {
                    receipts.put(receiptId, receipt);
                }
            }
        }

        if (dataLogger.isDebugEnabled()) {
            dataLogger.debug("Summary value: " + summaryRevenue.getValue());
            dataLogger.debug("Summary net value: " + summaryRevenue.getNetValue());
        }
        summary.add(summaryRevenue);
    }

    // Calculate the totals for the revenue analysis

    for (RevenueBean summaryRevenue : summary) {
        /* Update the overall running revenue totals */
        result.setValue(result.getValue() + summaryRevenue.getValue());
        result.setNetValue(result.getNetValue() + summaryRevenue.getNetValue());

        /* Update the GST totals */
        for (double gstRate : summaryRevenue.getGSTValues().keySet()) {
            final double gstValue = summaryRevenue.getGSTValues().get(gstRate);

            double currentGSTValue = 0;
            if (result.getGSTValues().containsKey(gstRate)) {
                currentGSTValue = result.getGSTValues().get(gstRate);
            }
            result.setGSTValue(gstRate, currentGSTValue + gstValue);
        }
    }
    // Ensure the revenue beans have the same GST fields
    result.setRevenue(processGSTRates(summary));

    if (dataLogger.isDebugEnabled()) {
        dataLogger.debug("Total calculated value: " + result.getValue());
        dataLogger.debug("Total calculated net value: " + result.getNetValue());
    }

    return result;
}

From source file:com.sfs.whichdoctor.importer.ExamImporter.java

/**
 * Assign data./* ww  w .  j a v  a  2 s  .c om*/
 *
 * @param personDAO the person dao
 *
 * @return the hash map< object, list< object>>
 *
 * @throws WhichDoctorImporterException the which doctor importer exception
 */
protected final HashMap<Object, List<Object>> assignData(final PersonDAO personDAO)
        throws WhichDoctorImporterException {

    importLogger.debug("Getting data map");
    HashMap<String, List<String>> dataMap = this.getDataMap();

    if (dataMap == null) {
        throw new WhichDoctorImporterException("Error importing " + "- the data map cannot be null");
    }

    List<Integer> indexValues = new ArrayList<Integer>();
    TreeMap<Integer, PersonBean> keyMap = new TreeMap<Integer, PersonBean>();

    // Load the index values
    if (dataMap.containsKey("MIN") || dataMap.containsKey("Candidate No.")) {
        importLogger.debug("Datamap contains index key");

        // The submitted data has a key field, load associated people
        if (dataMap.containsKey("MIN")) {
            // Load person based on MIN
            for (String strIdentifier : dataMap.get("MIN")) {
                try {
                    Integer identifier = new Integer(strIdentifier);
                    if (!keyMap.containsKey(identifier)) {
                        PersonBean person = personDAO.loadIdentifier(identifier, new BuilderBean());
                        if (person != null) {
                            keyMap.put(identifier, person);
                        }
                    }
                    indexValues.add(identifier);
                } catch (Exception e) {
                    setImportMessage("Error loading person with MIN: " + strIdentifier);
                    importLogger.error("Error loading person with MIN: " + e.getMessage());
                }
            }
        } else {
            // dataMap has Candidate number but not MIN
            for (String strCandidateNo : dataMap.get("Candidate No.")) {
                try {
                    Integer candidateNo = new Integer(strCandidateNo);
                    if (!keyMap.containsKey(candidateNo)) {
                        PersonBean person = personDAO.loadCandidateNumber(candidateNo.intValue(),
                                new BuilderBean());
                        if (person != null) {
                            keyMap.put(candidateNo, person);
                        }
                    }
                    indexValues.add(candidateNo);
                } catch (Exception e) {
                    setImportMessage("Error loading person with Candidate " + "Number: " + strCandidateNo);
                    importLogger.error("Error loading person with " + "Candidate Number: " + e.getMessage());
                }
            }
        }
    }

    // With the index values loaded cycle through and create the actual
    // beans
    for (int i = 0; i < indexValues.size(); i++) {
        Integer index = (Integer) indexValues.get(i);

        if (keyMap.containsKey(index)) {
            PersonBean person = (PersonBean) keyMap.get(index);
            if (person == null) {
                throw new WhichDoctorImporterException("Person is null");
            }
            try {
                // Set the values of the exam object
                ExamBean exam = new ExamBean();
                exam.setReferenceGUID(person.getGUID());
                exam.setLogMessage("Created via automated import process");

                if (dataMap.containsKey("Exam Type")) {
                    List<String> values = dataMap.get("Exam Type");
                    importLogger.info("Exam Type: " + values.get(i));
                    exam.setType(values.get(i));
                }
                if (dataMap.containsKey("Exam Date")) {
                    List<String> values = dataMap.get("Exam Date");
                    importLogger.info("Exam Date: " + DataFilter.parseDate(values.get(i), true));
                    exam.setDateSat(DataFilter.parseDate(values.get(i), true));
                }
                if (dataMap.containsKey("Exam Venue")) {
                    List<String> values = dataMap.get("Exam Venue");
                    importLogger.info("Exam Venue: " + values.get(i));
                    exam.setLocation(values.get(i));
                }
                if (dataMap.containsKey("Result")) {
                    List<String> values = dataMap.get("Result");
                    importLogger.info("Result: " + values.get(i));
                    String status = checkInput("Result", values.get(i));
                    exam.setStatus(status);
                }
                if (dataMap.containsKey("Result Band")) {
                    List<String> values = dataMap.get("Result Band");
                    importLogger.info("Result Band: " + values.get(i));
                    exam.setStatusLevel(values.get(i));
                }

                setBeanArray(person, exam);

            } catch (Exception e) {
                setImportMessage("Error setting values for exam associated to: "
                        + OutputFormatter.toFormattedName(person));
                importLogger.error("Error setting values for exam: " + e.getMessage());
            }
        }
    }
    return this.getBeanMap();
}

From source file:com.sfs.whichdoctor.analysis.RevenueAnalysisDAOImpl.java

/**
 * Stream analysis./*www. j  a va  2s  .  c o m*/
 *
 * @param search the search
 *
 * @return the revenue analysis bean
 *
 * @throws WhichDoctorAnalysisDaoException the which doctor analysis dao
 *             exception
 */
@SuppressWarnings("unchecked")
public final RevenueAnalysisBean streamAnalysis(final RevenueAnalysisBean search)
        throws WhichDoctorAnalysisDaoException {

    /* Zero out values in revenueanalysis bean */
    search.setValue(0);
    search.setNetValue(0);

    /* Set ordering system of returned results */
    String sqlORDER = " ORDER BY RevenueType, receipt.ReceiptNo";
    final StringBuffer sqlWHERE = new StringBuffer();

    Collection<Object> parameters = new ArrayList<Object>();
    if (search.getSQLWhereStatement() != null) {
        if (search.getSQLWhereStatement().compareTo("") != 0) {
            sqlWHERE.append(" AND ");
            sqlWHERE.append(search.getSQLWhereStatement());
        }
    }
    if (search.getSearchParameters() != null) {
        parameters = search.getSearchParameters();
    }

    // BUILD SQL Statement
    final StringBuffer searchSQL = new StringBuffer();
    searchSQL.append(this.getSQL().getValue("revenue"));
    searchSQL.append(sqlWHERE.toString());
    searchSQL.append(" GROUP BY payment.PaymentId, RevenueType ");
    searchSQL.append(sqlORDER);

    dataLogger.info("SQL Query: " + searchSQL.toString());

    Collection<RevenueBean> results = new ArrayList<RevenueBean>();
    try {
        results = this.getJdbcTemplateReader().query(searchSQL.toString(), parameters.toArray(),
                new RowMapper() {
                    public Object mapRow(final ResultSet rs, final int rowNum) throws SQLException {
                        return loadStreamRevenue(rs);
                    }
                });

    } catch (IncorrectResultSizeDataAccessException ie) {
        // No results found for this search
        dataLogger.debug("No results found for search: " + ie.getMessage());
    }

    TreeMap<Object, ArrayList<RevenueBean>> revenueTypeMap = new TreeMap<Object, ArrayList<RevenueBean>>();

    for (RevenueBean revenue : results) {
        if (dataLogger.isDebugEnabled()) {
            dataLogger.debug("Net value: " + revenue.getNetValue());
            dataLogger.debug("Value: " + revenue.getValue());
        }
        ArrayList<RevenueBean> revenueList = new ArrayList<RevenueBean>();
        if (revenueTypeMap.containsKey(revenue.getRevenueType())) {
            revenueList = revenueTypeMap.get(revenue.getRevenueType());
        }
        revenueList.add(revenue);
        revenueTypeMap.put(revenue.getRevenueType(), revenueList);
    }

    final RevenueAnalysisBean summary = consolidateSummary(revenueTypeMap);
    search.setValue(summary.getValue());
    search.setNetValue(summary.getNetValue());
    search.setGSTValues(summary.getGSTValues());
    search.setRevenue(summary.getRevenue());

    return search;
}

From source file:org.apache.hadoop.hdfs.client.ShortCircuitCache.java

/**
 * Insert a replica into an evictable map.
 *
 * If an element already exists with this eviction time, we add a nanosecond
 * to it until we find an unused key.//from   w  w  w  . jav a 2 s . c  o  m
 *
 * @param evictionTimeNs   The eviction time in absolute nanoseconds.
 * @param replica          The replica to insert.
 * @param map              The map to insert it into.
 */
private void insertEvictable(Long evictionTimeNs, ShortCircuitReplica replica,
        TreeMap<Long, ShortCircuitReplica> map) {
    while (map.containsKey(evictionTimeNs)) {
        evictionTimeNs++;
    }
    Preconditions.checkState(null == replica.getEvictableTimeNs());
    Long time = Long.valueOf(evictionTimeNs);
    replica.setEvictableTimeNs(time);
    map.put(time, replica);
}

From source file:org.finra.herd.dao.helper.EmrPricingHelper.java

/**
 * Finds all the clusters that are within the range of lowest core instance price.
 * <p>//from   w  ww . ja  v  a2  s.co  m
 * For example, if the core prices are 0.30, 0.32, 0.34, 0.36, and the threshold value is 0.1(10%), then the lowest core price range should be [0.30, 0.33].
 * The upper bound is derived by calculating 0.30*(1 + 0.1) = 0.33
 *
 * @param emrClusterPrices the list of clusters to select from
 * @param lowestCoreInstancePriceThresholdPercentage the threshold value that defines the range of lowest core instance price
 *
 * @return the list of clusters that fall in lowest core instance price range
 */
List<EmrClusterPriceDto> getEmrClusterPricesWithinLowestCoreInstancePriceThreshold(
        final List<EmrClusterPriceDto> emrClusterPrices,
        final BigDecimal lowestCoreInstancePriceThresholdPercentage) {
    // Builds a tree map that has the core instance price as the key, and the list of pricing with the same core instance price as the value. The tree map
    // is automatically sorted, so it is easy to find the lowest core instance price range.
    TreeMap<BigDecimal, List<EmrClusterPriceDto>> emrClusterPriceMapKeyedByCoreInstancePrice = new TreeMap<>();
    for (final EmrClusterPriceDto emrClusterPriceDto : emrClusterPrices) {
        final BigDecimal coreInstancePrice = getEmrClusterCoreInstancePrice(emrClusterPriceDto);
        if (emrClusterPriceMapKeyedByCoreInstancePrice.containsKey(coreInstancePrice)) {
            emrClusterPriceMapKeyedByCoreInstancePrice.get(coreInstancePrice).add(emrClusterPriceDto);
        } else {
            List<EmrClusterPriceDto> emrClusterPriceList = new ArrayList<>();
            emrClusterPriceList.add(emrClusterPriceDto);
            emrClusterPriceMapKeyedByCoreInstancePrice.put(coreInstancePrice, emrClusterPriceList);
        }
    }

    // Log all the information in the tree map
    LOGGER.info("All available EMR clusters keyed by core instance price: availableEmrClusters={}",
            jsonHelper.objectToJson(emrClusterPriceMapKeyedByCoreInstancePrice));

    // Finds the list of pricing in the range of the lowest core instance price
    List<EmrClusterPriceDto> lowestCoreInstancePriceEmrClusters = new ArrayList<>();
    if (!emrClusterPriceMapKeyedByCoreInstancePrice.isEmpty()) {
        // calculate the lowest core instance price range
        final BigDecimal lowestCoreInstancePriceLowerBound = emrClusterPriceMapKeyedByCoreInstancePrice
                .firstEntry().getKey();
        final BigDecimal lowestCoreInstancePriceUpperBound = lowestCoreInstancePriceLowerBound
                .multiply(BigDecimal.ONE.add(lowestCoreInstancePriceThresholdPercentage));

        LOGGER.info("emrClusterLowestCoreInstancePriceRange={}", jsonHelper.objectToJson(
                Arrays.asList(lowestCoreInstancePriceLowerBound, lowestCoreInstancePriceUpperBound)));

        for (final Map.Entry<BigDecimal, List<EmrClusterPriceDto>> entry : emrClusterPriceMapKeyedByCoreInstancePrice
                .entrySet()) {
            final BigDecimal coreInstancePrice = entry.getKey();
            // Fall into the lowest price range? add it to the list.
            // There is no need to check the lower bound here, since the tree map is sorted, and lower bound is the lowest core price in the tree map.
            if (coreInstancePrice.compareTo(lowestCoreInstancePriceUpperBound) <= 0) {
                lowestCoreInstancePriceEmrClusters.addAll(entry.getValue());
            } else {
                // since the tree map is sorted in ascending order, we do not need to check the rest of entries in the map
                break;
            }
        }
    }
    return lowestCoreInstancePriceEmrClusters;
}

From source file:com.facebook.tsdb.tsdash.server.model.Metric.java

/**
 * create a new metric with rows aggregated after dissolving the given tags.
 * The resulted metric will not be able to accept filters on this tag
 * anymore.//from   w ww  .j  av a  2 s  . co m
 *
 * @param tagName
 * @param aggregatorName
 *            'sum', 'max', 'min' or 'avg'
 * @return a new Metric object that contains the aggregated rows
 * @throws IDNotFoundException
 * @throws IOException
 */
public Metric dissolveTags(ArrayList<String> tagsName, String aggregatorName)
        throws IOException, IDNotFoundException {
    if (tagsName.size() == 0) {
        return this;
    }
    HashMap<String, HashSet<String>> tagsSet = getTagsSet();
    for (String tagName : tagsName) {
        if (!tagsSet.containsKey(tagName)) {
            // TODO: throw an exception here
            logger.error("Dissolve error: tag '" + tagName + "' is not part of the tag set");
            return null;
        }
        // we can only dissolve once a given tag
        if (dissolvedTags.contains(tagName)) {
            // TODO: throw an exception here
            logger.error("Metric already dissolved tag " + tagName);
            return null;
        }
    }
    // this aligns the time series in a perfect grid
    alignAllTimeSeries();

    Metric newData = new Metric(id, name, idMap);
    Tag[] toDissolve = new Tag[tagsName.size()];
    for (int i = 0; i < toDissolve.length; i++) {
        toDissolve[i] = new Tag(tagsName.get(i), idMap);
        newData.dissolvedTags.add(tagsName.get(i));
    }
    TreeMap<TagsArray, ArrayList<ArrayList<DataPoint>>> dissolved = new TreeMap<TagsArray, ArrayList<ArrayList<DataPoint>>>(
            Tag.arrayComparator());
    // sort the tags we will dissolve for calling disableTags()
    Arrays.sort(toDissolve, Tag.keyComparator());
    for (TagsArray header : timeSeries.keySet()) {
        TagsArray dissolvedRowTags = header.copy();
        if (toDissolve.length == 1) {
            dissolvedRowTags.disableTag(toDissolve[0]);
        } else {
            dissolvedRowTags.disableTags(toDissolve);
        }
        if (!dissolved.containsKey(dissolvedRowTags)) {
            dissolved.put(dissolvedRowTags, new ArrayList<ArrayList<DataPoint>>());
        }
        dissolved.get(dissolvedRowTags).add(timeSeries.get(header));
    }
    Aggregator aggregator = getAggregator(aggregatorName);
    newData.aggregatorName = aggregatorName;
    for (TagsArray header : dissolved.keySet()) {
        newData.timeSeries.put(header, TimeSeries.aggregate(dissolved.get(header), aggregator));
    }
    return newData;
}