Example usage for java.util LinkedHashMap containsKey

List of usage examples for java.util LinkedHashMap containsKey

Introduction

In this page you can find the example usage for java.util LinkedHashMap containsKey.

Prototype

boolean containsKey(Object key);

Source Link

Document

Returns true if this map contains a mapping for the specified key.

Usage

From source file:org.kuali.rice.kew.impl.document.search.DocumentSearchCriteriaTranslatorImpl.java

@Override
public DocumentSearchCriteria translateFieldsToCriteria(Map<String, String> fieldValues) {

    DocumentSearchCriteria.Builder criteria = DocumentSearchCriteria.Builder.create();
    List<String> documentAttributeFields = new ArrayList<String>();
    for (Map.Entry<String, String> field : fieldValues.entrySet()) {
        try {/*from  ww w  .  j av a  2 s  .c  om*/
            if (StringUtils.isNotBlank(field.getValue())) {
                if (DIRECT_TRANSLATE_FIELD_NAMES_SET.contains(field.getKey())) {
                    PropertyUtils.setNestedProperty(criteria, field.getKey(), field.getValue());
                } else if (DATE_RANGE_TRANSLATE_FIELD_NAMES_SET.contains(field.getKey())) {
                    applyDateRangeField(criteria, field.getKey(), field.getValue());
                } else if (field.getKey().startsWith(KewApiConstants.DOCUMENT_ATTRIBUTE_FIELD_PREFIX)) {
                    documentAttributeFields.add(field.getKey());
                }

            }
        } catch (Exception e) {
            throw new IllegalStateException("Failed to set document search criteria field: " + field.getKey(),
                    e);
        }
    }

    if (!documentAttributeFields.isEmpty()) {
        translateDocumentAttributeFieldsToCriteria(fieldValues, documentAttributeFields, criteria);
    }

    String routeNodeLookupLogic = fieldValues.get(ROUTE_NODE_LOOKUP_LOGIC);
    if (StringUtils.isNotBlank(routeNodeLookupLogic)) {
        criteria.setRouteNodeLookupLogic(RouteNodeLookupLogic.valueOf(routeNodeLookupLogic));
    }

    String documentStatusesValue = fieldValues
            .get(KEWPropertyConstants.DOC_SEARCH_RESULT_PROPERTY_NAME_STATUS_CODE);
    if (StringUtils.isNotBlank(documentStatusesValue)) {
        String[] documentStatuses = documentStatusesValue.split(",");
        for (String documentStatus : documentStatuses) {
            if (documentStatus.startsWith("category:")) {
                String categoryCode = StringUtils.remove(documentStatus, "category:");
                criteria.getDocumentStatusCategories().add(DocumentStatusCategory.fromCode(categoryCode));
            } else {
                criteria.getDocumentStatuses().add(DocumentStatus.fromCode(documentStatus));
            }
        }
    }

    LinkedHashMap<String, List<String>> applicationDocumentStatusGroupings = ApplicationDocumentStatusUtils
            .getApplicationDocumentStatusCategories(criteria.getDocumentTypeName());

    String applicationDocumentStatusesValue = fieldValues
            .get(KEWPropertyConstants.DOC_SEARCH_RESULT_PROPERTY_NAME_DOC_STATUS);
    if (StringUtils.isNotBlank(applicationDocumentStatusesValue)) {
        String[] applicationDocumentStatuses = applicationDocumentStatusesValue.split(",");
        for (String applicationDocumentStatus : applicationDocumentStatuses) {
            // KULRICE-7786: support for groups (categories) of application document statuses
            if (applicationDocumentStatus.startsWith("category:")) {
                String categoryCode = StringUtils.remove(applicationDocumentStatus, "category:");
                if (applicationDocumentStatusGroupings.containsKey(categoryCode)) {
                    criteria.getApplicationDocumentStatuses()
                            .addAll(applicationDocumentStatusGroupings.get(categoryCode));
                }
            } else {
                criteria.getApplicationDocumentStatuses().add(applicationDocumentStatus);
            }
        }
    }

    // blank the deprecated field out, it's not needed.
    criteria.setApplicationDocumentStatus(null);

    return criteria.build();
}

From source file:ubic.gemma.datastructure.matrix.ExpressionDataMatrixColumnSort.java

/**
 * Organized the results by the factor values (for one factor)
 * /*  www  . j a v a  2s .  co  m*/
 * @param fv2bms master map
 * @param bioMaterialChunk biomaterials to organize
 * @param factorValues factor value to consider - biomaterials will be organized in the order given
 * @param chunks map of factor values to chunks goes here
 * @param organized the results go here
 */
private static void organizeByFactorValuesVO(Map<FactorValueValueObject, List<BioMaterialValueObject>> fv2bms,
        List<BioMaterialValueObject> bioMaterialChunk, List<FactorValueValueObject> factorValues,
        LinkedHashMap<FactorValueValueObject, List<BioMaterialValueObject>> chunks,
        List<BioMaterialValueObject> organized) {
    Collection<BioMaterialValueObject> seenBioMaterials = new HashSet<BioMaterialValueObject>();
    for (FactorValueValueObject fv : factorValues) {

        if (!fv2bms.containsKey(fv)) {
            /*
             * This can happen if a factorvalue has been created but not yet associated with any biomaterials. This
             * can also be cruft.
             */
            continue;
        }

        // all in entire experiment, so we might not want them all as we may just be processing a small chunk.
        List<BioMaterialValueObject> biomsforfv = fv2bms.get(fv);

        for (BioMaterialValueObject bioMaterial : biomsforfv) {
            if (bioMaterialChunk.contains(bioMaterial)) {
                if (!chunks.containsKey(fv)) {
                    chunks.put(fv, new ArrayList<BioMaterialValueObject>());
                }
                if (!chunks.get(fv).contains(bioMaterial)) {
                    /*
                     * shouldn't be twice, but ya never know.
                     */
                    chunks.get(fv).add(bioMaterial);
                }
            }
            seenBioMaterials.add(bioMaterial);
        }

        // If we used that fv ...
        if (chunks.containsKey(fv)) {
            organized.addAll(chunks.get(fv)); // now at least this is in order of this factor
        }
    }

    // Leftovers contains biomaterials which have no factorvalue assigned for this factor.
    Collection<BioMaterialValueObject> leftovers = new HashSet<BioMaterialValueObject>();
    for (BioMaterialValueObject bm : bioMaterialChunk) {
        if (!seenBioMaterials.contains(bm)) {
            leftovers.add(bm);
        }
    }

    if (leftovers.size() > 0) {
        organized.addAll(leftovers);
        chunks.put((FactorValueValueObject) null, new ArrayList<BioMaterialValueObject>(leftovers));
    }

}

From source file:edu.harvard.i2b2.analysis.dataModel.ConceptTableModel.java

public void fillDataFromTable(ArrayList<ArrayList<ConceptTableRow>> list) {
    list.clear();//from   ww  w . ja v  a  2 s  .c o  m
    ConceptTableRow row = null;
    ArrayList<ConceptTableRow> group = null;
    Integer curRow = null;
    LinkedHashMap<Integer, ArrayList<ConceptTableRow>> rowMap = new LinkedHashMap<Integer, ArrayList<ConceptTableRow>>();

    for (int i = 1; i < rowCount; i++) {
        row = new ConceptTableRow();
        curRow = new Integer((String) content.get("0/" + i));
        row.rowNumber = curRow.intValue();
        if (!rowMap.containsKey(curRow)) {
            group = new ArrayList<ConceptTableRow>();
            list.add(group);
            rowMap.put(curRow, group);
        } else {
            group = rowMap.get(curRow);
        }
        row.conceptName = (String) content.get("1/" + i);
        row.dateText = (String) content.get("2/" + i);
        row.valueText = (String) content.get("3/" + i);
        row.height = (String) content.get("4/" + i);
        row.color = (RGB) content.get("5/" + i);
        row.conceptXml = (String) content.get("6/" + i);
        row.data = (QueryModel) content.get("7/" + i);
        row.rowId = i;
        group.add(row);
    }
}

From source file:org.kuali.rice.kew.docsearch.DocumentSearchCustomizationMediatorImpl.java

@Override
public DocumentSearchCriteriaConfiguration getDocumentSearchCriteriaConfiguration(DocumentType documentType) {

    List<DocumentTypeAttributeBo> searchableAttributes = documentType.getSearchableAttributes();

    // This first map is used to partition our attributes by application id.  It maps an application id to the
    // list of searchable attribute names that are associated with that application id.  Note that 'null' is a
    // valid key in this map for those attributes that have no application id.
    LinkedHashMap<String, List<String>> applicationIdToAttributeNameMap = new LinkedHashMap<String, List<String>>();

    // This second map is used to map the searchable attribute name to the List of RemotableAttributeFields
    // that are returned by invocations of it's getSearchFields method.  This is a LinkedHashMap because it
    // preserves the order of the keys as they are entered.  This allows us to return attribute fields in the
    // proper order as defined by the order of searchable attributes on the doc type, despite the partitioning
    // of our attributes by application id.
    LinkedHashMap<String, AttributeFields> orderedSearchFieldMap = new LinkedHashMap<String, AttributeFields>();
    LinkedHashMap<String, AttributeFields> orderedResultSetFieldMap = new LinkedHashMap<String, AttributeFields>();

    for (DocumentTypeAttributeBo searchableAttribute : searchableAttributes) {
        RuleAttribute ruleAttribute = searchableAttribute.getRuleAttribute();
        String attributeName = ruleAttribute.getName();
        String applicationId = ruleAttribute.getApplicationId();
        if (!applicationIdToAttributeNameMap.containsKey(applicationId)) {
            applicationIdToAttributeNameMap.put(applicationId, new ArrayList<String>());
        }//w  w  w  .ja  va 2 s.c  o  m
        applicationIdToAttributeNameMap.get(applicationId).add(attributeName);
        // reserve a spot in the field map
        orderedSearchFieldMap.put(attributeName, null);
    }

    for (String applicationId : applicationIdToAttributeNameMap.keySet()) {
        DocumentSearchCustomizationHandlerService documentSearchCustomizationService = loadCustomizationService(
                applicationId);
        List<String> searchableAttributeNames = applicationIdToAttributeNameMap.get(applicationId);
        DocumentSearchCriteriaConfiguration documentSearchConfiguration = documentSearchCustomizationService
                .getDocumentSearchConfiguration(documentType.getName(), searchableAttributeNames);
        mergeAttributeFields(documentSearchConfiguration.getSearchAttributeFields(), orderedSearchFieldMap);
    }

    DocumentSearchCriteriaConfiguration.Builder configBuilder = DocumentSearchCriteriaConfiguration.Builder
            .create();
    configBuilder.setSearchAttributeFields(flattenOrderedFieldMap(orderedSearchFieldMap));
    return configBuilder.build();
}

From source file:com.opengamma.analytics.financial.interestrate.MultipleYieldCurveFinderDataBundle.java

public MultipleYieldCurveFinderDataBundle(final List<InstrumentDerivative> derivatives,
        final double[] marketValues, final YieldCurveBundle knownCurves,
        final LinkedHashMap<String, double[]> unknownCurveNodePoints,
        final LinkedHashMap<String, Interpolator1D> unknownCurveInterpolators,
        final boolean useFiniteDifferenceByDefault, final FXMatrix fxMatrix) {
    ArgumentChecker.notNull(derivatives, "derivatives");
    ArgumentChecker.noNulls(derivatives, "derivatives");
    ArgumentChecker.notNull(marketValues, "market values null");
    ArgumentChecker.notNull(unknownCurveNodePoints, "unknown curve node points");
    ArgumentChecker.notNull(unknownCurveInterpolators, "unknown curve interpolators");
    ArgumentChecker.notEmpty(unknownCurveNodePoints, "unknown curve node points");
    ArgumentChecker.notEmpty(unknownCurveInterpolators, "unknown curve interpolators");
    ArgumentChecker.isTrue(derivatives.size() == marketValues.length,
            "marketValues wrong length; must be one par rate per derivative (have {} values for {} derivatives",
            marketValues.length, derivatives.size());
    ArgumentChecker.notNull(fxMatrix, "FX matrix");
    if (knownCurves != null) {
        for (final String name : knownCurves.getAllNames()) {
            if (unknownCurveInterpolators.containsKey(name)) {
                throw new IllegalArgumentException("Curve name in known set matches one to be solved for");
            }/*w ww  . j  ava  2 s .  com*/
        }
        _knownCurves = knownCurves;
    } else {
        _knownCurves = null;
    }
    _derivatives = derivatives;
    _marketValues = marketValues;
    if (unknownCurveNodePoints.size() != unknownCurveInterpolators.size()) {
        throw new IllegalArgumentException("Number of unknown curves not the same as curve interpolators");
    }
    final Iterator<Entry<String, double[]>> nodePointsIterator = unknownCurveNodePoints.entrySet().iterator();
    final Iterator<Entry<String, Interpolator1D>> unknownCurvesIterator = unknownCurveInterpolators.entrySet()
            .iterator();
    _names = new ArrayList<>();
    while (nodePointsIterator.hasNext()) {
        final Entry<String, double[]> entry1 = nodePointsIterator.next();
        final Entry<String, Interpolator1D> entry2 = unknownCurvesIterator.next();
        final String name1 = entry1.getKey();
        if (!name1.equals(entry2.getKey())) {
            throw new IllegalArgumentException("Names must be the same");
        }
        ArgumentChecker.notNull(entry1.getValue(), "curve node points for " + name1);
        ArgumentChecker.notNull(entry2.getValue(), "interpolator for " + name1);
        _names.add(name1);
    }
    int nNodes = 0;
    for (final double[] nodes : unknownCurveNodePoints.values()) {
        nNodes += nodes.length;
    }
    if (nNodes > derivatives.size()) {
        throw new IllegalArgumentException("Total number of nodes (" + nNodes
                + ") is greater than the number of instruments (" + derivatives.size() + ")");
    }
    _totalNodes = nNodes;
    _unknownCurveNodePoints = unknownCurveNodePoints;
    _unknownCurveInterpolators = unknownCurveInterpolators;
    _useFiniteDifferenceByDefault = useFiniteDifferenceByDefault;
    _fxMatrix = fxMatrix;
}

From source file:com.DGSD.Teexter.UI.Recipient.BaseRecipientAdapter.java

private void putOneEntry(TemporaryEntry entry, boolean isAggregatedEntry,
        LinkedHashMap<Long, List<RecipientEntry>> entryMap, List<RecipientEntry> nonAggregatedEntries,
        Set<String> existingDestinations) {
    if (existingDestinations.contains(entry.destination)) {
        return;/*from  w  w  w  .  j  a v  a2s.  c o m*/
    }

    existingDestinations.add(entry.destination);

    if (!isAggregatedEntry) {
        nonAggregatedEntries.add(RecipientEntry.constructTopLevelEntry(entry.displayName, entry.destination,
                entry.destinationType, entry.destinationLabel, entry.contactId, entry.dataId,
                entry.thumbnailUriString));
    } else if (entryMap.containsKey(entry.contactId)) {
        // We already have a section for the person.
        final List<RecipientEntry> entryList = entryMap.get(entry.contactId);
        entryList.add(RecipientEntry.constructSecondLevelEntry(entry.displayName, entry.destination,
                entry.destinationType, entry.destinationLabel, entry.contactId, entry.dataId,
                entry.thumbnailUriString));
    } else {
        final List<RecipientEntry> entryList = new ArrayList<RecipientEntry>();
        entryList.add(RecipientEntry.constructTopLevelEntry(entry.displayName, entry.destination,
                entry.destinationType, entry.destinationLabel, entry.contactId, entry.dataId,
                entry.thumbnailUriString));
        entryMap.put(entry.contactId, entryList);
    }
}

From source file:org.deegree.style.se.parser.GraphicSymbologyParser.java

private Triple<BufferedImage, String, Continuation<List<BufferedImage>>> parseExternalGraphic(
        final XMLStreamReader in) throws IOException, XMLStreamException {
    // TODO color replacement

    in.require(START_ELEMENT, null, "ExternalGraphic");

    String format = null;/*  w  w  w  .  ja v  a 2s . c o  m*/
    BufferedImage img = null;
    String url = null;
    Triple<InputStream, String, Continuation<StringBuffer>> pair = null;
    Continuation<List<BufferedImage>> contn = null; // needs to be list to be updateable by reference...

    while (!(in.isEndElement() && in.getLocalName().equals("ExternalGraphic"))) {
        in.nextTag();

        if (in.getLocalName().equals("Format")) {
            format = in.getElementText();
        } else if (in.getLocalName().equals("OnlineResource") || in.getLocalName().equals("InlineContent")) {
            pair = getOnlineResourceOrInlineContent(in);
        } else if (in.isStartElement()) {
            Location loc = in.getLocation();
            LOG.error("Found unknown element '{}' at line {}, column {}, skipping.",
                    new Object[] { in.getLocalName(), loc.getLineNumber(), loc.getColumnNumber() });
            skipElement(in);
        }
    }

    try {
        if (pair != null) {
            if (pair.first != null && format != null && (format.toLowerCase().indexOf("svg") == -1)) {
                img = ImageIO.read(pair.first);
            }
            url = pair.second;

            final Continuation<StringBuffer> sbcontn = pair.third;

            if (pair.third != null) {
                final LinkedHashMap<String, BufferedImage> cache = new LinkedHashMap<String, BufferedImage>(
                        256) {
                    private static final long serialVersionUID = -6847956873232942891L;

                    @Override
                    protected boolean removeEldestEntry(Map.Entry<String, BufferedImage> eldest) {
                        return size() > 256; // yeah, hardcoded max size... TODO
                    }
                };
                contn = new Continuation<List<BufferedImage>>() {
                    @Override
                    public void updateStep(List<BufferedImage> base, Feature f,
                            XPathEvaluator<Feature> evaluator) {
                        StringBuffer sb = new StringBuffer();
                        sbcontn.evaluate(sb, f, evaluator);
                        String file = sb.toString();
                        if (cache.containsKey(file)) {
                            base.add(cache.get(file));
                            return;
                        }
                        try {
                            BufferedImage i;
                            if (context.location != null) {
                                i = ImageIO.read(context.location.resolve(file));
                            } else {
                                i = ImageIO.read(resolve(file, in));
                            }
                            base.add(i);
                            cache.put(file, i);
                        } catch (MalformedURLException e) {
                            // TODO Auto-generated catch block
                            e.printStackTrace();
                        } catch (IOException e) {
                            // TODO Auto-generated catch block
                            e.printStackTrace();
                        }
                    }
                };
            }
        }
    } finally {
        if (pair != null) {
            try {
                pair.first.close();
            } catch (Exception e) {
                LOG.trace("Stack trace when closing input stream:", e);
            }
        }
    }

    return new Triple<BufferedImage, String, Continuation<List<BufferedImage>>>(img, url, contn);
}

From source file:ubic.gemma.datastructure.matrix.ExpressionDataMatrixColumnSort.java

/**
 * Divide the biomaterials up into chunks based on the experimental factor given, keeping everybody in order.
 * // w  ww  . java 2 s.c  o m
 * @param ef
 * @param bms
 * @return ordered map of fv->bm where fv is of ef, or null if it couldn't be done properly.
 */
private static LinkedHashMap<FactorValue, List<BioMaterial>> chunkOnFactor(ExperimentalFactor ef,
        List<BioMaterial> bms) {

    if (bms == null) {
        return null;
    }

    LinkedHashMap<FactorValue, List<BioMaterial>> chunks = new LinkedHashMap<FactorValue, List<BioMaterial>>();

    /*
     * Get the factor values in the order we have things right now
     */
    for (BioMaterial bm : bms) {
        for (FactorValue fv : bm.getFactorValues()) {
            if (!ef.getFactorValues().contains(fv)) {
                continue;
            }
            if (chunks.keySet().contains(fv)) {
                continue;
            }
            chunks.put(fv, new ArrayList<BioMaterial>());
        }
    }

    /*
     * What if bm doesn't have a value for the factorvalue. Need a dummy value.
     */
    FactorValue dummy = FactorValue.Factory.newInstance(ef);
    dummy.setValue("");
    dummy.setId(-1L);
    chunks.put(dummy, new ArrayList<BioMaterial>());

    for (BioMaterial bm : bms) {
        boolean found = false;
        for (FactorValue fv : bm.getFactorValues()) {
            if (ef.getFactorValues().contains(fv)) {
                found = true;
                assert chunks.containsKey(fv);
                chunks.get(fv).add(bm);
            }
        }

        if (!found) {
            if (log.isDebugEnabled())
                log.debug(bm + " has no value for factor=" + ef + "; using dummy value");
            chunks.get(dummy).add(bm);
        }

    }

    if (chunks.get(dummy).size() == 0) {
        if (log.isDebugEnabled())
            log.debug("removing dummy");
        chunks.remove(dummy);
    }

    log.debug(chunks.size() + " chunks for " + ef + ", from current chunk of size " + bms.size());

    /*
     * Sanity check
     */
    int total = 0;
    for (FactorValue fv : chunks.keySet()) {
        List<BioMaterial> chunk = chunks.get(fv);
        total += chunk.size();
    }

    assert total == bms.size() : "expected " + bms.size() + ", got " + total;

    return chunks;
}

From source file:com.uber.hoodie.TestHoodieClient.java

@Test
public void testUpserts() throws Exception {
    HoodieWriteConfig cfg = getConfig();
    HoodieWriteClient client = new HoodieWriteClient(jsc, cfg);
    HoodieIndex index = HoodieIndex.createIndex(cfg, jsc);
    FileSystem fs = FSUtils.getFs();

    /**/*from w ww  .  jav a  2 s  . c o m*/
     * Write 1 (only inserts)
     */
    String newCommitTime = "001";
    List<HoodieRecord> records = dataGen.generateInserts(newCommitTime, 200);
    JavaRDD<HoodieRecord> writeRecords = jsc.parallelize(records, 1);

    List<WriteStatus> statuses = client.upsert(writeRecords, newCommitTime).collect();
    assertNoWriteErrors(statuses);

    // check the partition metadata is written out
    assertPartitionMetadata(HoodieTestDataGenerator.DEFAULT_PARTITION_PATHS, fs);

    // verify that there is a commit
    HoodieReadClient readClient = new HoodieReadClient(jsc, basePath, sqlContext);
    assertEquals("Expecting a single commit.", readClient.listCommitsSince("000").size(), 1);
    assertEquals("Latest commit should be 001", readClient.latestCommit(), newCommitTime);
    assertEquals("Must contain 200 records", readClient.readCommit(newCommitTime).count(), records.size());
    // Should have 100 records in table (check using Index), all in locations marked at commit
    HoodieTableMetaClient metaClient = new HoodieTableMetaClient(fs, basePath);
    HoodieTable table = HoodieTable.getHoodieTable(metaClient, getConfig());

    List<HoodieRecord> taggedRecords = index.tagLocation(jsc.parallelize(records, 1), table).collect();
    checkTaggedRecords(taggedRecords, "001");

    /**
     * Write 2 (updates)
     */
    newCommitTime = "004";
    records = dataGen.generateUpdates(newCommitTime, 100);
    LinkedHashMap<HoodieKey, HoodieRecord> recordsMap = new LinkedHashMap<>();
    for (HoodieRecord rec : records) {
        if (!recordsMap.containsKey(rec.getKey())) {
            recordsMap.put(rec.getKey(), rec);
        }
    }
    List<HoodieRecord> dedupedRecords = new ArrayList<>(recordsMap.values());

    statuses = client.upsert(jsc.parallelize(records, 1), newCommitTime).collect();
    // Verify there are no errors
    assertNoWriteErrors(statuses);

    // verify there are now 2 commits
    readClient = new HoodieReadClient(jsc, basePath, sqlContext);
    assertEquals("Expecting two commits.", readClient.listCommitsSince("000").size(), 2);
    assertEquals("Latest commit should be 004", readClient.latestCommit(), newCommitTime);

    metaClient = new HoodieTableMetaClient(fs, basePath);
    table = HoodieTable.getHoodieTable(metaClient, getConfig());

    // Index should be able to locate all updates in correct locations.
    taggedRecords = index.tagLocation(jsc.parallelize(dedupedRecords, 1), table).collect();
    checkTaggedRecords(taggedRecords, "004");

    // Check the entire dataset has 100 records still
    String[] fullPartitionPaths = new String[dataGen.getPartitionPaths().length];
    for (int i = 0; i < fullPartitionPaths.length; i++) {
        fullPartitionPaths[i] = String.format("%s/%s/*", basePath, dataGen.getPartitionPaths()[i]);
    }
    assertEquals("Must contain 200 records", readClient.read(fullPartitionPaths).count(), 200);

    // Check that the incremental consumption from time 000
    assertEquals("Incremental consumption from time 002, should give all records in commit 004",
            readClient.readCommit(newCommitTime).count(), readClient.readSince("002").count());
    assertEquals("Incremental consumption from time 001, should give all records in commit 004",
            readClient.readCommit(newCommitTime).count(), readClient.readSince("001").count());
}

From source file:com.smartitengineering.dao.impl.hbase.CommonDao.java

protected void put(Template[] states, final boolean merge) throws IllegalStateException {
    LinkedHashMap<String, List<Put>> allPuts = new LinkedHashMap<String, List<Put>>();
    for (Template state : states) {
        if (!state.isValid()) {
            throw new IllegalStateException("Entity not in valid state!");
        }/*from  w  w  w  .jav a 2 s  .  co m*/
        final LinkedHashMap<String, Put> puts;
        puts = getConverter().objectToRows(state, executorService, getLockType().equals(LockType.PESSIMISTIC));
        for (Map.Entry<String, Put> put : puts.entrySet()) {
            final List<Put> putList;
            if (allPuts.containsKey(put.getKey())) {
                putList = allPuts.get(put.getKey());
            } else {
                putList = new ArrayList<Put>();
                allPuts.put(put.getKey(), putList);
            }
            putList.add(put.getValue());
        }
    }
    for (Map.Entry<String, List<Put>> puts : allPuts.entrySet()) {
        if (LockType.OPTIMISTIC.equals(getLockType()) && infoProvider.getVersionColumnFamily() != null
                && infoProvider.getVersionColumnQualifier() != null) {
            putOptimistically(puts, merge, states);
        } else {
            putNonOptimistically(puts, merge, states);
        }
    }
}