List of usage examples for com.mongodb BasicDBList BasicDBList
BasicDBList
From source file:org.opencb.opencga.storage.mongodb.alignment.CoverageMongoDBWriter.java
License:Apache License
private void secureInsert(DBObject query, DBObject object, String chromosome, int start, int size) { boolean documentExists = true; boolean fileExists = true; //Check if the document exists {//from w w w. jav a 2s .c o m QueryResult countId = collection.count(query); if (countId.getNumResults() == 1 && countId.getResultType().equals(Long.class.getCanonicalName())) { if ((Long) countId.getResult().get(0) < 1) { DBObject document = BasicDBObjectBuilder.start().append(FILES_FIELD, new BasicDBList()) .append(CHR_FIELD, chromosome).append(START_FIELD, start).append(SIZE_FIELD, size) .get(); document.putAll(query); //{_id:<chunkId>, files:[]} collection.insert(document, null); //Insert a document with empty files array. fileExists = false; } } else { logger.error(countId.getErrorMsg(), countId); } } if (documentExists) { //Check if the file exists BasicDBObject fileQuery = new BasicDBObject(FILES_FIELD + "." + FILE_ID_FIELD, fileId); fileQuery.putAll(query); QueryResult countFile = collection.count(fileQuery); if (countFile.getNumResults() == 1 && countFile.getResultType().equals(Long.class.getCanonicalName())) { if ((Long) countFile.getResult().get(0) < 1) { fileExists = false; } } else { logger.error(countFile.getErrorMsg(), countFile); } } if (fileExists) { BasicDBObject fileQuery = new BasicDBObject(FILES_FIELD + "." + FILE_ID_FIELD, fileId); fileQuery.putAll(query); BasicDBObject fileObject = new BasicDBObject(); for (String key : object.keySet()) { fileObject.put(FILES_FIELD + ".$." + key, object.get(key)); } // DBObject update = new BasicDBObject("$set", new BasicDBObject(FILES_FIELD, fileObject)); DBObject update = new BasicDBObject("$set", fileObject); //db.<collectionName>.update({_id:<chunkId> , "files.id":<fileId>}, {$set:{"files.$.<objKey>":<objValue>}}) collection.update(fileQuery, update, updateOptions); } else { BasicDBObject fileObject = new BasicDBObject(FILE_ID_FIELD, fileId); fileObject.putAll(object); DBObject update = new BasicDBObject("$addToSet", new BasicDBObject(FILES_FIELD, fileObject)); //db.<collectionName>.update({_id:<chunkId>} , {$addToSet:{files:{id:<fileId>, <object>}}}) collection.update(query, update, updateOptions); } }
From source file:org.opencb.opencga.storage.mongodb.variant.adaptors.VariantMongoDBAdaptor.java
License:Apache License
@Override public QueryResult getFrequency(Query query, Region region, int regionIntervalSize) { // db.variants.aggregate( { $match: { $and: [ {chr: "1"}, {start: {$gt: 251391, $lt: 2701391}} ] }}, // { $group: { _id: { $subtract: [ { $divide: ["$start", 20000] }, { $divide: [{$mod: ["$start", 20000]}, // 20000] } ] }, // totalCount: {$sum: 1}}}) QueryOptions options = new QueryOptions(); // If interval is not provided is set to the value that returns 200 values if (regionIntervalSize <= 0) { // regionIntervalSize = options.getInt("interval", (region.getEnd() - region.getStart()) / 200); regionIntervalSize = (region.getEnd() - region.getStart()) / 200; }/* w ww .j a va2 s .c o m*/ Document start = new Document("$gt", region.getStart()); start.append("$lt", region.getEnd()); BasicDBList andArr = new BasicDBList(); andArr.add(new Document(DocumentToVariantConverter.CHROMOSOME_FIELD, region.getChromosome())); andArr.add(new Document(DocumentToVariantConverter.START_FIELD, start)); // Parsing the rest of options Document mongoQuery = parseQuery(query); if (!mongoQuery.isEmpty()) { andArr.add(mongoQuery); } Document match = new Document("$match", new Document("$and", andArr)); // qb.and("_at.chunkIds").in(chunkIds); // qb.and(DBObjectToVariantConverter.END_FIELD).greaterThanEquals(region.getStart()); // qb.and(DBObjectToVariantConverter.START_FIELD).lessThanEquals(region.getEnd()); // // List<String> chunkIds = getChunkIds(region); // DBObject regionObject = new Document("_at.chunkIds", new Document("$in", chunkIds)) // .append(DBObjectToVariantConverter.END_FIELD, new Document("$gte", region.getStart())) // .append(DBObjectToVariantConverter.START_FIELD, new Document("$lte", region.getEnd())); BasicDBList divide1 = new BasicDBList(); divide1.add("$start"); divide1.add(regionIntervalSize); BasicDBList divide2 = new BasicDBList(); divide2.add(new Document("$mod", divide1)); divide2.add(regionIntervalSize); BasicDBList subtractList = new BasicDBList(); subtractList.add(new Document("$divide", divide1)); subtractList.add(new Document("$divide", divide2)); Document subtract = new Document("$subtract", subtractList); Document totalCount = new Document("$sum", 1); Document g = new Document("_id", subtract); g.append("features_count", totalCount); Document group = new Document("$group", g); Document sort = new Document("$sort", new Document("_id", 1)); // logger.info("getAllIntervalFrequencies - (>_)>"); // System.out.println(options.toString()); // System.out.println(match.toString()); // System.out.println(group.toString()); // System.out.println(sort.toString()); long dbTimeStart = System.currentTimeMillis(); QueryResult output = variantsCollection.aggregate(/*"$histogram", */Arrays.asList(match, group, sort), options); long dbTimeEnd = System.currentTimeMillis(); Map<Long, Document> ids = new HashMap<>(); // Create DBObject for intervals with features inside them for (Document intervalObj : (List<Document>) output.getResult()) { Long auxId = Math.round((Double) intervalObj.get("_id")); //is double Document intervalVisited = ids.get(auxId); if (intervalVisited == null) { intervalObj.put("_id", auxId); intervalObj.put("start", getChunkStart(auxId.intValue(), regionIntervalSize)); intervalObj.put("end", getChunkEnd(auxId.intValue(), regionIntervalSize)); intervalObj.put("chromosome", region.getChromosome()); intervalObj.put("features_count", Math.log((int) intervalObj.get("features_count"))); ids.put(auxId, intervalObj); } else { Double sum = (Double) intervalVisited.get("features_count") + Math.log((int) intervalObj.get("features_count")); intervalVisited.put("features_count", sum.intValue()); } } // Create DBObject for intervals without features inside them BasicDBList resultList = new BasicDBList(); int firstChunkId = getChunkId(region.getStart(), regionIntervalSize); int lastChunkId = getChunkId(region.getEnd(), regionIntervalSize); Document intervalObj; for (int chunkId = firstChunkId; chunkId <= lastChunkId; chunkId++) { intervalObj = ids.get((long) chunkId); if (intervalObj == null) { intervalObj = new Document(); intervalObj.put("_id", chunkId); intervalObj.put("start", getChunkStart(chunkId, regionIntervalSize)); intervalObj.put("end", getChunkEnd(chunkId, regionIntervalSize)); intervalObj.put("chromosome", region.getChromosome()); intervalObj.put("features_count", 0); } resultList.add(intervalObj); } QueryResult queryResult = new QueryResult(region.toString(), ((Long) (dbTimeEnd - dbTimeStart)).intValue(), resultList.size(), resultList.size(), null, null, resultList); return queryResult; }
From source file:org.opencb.opencga.storage.mongodb.variant.DBObjectToVariantConverter.java
License:Apache License
@Override public DBObject convertToStorageType(Variant variant) { // Attributes easily calculated BasicDBObject mongoVariant = new BasicDBObject("_id", buildStorageId(variant)) // .append(IDS_FIELD, object.getIds()) //Do not include IDs. .append(CHROMOSOME_FIELD, variant.getChromosome()).append(START_FIELD, variant.getStart()) .append(END_FIELD, variant.getEnd()).append(LENGTH_FIELD, variant.getLength()) .append(REFERENCE_FIELD, variant.getReference()).append(ALTERNATE_FIELD, variant.getAlternate()) .append(TYPE_FIELD, variant.getType().name()); // Internal fields used for query optimization (dictionary named "_at") BasicDBObject _at = new BasicDBObject(); mongoVariant.append("_at", _at); // Two different chunk sizes are calculated for different resolution levels: 1k and 10k BasicDBList chunkIds = new BasicDBList(); String chunkSmall = variant.getChromosome() + "_" + variant.getStart() / VariantMongoDBWriter.CHUNK_SIZE_SMALL + "_" + VariantMongoDBWriter.CHUNK_SIZE_SMALL / 1000 + "k"; String chunkBig = variant.getChromosome() + "_" + variant.getStart() / VariantMongoDBWriter.CHUNK_SIZE_BIG + "_" + VariantMongoDBWriter.CHUNK_SIZE_BIG / 1000 + "k"; chunkIds.add(chunkSmall);/*from w ww. j a v a 2 s.c o m*/ chunkIds.add(chunkBig); _at.append("chunkIds", chunkIds); // Transform HGVS: Map of lists -> List of map entries BasicDBList hgvs = new BasicDBList(); for (Map.Entry<String, Set<String>> entry : variant.getHgvs().entrySet()) { for (String value : entry.getValue()) { hgvs.add(new BasicDBObject(HGVS_TYPE_FIELD, entry.getKey()).append(HGVS_NAME_FIELD, value)); } } mongoVariant.append(HGVS_FIELD, hgvs); // Files if (variantSourceEntryConverter != null) { BasicDBList mongoFiles = new BasicDBList(); for (VariantSourceEntry archiveFile : variant.getSourceEntries().values()) { mongoFiles.add(variantSourceEntryConverter.convertToStorageType(archiveFile)); } mongoVariant.append(STUDIES_FIELD, mongoFiles); } // // Annotations mongoVariant.append(ANNOTATION_FIELD, Collections.emptyList()); // if (variantAnnotationConverter != null) { // if (object.getAnnotation() != null) { // DBObject annotation = variantAnnotationConverter.convertToStorageType(object.getAnnotation()); // mongoVariant.append(ANNOTATION_FIELD, annotation); // } // } // Statistics if (statsConverter != null) { List mongoStats = statsConverter.convertCohortsToStorageType(variant.getSourceEntries()); mongoVariant.put(STATS_FIELD, mongoStats); } return mongoVariant; }
From source file:org.opencb.opencga.storage.mongodb.variant.VariantMongoDBAdaptor.java
License:Apache License
@Override public QueryResult getFrequency(Query query, Region region, int regionIntervalSize) { // db.variants.aggregate( { $match: { $and: [ {chr: "1"}, {start: {$gt: 251391, $lt: 2701391}} ] }}, // { $group: { _id: { $subtract: [ { $divide: ["$start", 20000] }, { $divide: [{$mod: ["$start", 20000]}, 20000] } ] }, // totalCount: {$sum: 1}}}) QueryOptions options = new QueryOptions(); // If interval is not provided is set to the value that returns 200 values if (regionIntervalSize <= 0) { // regionIntervalSize = options.getInt("interval", (region.getEnd() - region.getStart()) / 200); regionIntervalSize = (region.getEnd() - region.getStart()) / 200; }//w ww . ja v a 2 s . c o m BasicDBObject start = new BasicDBObject("$gt", region.getStart()); start.append("$lt", region.getEnd()); BasicDBList andArr = new BasicDBList(); andArr.add(new BasicDBObject(DBObjectToVariantConverter.CHROMOSOME_FIELD, region.getChromosome())); andArr.add(new BasicDBObject(DBObjectToVariantConverter.START_FIELD, start)); // Parsing the rest of options QueryBuilder qb = new QueryBuilder(); // DBObject optionsMatch = parseQueryOptions(options, qb).get(); DBObject optionsMatch = parseQuery(query, qb).get(); if (!optionsMatch.keySet().isEmpty()) { andArr.add(optionsMatch); } DBObject match = new BasicDBObject("$match", new BasicDBObject("$and", andArr)); // qb.and("_at.chunkIds").in(chunkIds); // qb.and(DBObjectToVariantConverter.END_FIELD).greaterThanEquals(region.getStart()); // qb.and(DBObjectToVariantConverter.START_FIELD).lessThanEquals(region.getEnd()); // // List<String> chunkIds = getChunkIds(region); // DBObject regionObject = new BasicDBObject("_at.chunkIds", new BasicDBObject("$in", chunkIds)) // .append(DBObjectToVariantConverter.END_FIELD, new BasicDBObject("$gte", region.getStart())) // .append(DBObjectToVariantConverter.START_FIELD, new BasicDBObject("$lte", region.getEnd())); BasicDBList divide1 = new BasicDBList(); divide1.add("$start"); divide1.add(regionIntervalSize); BasicDBList divide2 = new BasicDBList(); divide2.add(new BasicDBObject("$mod", divide1)); divide2.add(regionIntervalSize); BasicDBList subtractList = new BasicDBList(); subtractList.add(new BasicDBObject("$divide", divide1)); subtractList.add(new BasicDBObject("$divide", divide2)); BasicDBObject subtract = new BasicDBObject("$subtract", subtractList); DBObject totalCount = new BasicDBObject("$sum", 1); BasicDBObject g = new BasicDBObject("_id", subtract); g.append("features_count", totalCount); DBObject group = new BasicDBObject("$group", g); DBObject sort = new BasicDBObject("$sort", new BasicDBObject("_id", 1)); // logger.info("getAllIntervalFrequencies - (>_)>"); // System.out.println(options.toString()); // System.out.println(match.toString()); // System.out.println(group.toString()); // System.out.println(sort.toString()); long dbTimeStart = System.currentTimeMillis(); QueryResult output = variantsCollection.aggregate(/*"$histogram", */Arrays.asList(match, group, sort), options); long dbTimeEnd = System.currentTimeMillis(); Map<Long, DBObject> ids = new HashMap<>(); // Create DBObject for intervals with features inside them for (DBObject intervalObj : (List<DBObject>) output.getResult()) { Long _id = Math.round((Double) intervalObj.get("_id"));//is double DBObject intervalVisited = ids.get(_id); if (intervalVisited == null) { intervalObj.put("_id", _id); intervalObj.put("start", getChunkStart(_id.intValue(), regionIntervalSize)); intervalObj.put("end", getChunkEnd(_id.intValue(), regionIntervalSize)); intervalObj.put("chromosome", region.getChromosome()); intervalObj.put("features_count", Math.log((int) intervalObj.get("features_count"))); ids.put(_id, intervalObj); } else { Double sum = (Double) intervalVisited.get("features_count") + Math.log((int) intervalObj.get("features_count")); intervalVisited.put("features_count", sum.intValue()); } } // Create DBObject for intervals without features inside them BasicDBList resultList = new BasicDBList(); int firstChunkId = getChunkId(region.getStart(), regionIntervalSize); int lastChunkId = getChunkId(region.getEnd(), regionIntervalSize); DBObject intervalObj; for (int chunkId = firstChunkId; chunkId <= lastChunkId; chunkId++) { intervalObj = ids.get((long) chunkId); if (intervalObj == null) { intervalObj = new BasicDBObject(); intervalObj.put("_id", chunkId); intervalObj.put("start", getChunkStart(chunkId, regionIntervalSize)); intervalObj.put("end", getChunkEnd(chunkId, regionIntervalSize)); intervalObj.put("chromosome", region.getChromosome()); intervalObj.put("features_count", 0); } resultList.add(intervalObj); } QueryResult queryResult = new QueryResult(region.toString(), ((Long) (dbTimeEnd - dbTimeStart)).intValue(), resultList.size(), resultList.size(), null, null, resultList); return queryResult; }
From source file:org.opencb.opencga.storage.mongodb.variant.VariantMongoDBAdaptor.java
License:Apache License
@Override @Deprecated/* ww w. j av a 2s. c om*/ public QueryResult getVariantFrequencyByRegion(Region region, QueryOptions options) { // db.variants.aggregate( { $match: { $and: [ {chr: "1"}, {start: {$gt: 251391, $lt: 2701391}} ] }}, // { $group: { _id: { $subtract: [ { $divide: ["$start", 20000] }, { $divide: [{$mod: ["$start", 20000]}, 20000] } ] }, // totalCount: {$sum: 1}}}) if (options == null) { options = new QueryOptions(); } int interval = options.getInt("interval", 20000); BasicDBObject start = new BasicDBObject("$gt", region.getStart()); start.append("$lt", region.getEnd()); BasicDBList andArr = new BasicDBList(); andArr.add(new BasicDBObject(DBObjectToVariantConverter.CHROMOSOME_FIELD, region.getChromosome())); andArr.add(new BasicDBObject(DBObjectToVariantConverter.START_FIELD, start)); // Parsing the rest of options QueryBuilder qb = new QueryBuilder(); DBObject optionsMatch = parseQueryOptions(options, qb).get(); if (!optionsMatch.keySet().isEmpty()) { andArr.add(optionsMatch); } DBObject match = new BasicDBObject("$match", new BasicDBObject("$and", andArr)); // qb.and("_at.chunkIds").in(chunkIds); // qb.and(DBObjectToVariantConverter.END_FIELD).greaterThanEquals(region.getStart()); // qb.and(DBObjectToVariantConverter.START_FIELD).lessThanEquals(region.getEnd()); // // List<String> chunkIds = getChunkIds(region); // DBObject regionObject = new BasicDBObject("_at.chunkIds", new BasicDBObject("$in", chunkIds)) // .append(DBObjectToVariantConverter.END_FIELD, new BasicDBObject("$gte", region.getStart())) // .append(DBObjectToVariantConverter.START_FIELD, new BasicDBObject("$lte", region.getEnd())); BasicDBList divide1 = new BasicDBList(); divide1.add("$start"); divide1.add(interval); BasicDBList divide2 = new BasicDBList(); divide2.add(new BasicDBObject("$mod", divide1)); divide2.add(interval); BasicDBList subtractList = new BasicDBList(); subtractList.add(new BasicDBObject("$divide", divide1)); subtractList.add(new BasicDBObject("$divide", divide2)); BasicDBObject substract = new BasicDBObject("$subtract", subtractList); DBObject totalCount = new BasicDBObject("$sum", 1); BasicDBObject g = new BasicDBObject("_id", substract); g.append("features_count", totalCount); DBObject group = new BasicDBObject("$group", g); DBObject sort = new BasicDBObject("$sort", new BasicDBObject("_id", 1)); // logger.info("getAllIntervalFrequencies - (>_)>"); // System.out.println(options.toString()); // // System.out.println(match.toString()); // System.out.println(group.toString()); // System.out.println(sort.toString()); long dbTimeStart = System.currentTimeMillis(); QueryResult output = variantsCollection.aggregate(/*"$histogram", */Arrays.asList(match, group, sort), options); long dbTimeEnd = System.currentTimeMillis(); Map<Long, DBObject> ids = new HashMap<>(); // Create DBObject for intervals with features inside them for (DBObject intervalObj : (List<DBObject>) output.getResult()) { Long _id = Math.round((Double) intervalObj.get("_id"));//is double DBObject intervalVisited = ids.get(_id); if (intervalVisited == null) { intervalObj.put("_id", _id); intervalObj.put("start", getChunkStart(_id.intValue(), interval)); intervalObj.put("end", getChunkEnd(_id.intValue(), interval)); intervalObj.put("chromosome", region.getChromosome()); intervalObj.put("features_count", Math.log((int) intervalObj.get("features_count"))); ids.put(_id, intervalObj); } else { Double sum = (Double) intervalVisited.get("features_count") + Math.log((int) intervalObj.get("features_count")); intervalVisited.put("features_count", sum.intValue()); } } // Create DBObject for intervals without features inside them BasicDBList resultList = new BasicDBList(); int firstChunkId = getChunkId(region.getStart(), interval); int lastChunkId = getChunkId(region.getEnd(), interval); DBObject intervalObj; for (int chunkId = firstChunkId; chunkId <= lastChunkId; chunkId++) { intervalObj = ids.get((long) chunkId); if (intervalObj == null) { intervalObj = new BasicDBObject(); intervalObj.put("_id", chunkId); intervalObj.put("start", getChunkStart(chunkId, interval)); intervalObj.put("end", getChunkEnd(chunkId, interval)); intervalObj.put("chromosome", region.getChromosome()); intervalObj.put("features_count", 0); } resultList.add(intervalObj); } QueryResult queryResult = new QueryResult(region.toString(), ((Long) (dbTimeEnd - dbTimeStart)).intValue(), resultList.size(), resultList.size(), null, null, resultList); return queryResult; }
From source file:org.opendaylight.controller.samples.onftappingapp.MatchCriteria.java
License:Apache License
public BasicDBObject getAsDocument() { BasicDBObject document = new BasicDBObject(); document.put("refCount", this.getReferenceCount()); document.put("name", this.getName()); document.put("enabled", this.getEnabled()); document.put("reflexive", this.getReflexive()); document.put("priority", this.getPriority()); List<Object> matchFieldsDoc = new BasicDBList(); for (int i = 0; i < matchFieldList.size(); i++) { MatchField matchField = matchFieldList.get(i); matchFieldsDoc.add(matchField.getAsDocument()); }// www .ja v a 2 s. c o m document.put("matchFields", matchFieldsDoc); return document; }
From source file:org.opendaylight.controller.samples.onftappingapp.PortRangeList.java
License:Apache License
public BasicDBObject getAsDocument() { BasicDBObject document = new BasicDBObject(); List<Object> dbList = new BasicDBList(); for (PortRange portRange : this.portList) { dbList.add(portRange.getAsDocument()); }/*from w w w. j a v a 2 s . c o m*/ document.put("portRanges", dbList); return document; }
From source file:org.opendaylight.controller.samples.onftappingapp.TapPolicy.java
License:Apache License
public BasicDBObject getAsDocument() { BasicDBObject document = new BasicDBObject(); document.put("name", getName()); document.put("description", getDescription()); document.put("enabled", isEnabled()); document.put("creationDate", new Date()); // Add Match Criteria ID list Sub-document BasicDBList matchCriteriaListDoc = new BasicDBList(); for (int i = 0; i < matchCriteriaIdList.size(); i++) { String matchCriteriaId = matchCriteriaIdList.get(i); matchCriteriaListDoc.add(matchCriteriaId); }/*from www. j a v a 2s . c om*/ document.put("matchCriteria", matchCriteriaListDoc); // Add SwitchAndPort Sub-document BasicDBList switchAndPortListDoc = new BasicDBList(); for (SwitchAndPort switchAndPort : switchAndPortList) { switchAndPortListDoc.add(switchAndPort.getAsDocument()); } document.put("switchAndPort", switchAndPortListDoc); // Add Capture Device Sub-document BasicDBList captureDevListDoc = new BasicDBList(); for (int i = 0; i < captureDevIdList.size(); i++) { String captureDevId = captureDevIdList.get(i); captureDevListDoc.add(captureDevId); } document.put("captureDev", captureDevListDoc); // Add Port Chain Sub-document BasicDBList portChainListDoc = new BasicDBList(); for (int i = 0; i < portChainIdList.size(); i++) { String portChainId = portChainIdList.get(i); portChainListDoc.add(portChainId); } document.put("portChain", portChainListDoc); return document; }
From source file:org.opengrid.data.impl.PlenarioDataProvider.java
private String getJsonFromFilterList(String value, String key) throws ParseException, UnsupportedEncodingException { StringBuilder sb = new StringBuilder(); BasicDBList q_list = new BasicDBList(); q_list = (BasicDBList) JSON.parse(value); Iterator iter = q_list.iterator(); if (key.equals("[$and]") || key.equals("$and")) { sb.append("{\"op\": \"and\", \"val\": ["); } else {/*from w ww . ja va 2s . c o m*/ sb.append("{\"op\": \"or\", \"val\": ["); } while (iter.hasNext()) { // {\"$and\":[{\"current_activity\":\"1\"},{\"police_district\":2},{\"$and\":[{\"current_activity\":\"1\"},{\"creation_date\":{\"$gt\":1474651260000}}]}]} LinkedHashMap<String, Object> q_filter = new LinkedHashMap<String, Object>(); q_filter = (LinkedHashMap<String, Object>) JSON .parse(((LinkedHashMap<String, Object>) (iter.next())).toString()); sb.append(getSingleFilter(q_filter)); if (iter.hasNext()) { sb.append(","); } } sb.append("]}"); return sb.toString(); }
From source file:org.opengrid.data.impl.PlenarioDataProvider.java
private boolean IgnoreDataset(String datasetName) { LinkedHashMap<String, Object> q = new LinkedHashMap<String, Object>(); q = (LinkedHashMap<String, Object>) JSON .parse(FileUtil.getJsonFileContents("json/plenario_datasets_to_ignore.json")); for (Map.Entry<String, Object> entry : q.entrySet()) { BasicDBList q_list = new BasicDBList(); q_list = (BasicDBList) JSON.parse(entry.getValue().toString()); Iterator iter = q_list.iterator(); while (iter.hasNext()) { LinkedHashMap<String, String> q_filter = new LinkedHashMap<String, String>(); q_filter = (LinkedHashMap<String, String>) JSON .parse(((LinkedHashMap<String, String>) (iter.next())).toString()); String q_filter_key = q_filter.get("id"); if (q_filter_key.equals(datasetName)) { return true; }/*from w w w . j av a 2 s. c o m*/ } } return false; }