List of usage examples for com.mongodb BasicDBObject append
@Override public BasicDBObject append(final String key, final Object val)
From source file:com.ikanow.infinit.e.processing.generic.aggregation.EntityGeotagAggregationUtils.java
License:Open Source License
/** * Takes an entity feature and checks if it does not have a geotag but is a dimension: WHERE * If these conditions are met then this attempts to find a geotag for the entity. * //from w w w . j a va2 s . c o m * 1st try: looks up in feature.geo table on entity.getIndex() * 2nd try: * A. Trys to split entity.getIndex into 3 terms and searches feature.geo on city,region,country * B. Trys to split entity.getIndex into 2 terms and searches feature.geo on region,country * * Sets the features geotag and ontology_type if a result is found * * @param ent_feature The entity feature we are trying to find a geotag for * */ public static void addEntityGeo(EntityFeaturePojo ent_feature) { try { if (ent_feature.getGeotag() == null && (Dimension.Where == ent_feature.getDimension())) { BasicDBObject hint = new BasicDBObject("search_field", 1); //attempt 1, try to match on index String firsttry = ent_feature.getIndex().substring(0, ent_feature.getIndex().lastIndexOf("/")); BasicDBObject query1 = new BasicDBObject("geoindex", new BasicDBObject("$exists", true)); query1.append("search_field", firsttry); DBCursor dbc1 = DbManager.getFeature().getGeo().find(query1).hint(hint); DBObject dbo = null; if (dbc1.hasNext()) dbo = dbc1.next(); // (more efficient - I think! - version of dbc1.count() == 1) if ((null != dbo) && !dbc1.hasNext()) // (ie "at least 1" && "not more than 1") { //only 1 match so we can use this GeoFeaturePojo gfp = GeoFeaturePojo.fromDb(dbo, GeoFeaturePojo.class); ent_feature.setGeotag(gfp.getGeoindex()); //we dont know what kind of point this is so we have to guess if (gfp.getCity() != null) ent_feature.setOntology_type("city"); else if (gfp.getRegion() != null) ent_feature.setOntology_type("countrysubsidiary"); else if (gfp.getCountry() != null) ent_feature.setOntology_type("country"); else ent_feature.setOntology_type("point"); return; //we are done so return } else { //on to step 2, we attempt to attack on 2 fronts //the geo term can be in the form of something,something,something //CASE 1: city,region,country e.g. blacksburg,virginia,united states //CASE 2: region,country e.g. new jersey,united states //NOTE: this fails if something has a comma in the name, but its the best we can hope for String[] secondtry = firsttry.split("\\s*,\\s*"); if (secondtry.length > 2) //CASE 1 { StringBuffer sb22 = new StringBuffer("^").append(Pattern.quote(secondtry[1])).append("$"); Pattern searchterm22 = Pattern.compile(sb22.toString(), Pattern.CASE_INSENSITIVE); StringBuffer sb23 = new StringBuffer("^").append(Pattern.quote(secondtry[2])).append("$"); Pattern searchterm23 = Pattern.compile(sb23.toString(), Pattern.CASE_INSENSITIVE); BasicDBObject query2 = new BasicDBObject("geoindex", new BasicDBObject("$exists", true)); query2.append("search_field", secondtry[0].toLowerCase()); query2.append("region", searchterm22); query2.append("country", searchterm23); DBCursor dbc2 = DbManager.getFeature().getGeo().find(query2).hint(hint); DBObject dbo2 = null; if (dbc2.hasNext()) dbo2 = dbc2.next(); //(see dbc1) if ((null != dbo2) && !dbc2.hasNext()) // (ie "at least 1" && "not more than 1") { ent_feature.setGeotag(GeoFeaturePojo.fromDb(dbo2, GeoFeaturePojo.class).getGeoindex()); ent_feature.setOntology_type("city"); //we searched for city,region,country return; //we are done so return } } else if (secondtry.length > 1) //CASE 2 { StringBuffer sb22 = new StringBuffer("^").append(Pattern.quote(secondtry[1])).append("$"); Pattern searchterm22 = Pattern.compile(sb22.toString(), Pattern.CASE_INSENSITIVE); BasicDBObject query2 = new BasicDBObject("geoindex", new BasicDBObject("$exists", true)); query2.append("search_field", secondtry[0].toLowerCase()); query2.append("country", searchterm22); DBCursor dbc2 = DbManager.getFeature().getGeo().find(query2).hint(hint); DBObject dbo2 = null; if (dbc2.hasNext()) dbo2 = dbc2.next(); //(see dbc1) if ((null != dbo2) && !dbc2.hasNext()) // (ie "at least 1" && "not more than 1") { ent_feature.setGeotag(GeoFeaturePojo.fromDb(dbo2, GeoFeaturePojo.class).getGeoindex()); ent_feature.setOntology_type("countrysubsidiary"); //we searched for region, country return; //we are done so return } } } } } catch (Exception e) { // If an exception occurs log the error logger.error("Exception Message: " + e.getMessage(), e); } }
From source file:com.ikanow.infinit.e.processing.generic.synchronization.SynchronizationManager.java
License:Open Source License
/** * Does the DB sync, pulls all mongo docs that occured from the * cleanseStartTime and source and makes sure they are in the search db. * /*w w w .j a va 2s . co m*/ * @param lastCleanse 1 hour before this harvester started * @param sources list of sources we are syncing * @return The number of errors fixed (docs deleted) */ // DON'T USE THIS UNTIL REWRITTEN - IT SHOULD TRANSFER DOCS ACROSS, NOT LEAVE THEM ALONE @Deprecated public int syncDB(long cleanseStartTime, Set<String> dbCache) { dbCache.clear(); int fixcount = 0; DBCollection contentDb = DbManager.getDocument().getContent(); DBCollection documentDb = DbManager.getDocument().getMetadata(); StoreAndIndexManager storeManager = new StoreAndIndexManager(); for (SourcePojo sp : sources) { // Don't combine the sources (apart from unusual multi-community case), because // that prevents you from using the compound sourceKey/_id index List<String> sourceKeyList = new ArrayList<String>(); sourceKeyList.addAll(sp.getDistributedKeys()); try { List<DocumentPojo> docs_to_remove = new ArrayList<DocumentPojo>(); //FIRST DO ALL NEW FEEDS BasicDBObject query = new BasicDBObject(); query.put(DocumentPojo._id_, new BasicDBObject(MongoDbManager.gt_, new ObjectId((int) (cleanseStartTime / 1000), 0, 0))); // time aspect query.put(DocumentPojo.sourceKey_, new BasicDBObject(MongoDbManager.in_, sourceKeyList)); //source aspect BasicDBObject queryFields = new BasicDBObject(); queryFields.append(DocumentPojo.url_, 1); queryFields.append(DocumentPojo.index_, 1); queryFields.append(DocumentPojo.sourceKey_, 1); DBCursor cur = documentDb.find(query, queryFields).batchSize(100); ElasticSearchManager esm = null; ElasticSearchManager esm_base = ElasticSearchManager.getIndex("document_index"); String sIndex = null; while (cur.hasNext()) { if (bKillMeNow) { return fixcount; } DocumentPojo doc = DocumentPojo.fromDb(cur.next(), DocumentPojo.class); if (null != doc.getId()) { dbCache.add(doc.getId().toString()); } // Get index of doc to check in: String sNewIndex = doc.getIndex(); if (null == sNewIndex) { sIndex = null; esm = esm_base; } else if ((null == sIndex) || (!sNewIndex.equals(sIndex))) { sIndex = sNewIndex; if (sNewIndex.equals("document_index")) { esm = esm_base; } else { esm = ElasticSearchManager.getIndex(sNewIndex + "/document_index"); } } //Compare mongo doc to search doc Map<String, GetField> results = esm.getDocument(doc.getId().toString(), DocumentPojo.url_); if (null == results || results.isEmpty()) { //either too many entries (duplicates) or no entry //delete this doc from both logger.info("db sync removing doc: " + doc.getId() + "/" + doc.getSourceKey() + " not found in search (or duplicate)"); docs_to_remove.add(doc); documentDb.remove(new BasicDBObject(DocumentPojo._id_, doc.getId())); BasicDBObject contentQ = new BasicDBObject(CompressedFullTextPojo.url_, doc.getUrl()); contentQ.put(CompressedFullTextPojo.sourceKey_, new BasicDBObject(MongoDbManager.in_, Arrays.asList(null, doc.getSourceKey()))); contentDb.remove(contentQ); fixcount++; } } //end loop over new docs for this source storeManager.removeFromSearch(docs_to_remove); //NOW VERIFY ALL OLD FEEDS int iteration = 1; boolean removedAll = true; docs_to_remove.clear(); while (removedAll) { int rows = iteration * iteration * 10; //10x^2 exponentially check more docs int oldfixes = 0; BasicDBObject queryOLD = new BasicDBObject(); queryOLD.put(DocumentPojo.sourceKey_, new BasicDBObject(MongoDbManager.in_, sourceKeyList)); //source aspect BasicDBObject sortOLD = new BasicDBObject(DocumentPojo._id_, 1); DBCursor curOLD = documentDb.find(queryOLD, queryFields).sort(sortOLD).limit(rows); while (curOLD.hasNext()) { DocumentPojo doc = DocumentPojo.fromDb(curOLD.next(), DocumentPojo.class); if (null != doc.getId()) { dbCache.add(doc.getId().toString()); } // Get index of doc to check in: String sNewIndex = doc.getIndex(); if (null == sNewIndex) { sIndex = null; esm = esm_base; } else if ((null == sIndex) || (!sNewIndex.equals(sIndex))) { sIndex = sNewIndex; if (sNewIndex.equals("document_index")) { esm = esm_base; } else { esm = ElasticSearchManager.getIndex(sNewIndex + "/document_index"); } } //Compare mongo doc to search doc Map<String, GetField> results = esm.getDocument(doc.getId().toString(), DocumentPojo.url_); if (null == results || results.isEmpty()) { //either too many entries (duplicates) or no entry //delete this doc from both logger.info("db sync removing doc: " + doc.getId() + "/" + doc.getSourceKey() + " not found in search (or duplicate)"); docs_to_remove.add(doc); documentDb.remove(new BasicDBObject(DocumentPojo._id_, doc.getId())); contentDb.remove(new BasicDBObject(DocumentPojo.url_, doc.getUrl())); fixcount++; oldfixes++; } } if (oldfixes != rows) removedAll = false; } //(end loop over old docs for this source) storeManager.removeFromSearch(docs_to_remove); } catch (Exception e) { // If an exception occurs log the error logger.error("Exception Message: " + e.getMessage(), e); } } return fixcount; }
From source file:com.images3.data.impl.ImageMetricsServiceImplMongoDB.java
License:Apache License
private BasicDBObject getImageIncrements(ImageMetricsOS metrics) { BasicDBObject increase = new BasicDBObject(); Map<ImageMetricsType, Long> numbers = metrics.getNumbers(); for (Iterator<ImageMetricsType> iter = numbers.keySet().iterator(); iter.hasNext();) { ImageMetricsType type = iter.next(); Long number = metrics.getNumbers().get(type); increase.append(type.toString(), number); }/*from w w w . j a va 2 s. c o m*/ return increase; }
From source file:com.images3.data.impl.TemplateAccessImplMongoDB.java
License:Apache License
private List<TemplateOS> getTemplatesByImagePlantId(String imagePlantId, Boolean isArchived, Page pageCursor) { DBCollection coll = getDatabase().getCollection("Template"); int skipRecords = (pageCursor.getStart() - 1) * pageCursor.getSize(); BasicDBObject criteria = new BasicDBObject().append("imagePlantId", imagePlantId); if (null != isArchived) { criteria.append("isArchived", isArchived); }/* www . ja v a 2s . c om*/ List<DBObject> objects = coll.find(criteria).skip(skipRecords).limit(pageCursor.getSize()).toArray(); List<TemplateOS> templates = new ArrayList<TemplateOS>(objects.size()); for (DBObject obj : objects) { templates.add(getObjectMapper().mapToTemplateOS((BasicDBObject) obj)); } return templates; }
From source file:com.images3.data.impl.TemplateAccessImplMongoDB.java
License:Apache License
private List<TemplateOS> getTemplatesByImagePlantId(String imagePlantId, Boolean isArchived) { DBCollection coll = getDatabase().getCollection("Template"); BasicDBObject criteria = new BasicDBObject().append("imagePlantId", imagePlantId); if (null != isArchived) { criteria.append("isArchived", isArchived); }//from w w w . ja v a 2 s .com List<DBObject> objects = coll.find(criteria).toArray(); List<TemplateOS> templates = new ArrayList<TemplateOS>(objects.size()); for (DBObject obj : objects) { templates.add(getObjectMapper().mapToTemplateOS((BasicDBObject) obj)); } return templates; }
From source file:com.impetus.client.mongodb.query.MongoDBQuery.java
License:Apache License
/** * Creates MongoDB Query object from filterClauseQueue. * // ww w . j ava2 s . c o m * @param m * the m * @param filterClauseQueue * the filter clause queue * @return the basic db object */ public BasicDBObject createSubMongoQuery(EntityMetadata m, Queue filterClauseQueue) { BasicDBObject query = new BasicDBObject(); BasicDBObject compositeColumns = new BasicDBObject(); MetamodelImpl metaModel = (MetamodelImpl) kunderaMetadata.getApplicationMetadata() .getMetamodel(m.getPersistenceUnit()); AbstractManagedType managedType = (AbstractManagedType) metaModel.entity(m.getEntityClazz()); for (Object object : filterClauseQueue) { boolean isCompositeColumn = false; boolean isSubCondition = false; if (object instanceof FilterClause) { FilterClause filter = (FilterClause) object; String property = filter.getProperty(); String condition = filter.getCondition(); Object value = filter.getValue().get(0); // value is string but field.getType is different, then get // value using Field f = null; // if alias is still present .. means it is an enclosing // document search. if (managedType.hasLobAttribute()) { EntityType entity = metaModel.entity(m.getEntityClazz()); String fieldName = m.getFieldName(property); f = (Field) entity.getAttribute(fieldName).getJavaMember(); if (value.getClass().isAssignableFrom(String.class) && f != null && !f.getType().equals(value.getClass())) { value = PropertyAccessorFactory.getPropertyAccessor(f).fromString(f.getType().getClass(), value.toString()); } value = MongoDBUtils.populateValue(value, value.getClass()); property = "metadata." + property; } else { if (((AbstractAttribute) m.getIdAttribute()).getJPAColumnName().equalsIgnoreCase(property)) { property = "_id"; f = (Field) m.getIdAttribute().getJavaMember(); if (metaModel.isEmbeddable(m.getIdAttribute().getBindableJavaType()) && value.getClass().isAssignableFrom(f.getType())) { EmbeddableType compoundKey = metaModel .embeddable(m.getIdAttribute().getBindableJavaType()); compositeColumns = MongoDBUtils.getCompoundKeyColumns(m, value, compoundKey); isCompositeColumn = true; continue; } } else if (metaModel.isEmbeddable(m.getIdAttribute().getBindableJavaType()) && StringUtils.contains(property, '.')) { // Means it is a case of composite column. property = property.substring(property.indexOf(".") + 1); isCompositeColumn = true; } /* * if a composite key. "." assuming "." is part of * property in case of embeddable only */ else if (StringUtils.contains(property, '.')) { EntityType entity = metaModel.entity(m.getEntityClazz()); StringTokenizer tokenizer = new StringTokenizer(property, "."); String embeddedAttributeAsStr = tokenizer.nextToken(); String embeddableAttributeAsStr = tokenizer.nextToken(); Attribute embeddedAttribute = entity.getAttribute(embeddedAttributeAsStr); EmbeddableType embeddableEntity = metaModel .embeddable(((AbstractAttribute) embeddedAttribute).getBindableJavaType()); f = (Field) embeddableEntity.getAttribute(embeddableAttributeAsStr).getJavaMember(); property = ((AbstractAttribute) embeddedAttribute).getJPAColumnName() + "." + ((AbstractAttribute) embeddableEntity.getAttribute(embeddableAttributeAsStr)) .getJPAColumnName(); } else { EntityType entity = metaModel.entity(m.getEntityClazz()); String discriminatorColumn = ((AbstractManagedType) entity).getDiscriminatorColumn(); if (!property.equals(discriminatorColumn)) { String fieldName = m.getFieldName(property); f = (Field) entity.getAttribute(fieldName).getJavaMember(); } } if (value.getClass().isAssignableFrom(String.class) && f != null && !f.getType().equals(value.getClass())) { value = PropertyAccessorFactory.getPropertyAccessor(f).fromString(f.getType().getClass(), value.toString()); } value = MongoDBUtils.populateValue(value, value.getClass()); } // Property, if doesn't exist in entity, may be there in a // document embedded within it, so we have to check that // TODO: Query should actually be in a format // documentName.embeddedDocumentName.column, remove below if // block once this is decided // Query could be geospatial in nature if (f != null && f.getType().equals(Point.class)) { GeospatialQuery geospatialQueryimpl = GeospatialQueryFactory .getGeospatialQueryImplementor(condition, value); query = (BasicDBObject) geospatialQueryimpl.createGeospatialQuery(property, value, query); } else { if (isCompositeColumn) { EmbeddableType embeddableType = metaModel .embeddable(m.getIdAttribute().getBindableJavaType()); AbstractAttribute attribute = (AbstractAttribute) embeddableType.getAttribute(property); property = new StringBuffer("_id.").append(attribute.getJPAColumnName()).toString(); } if (condition.equals("=")) { query.append(property, value); } else if (condition.equalsIgnoreCase("like")) { if (query.containsField(property)) { query.get(property); query.put(property, ((BasicDBObject) query.get(property)).append("$regex", createLikeRegex((String) value))); } else { query.append(property, new BasicDBObject("$regex", createLikeRegex((String) value))); } } else if (condition.equalsIgnoreCase(">")) { if (query.containsField(property)) { query.get(property); query.put(property, ((BasicDBObject) query.get(property)).append("$gt", value)); } else { query.append(property, new BasicDBObject("$gt", value)); } } else if (condition.equalsIgnoreCase(">=")) { if (query.containsField(property)) { query.get(property); query.put(property, ((BasicDBObject) query.get(property)).append("$gte", value)); } else { query.append(property, new BasicDBObject("$gte", value)); } } else if (condition.equalsIgnoreCase("<")) { if (query.containsField(property)) { query.get(property); query.put(property, ((BasicDBObject) query.get(property)).append("$lt", value)); } else { query.append(property, new BasicDBObject("$lt", value)); } } else if (condition.equalsIgnoreCase("<=")) { if (query.containsField(property)) { query.get(property); query.put(property, ((BasicDBObject) query.get(property)).append("$lte", value)); } else { query.append(property, new BasicDBObject("$lte", value)); } } else if (condition.equalsIgnoreCase("in")) { if (query.containsField(property)) { query.get(property); query.put(property, ((BasicDBObject) query.get(property)).append("$in", filter.getValue())); } else { query.append(property, new BasicDBObject("$in", filter.getValue())); } } else if (condition.equalsIgnoreCase("not in")) { if (query.containsField(property)) { query.get(property); query.put(property, ((BasicDBObject) query.get(property)).append("$nin", filter.getValue())); } else { query.append(property, new BasicDBObject("$nin", filter.getValue())); } } else if (condition.equalsIgnoreCase("<>")) { if (query.containsField(property)) { query.get(property); query.put(property, ((BasicDBObject) query.get(property)).append("$ne", value)); } else { query.append(property, new BasicDBObject("$ne", value)); } } } // TODO: Add support for other operators like >, <, >=, <=, // order by asc/ desc, limit, skip, count etc } } if (!compositeColumns.isEmpty()) { query.append("_id", compositeColumns); } return query; }
From source file:com.impetus.client.mongodb.query.MongoDBQuery.java
License:Apache License
/** * Prepare order by clause.//from w w w. j a va 2s . co m * * @param metadata * the metadata * @return order by clause. */ private BasicDBObject getOrderByClause(final EntityMetadata metadata) { BasicDBObject orderByClause = null; Metamodel metaModel = kunderaMetadata.getApplicationMetadata().getMetamodel(metadata.getPersistenceUnit()); EntityType entityType = metaModel.entity(metadata.getEntityClazz()); AbstractManagedType managedType = (AbstractManagedType) metaModel.entity(metadata.getEntityClazz()); List<SortOrdering> orders = kunderaQuery.getOrdering(); if (orders != null) { orderByClause = new BasicDBObject(); if (!managedType.hasLobAttribute()) { for (SortOrdering order : orders) { orderByClause.append(getColumnName(metadata, entityType, order.getColumnName()), order.getOrder().equals(SortOrder.ASC) ? 1 : -1); } } else { for (SortOrdering order : orders) { orderByClause.append("metadata." + getColumnName(metadata, entityType, order.getColumnName()), order.getOrder().equals(SortOrder.ASC) ? 1 : -1); } } } return orderByClause; }
From source file:com.itfsw.query.builder.support.parser.mongodb.BetweenRuleParser.java
License:Apache License
public DBObject parse(IRule rule, JsonRuleParser parser) { List<Object> values = (List<Object>) rule.getValue(); BasicDBObject operate = new BasicDBObject(); operate.append("$gte", values.get(0)); operate.append("$lte", values.get(1)); return new BasicDBObject(rule.getField(), operate); }
From source file:com.itfsw.query.builder.support.parser.mongodb.DefaultGroupParser.java
License:Apache License
/** * ?//from www . j a va 2s .c om * @param group * @param parser * @return */ @Override public Object parse(IGroup group, JsonRuleParser parser) { // rules BasicDBList operates = new BasicDBList(); for (JsonRule jsonRule : group.getRules()) { operates.add(parser.parse(jsonRule)); } // AND or OR BasicDBObject andOrObj = new BasicDBObject(); andOrObj.append(EnumCondition.AND.equals(group.getCondition()) ? "$and" : "$or", operates); // Not if (group.getNot() != null && group.getNot()) { BasicDBList list = new BasicDBList(); list.add(andOrObj); return new BasicDBObject("$nor", list); } return andOrObj; }
From source file:com.itfsw.query.builder.support.parser.mongodb.NotBetweenRuleParser.java
License:Apache License
public BasicDBObject parse(IRule rule, JsonRuleParser parser) { List<Object> values = (List<Object>) rule.getValue(); BasicDBObject operate = new BasicDBObject(); operate.append("$lt", values.get(0)); operate.append("$gt", values.get(1)); return new BasicDBObject(rule.getField(), operate); }