List of usage examples for com.mongodb BasicDBObjectBuilder add
public BasicDBObjectBuilder add(final String key, final Object val)
From source file:co.cask.hydrator.plugin.batch.sink.MongoDBBatchSink.java
License:Apache License
@Override public void transform(StructuredRecord input, Emitter<KeyValue<NullWritable, BSONWritable>> emitter) throws Exception { BasicDBObjectBuilder bsonBuilder = BasicDBObjectBuilder.start(); for (Schema.Field field : input.getSchema().getFields()) { bsonBuilder.add(field.getName(), input.get(field.getName())); }//from w w w .j ava2s. c o m emitter.emit(new KeyValue<>(NullWritable.get(), new BSONWritable(bsonBuilder.get()))); }
From source file:com.bbc.remarc.util.ResourceManager.java
License:Apache License
private static void createDocumentsFromFileMap(HashMap<String, List<File>> fileMap, HashMap<String, ResourceType> typeMap, File properties, String resourcesDir) { DB db = MongoClient.getDB();//from w w w . j a va2 s.c om Properties documentProps = processPropertiesFile(properties); if (documentProps == null) { log.error("could not create properties file. Abort directory."); return; } String theme = documentProps.getProperty("theme"); String decade = documentProps.getProperty("decade"); if (theme == null && decade == null) { log.error("ERROR! Properties file contained neither THEME nor DECADE. Abort directory."); return; } // now we process each key (document) in the hashmap, copying the // resources (file array) into the correct folder Set<String> keys = fileMap.keySet(); for (String key : keys) { log.debug("processing [" + key + "]"); // create document with id, theme and decade BasicDBObjectBuilder documentBuilder = BasicDBObjectBuilder.start(); documentBuilder.add("id", key); documentBuilder.add("theme", theme); documentBuilder.add("decade", decade); // based upon the documentType, we can determine all our urls and // storage variables ResourceType documentType = typeMap.get(key); File fileDestDirectory = null; // Get the relative base URL from an Environment variable if it has been set String relativefileBaseUrl = System.getenv(Configuration.ENV_BASE_URL); if (relativefileBaseUrl == null || "".equals(relativefileBaseUrl)) { relativefileBaseUrl = Configuration.DEFAULT_RELATIVE_BASE_URL; } else { relativefileBaseUrl += Configuration.CONTENT_DIR; } String mongoCollection = ""; switch (documentType) { case IMAGE: mongoCollection = "images"; fileDestDirectory = new File(resourcesDir + Configuration.IMAGE_DIR_NAME); relativefileBaseUrl += Configuration.IMAGE_DIR; break; case AUDIO: mongoCollection = "audio"; fileDestDirectory = new File(resourcesDir + Configuration.AUDIO_DIR_NAME); relativefileBaseUrl += Configuration.AUDIO_DIR; break; case VIDEO: mongoCollection = "video"; fileDestDirectory = new File(resourcesDir + Configuration.VIDEO_DIR_NAME); relativefileBaseUrl += Configuration.VIDEO_DIR; break; default: break; } List<File> files = fileMap.get(key); for (File resource : files) { log.debug("--- processing [" + resource.getName() + "]"); String resourceLocation = relativefileBaseUrl + resource.getName(); String extension = FilenameUtils.getExtension(resource.getName()); ResourceType fileType = getTypeFromExtension(extension); // now determine the value to store the resource under in MongoDB, different if an image or metadata String urlKey; switch (fileType) { case IMAGE: urlKey = "imageUrl"; break; case INFORMATION: urlKey = "metadata"; break; default: urlKey = (extension + "ContentUrl"); break; } // If the file is a metadata file, we want to read from it, otherwise we just add the location to the db if (fileType == ResourceType.INFORMATION) { String metadata = processMetadata(resource.getPath()); documentBuilder.add(urlKey, metadata); } else { documentBuilder.add(urlKey, resourceLocation); } } // insert the document into the database try { DBObject obj = documentBuilder.get(); log.debug("writing document to collection (" + mongoCollection + "): " + obj); db.requestStart(); DBCollection collection = db.getCollection(mongoCollection); collection.insert(documentBuilder.get()); } finally { db.requestDone(); } // write all the resource files to the correct directory log.debug("copying resources into " + fileDestDirectory.getPath()); for (File resource : files) { // We don't want to copy the metadata into the directory, so remove it here String extension = FilenameUtils.getExtension(resource.getName()); ResourceType fileType = getTypeFromExtension(extension); if (fileType != ResourceType.INFORMATION) { try { FileUtils.copyFileToDirectory(resource, fileDestDirectory); } catch (IOException e) { log.error("ERROR! Couldn't copy resource to directory: " + e); } } } } }
From source file:com.continuent.tungsten.replicator.applier.MongoApplier.java
License:Open Source License
private void ensureIndexes(DBCollection coll, OneRowChange orc) { // If we have not seen this table before, check whether it // needs an index. if (autoIndex) { String schema = orc.getSchemaName(); String table = orc.getTableName(); Table t = tableMetadataCache.retrieve(schema, table); if (t == null) { if (logger.isDebugEnabled()) { logger.debug("Ensuring index exists on collection: db=" + schema + " collection=" + table); }//from w w w . j a v a2 s . c o m // Compute required index keys and ensure they // exist in MongoDB. List<ColumnSpec> keySpecs = orc.getKeySpec(); if (keySpecs.size() > 0) { BasicDBObjectBuilder builder = BasicDBObjectBuilder.start(); for (ColumnSpec keySpec : keySpecs) { builder.add(keySpec.getName(), 1); } coll.ensureIndex(builder.get()); } // Note that we have processed the table. t = new Table(schema, table); tableMetadataCache.store(t); } } }
From source file:com.englishtown.vertx.GridFSModule.java
License:Open Source License
public void saveFile(Message<JsonObject> message, JsonObject jsonObject) { ObjectId id = getObjectId(message, jsonObject, "id"); if (id == null) { return;//from w w w .j a v a2s .c o m } Integer length = getRequiredInt("length", message, jsonObject, 1); if (length == null) { return; } Integer chunkSize = getRequiredInt("chunkSize", message, jsonObject, 1); if (chunkSize == null) { return; } long uploadDate = jsonObject.getLong("uploadDate", 0); if (uploadDate <= 0) { uploadDate = System.currentTimeMillis(); } String filename = jsonObject.getString("filename"); String contentType = jsonObject.getString("contentType"); JsonObject metadata = jsonObject.getObject("metadata"); try { BasicDBObjectBuilder builder = BasicDBObjectBuilder.start().add("_id", id).add("length", length) .add("chunkSize", chunkSize).add("uploadDate", new Date(uploadDate)); if (filename != null) builder.add("filename", filename); if (contentType != null) builder.add("contentType", contentType); if (metadata != null) builder.add("metadata", JSON.parse(metadata.encode())); DBObject dbObject = builder.get(); String bucket = jsonObject.getString("bucket", GridFS.DEFAULT_BUCKET); DBCollection collection = db.getCollection(bucket + ".files"); // Ensure standard indexes as long as collection is small if (collection.count() < 1000) { collection.ensureIndex(BasicDBObjectBuilder.start().add("filename", 1).add("uploadDate", 1).get()); } collection.save(dbObject); sendOK(message); } catch (Exception e) { sendError(message, "Error saving file", e); } }
From source file:com.examples.aggregation.AggregationExample.java
License:Open Source License
public Iterator<DBObject> simpleAggregation() { BasicDBObjectBuilder builder = new BasicDBObjectBuilder(); builder.push("$group"); builder.add("_id", "$manufacturer"); builder.push("num_products"); builder.add("$sum", 1); builder.pop();/*w ww. j av a 2s.co m*/ builder.pop(); return col.aggregate(builder.get()).results().iterator(); }
From source file:com.examples.aggregation.AggregationExample.java
License:Open Source License
public Iterator<DBObject> compoundAggregation() { BasicDBObjectBuilder builder = new BasicDBObjectBuilder(); builder.push("$group"); builder.push("_id"); builder.add("manufacturer", "$manufacturer"); builder.add("category", "$category"); builder.pop();//from ww w . ja v a2s . c o m builder.push("num_products"); builder.add("$sum", 1); builder.pop(); builder.pop(); return col.aggregate(builder.get()).results().iterator(); }
From source file:com.examples.aggregation.AggregationExample.java
License:Open Source License
public Iterator<DBObject> sumPrices() { BasicDBObjectBuilder builder = new BasicDBObjectBuilder(); builder.push("$group"); builder.add("_id", "$manufacturer"); builder.push("sum_prices"); builder.add("$sum", "$price"); builder.pop();/*from ww w . j a v a 2s . c o m*/ builder.pop(); return col.aggregate(builder.get()).results().iterator(); }
From source file:com.examples.aggregation.AggregationExample.java
License:Open Source License
public Iterator<DBObject> averagePrices() { BasicDBObjectBuilder builder = new BasicDBObjectBuilder(); builder.push("$group"); builder.add("_id", "$category"); builder.push("sum_prices"); builder.add("$avg", "$price"); builder.pop();/*from w w w.j av a2s. c o m*/ builder.pop(); return col.aggregate(builder.get()).results().iterator(); }
From source file:com.examples.aggregation.AggregationExample.java
License:Open Source License
public Iterator<DBObject> addToSet() { BasicDBObjectBuilder builder = new BasicDBObjectBuilder(); builder.push("$group"); builder.push("_id"); builder.add("maker", "$manufacturer"); builder.pop();/* w ww .jav a2s .co m*/ builder.push("categories"); builder.add("$addToSet", "$category"); builder.pop(); builder.pop(); return col.aggregate(builder.get()).results().iterator(); }
From source file:com.examples.aggregation.AggregationExample.java
License:Open Source License
public Iterator<DBObject> push() { BasicDBObjectBuilder builder = new BasicDBObjectBuilder(); builder.push("$group"); builder.push("_id"); builder.add("maker", "$manufacturer"); builder.pop();/*from w ww . j a v a 2 s . c o m*/ builder.push("categories"); builder.add("$push", "$category"); builder.pop(); builder.pop(); return col.aggregate(builder.get()).results().iterator(); }