List of usage examples for com.mongodb.client MongoCollection updateOne
UpdateResult updateOne(ClientSession clientSession, Bson filter, List<? extends Bson> update);
From source file:es.dheraspi.sums.model.DAOMongoDB.java
@Override public void insertSummoner(Summoner summoner, String region) { MongoCredential credential = MongoCredential.createCredential(user, dbname, password.toCharArray()); try (MongoClient mongoClient = new MongoClient(new ServerAddress(host), Arrays.asList(credential))) { db = mongoClient.getDatabase(dbname); MongoCollection<Document> coll = db.getCollection("summoners"); int profileIconID = summoner.getProfileIconID(); Bson doc = new Document("$set", new Document("_id", summoner.getID())) .append("name", summoner.getName()).append("level", summoner.getLevel()) .append("profileIconID", profileIconID < 0 ? 0 : profileIconID); Bson filter = Filters.eq("_id", region); switch (region) { case "EUW": break; case "EUNE": break; case "NA": break; case "LAN": break; case "LAS": break; case "BR": break; case "TR": break; case "RU": break; case "OCE": break; case "KR": break; case "JP": break; }//from w w w .j av a2 s . c o m UpdateOptions options = new UpdateOptions().upsert(true); coll.updateOne(filter, doc, options); } catch (APIException ex) { //Some unknown error when trying to get matchList } }
From source file:net.netzgut.integral.mongo.internal.services.MongoODMImplementation.java
License:Apache License
@Override public <T extends Serializable> UpdateResult upsert(Bson filter, T entity) { Class<? extends Serializable> entityClass = entity.getClass(); MongoCollection<Document> collection = this.mongo.getCollection(entityClass); Document document = this.converter.documentFrom(entity); Bson update = Filters.eq("$set", document); return collection.updateOne(filter, update, new UpdateOptions().upsert(true)); }
From source file:org.apache.nifi.processors.mongodb.PutMongo.java
License:Apache License
@Override public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException { final FlowFile flowFile = session.get(); if (flowFile == null) { return;// w w w . ja v a 2 s. c o m } final ComponentLog logger = getLogger(); final Charset charset = Charset.forName(context.getProperty(CHARACTER_SET).getValue()); final String mode = context.getProperty(MODE).getValue(); final String updateMode = context.getProperty(UPDATE_MODE).getValue(); final WriteConcern writeConcern = getWriteConcern(context); final MongoCollection<Document> collection = getCollection(context, flowFile) .withWriteConcern(writeConcern); try { // Read the contents of the FlowFile into a byte array final byte[] content = new byte[(int) flowFile.getSize()]; session.read(flowFile, in -> StreamUtils.fillBuffer(in, content, true)); // parse final Object doc = (mode.equals(MODE_INSERT) || (mode.equals(MODE_UPDATE) && updateMode.equals(UPDATE_WITH_DOC.getValue()))) ? Document.parse(new String(content, charset)) : JSON.parse(new String(content, charset)); if (MODE_INSERT.equalsIgnoreCase(mode)) { collection.insertOne((Document) doc); logger.info("inserted {} into MongoDB", new Object[] { flowFile }); } else { // update final boolean upsert = context.getProperty(UPSERT).asBoolean(); final String updateKey = context.getProperty(UPDATE_QUERY_KEY).getValue(); Object keyVal = ((Map) doc).get(updateKey); if (updateKey.equals("_id") && ObjectId.isValid(((String) keyVal))) { keyVal = new ObjectId((String) keyVal); } final Document query = new Document(updateKey, keyVal); if (updateMode.equals(UPDATE_WITH_DOC.getValue())) { collection.replaceOne(query, (Document) doc, new UpdateOptions().upsert(upsert)); } else { BasicDBObject update = (BasicDBObject) doc; update.remove(updateKey); collection.updateOne(query, update, new UpdateOptions().upsert(upsert)); } logger.info("updated {} into MongoDB", new Object[] { flowFile }); } session.getProvenanceReporter().send(flowFile, getURI(context)); session.transfer(flowFile, REL_SUCCESS); } catch (Exception e) { logger.error("Failed to insert {} into MongoDB due to {}", new Object[] { flowFile, e }, e); session.transfer(flowFile, REL_FAILURE); context.yield(); } }
From source file:rapture.lock.mongodb.MongoLockHandler.java
License:Open Source License
@Override public LockHandle acquireLock(String lockHolder, String lockName, long secondsToWait, long secondsToHold) { log.trace("Mongo acquire lock"); // create a random marker for each acquisition -- that way double-grabs // from the same session are rejected String random = makeRandom(); Document query = getLockQuery(lockName); Document val = createLockVal(lockHolder, secondsToHold, random); Document update = createAddValObject(val); // First see if this exists MongoCollection<Document> coll = getLockCollection(); long bailTime = System.currentTimeMillis() + secondsToWait * 1000; boolean gotLock = false; coll.updateOne(query, update, new UpdateOptions().upsert(true)); while (!gotLock) { Document obj = coll.find(query).first(); // Look for the locks field, and see if we are top if (obj != null) { log.trace("Locks are present"); List<Object> locks = (List<Object>) obj.get(LOCKS); if (locks.size() > 0) { Document first = (Document) locks.get(0); log.trace("First lock is " + first.get(CTX)); if (first.get(CTX).toString().equals(lockHolder) && first.get(RANDOM).toString().equals(random)) { log.trace(String.format("We have the lock with name '%s'", lockName)); gotLock = true;/* w ww.j a v a 2 s.co m*/ } else { if (log.isTraceEnabled()) { log.trace(String.format( "name: [%s]\n" + "ctx: [%s], rnd: [%s]\n" + "ctx: [%s], rnd: [%s]", lockName, first.get(CTX).toString(), first.get(RANDOM).toString(), lockHolder, random)); } if (expired(first)) { releaseLockWithRandom(first.get(CTX).toString(), lockName, first.get(RANDOM).toString()); } if (waitAndShouldBail(bailTime)) { break; } } } else { log.trace("Locks return list was zero size"); if (waitAndShouldBail(bailTime)) { break; } } } else { log.trace("No update, bailing with no lock"); break; } } if (gotLock) { LockHandle lockHandle = new LockHandle(); lockHandle.setLockName(lockName); lockHandle.setHandle(random); lockHandle.setLockHolder(lockHolder); return lockHandle; } else { log.info(String.format("Could not get lock for %s, bailing", lockName)); // could not get the lock, so we should remove the lock with the random we saved earlier in this method, since the random is never accessible again releaseLockWithRandom(lockHolder, lockName, random); return null; } }
From source file:soatreefinder.SOATreeFinder.java
public static void loadMongoFromSQL(String sqlQuery, String mongoDBName, String mongoCollName, String mongoCollPrimaryKey) throws SQLException, IOException, FileNotFoundException { propsFile.load(new FileInputStream(System.getProperty("user.dir") + "\\context.properties")); Statement stmt = null;/*from ww w . j a va 2 s . c o m*/ String blobString; Map sqlMap = new HashMap(); MongoClient mongoClient = new MongoClient(Arrays.asList(new ServerAddress( propsFile.getProperty("mongoHost"), Integer.parseInt(propsFile.getProperty("mongoPort"))))); MongoDatabase db = mongoClient.getDatabase(mongoDBName); MongoCollection coll = db.getCollection(mongoCollName); UpdateOptions upsertOption = new UpdateOptions(); upsertOption.upsert(true); Connection soadb_Connection = DriverManager .getConnection("jdbc:oracle:thin:" + propsFile.getProperty("user") + "/" + propsFile.getProperty("password") + "@//" + propsFile.getProperty("host") + ":" + propsFile.getProperty("port") + "/" + propsFile.getProperty("service")); stmt = soadb_Connection.createStatement(); ResultSet rs = stmt.executeQuery(sqlQuery); ResultSetMetaData rsmd = rs.getMetaData(); while (rs.next()) { for (int i = 1; i <= rsmd.getColumnCount(); i++) { /* The following block of switch statement checks for the sql data type of a given column and puts the appropriate object into the hash map*/ switch (rsmd.getColumnType(i)) { case 12: sqlMap.put(rsmd.getColumnName(i), rs.getString(i)); break; case -9: sqlMap.put(rsmd.getColumnName(i), rs.getString(i)); break; case -15: sqlMap.put(rsmd.getColumnName(i), rs.getString(i)); break; case 16: sqlMap.put(rsmd.getColumnName(i), rs.getBoolean(i)); break; case -7: sqlMap.put(rsmd.getColumnName(i), rs.getByte(i)); break; case 2: sqlMap.put(rsmd.getColumnName(i), rs.getInt(i)); break; case 4: sqlMap.put(rsmd.getColumnName(i), rs.getInt(i)); break; case 6: sqlMap.put(rsmd.getColumnName(i), rs.getFloat(i)); break; case 8: sqlMap.put(rsmd.getColumnName(i), rs.getDouble(i)); break; case 3: sqlMap.put(rsmd.getColumnName(i), rs.getBigDecimal(i)); break; case 1: sqlMap.put(rsmd.getColumnName(i), rs.getString(i)); break; case 91: sqlMap.put(rsmd.getColumnName(i), rs.getDate(i).toString()); break; case 92: sqlMap.put(rsmd.getColumnName(i), rs.getTime(i).toString()); break; case 93: sqlMap.put(rsmd.getColumnName(i), rs.getTimestamp(i).toString()); break; case 2003: sqlMap.put(rsmd.getColumnName(i), rs.getArray(i)); break; case 2004: { Blob blob = rs.getBlob(i); byte[] bdata = blob.getBytes(1, (int) blob.length()); blobString = new String(bdata); sqlMap.put(rsmd.getColumnName(i), blobString); } break; case 2005: sqlMap.put(rsmd.getColumnName(i), rs.getClob(i)); break; default: sqlMap.put(rsmd.getColumnName(i), rs.getString(i)); } } // Upsert sql record into mongo collection coll.updateOne(eq(mongoCollPrimaryKey, sqlMap.get(mongoCollPrimaryKey)), new Document("$set", new Document(sqlMap)), upsertOption); } }