List of usage examples for com.mongodb.client MongoDatabase getCollection
MongoCollection<Document> getCollection(String collectionName);
From source file:org.bananaforscale.cormac.dao.document.DocumentDataServiceImpl.java
License:Apache License
/** * Updates a document in the collection. If the document exists in the * collection it will be updated. If the document doesn't exist an error * will be thrown. If the specified database and collection do not exist * they will be created.//from w w w . j a va 2s.c o m * * @param databaseName the database * @param collectionName the collection * @param documentId the document identifier * @param content the JSON payload * @return a status message with the outcome of the operation * @throws DatasourceException * @throws DeserializeException * @throws IllegalArgumentException * @throws NotFoundException */ @Override public boolean replaceById(String databaseName, String collectionName, String documentId, String content) throws DatasourceException, DeserializeException, IllegalArgumentException, NotFoundException { try { if (!validInputForAddOrUpdate(databaseName, collectionName, documentId, content)) { throw new IllegalArgumentException(); } MongoDatabase mongoDatabase = mongoClient.getDatabase(databaseName); MongoCollection<Document> collection = mongoDatabase.getCollection(collectionName); Document query = new Document("_id", new ObjectId(documentId)); Document document = Document.parse(content); if (collection.count(query) == 0) { throw new NotFoundException("The document doesn't exist in the collection"); } collection.replaceOne(query, document); return true; } catch (IllegalArgumentException | ClassCastException | JSONParseException ex) { logger.error("The JSON payload is invalid", ex); throw new DeserializeException("The JSON payload is invalid"); } catch (MongoException ex) { logger.error("An error occured while updating the document", ex); throw new DatasourceException("An error occured while updating the document"); } }
From source file:org.bananaforscale.cormac.dao.document.DocumentDataServiceImpl.java
License:Apache License
/** * Removes a document in the collection. * * @param databaseName the database/* ww w.j a v a 2 s.c om*/ * @param collectionName the collection * @param documentId the document identifier to delete * @return the result of the operation * @throws DatasourceException * @throws NotFoundException */ @Override public boolean deleteById(String databaseName, String collectionName, String documentId) throws DatasourceException, NotFoundException { try { if (!databaseExists(databaseName)) { throw new NotFoundException("The database doesn't exist in the datasource"); } if (!collectionExists(databaseName, collectionName)) { throw new NotFoundException("The collection doesn't exist in the datasource"); } MongoDatabase mongoDatabase = mongoClient.getDatabase(databaseName); MongoCollection<Document> collection = mongoDatabase.getCollection(collectionName); Document query = new Document("_id", new ObjectId(documentId)); if (collection.count(query) == 0) { throw new NotFoundException("The document doesn't exist in the datasource"); } collection.deleteOne(query); return true; } catch (MongoException ex) { logger.error("An error occured while updating the document", ex); throw new DatasourceException("An error occured while updating the document"); } }
From source file:org.bananaforscale.cormac.dao.gridfs.GridFsDataServiceImpl.java
License:Apache License
/** * Deletes a bucket from the database./*w w w. j a va 2 s .c om*/ * * @param databaseName the database * @param bucketName the bucket * @return the result of the operation * @throws DatasourceException * @throws NotFoundException */ @Override public boolean removeBucket(String databaseName, String bucketName) throws DatasourceException, NotFoundException { try { if (!databaseExists(databaseName)) { throw new NotFoundException("The database doesn't exist in the datasource"); } if (!bucketExists(databaseName, bucketName)) { throw new NotFoundException("The bucket doesn't exist in the database"); } MongoDatabase mongoDatabase = mongoClient.getDatabase(databaseName); String chunks = bucketName + ".chunks"; String files = bucketName + ".files"; mongoDatabase.getCollection(chunks).drop(); mongoDatabase.getCollection(files).drop(); return true; } catch (MongoException ex) { logger.error("An error occured while deleting a bucket", ex); throw new DatasourceException("An error occured while deleting a bucket"); } }
From source file:org.cleaner.main.GridFSDupeFinder.java
License:Open Source License
public static void main(String[] args) { try (MongoClient mc = new MongoClient()) { MongoDatabase database = mc.getDatabase("gridfs"); FindDuplicatesByMd5 fd = new FindDuplicatesByMd5(new DuplicateLoggerStrategy(), new GridFS(mc.getDB("gridfs"))); fd.find(database.getCollection("fs.files")); }//from ww w . j av a2 s . com }
From source file:org.codinjutsu.tools.nosql.mongo.logic.SingleMongoClient.java
License:Apache License
public void update(ServerConfiguration configuration, SingleMongoCollection singleMongoCollection, Document mongoDocument) { MongoClient mongo = null;//from ww w.ja va 2 s . c o m try { String databaseName = singleMongoCollection.getDatabaseName(); mongo = createMongoClient(configuration); MongoDatabase database = mongo.getDatabase(databaseName); MongoCollection<Document> collection = database.getCollection(singleMongoCollection.getName()); final Object id = mongoDocument.get("_id"); if (id == null) { collection.insertOne(mongoDocument); } else { collection.replaceOne(Filters.eq("_id", id), mongoDocument); } } catch (UnknownHostException ex) { throw new ConfigurationException(ex); } finally { if (mongo != null) { mongo.close(); } } }
From source file:org.codinjutsu.tools.nosql.mongo.logic.SingleMongoClient.java
License:Apache License
public void delete(ServerConfiguration configuration, SingleMongoCollection singleMongoCollection, Object _id) { MongoClient mongo = null;/*from w w w . jav a 2s. c om*/ try { String databaseName = singleMongoCollection.getDatabaseName(); mongo = createMongoClient(configuration); MongoDatabase database = mongo.getDatabase(databaseName); MongoCollection<Document> collection = database.getCollection(singleMongoCollection.getName()); collection.deleteOne(Filters.eq("_id", _id)); } catch (UnknownHostException ex) { throw new ConfigurationException(ex); } finally { if (mongo != null) { mongo.close(); } } }
From source file:org.codinjutsu.tools.nosql.mongo.logic.SingleMongoClient.java
License:Apache License
public MongoResult loadCollectionValues(ServerConfiguration configuration, SingleMongoCollection singleMongoCollection, MongoQueryOptions mongoQueryOptions) { MongoClient mongo = null;//from w w w. ja v a 2 s . c o m try { String databaseName = singleMongoCollection.getDatabaseName(); mongo = createMongoClient(configuration); MongoDatabase database = mongo.getDatabase(databaseName); MongoCollection<Document> collection = database.getCollection(singleMongoCollection.getName()); MongoResult mongoResult = new MongoResult(singleMongoCollection.getName()); if (mongoQueryOptions.isAggregate()) { return aggregate(mongoQueryOptions, mongoResult, collection); } return find(mongoQueryOptions, mongoResult, collection); } catch (UnknownHostException ex) { throw new ConfigurationException(ex); } finally { if (mongo != null) { mongo.close(); } } }
From source file:org.codinjutsu.tools.nosql.mongo.logic.SingleMongoClient.java
License:Apache License
public Document findMongoDocument(ServerConfiguration configuration, SingleMongoCollection singleMongoCollection, Object _id) { MongoClient mongo = null;/*from w ww . j av a 2 s .c o m*/ try { String databaseName = singleMongoCollection.getDatabaseName(); mongo = createMongoClient(configuration); MongoDatabase database = mongo.getDatabase(databaseName); com.mongodb.client.MongoCollection<Document> collection = database .getCollection(singleMongoCollection.getName()); return collection.find(Filters.eq("_id", _id)).first(); } catch (UnknownHostException ex) { throw new ConfigurationException(ex); } finally { if (mongo != null) { mongo.close(); } } }
From source file:org.eclipse.leshan.server.californium.impl.LeshanServer.java
License:Open Source License
private void observeResource(final Client client) { // ObserveRequest request = new ObserveRequest("2050/0/0"); String contentFormatParam = "TLV"; ContentFormat contentFormat = contentFormatParam != null ? ContentFormat.fromName(contentFormatParam.toUpperCase()) : null;/* ww w . jav a 2s .c o m*/ ObserveRequest request = new ObserveRequest(contentFormat, "/3/0/13"); // ObserveRequest request = new ObserveRequest(contentFormat, "/2050/0/0"); ObserveResponse cResponse = null; try { long i = 50000L; cResponse = this.send(client, request, i); } catch (InterruptedException e) { // TODO Auto-generated catch block e.printStackTrace(); } LOG.debug("cResponse : " + cResponse); Observation observation = cResponse.getObservation(); observationRegistry.addListener(new ObservationRegistryListener() { @Override public void newObservation(Observation observation) { // TODO Auto-generated method stub } @Override public void cancelled(Observation observation) { LOG.debug("Observation Cancelled ...."); } @Override public void newValue(Observation observation, ObserveResponse response) { // writeToFile(observation, mostRecentValue,timestampedValues ); // TODO Auto-generated method stub if (client.getRegistrationId().equals(observation.getRegistrationId())) { // initialize("document"); /* * try { publishMesssage(); } catch (Exception e1) { // TODO Auto-generated catch block * e1.printStackTrace(); } */ // ********Saving into database************************** Gson gson = new Gson(); // List<TimestampedLwM2mNode> obresp = response.getTimestampedLwM2mNode(); JsonObject jsonObject = new JsonParser().parse(gson.toJson(response.getContent())) .getAsJsonObject(); DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); java.util.Date date = new java.util.Date(); String timestamp = dateFormat.format(date); try { MongoClient mongoClient = new MongoClient(mongoDBAdd, mongoDBPort); MongoDatabase database = mongoClient.getDatabase("qolsys"); MongoCollection<Document> collection = database.getCollection("events"); String event = jsonObject.get("value").getAsString().trim(); Document document = new Document(); document.put("client_ep", client.getEndpoint()); document.put("event", event); document.put("timestamp", timestamp); collection.insertOne(document); json = document.toJson(); sendToBroker(topic, json); mongoClient.close(); producer.close(); } catch (Exception e) { e.printStackTrace(); System.err.println(e.getClass().getName() + ": " + e.getMessage()); System.exit(0); } // ****************************************************** LOG.debug("recent observation ...." + observation); } } // ************************************************************* /* * private static Producer<Integer, String> producer; private static final String topic = "mytopic"; */ @SuppressWarnings("deprecation") public void sendToBroker(String topic, String msg) { Properties producerProps = new Properties(); producerProps.put("metadata.broker.list", kafkaBroker1Add + ":" + kafkaBroker1Port); producerProps.put("serializer.class", "kafka.serializer.StringEncoder"); producerProps.put("request.required.acks", "1"); ProducerConfig producerConfig = new ProducerConfig(producerProps); producer = new Producer<Integer, String>(producerConfig); KeyedMessage<Integer, String> keyedMsg = new KeyedMessage<Integer, String>(topic, msg); producer.send(keyedMsg); } }); }
From source file:org.eclipse.leshan.server.demo.servlet.FetchServlet.java
License:Open Source License
/** * {@inheritDoc}/*from w ww . j av a 2 s . c o m*/ */ @Override protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { Properties props = new Properties(); props.put("zookeeper.connect", "localhost:2181"); props.put("group.id", "testgroup"); props.put("zookeeper.session.timeout.ms", "400"); props.put("zookeeper.sync.time.ms", "300"); props.put("auto.commit.interval.ms", "1000"); ConsumerConfig conConfig = new ConsumerConfig(props); consumerConnector = Consumer.createJavaConsumerConnector(conConfig); resp.setContentType("application/json"); MongoClient client = new MongoClient(mongoDBAdd, mongoDBPort); // MongoClient client = new MongoClient("54.161.178.113", 27017); MongoDatabase database = client.getDatabase(mongoDBName); MongoCollection<Document> collection = database.getCollection("events"); Gson gson = new Gson(); ArrayList<ClientDao> clientDaoList = new ArrayList<ClientDao>(); if (req.getPathInfo() == null) { try { MongoCursor<String> mongoCursor = database.getCollection("events") .distinct("client_ep", String.class).iterator(); while (mongoCursor.hasNext()) { String clientEp = mongoCursor.next(); ClientDao clientDao = new ClientDao(); clientDao.setClientEP(clientEp); clientDao.setTimestamp(null); clientDaoList.add(clientDao); } String json = gson.toJson(clientDaoList); resp.getWriter().write(json.toString()); resp.setStatus(HttpServletResponse.SC_OK); } catch (Exception e) { e.printStackTrace(); } finally { client.close(); } } if (req.getPathInfo() != null) { String[] path = StringUtils.split(req.getPathInfo(), '/'); if (path.length == 1) { try { BasicDBObject query1 = new BasicDBObject(); BasicDBObject sort = new BasicDBObject(); sort.put("timestamp", -1); query1.put("client_ep", path[0].toString()); Iterable<Document> cur = collection.find(query1).sort(sort); Iterator<Document> itr = cur.iterator(); while (itr.hasNext()) { Document document = itr.next(); ClientDao clientDao = new ClientDao(); clientDao.setClientEP(document.getString("client_ep")); clientDao.setEvent(document.getString("event")); clientDao.setTimestamp(document.getString("timestamp")); clientDaoList.add(clientDao); } // String json = gson.toJson(clientDaoList); // resp.getWriter().write(json.toString()); // resp.setStatus(HttpServletResponse.SC_OK); Map<String, Integer> topicCount = new HashMap<String, Integer>(); topicCount.put(topic, new Integer(1)); //ConsumerConnector creates the message stream for each topic Map<String, List<KafkaStream<byte[], byte[]>>> consumerStreams = consumerConnector .createMessageStreams(topicCount); // Get Kafka stream for topic 'mytopic' List<KafkaStream<byte[], byte[]>> kStreamList = consumerStreams.get(topic); // Iterate stream using ConsumerIterator for (final KafkaStream<byte[], byte[]> kStreams : kStreamList) { ConsumerIterator<byte[], byte[]> consumerIte = kStreams.iterator(); int count = 0; while (consumerIte.hasNext()) { count++; // Shutdown the consumer connector if (consumerConnector != null && count == 10) consumerConnector.shutdown(); Message msg = new Message(consumerIte.next().message()); ByteBuffer buffer = msg.payload(); byte[] bytes = new byte[buffer.remaining()]; buffer.get(bytes); String result = new String(bytes); JSONObject jsonObj = new JSONObject(result); ClientDao clientDao = new ClientDao(); clientDao.setClientEP(jsonObj.getString("client_ep")); clientDao.setEvent(jsonObj.getString("event")); clientDao.setTimestamp(jsonObj.getString("timestamp")); clientDaoList.add(clientDao); } } String json = gson.toJson(clientDaoList); resp.getWriter().write(json.toString()); resp.setStatus(HttpServletResponse.SC_OK); } catch (Exception e) { e.printStackTrace(); } finally { client.close(); } } } }