Example usage for com.mongodb DBCollection remove

List of usage examples for com.mongodb DBCollection remove

Introduction

In this page you can find the example usage for com.mongodb DBCollection remove.

Prototype

public WriteResult remove(final DBObject query) 

Source Link

Document

Remove documents from a collection.

Usage

From source file:com.mythesis.userbehaviouranalysis.ProfileAnalysis.java

License:Apache License

/**
 * finds the profiles that match user's interests given his web history
 * @param userID the user's id/*www .j  a v a  2 s.  c  o m*/
 * @param history the user's web history
 * @param input a txt file that contains the necessary parameters
 */
public void perform(String userID, String[] history, File input) {

    System.out.println("total urls = " + history.length);
    //default parameters
    //number of random queries for each profile
    int numQueriesSuggestion = 5;
    //number of random webpages per query to suggest - total number of suggestions = 
    // numQueriesSuggestion*pagesPerQuerySuggestion
    int pagesPerQuerySuggestion = 1;
    //number of random queries to return as examples for alternatives profiles
    int numQueriesExample = 2;

    //we get the current date/time
    DateTime current = new DateTime();
    DateTimeFormatter fmt = DateTimeFormat.forPattern("dd/MM/yyyy HH:mm");
    String timestamp = fmt.print(current);

    //update user info - i'll store the results when i'll perform the last analysis
    Mongo mongo = new Mongo("localhost", 27017);
    DB db = mongo.getDB("profileAnalysis");
    DBCollection userinfo = db.getCollection("userinfo");
    BasicDBObject newDocument = new BasicDBObject();
    newDocument.put("$set", new BasicDBObject().append("timestamp", timestamp));
    BasicDBObject searchQuery = new BasicDBObject();
    searchQuery.put("userID", userID);
    userinfo.update(searchQuery, newDocument, true, false);

    //read the neccessary parameters
    Utils utils = new Utils();
    utils.readInput(input);
    HashMap<String, ArrayList<String>> wordvectors = utils.wordvectors;
    HashMap<String, String> crawlerOutputPaths = utils.crawlerOutputPaths;

    //get the urls' content
    ArrayList<String> webpages = new ArrayList<>();
    ArrayList<String> urls = new ArrayList<>();
    for (int i = 0; i < history.length; i++) {
        WebParser pageParser = new WebParser(history[i]);
        pageParser.parse();
        String content = pageParser.getContent();
        if ("".equals(content) || content == null)
            continue;
        webpages.add(content);
        urls.add(history[i]);
    }

    //calculate the urls' scores
    HashMap<String, double[]> historyScores = new HashMap<>();
    String[] webpagesArr = new String[webpages.size()];
    webpagesArr = webpages.toArray(webpagesArr);
    String[] urlsArr = new String[urls.size()];
    urlsArr = urls.toArray(urlsArr);
    for (String profile : wordvectors.keySet()) {
        Scorer scorer = new Scorer(webpagesArr, urlsArr, wordvectors.get(profile));
        double[] semanticScores = scorer.getSemanticScores();
        double[] relevanceScores = scorer.getRelevanceScores();
        double[] confidenceScores = scorer.getConfidenceScores();
        double[] scores = scoreFormula(semanticScores, relevanceScores, confidenceScores);
        historyScores.put(profile, scores);
    }

    //find the maximum score of every url and get summation of the scores for each profile
    HashMap<String, Double> userProfilesScore = new HashMap<>();
    for (int i = 0; i < webpages.size(); i++) {
        double max = 0.0;
        String info = "undefined";
        for (String profile : historyScores.keySet()) {
            if (historyScores.get(profile)[i] > max) {
                max = historyScores.get(profile)[i];
                info = profile;
            }
        }
        if (!"undefined".equals(info)) {
            Double prevscore = userProfilesScore.get(info);
            userProfilesScore.put(info, (prevscore == null) ? max : prevscore + max);
        }
    }

    //find which profile level has maximum score e.g. if football/level=0 score is greater
    //than football/level=1 score then the user is better described as a football/level=0 user
    HashMap<String, Double> userProfileScores = new HashMap<>();
    HashMap<String, String> userProfileLevels = new HashMap<>();
    for (String s : userProfilesScore.keySet()) {
        String[] info = s.split("/");
        Double prevscore = userProfileScores.get(info[0] + "/" + info[1] + "/");
        if (prevscore == null) {
            userProfileScores.put(info[0] + "/" + info[1] + "/", userProfilesScore.get(s));
            userProfileLevels.put(info[0] + "/" + info[1] + "/", info[2]);
        } else if (userProfilesScore.get(s) > prevscore) {
            userProfileScores.put(info[0] + "/" + info[1] + "/", userProfilesScore.get(s));
            userProfileLevels.put(info[0] + "/" + info[1] + "/", info[2]);
        }
    }

    //put the final profiles together in this simple form: domain/profile/level of expertise and rank them
    Double totalScore = 0.0;
    for (String s : userProfileScores.keySet())
        totalScore += userProfileScores.get(s);

    Map<String, Double> userProfiles = new HashMap<>();
    for (String s : userProfileLevels.keySet())
        userProfiles.put(s + userProfileLevels.get(s), round(userProfileScores.get(s) * 100 / totalScore, 2));

    userProfiles = sortByValue(userProfiles);

    //find page suggestions for every profile
    HashMap<String, ArrayList<String>> pageSuggestions = new HashMap<>();
    for (String profile : userProfiles.keySet()) {
        String path = crawlerOutputPaths.get(profile);
        ArrayList<String> suggestions = getSuggestions(path, numQueriesSuggestion, pagesPerQuerySuggestion,
                history);
        pageSuggestions.put(profile, suggestions);
    }

    //find alternative profiles for every profile and representative queries
    HashMap<String, HashMap<String, ArrayList<String>>> alternativeProfiles = new HashMap<>();
    for (String userProfile : userProfiles.keySet()) {
        String[] userProfileInfo = userProfile.split("/");
        HashMap<String, ArrayList<String>> profileQueries = new HashMap<>();
        for (String profile : wordvectors.keySet()) {
            String[] profileInfo = profile.split("/");
            if (profileInfo[0].equals(userProfileInfo[0]) && profileInfo[1].equals(userProfileInfo[1])
                    && !profileInfo[2].equals(userProfileInfo[2])) {
                String path = crawlerOutputPaths.get(profile);
                ArrayList<String> queries = getQueries(path, numQueriesExample);
                for (int i = 0; i < queries.size(); i++) {
                    String query = queries.get(i);
                    queries.set(i, query.substring(query.lastIndexOf("\\") + 1).replace("-query", "")
                            .replace("+", " "));
                }
                profileQueries.put(profile, queries);
            }
        }
        alternativeProfiles.put(userProfile, profileQueries);
    }

    //prepare JSON response
    JSONObject response = new JSONObject();
    response.put("userID", userID);
    response.put("timestamp", timestamp);
    JSONArray list = new JSONArray();

    for (String profile : userProfiles.keySet()) {
        JSONObject profileInfo = new JSONObject();
        profileInfo.put("profile", profile);
        profileInfo.put("score", userProfiles.get(profile));

        JSONArray temp = new JSONArray();
        ArrayList<String> suggestions = pageSuggestions.get(profile);
        for (String s : suggestions)
            temp.add(s);
        profileInfo.put("suggestions", temp);

        JSONArray alternativesArray = new JSONArray();
        for (String s : alternativeProfiles.get(profile).keySet()) {
            JSONObject alternativeInfo = new JSONObject();
            alternativeInfo.put("alternative", s);
            ArrayList<String> queries = alternativeProfiles.get(profile).get(s);
            JSONArray queriesArray = new JSONArray();
            for (String str : queries) {
                queriesArray.add(str);
            }
            alternativeInfo.put("queries", queriesArray);
            alternativesArray.add(alternativeInfo);
        }

        profileInfo.put("alternatives", alternativesArray);
        list.add(profileInfo);
    }
    response.put("profiles", list);
    System.out.println("JSON response is ready: " + response);

    //delete previous analysis and store results
    DBCollection collection = db.getCollection("history");
    BasicDBObject previous = new BasicDBObject();
    previous.put("userID", userID);
    collection.remove(previous);
    DBObject dbObject = (DBObject) JSON.parse(response.toString());
    collection.insert(dbObject);
    System.out.println("I saved the analysis...");

}

From source file:com.nlp.twitterstream.MongoUtil.java

License:Open Source License

/**
 * Remove documents matching criteria/*from  ww w .  ja  va 2 s. com*/
 * 
 * @param collection
 * @param query
 *            to match
 */
public WriteResult removeDocuments(DBCollection collection, BasicDBObject query) {

    WriteResult writeRes = collection.remove(query);

    return writeRes;
}

From source file:com.norconex.collector.core.data.store.impl.mongo.MongoCrawlDataStore.java

License:Apache License

private void deleteAllDocuments(DBCollection coll) {
    coll.remove(new BasicDBObject());
}

From source file:com.nowellpoint.mongodb.persistence.impl.DocumentManagerImpl.java

License:Apache License

@Override
public void remove(Object document) {
    String collectionName = doumentManagerFactory.resolveDocumentName(document.getClass());
    DBCollection collection = getDB().getCollection(collectionName);
    Object documentId = doumentManagerFactory.resolveId(document);
    WriteResult wr = collection.remove(new BasicDBObject(DocumentManagerFactoryImpl.ID, documentId));
    if (wr.getError() != null) {
        throw new MongoException(wr.getLastError());
    }//www.j  a v a 2s  .  com
}

From source file:com.petpet.c3po.dao.mongo.MongoPersistenceLayer.java

License:Apache License

/**
 * {@inheritDoc}//from   ww  w .j  a  v a2 s .  c o  m
 */
@Override
public <T extends Model> void remove(T object) {
    if (object == null) {
        return;
    }

    DBCollection dbCollection = this.getCollection(object.getClass());
    MongoModelSerializer serializer = this.getSerializer(object.getClass());

    WriteResult remove = dbCollection.remove(serializer.serialize(object));
    setResult(remove);

}

From source file:com.petpet.c3po.dao.mongo.MongoPersistenceLayer.java

License:Apache License

/**
 * {@inheritDoc}/*from   ww w.j ava 2  s .  c om*/
 */
@Override
public <T extends Model> void remove(Class<T> clazz, Filter filter) {

    DBObject query = this.getCachedFilter(filter);
    DBCollection dbCollection = this.getCollection(clazz);
    WriteResult remove = dbCollection.remove(query);
    clearCache();
    setResult(remove);

}

From source file:com.redhat.lightblue.mongo.crud.BasicDocDeleter.java

License:Open Source License

@Override
public void delete(CRUDOperationContext ctx, DBCollection collection, DBObject mongoQuery,
        CRUDDeleteResponse response) {/*from w  ww .j  av  a 2s  .  co  m*/
    LOGGER.debug("Removing docs with {}", mongoQuery);

    int numDeleted = 0;

    if (!hookOptimization || ctx.getHookManager().hasHooks(ctx, CRUDOperation.DELETE)) {
        LOGGER.debug("There are hooks, retrieve-delete");
        try (DBCursor cursor = collection.find(mongoQuery, null)) {
            // Set read preference to primary for read-for-update operations
            cursor.setReadPreference(ReadPreference.primary());

            // All docs, to be put into the context
            ArrayList<DocCtx> contextDocs = new ArrayList<>();
            // ids to delete from the db
            List<Object> idsToDelete = new ArrayList<>(batchSize);
            while (cursor.hasNext()) {

                // We will use this index to access the documents deleted in this batch
                int thisBatchIndex = contextDocs.size();
                if (idsToDelete.size() < batchSize) {
                    // build batch
                    DBObject doc = cursor.next();
                    DocTranslator.TranslatedDoc tdoc = translator.toJson(doc);
                    DocCtx docCtx = new DocCtx(tdoc.doc, tdoc.rmd);
                    docCtx.setOriginalDocument(docCtx);
                    docCtx.setCRUDOperationPerformed(CRUDOperation.DELETE);
                    contextDocs.add(docCtx);
                    idsToDelete.add(doc.get(MongoCRUDController.ID_STR));
                }

                if (idsToDelete.size() == batchSize || !cursor.hasNext()) {
                    // batch built or run out of documents                        
                    BulkWriteOperation bw = collection.initializeUnorderedBulkOperation();

                    for (Object id : idsToDelete) {
                        // doing a bulk of single operations instead of removing by initial query
                        // that way we know which documents were not removed
                        bw.find(new BasicDBObject("_id", id)).remove();
                    }

                    BulkWriteResult result = null;
                    try {
                        if (writeConcern == null) {
                            LOGGER.debug("Bulk deleting docs");
                            result = bw.execute();
                        } else {
                            LOGGER.debug("Bulk deleting docs with writeConcern={} from execution",
                                    writeConcern);
                            result = bw.execute(writeConcern);
                        }
                        LOGGER.debug("Bulk deleted docs - attempted {}, deleted {}", idsToDelete.size(),
                                result.getRemovedCount());
                    } catch (BulkWriteException bwe) {
                        LOGGER.error("Bulk write exception", bwe);
                        handleBulkWriteError(bwe.getWriteErrors(),
                                contextDocs.subList(thisBatchIndex, contextDocs.size()));
                        result = bwe.getWriteResult();
                    } catch (RuntimeException e) {
                        LOGGER.error("Exception", e);
                        throw e;
                    } finally {

                        numDeleted += result.getRemovedCount();
                        // clear list before processing next batch
                        idsToDelete.clear();
                    }
                }
            }
            ctx.setDocumentStream(new ListDocumentStream<DocCtx>(contextDocs));
        }
    } else {
        LOGGER.debug("There are no hooks, deleting in bulk");
        try {
            if (writeConcern == null) {
                numDeleted = collection.remove(mongoQuery).getN();
            } else {
                numDeleted = collection.remove(mongoQuery, writeConcern).getN();
            }
        } catch (MongoException e) {
            LOGGER.error("Deletion error", e);
            throw e;
        }
        ctx.setDocumentStream(new ListDocumentStream<DocCtx>(new ArrayList<DocCtx>()));
    }

    response.setNumDeleted(numDeleted);
}

From source file:com.restfeel.controller.rest.EntityDataController.java

License:Apache License

@RequestMapping(value = "/api/{projectId}/entities/{name}/{uuid}", method = RequestMethod.DELETE, headers = "Accept=application/json")
public @ResponseBody StatusResponse deleteEntityData(@PathVariable("projectId") String projectId,
        @PathVariable("name") String entityName, @PathVariable("uuid") String uuid,
        @RequestHeader(value = "authToken", required = false) String authToken) {

    StatusResponse res = new StatusResponse();

    JSONObject authRes = authService.authorize(projectId, authToken, "USER");
    if (!authRes.getBoolean(SUCCESS)) {
        res.setStatus("Unauthorized");
        return res;
    }//  ww w . jav  a 2  s.  c  o m

    DBCollection dbCollection = mongoTemplate.getCollection(projectId + "_" + entityName);
    BasicDBObject queryObject = new BasicDBObject();
    queryObject.append("_id", new ObjectId(uuid));
    dbCollection.remove(queryObject);

    res.setStatus("DELETED");

    return res;
}

From source file:com.restfiddle.controller.rest.EntityDataController.java

License:Apache License

@RequestMapping(value = "/api/{projectId}/entities/{name}/{uuid}", method = RequestMethod.DELETE, headers = "Accept=application/json")
public @ResponseBody StatusResponse deleteEntityData(@PathVariable("projectId") String projectId,
        @PathVariable("name") String entityName, @PathVariable("uuid") String uuid) {
    DBCollection dbCollection = mongoTemplate.getCollection(entityName);
    BasicDBObject queryObject = new BasicDBObject();
    queryObject.append("_id", new ObjectId(uuid));
    dbCollection.remove(queryObject);

    StatusResponse res = new StatusResponse();
    res.setStatus("DELETED");

    return res;//from   w w  w  . j a v  a2  s. c  o  m
}

From source file:com.ricardolorenzo.identity.user.impl.UserIdentityManagerMongoDB.java

License:Open Source License

/**
 * All the scripts should have the following format:
 *
 * {//from   w w  w.jav a2  s.  c  o m
 *    database.collection: {
 *        operation: insert|update|find|aggregate|delete
 *        query: {}
 *    }
 * }
 *
 * For update operations, you should specify the following:
 *
 * query: {
 *     find: {}
 *     update: {}
 * }
 */
private List<DBObject> runQueryScript(final String scriptType, final Map<String, Object[]> attributes)
        throws IdentityException {
    List<DBObject> results = new ArrayList<>();
    try {
        DB database = mongoClient.getDB(this.properties.getProperty("mongodb.database"));
        final ScriptCollection sc = getScriptCollection();
        if (sc.hasScript(scriptType)) {
            final String scriptContent = sc.getScript(scriptType);
            String query = createQueryFromScript(scriptContent, attributes);
            DBObject collectionOperation = DBObject.class.cast(JSON.parse(query));

            for (String collection : collectionOperation.keySet()) {
                if (!database.collectionExists(collection)) {
                    throw new IdentityException("collection [" + collection + "] does not exists");
                }

                DBObject dbObject = DBObject.class.cast(collectionOperation.get(collection));
                if (!dbObject.containsField("operation")) {
                    throw new IdentityException("operation field not specified");
                }

                String dbOperation = String.class.cast(dbObject.get("operation")).toLowerCase();
                if (!OPERATIONS.contains(dbOperation)) {
                    throw new IdentityException("operation [" + dbOperation + "] not supported");
                }

                DBObject dbQuery = DBObject.class.cast(dbObject.get("query"));
                if (dbQuery == null) {
                    throw new IdentityException("query field not specified");
                }

                DBCollection coll = database.getCollection(collection);
                switch (dbOperation) {
                case "insert": {
                    coll.insert(dbQuery);
                }
                case "update": {
                    if (!dbObject.containsField("find")) {
                        throw new IdentityException("find field not found inside the update operation");
                    }
                    if (!dbObject.containsField("update")) {
                        throw new IdentityException("update field not found inside the update operation");
                    }
                    DBObject dbUpdateFind = DBObject.class.cast(dbQuery.get("find"));
                    DBObject dbUpdateFields = DBObject.class.cast(dbQuery.get("update"));
                    coll.update(dbUpdateFind, dbUpdateFields, false, false);
                }
                case "delete": {
                    coll.remove(dbQuery);
                }
                case "find": {
                    DBCursor cursor = coll.find(dbQuery);
                    while (cursor.hasNext()) {
                        results.add(cursor.next());
                    }
                }
                case "aggregate": {
                    List<DBObject> aggregate = new ArrayList<DBObject>();
                    aggregate.add(dbQuery);
                    for (DBObject o : coll.aggregate(aggregate).results()) {
                        results.add(o);
                    }
                }
                }
            }
            return results;
        }
    } catch (final NoSuchAlgorithmException e) {
        throw new IdentityException(e.getMessage());
    } finally {
        /**
         * TODO close cursors
         */
    }
    return null;
}