Example usage for com.mongodb DBCollection update

List of usage examples for com.mongodb DBCollection update

Introduction

In this page you can find the example usage for com.mongodb DBCollection update.

Prototype

public WriteResult update(final DBObject query, final DBObject update, final boolean upsert,
        final boolean multi) 

Source Link

Document

Modify an existing document or documents in collection.

Usage

From source file:com.liferay.mongodb.hook.service.impl.MongoExpandoValueLocalServiceImpl.java

License:Open Source License

@Override
public void deleteTableValues(long tableId) {
    try {//w  w w. ja va2 s .  c om
        ExpandoTable expandoTable = ExpandoTableLocalServiceUtil.getTable(tableId);

        DBCollection dbCollection = MongoDBUtil.getCollection(expandoTable);

        DBObject valueDBObject = new BasicDBObject();

        List<ExpandoColumn> expandoColumns = ExpandoColumnLocalServiceUtil.getColumns(tableId);

        for (ExpandoColumn expandoColumn : expandoColumns) {
            valueDBObject.put(expandoColumn.getName(), null);
        }

        DBObject operatorDBObject = new BasicDBObject(MongoOperator.SET, valueDBObject);

        dbCollection.update(new BasicDBObject(), operatorDBObject, false, true);
    } catch (PortalException pe) {
        throw new SystemException(pe);
    }
}

From source file:com.mobileman.moments.core.services.user.impl.UserServiceImpl.java

License:Apache License

@Override
@ServiceActivator(inputChannel = MomentsESBConstants.QUESTION_DELETED_CHANNEL)
public void questionDeleted(final Question question) {

    mongoTemplate.execute(User.class, new CollectionCallback<Boolean>() {

        @Override/*  w w  w .j a  va2s  .co m*/
        public Boolean doInCollection(DBCollection collection) throws MongoException, DataAccessException {

            final Query query = Query
                    .query(Criteria.where("followed_questions._id").is(new ObjectId(question.getId())));
            final Update update = new Update().pull("followed_questions",
                    new BasicDBObject("_id", new ObjectId(question.getId())));
            WriteResult result = collection.update(query.getQueryObject(), update.getUpdateObject(), false,
                    true);

            return result.getN() > 0;
        }
    });

}

From source file:com.mythesis.userbehaviouranalysis.ProfileAnalysis.java

License:Apache License

/**
 * finds the profiles that match user's interests given his web history
 * @param userID the user's id//from ww  w . j  ava  2  s.  c o m
 * @param history the user's web history
 * @param input a txt file that contains the necessary parameters
 */
public void perform(String userID, String[] history, File input) {

    System.out.println("total urls = " + history.length);
    //default parameters
    //number of random queries for each profile
    int numQueriesSuggestion = 5;
    //number of random webpages per query to suggest - total number of suggestions = 
    // numQueriesSuggestion*pagesPerQuerySuggestion
    int pagesPerQuerySuggestion = 1;
    //number of random queries to return as examples for alternatives profiles
    int numQueriesExample = 2;

    //we get the current date/time
    DateTime current = new DateTime();
    DateTimeFormatter fmt = DateTimeFormat.forPattern("dd/MM/yyyy HH:mm");
    String timestamp = fmt.print(current);

    //update user info - i'll store the results when i'll perform the last analysis
    Mongo mongo = new Mongo("localhost", 27017);
    DB db = mongo.getDB("profileAnalysis");
    DBCollection userinfo = db.getCollection("userinfo");
    BasicDBObject newDocument = new BasicDBObject();
    newDocument.put("$set", new BasicDBObject().append("timestamp", timestamp));
    BasicDBObject searchQuery = new BasicDBObject();
    searchQuery.put("userID", userID);
    userinfo.update(searchQuery, newDocument, true, false);

    //read the neccessary parameters
    Utils utils = new Utils();
    utils.readInput(input);
    HashMap<String, ArrayList<String>> wordvectors = utils.wordvectors;
    HashMap<String, String> crawlerOutputPaths = utils.crawlerOutputPaths;

    //get the urls' content
    ArrayList<String> webpages = new ArrayList<>();
    ArrayList<String> urls = new ArrayList<>();
    for (int i = 0; i < history.length; i++) {
        WebParser pageParser = new WebParser(history[i]);
        pageParser.parse();
        String content = pageParser.getContent();
        if ("".equals(content) || content == null)
            continue;
        webpages.add(content);
        urls.add(history[i]);
    }

    //calculate the urls' scores
    HashMap<String, double[]> historyScores = new HashMap<>();
    String[] webpagesArr = new String[webpages.size()];
    webpagesArr = webpages.toArray(webpagesArr);
    String[] urlsArr = new String[urls.size()];
    urlsArr = urls.toArray(urlsArr);
    for (String profile : wordvectors.keySet()) {
        Scorer scorer = new Scorer(webpagesArr, urlsArr, wordvectors.get(profile));
        double[] semanticScores = scorer.getSemanticScores();
        double[] relevanceScores = scorer.getRelevanceScores();
        double[] confidenceScores = scorer.getConfidenceScores();
        double[] scores = scoreFormula(semanticScores, relevanceScores, confidenceScores);
        historyScores.put(profile, scores);
    }

    //find the maximum score of every url and get summation of the scores for each profile
    HashMap<String, Double> userProfilesScore = new HashMap<>();
    for (int i = 0; i < webpages.size(); i++) {
        double max = 0.0;
        String info = "undefined";
        for (String profile : historyScores.keySet()) {
            if (historyScores.get(profile)[i] > max) {
                max = historyScores.get(profile)[i];
                info = profile;
            }
        }
        if (!"undefined".equals(info)) {
            Double prevscore = userProfilesScore.get(info);
            userProfilesScore.put(info, (prevscore == null) ? max : prevscore + max);
        }
    }

    //find which profile level has maximum score e.g. if football/level=0 score is greater
    //than football/level=1 score then the user is better described as a football/level=0 user
    HashMap<String, Double> userProfileScores = new HashMap<>();
    HashMap<String, String> userProfileLevels = new HashMap<>();
    for (String s : userProfilesScore.keySet()) {
        String[] info = s.split("/");
        Double prevscore = userProfileScores.get(info[0] + "/" + info[1] + "/");
        if (prevscore == null) {
            userProfileScores.put(info[0] + "/" + info[1] + "/", userProfilesScore.get(s));
            userProfileLevels.put(info[0] + "/" + info[1] + "/", info[2]);
        } else if (userProfilesScore.get(s) > prevscore) {
            userProfileScores.put(info[0] + "/" + info[1] + "/", userProfilesScore.get(s));
            userProfileLevels.put(info[0] + "/" + info[1] + "/", info[2]);
        }
    }

    //put the final profiles together in this simple form: domain/profile/level of expertise and rank them
    Double totalScore = 0.0;
    for (String s : userProfileScores.keySet())
        totalScore += userProfileScores.get(s);

    Map<String, Double> userProfiles = new HashMap<>();
    for (String s : userProfileLevels.keySet())
        userProfiles.put(s + userProfileLevels.get(s), round(userProfileScores.get(s) * 100 / totalScore, 2));

    userProfiles = sortByValue(userProfiles);

    //find page suggestions for every profile
    HashMap<String, ArrayList<String>> pageSuggestions = new HashMap<>();
    for (String profile : userProfiles.keySet()) {
        String path = crawlerOutputPaths.get(profile);
        ArrayList<String> suggestions = getSuggestions(path, numQueriesSuggestion, pagesPerQuerySuggestion,
                history);
        pageSuggestions.put(profile, suggestions);
    }

    //find alternative profiles for every profile and representative queries
    HashMap<String, HashMap<String, ArrayList<String>>> alternativeProfiles = new HashMap<>();
    for (String userProfile : userProfiles.keySet()) {
        String[] userProfileInfo = userProfile.split("/");
        HashMap<String, ArrayList<String>> profileQueries = new HashMap<>();
        for (String profile : wordvectors.keySet()) {
            String[] profileInfo = profile.split("/");
            if (profileInfo[0].equals(userProfileInfo[0]) && profileInfo[1].equals(userProfileInfo[1])
                    && !profileInfo[2].equals(userProfileInfo[2])) {
                String path = crawlerOutputPaths.get(profile);
                ArrayList<String> queries = getQueries(path, numQueriesExample);
                for (int i = 0; i < queries.size(); i++) {
                    String query = queries.get(i);
                    queries.set(i, query.substring(query.lastIndexOf("\\") + 1).replace("-query", "")
                            .replace("+", " "));
                }
                profileQueries.put(profile, queries);
            }
        }
        alternativeProfiles.put(userProfile, profileQueries);
    }

    //prepare JSON response
    JSONObject response = new JSONObject();
    response.put("userID", userID);
    response.put("timestamp", timestamp);
    JSONArray list = new JSONArray();

    for (String profile : userProfiles.keySet()) {
        JSONObject profileInfo = new JSONObject();
        profileInfo.put("profile", profile);
        profileInfo.put("score", userProfiles.get(profile));

        JSONArray temp = new JSONArray();
        ArrayList<String> suggestions = pageSuggestions.get(profile);
        for (String s : suggestions)
            temp.add(s);
        profileInfo.put("suggestions", temp);

        JSONArray alternativesArray = new JSONArray();
        for (String s : alternativeProfiles.get(profile).keySet()) {
            JSONObject alternativeInfo = new JSONObject();
            alternativeInfo.put("alternative", s);
            ArrayList<String> queries = alternativeProfiles.get(profile).get(s);
            JSONArray queriesArray = new JSONArray();
            for (String str : queries) {
                queriesArray.add(str);
            }
            alternativeInfo.put("queries", queriesArray);
            alternativesArray.add(alternativeInfo);
        }

        profileInfo.put("alternatives", alternativesArray);
        list.add(profileInfo);
    }
    response.put("profiles", list);
    System.out.println("JSON response is ready: " + response);

    //delete previous analysis and store results
    DBCollection collection = db.getCollection("history");
    BasicDBObject previous = new BasicDBObject();
    previous.put("userID", userID);
    collection.remove(previous);
    DBObject dbObject = (DBObject) JSON.parse(response.toString());
    collection.insert(dbObject);
    System.out.println("I saved the analysis...");

}

From source file:com.nlp.twitterstream.MongoUtil.java

License:Open Source License

/**
 * Update documents with all param required
 * /*from ww w.jav  a2  s  .com*/
 * @param collection
 * @param updateCriteria
 * @param updateAction
 * @param upsertStatus
 *            When true, Inserts if document does not exist
 * @param multiStatus
 *            When true, Affects multiple documents
 * @return WriteRes
 */
public WriteResult updateDocuments(DBCollection collection, BasicDBObject updateCriteria,
        BasicDBObject updateAction, boolean upsertStatus, boolean multiStatus) {

    WriteResult writeRes = collection.update(updateCriteria, updateAction, upsertStatus, multiStatus);

    return writeRes;
}

From source file:com.petpet.c3po.dao.mongo.MongoPersistenceLayer.java

License:Apache License

/**
 * Inserts or updates all objects that correspond to the given filter. Note,
 * however, that if the object or the passed filter is null, nothing will
 * happen. Note, also that the updated document is not replaced but $set is
 * used on the changed fields. This implies that the caller has to make sure,
 * the passed object has only the fields that will be updated. All other
 * fields should be null or empty.// www .j a va2 s .c  o m
 *
 * @param object the object to update.
 * @param Filter the filter to apply in order to select the objects that will be
 *               updated.
 */
@Override
public <T extends Model> void update(T object, Filter f) {
    DBObject filter = this.getCachedFilter(f);
    String filterString = filter.toString();
    // if (filter.keySet().isEmpty()) {
    ///    LOG.warn("Cannot update an object without a filter");
    //      return;
    //  }

    if (object == null) {
        LOG.warn("Cannot update a null object");
        return;
    }

    DBCollection dbCollection = this.getCollection(object.getClass());
    MongoModelSerializer serializer = this.getSerializer(object.getClass());
    DBObject objectToUpdate = serializer.serialize(object);
    BasicDBObject set = new BasicDBObject("$set", objectToUpdate);
    String setString = set.toString();
    WriteResult update = dbCollection.update(filter, set, false, true);
    setResult(update);
}

From source file:com.petpet.c3po.dao.mongo.MongoPersistenceLayer.java

License:Apache License

public DBObject mapReduce(int key, String property, Filter filter, List<Integer> bins) {
    LOG.debug("Starting mapReduce for the following property: {}", property);
    long start = System.currentTimeMillis();
    Property prop = getCache().getProperty(property);
    String propType = prop.getType();
    String map = "";
    String map2 = "";
    String reduce = "";
    if (propType.equals(PropertyType.STRING.toString()) || propType.equals(PropertyType.BOOL.toString())) {
        map = "function() {\n" + "    property = '" + property + "';\n" + "    for (mr in this.metadata){\n"
                + "        metadataRecord=this.metadata[mr];\n"
                + "        if(metadataRecord.property == property)\n" + "        {\n"
                + "            if (metadataRecord.status == 'CONFLICT'){\n" + "                emit({\n"
                + "                    property: property,\n" + "                    value: 'CONFLICT'\n"
                + "                }, 1);\n" + "            } else {\n" + "                emit({\n"
                + "                    property: property,\n"
                + "                    value: metadataRecord.sourcedValues[0].value\n"
                + "                }, 1);\n" + "\n" + "            }\n" + "            return;\n"
                + "        }\n" + "    }\n" + "    emit({\n" + "        property: property,\n"
                + "        value: 'Unknown'\n" + "        }, 1);\n" + "}";

        reduce = "function reduce(key, values) {\n" + "    var res = 0;\n"
                + "    values.forEach(function(v) {\n" + "        res += v;\n" + "    });\n"
                + "    return res;\n" + "}";

    } else if (propType.equals(PropertyType.INTEGER.toString())
            || propType.equals(PropertyType.FLOAT.toString())) {
        map = "function() {\n" + "    property = '" + property + "';\n" + "    thresholds = "
                + getBinThresholds(bins) + ";\n" + "    for (mr in this.metadata){\n"
                + "        metadataRecord=this.metadata[mr];\n"
                + "        if(metadataRecord.property == property){\n"
                + "            if (metadataRecord.status == 'CONFLICT'){\n" + "                emit({\n"
                + "                    property: property,\n" + "                    value: 'CONFLICT'\n"
                + "                }, 1);\n" + "            } else {\n"
                + "                var val=metadataRecord.sourcedValues[0].value;\n"
                + "                var skipped=false;\n" + "                if (thresholds.length > 0)\n"
                + "                    for (t in thresholds){\n"
                + "                        threshold = thresholds[t];  \n"
                + "                        if (val>=threshold[0] && val<=threshold[1]){\n"
                + "                             emit({\n"
                + "                                property: property,\n"
                + "                                value: threshold[0]+'-'+threshold[1]\n"
                + "                            }, 1);\n" + "                             skipped=true;\n"
                + "                             break;\n" + "                         }\n"
                + "                    }\n" + "            }\n" + "            return;\n" + "        }\n"
                + "    }\n" + "    emit({\n" + "        property: property,\n" + "        value: 'Unknown'\n"
                + "        }, 1);\n" + "}";
        reduce = "function reduce(key, values) {\n" + "    var res = 0;\n"
                + "    values.forEach(function(v) {\n" + "        res += v;\n" + "    });\n"
                + "    return res;\n" + "}";

    } else if (propType.equals(PropertyType.DATE.toString())) {
        map = "function() {\n" + "    property = '" + property + "';\n" + "    for (mr in this.metadata){\n"
                + "        metadataRecord=this.metadata[mr];\n"
                + "        if(metadataRecord.property == property){\n"
                + "            if (metadataRecord.status == 'CONFLICT'){\n" + "                emit({\n"
                + "                    property: property,\n" + "                    value: 'CONFLICT'\n"
                + "                }, 1);\n" + "            } else {\n"
                + "                var date = new Date(metadataRecord.sourcedValues[0].value);\n"
                + "                var val=date.getFullYear();\n" + "                emit({\n"
                + "                    property: property,\n" + "                    value: val\n"
                + "                }, 1);\n" + "            }\n" + "            return;\n" + "        }\n"
                + "    }\n" + "    emit({\n" + "        property: property,\n" + "        value: 'Unknown'\n"
                + "        }, 1);\n" + "}";

        reduce = "function reduce(key, values) {\n" + "    var res = 0;\n"
                + "    values.forEach(function(v) {\n" + "        res += v;\n" + "    });\n"
                + "    return res;\n" + "}";

    }//from w  w  w.j a  v  a2 s .  com
    DBObject query = this.getCachedFilter(filter);
    LOG.debug("Filter query is:\n{}", query);
    String queryString = query.toString();
    DBCollection elmnts = getCollection(Element.class);
    MapReduceCommand cmd = new MapReduceCommand(elmnts, map, reduce, null, INLINE, query);
    MapReduceOutput output = elmnts.mapReduce(cmd);
    // List<BasicDBObject> results = (List<BasicDBObject>) output.getCommandResult().get( "results" );
    Iterator<DBObject> iterator = output.results().iterator();
    List<BasicDBObject> results = new ArrayList<BasicDBObject>();
    while (iterator.hasNext()) {
        results.add((BasicDBObject) iterator.next());

    }

    LOG.debug("MapReduce produced {} results", results.size());
    DBCollection histCollection = this.db.getCollection(TBL_HISTOGRAMS);
    BasicDBObject old = new BasicDBObject("_id", key);
    BasicDBObject res = new BasicDBObject(old.toMap());
    res.put("results", results);
    histCollection.update(old, res, true, false);

    DBCursor cursor = histCollection.find(new BasicDBObject("_id", key));

    if (cursor.count() == 0) {
        return null;
    }
    long end = System.currentTimeMillis();
    LOG.debug("MapReduce took {} seconds", (end - start) / 1000);
    return (DBObject) cursor.next().get("results");
}

From source file:com.petpet.c3po.dao.mongo.MongoPersistenceLayer.java

License:Apache License

public DBObject mapReduceAllValues(int key, String property, Filter filter, List<Integer> bins) {
    LOG.debug("Starting mapReduce for the following property: {}", property);
    long start = System.currentTimeMillis();
    Property prop = getCache().getProperty(property);
    String propType = prop.getType();
    String map = "";
    String map2 = "";
    String reduce = "";
    if (propType.equals(PropertyType.STRING.toString()) || propType.equals(PropertyType.BOOL.toString())) {
        map = "function() {\n" + "    property = '" + property + "';\n" + "    for (mr in this.metadata){\n"
                + "        metadataRecord=this.metadata[mr];\n"
                + "        if(metadataRecord.property == property)\n" + "        {\n"
                + "            for (i in metadataRecord.sourcedValues)\n" + "            {\n"
                + "                sv=metadataRecord.sourcedValues[i];\n" + "                emit({\n"
                + "                    property: property,\n" + "                    value: sv.value\n"
                + "                }, 1);\n" + "\n" + "            }\n" + "            return;\n"
                + "        }\n" + "    }\n" + "    emit({\n" + "        property: property,\n"
                + "        value: 'Unknown'\n" + "        }, 1);\n" + "}";

        reduce = "function reduce(key, values) {\n" + "    var res = 0;\n"
                + "    values.forEach(function(v) {\n" + "        res += v;\n" + "    });\n"
                + "    return res;\n" + "}";

    } else if (propType.equals(PropertyType.INTEGER.toString())
            || propType.equals(PropertyType.FLOAT.toString())) {
        map = "function() {\n" + "    property = '" + property + "';\n" + "    thresholds = "
                + getBinThresholds(bins) + ";\n" + "    for (mr in this.metadata)" + "    {\n"
                + "        metadataRecord=this.metadata[mr];\n"
                + "        if(metadataRecord.property == property)" + "        {\n"
                + "           for (i in metadataRecord.sourcedValues)" + "           {\n"
                + "                sv=metadataRecord.sourcedValues[i];\n"
                + "                var val=sv.value;\n" + "                if (thresholds.length > 0)\n"
                + "                    for (t in thresholds){\n"
                + "                        threshold = thresholds[t];  \n"
                + "                        if (val>=threshold[0] && val<=threshold[1]){\n"
                + "                             emit({\n"
                + "                                property: property,\n"
                + "                                value: threshold[0]+'-'+threshold[1]\n"
                + "                            }, 1);\n" + "                         }\n"
                + "                    }\n" + "            }\n" + "            return;\n" + "         }\n"
                + "    }\n" + "    emit({\n" + "        property: property,\n" + "        value: 'Unknown'\n"
                + "        }, 1);\n" + "}";
        reduce = "function reduce(key, values) {\n" + "    var res = 0;\n"
                + "    values.forEach(function(v) {\n" + "        res += v;\n" + "    });\n"
                + "    return res;\n" + "}";

    } else if (propType.equals(PropertyType.DATE.toString())) {
        map = "function() {\n" + "    property = '" + property + "';\n" + "    for (mr in this.metadata){\n"
                + "        metadataRecord=this.metadata[mr];\n"
                + "        if(metadataRecord.property == property){\n"
                + "           for (i in metadataRecord.sourcedValues){\n"
                + "               sv=metadataRecord.sourcedValues[i];\n"
                + "               var date = new Date(sv.value);\n"
                + "               var val=date.getFullYear();\n" + "               emit({\n"
                + "                    property: property,\n" + "                    value: val\n"
                + "               }, 1);\n" + "            }\n" + "            return;\n" + "        }\n"
                + "    }\n" + "    emit({\n" + "        property: property,\n" + "        value: 'Unknown'\n"
                + "        }, 1);\n" + "}";

        reduce = "function reduce(key, values) {\n" + "    var res = 0;\n"
                + "    values.forEach(function(v) {\n" + "        res += v;\n" + "    });\n"
                + "    return res;\n" + "}";

    }//from  w ww. j  ava2  s . c o  m
    DBObject query = this.getCachedFilter(filter);
    LOG.debug("Filter query is:\n{}", query);
    String queryString = query.toString();
    DBCollection elmnts = getCollection(Element.class);
    MapReduceCommand cmd = new MapReduceCommand(elmnts, map, reduce, null, INLINE, query);
    MapReduceOutput output = elmnts.mapReduce(cmd);
    // List<BasicDBObject> results = (List<BasicDBObject>) output.getCommandResult().get( "results" );
    Iterator<DBObject> iterator = output.results().iterator();
    List<BasicDBObject> results = new ArrayList<BasicDBObject>();
    while (iterator.hasNext()) {
        results.add((BasicDBObject) iterator.next());

    }

    LOG.debug("MapReduce produced {} results", results.size());
    DBCollection histCollection = this.db.getCollection(TBL_HISTOGRAMS);
    BasicDBObject old = new BasicDBObject("_id", key);
    BasicDBObject res = new BasicDBObject(old.toMap());
    res.put("results", results);
    histCollection.update(old, res, true, false);

    DBCursor cursor = histCollection.find(new BasicDBObject("_id", key));

    if (cursor.count() == 0) {
        return null;
    }
    long end = System.currentTimeMillis();
    LOG.debug("MapReduce took {} seconds", (end - start) / 1000);
    return (DBObject) cursor.next().get("results");
}

From source file:com.petpet.c3po.dao.mongo.MongoPersistenceLayer.java

License:Apache License

public DBObject mapReduceStats(int key, String property, Filter filter) {
    LOG.debug("Starting mapReduceStats for the following property: {}", property);
    long start = System.currentTimeMillis();
    Property prop = getCache().getProperty(property);
    String propType = prop.getType();
    String map = "";
    String reduce = "";
    String finalize = "";
    if (propType.equals(PropertyType.INTEGER.toString()) || propType.equals(PropertyType.FLOAT.toString())) {
        map = "function() {\n" + "    property = '" + property + "';\n" + "    for (mr in this.metadata){\n"
                + "        metadataRecord=this.metadata[mr];\n"
                + "        if(metadataRecord.property == property){\n" + "            {\n"
                + "                emit({\n" + "                    property: property,\n"
                + "                    value: property\n" + "                }, \n" + "                {\n"
                + "                    sum: metadataRecord.sourcedValues[0].value,\n"
                + "                    min: metadataRecord.sourcedValues[0].value,\n"
                + "                    max: metadataRecord.sourcedValues[0].value,\n"
                + "                    count: 1,\n" + "                    diff: 0\n" + "                }\n"
                + "                )\n" + "            }\n" + "            return;\n" + "        }\n"
                + "    }\n" + "    emit({\n" + "        property: property,\n" + "        value: 'Unknown'\n"
                + "        }, 1);\n" + "}\n";
        reduce = "function reduce(key, values) {\n" + "var a = values[0];\n"
                + "        for (var i = 1; i < values.length; i++) {\n" + "            var b = values[i];\n"
                + "            var delta = a.sum / a.count - b.sum / b.count;\n"
                + "            var weight = (a.count * b.count) / (a.count + b.count);\n"
                + "            a.diff += b.diff + delta * delta * weight;\n"
                + "            a.sum = b.sum*1+ a.sum*1;\n" + "            a.count += b.count;\n"
                + "            a.min = Math.min(a.min, b.min);\n"
                + "            a.max = Math.max(a.max, b.max);\n" + "        }\n" + "return a;" + "}"

        ;/*w  w w .  j  a  v a2 s . c o  m*/
        finalize = "function finalize(key, value) {\n" + "    value.avg = value.sum / value.count;\n"
                + "    value.variance = value.diff / value.count;\n"
                + "    value.stddev = Math.sqrt(value.variance);\n" + "    return value;\n" + "}";

    }
    DBObject query = this.getCachedFilter(filter);
    LOG.debug("filter query is:\n{}", query);
    DBCollection elmnts = getCollection(Element.class);
    MapReduceCommand cmd = new MapReduceCommand(elmnts, map, reduce, null, INLINE, query);
    cmd.setFinalize(finalize);
    MapReduceOutput output = elmnts.mapReduce(cmd);

    //List<BasicDBObject> results = (List<BasicDBObject>) output.getCommandResult().get( "results" );
    Iterator<DBObject> iterator = output.results().iterator();
    List<BasicDBObject> results = new ArrayList<BasicDBObject>();
    while (iterator.hasNext()) {
        results.add((BasicDBObject) iterator.next());

    }

    LOG.debug("MapReduce produced {} results", results.size());
    DBCollection histCollection = this.db.getCollection(TBL_HISTOGRAMS);
    BasicDBObject old = new BasicDBObject("_id", key);
    BasicDBObject res = new BasicDBObject(old.toMap());
    res.put("results", results);
    histCollection.update(old, res, true, false);

    DBCursor cursor = histCollection.find(new BasicDBObject("_id", key));

    if (cursor.count() == 0) {
        return null;
    }
    long end = System.currentTimeMillis();
    LOG.debug("The map-reduce job took {} seconds", (end - start) / 1000);
    return (DBObject) cursor.next().get("results");
}

From source file:com.ricardolorenzo.identity.user.impl.UserIdentityManagerMongoDB.java

License:Open Source License

/**
 * All the scripts should have the following format:
 *
 * {//from   ww w . j  av  a  2 s  .  c o  m
 *    database.collection: {
 *        operation: insert|update|find|aggregate|delete
 *        query: {}
 *    }
 * }
 *
 * For update operations, you should specify the following:
 *
 * query: {
 *     find: {}
 *     update: {}
 * }
 */
private List<DBObject> runQueryScript(final String scriptType, final Map<String, Object[]> attributes)
        throws IdentityException {
    List<DBObject> results = new ArrayList<>();
    try {
        DB database = mongoClient.getDB(this.properties.getProperty("mongodb.database"));
        final ScriptCollection sc = getScriptCollection();
        if (sc.hasScript(scriptType)) {
            final String scriptContent = sc.getScript(scriptType);
            String query = createQueryFromScript(scriptContent, attributes);
            DBObject collectionOperation = DBObject.class.cast(JSON.parse(query));

            for (String collection : collectionOperation.keySet()) {
                if (!database.collectionExists(collection)) {
                    throw new IdentityException("collection [" + collection + "] does not exists");
                }

                DBObject dbObject = DBObject.class.cast(collectionOperation.get(collection));
                if (!dbObject.containsField("operation")) {
                    throw new IdentityException("operation field not specified");
                }

                String dbOperation = String.class.cast(dbObject.get("operation")).toLowerCase();
                if (!OPERATIONS.contains(dbOperation)) {
                    throw new IdentityException("operation [" + dbOperation + "] not supported");
                }

                DBObject dbQuery = DBObject.class.cast(dbObject.get("query"));
                if (dbQuery == null) {
                    throw new IdentityException("query field not specified");
                }

                DBCollection coll = database.getCollection(collection);
                switch (dbOperation) {
                case "insert": {
                    coll.insert(dbQuery);
                }
                case "update": {
                    if (!dbObject.containsField("find")) {
                        throw new IdentityException("find field not found inside the update operation");
                    }
                    if (!dbObject.containsField("update")) {
                        throw new IdentityException("update field not found inside the update operation");
                    }
                    DBObject dbUpdateFind = DBObject.class.cast(dbQuery.get("find"));
                    DBObject dbUpdateFields = DBObject.class.cast(dbQuery.get("update"));
                    coll.update(dbUpdateFind, dbUpdateFields, false, false);
                }
                case "delete": {
                    coll.remove(dbQuery);
                }
                case "find": {
                    DBCursor cursor = coll.find(dbQuery);
                    while (cursor.hasNext()) {
                        results.add(cursor.next());
                    }
                }
                case "aggregate": {
                    List<DBObject> aggregate = new ArrayList<DBObject>();
                    aggregate.add(dbQuery);
                    for (DBObject o : coll.aggregate(aggregate).results()) {
                        results.add(o);
                    }
                }
                }
            }
            return results;
        }
    } catch (final NoSuchAlgorithmException e) {
        throw new IdentityException(e.getMessage());
    } finally {
        /**
         * TODO close cursors
         */
    }
    return null;
}

From source file:com.softinstigate.restheart.db.CollectionDAO.java

License:Open Source License

/**
 * Upsert the collection properties./*from   w w w  . j a  v a  2  s  . c  o m*/
 *
 * @param dbName the database name of the collection
 * @param collName the collection name
 * @param content the new collection properties
 * @param etag the entity tag. must match to allow actual write (otherwise
 * http error code is returned)
 * @param updating true if updating existing document
 * @param patching true if use patch semantic (update only specified fields)
 * @return the HttpStatus code to set in the http response
 */
public static int upsertCollection(String dbName, String collName, DBObject content, ObjectId etag,
        boolean updating, boolean patching) {
    DB db = DBDAO.getDB(dbName);

    DBCollection coll = db.getCollection(collName);

    if (patching && !updating) {
        return HttpStatus.SC_NOT_FOUND;
    }

    if (updating) {
        if (etag == null) {
            return HttpStatus.SC_CONFLICT;
        }

        BasicDBObject idAndEtagQuery = new BasicDBObject("_id", "_properties");
        idAndEtagQuery.append("_etag", etag);

        if (coll.count(idAndEtagQuery) < 1) {
            return HttpStatus.SC_PRECONDITION_FAILED;
        }
    }

    ObjectId timestamp = new ObjectId();
    Instant now = Instant.ofEpochSecond(timestamp.getTimestamp());

    if (content == null) {
        content = new BasicDBObject();
    }

    content.removeField("_id"); // make sure we don't change this field

    if (updating) {
        content.removeField("_crated_on"); // don't allow to update this field
        content.put("_etag", timestamp);
    } else {
        content.put("_id", "_properties");
        content.put("_created_on", now.toString());
        content.put("_etag", timestamp);
    }

    if (patching) {
        coll.update(PROPS_QUERY, new BasicDBObject("$set", content), true, false);
        return HttpStatus.SC_OK;
    } else {
        // we use findAndModify to get the @created_on field value from the existing properties document
        // we need to put this field back using a second update 
        // it is not possible in a single update even using $setOnInsert update operator
        // in this case we need to provide the other data using $set operator and this makes it a partial update (patch semantic) 

        DBObject old = coll.findAndModify(PROPS_QUERY, fieldsToReturn, null, false, content, false, true);

        if (old != null) {
            Object oldTimestamp = old.get("_created_on");

            if (oldTimestamp == null) {
                oldTimestamp = now.toString();
                logger.warn("properties of collection {} had no @created_on field. set to now",
                        coll.getFullName());
            }

            // need to readd the @created_on field 
            BasicDBObject createdContet = new BasicDBObject("_created_on", "" + oldTimestamp);
            createdContet.markAsPartialObject();
            coll.update(PROPS_QUERY, new BasicDBObject("$set", createdContet), true, false);

            return HttpStatus.SC_OK;
        } else {
            // need to readd the @created_on field 
            BasicDBObject createdContet = new BasicDBObject("_created_on", now.toString());
            createdContet.markAsPartialObject();
            coll.update(PROPS_QUERY, new BasicDBObject("$set", createdContet), true, false);

            initDefaultIndexes(coll);

            return HttpStatus.SC_CREATED;
        }
    }
}