Example usage for com.mongodb.util JSON parse

List of usage examples for com.mongodb.util JSON parse

Introduction

In this page you can find the example usage for com.mongodb.util JSON parse.

Prototype

public static Object parse(final String jsonString) 

Source Link

Document

Parses a JSON string and returns a corresponding Java object.

Usage

From source file:com.jaspersoft.mongodb.importer.MongoDbSimpleImporter.java

License:Open Source License

private void populate(MongoDbConnection connection, String collectionName, Resource scriptResource)
        throws JRException {
    DBCollection collection = null;/*from  w ww .j  a va  2s  .c o  m*/
    DB mongoDatabase = null;
    try {
        mongoDatabase = connection.getMongoDatabase();
        if (!mongoDatabase.collectionExists(collectionName)) {
            logger.info("Collection \"" + collectionName + "\" doesn't exist");
            DBObject options = new BasicDBObject("capped", false);
            collection = mongoDatabase.createCollection(collectionName, options);
        } else {
            logger.info("Collection \"" + collectionName + "\" exists");
            collection = mongoDatabase.getCollectionFromString(collectionName);
            collection.drop();
            logger.info("Collection \"" + collectionName + "\" was cleaned up");
        }
    } catch (MongoException e) {
        logger.error(e);
    }

    if (mongoDatabase == null) {
        throw new JRException(
                "Failed connection to mongoDB database: " + connection.getMongoURIObject().getDatabase());
    }

    FileInputStream fileInputStream = null;
    InputStreamReader inputStreamReader = null;
    BufferedReader reader = null;
    try {
        inputStreamReader = new InputStreamReader(scriptResource.getInputStream());
        reader = new BufferedReader(inputStreamReader);
        StringBuilder stringBuilder = new StringBuilder();
        String currentLine;
        while ((currentLine = reader.readLine()) != null) {
            stringBuilder.append(currentLine);
        }
        Object parseResult = JSON.parse(stringBuilder.toString());
        if (!(parseResult instanceof BasicDBList)) {
            throw new JRException(
                    "Unsupported type: " + parseResult.getClass().getName() + ". It must be a list");
        }
        BasicDBList list = (BasicDBList) parseResult;
        List<DBObject> objectsList = new ArrayList<DBObject>();
        for (int index = 0; index < list.size(); index++) {
            objectsList.add((DBObject) list.get(index));
        }
        collection.insert(objectsList);
        logger.info("Collection count: " + collection.count() + "\nSuccessfully populated collection: "
                + collectionName);
    } catch (UnsupportedEncodingException e) {
        logger.error(e);
    } catch (IOException e) {
        logger.error(e);
    } finally {
        if (fileInputStream != null) {
            try {
                fileInputStream.close();
            } catch (IOException e) {
                logger.error(e);
            }
        }
        if (inputStreamReader != null) {
            try {
                inputStreamReader.close();
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
        if (reader != null) {
            try {
                reader.close();
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
    }
}

From source file:com.jaspersoft.mongodb.query.MongoDbQueryWrapper.java

License:Open Source License

public void processQuery(String queryString) throws JRException {
    logger.info("Processing mongoDB query");
    if (queryString.startsWith("\"")) {
        queryString = queryString.substring(1, queryString.length());
    }/*from   www . j  a va  2s  .co m*/
    if (queryString.endsWith("\"")) {
        queryString = queryString.substring(0, queryString.length() - 1);
    }
    Object parseResult = JSON.parse(queryString);
    if (logger.isDebugEnabled()) {
        logger.debug("Query: " + queryString);
    }
    if (!(parseResult instanceof DBObject)) {
        throw new JRException("Unsupported type: " + parseResult.getClass().getName());
    }
    queryObject = (DBObject) parseResult;
    fixQueryObject(queryObject, parameters);

    if (queryObject.containsField(RUN_COMMAND_KEY)) {
        runCommand(queryObject.removeField(RUN_COMMAND_KEY));
    } else {
        createIterator();
    }

    if (queryObject.containsField(ROWS_TO_PROCESS_KEY)) {
        Integer value = processInteger(queryObject.get(ROWS_TO_PROCESS_KEY));
        if (value != null) {
            rowsToProcess = value.intValue();
        }
    }
    if (rowsToProcess == 0) {
        rowsToProcess = Integer.MAX_VALUE;
    }
}

From source file:com.jaspersoft.mongodb.query.MongoDbQueryWrapper.java

License:Open Source License

public static void main(String[] args) {
    MongoDbConnection c = null;//from w ww . j  a v a2s  . c om
    ReportTest reports = new ReportTest();
    try {
        c = new MongoDbConnection("mongodb://localhost:27017/test", null, null);
        reports.test();
        Object cmd = JSON.parse("{\n" + "    aggregate : \"accounts\",\n" + "    pipeline : [\n" + "      {\n"
                + "        $project : {\n" + "          billing_address_street : 1,\n"
                + "          billing_address_country : 1\n" + "        }\n" + "      }\n" + "    ]\n" + "  }");
        CommandResult result = c.getMongoDatabase().command((DBObject) cmd);
        System.out.println(result.keySet());
        System.out.println(result.get("ok") + " - " + result.ok());
        System.out.println(result.get("result").getClass().getName());
    } catch (Exception e) {
        logger.error(e);
    } finally {
        if (c != null) {
            c.close();
        }
    }
}

From source file:com.kurento.kmf.repository.internal.repoimpl.filesystem.ItemsMetadata.java

License:Open Source License

private void loadItemsMetadata() throws IOException {
    itemsMetadata = new ConcurrentHashMap<>();
    DBObject contents = (DBObject) JSON.parse(loadFileAsString());
    if (contents != null) {
        for (String key : contents.keySet()) {
            try {
                DBObject metadata = (DBObject) contents.get(key);
                Map<String, String> map = new HashMap<>();
                for (String metadataKey : metadata.keySet()) {
                    map.put(metadataKey, metadata.get(metadataKey).toString());
                }/*from w  w  w.j  av a2  s.c o  m*/
                itemsMetadata.put(key, map);
            } catch (ClassCastException e) {
                log.warn("Attribute '{}' should be an object", key);
            }
        }
    }
}

From source file:com.kurento.kmf.repository.internal.repoimpl.mongo.MongoRepository.java

License:Open Source License

private List<RepositoryItem> findRepositoryItemsByQuery(String query) {
    List<GridFSDBFile> files = gridFS.find((DBObject) JSON.parse(query));

    List<RepositoryItem> repositoryItems = new ArrayList<>();
    for (GridFSDBFile file : files) {
        repositoryItems.add(createRepositoryItem(file));
    }/*from w ww.j a  v a 2s . c o  m*/

    return repositoryItems;
}

From source file:com.kurento.kmf.repository.internal.repoimpl.mongo.MongoRepositoryItem.java

License:Open Source License

protected void refreshAttributesOnClose() {
    DBObject query = (DBObject) JSON.parse("{ _id : '" + getId() + "' }");
    dbFile = ((MongoRepository) repository).getGridFS().findOne(query);
    state = State.STORED;/*from   w  ww  .jav a2 s.c om*/
    attributes.setContentLength(dbFile.getLength());
}

From source file:com.machinelinking.storage.mongodb.MongoJSONStorageConnection.java

License:Apache License

@Override
public MongoDocument createDocument(WikiPage page, TokenBuffer buffer) throws JSONStorageConnectionException {
    final DBObject dbNode = (DBObject) JSON.parse(JSONUtils.bufferToJSONString(buffer, false)); //TODO: improve this serialization
    return new MongoDocument(page.getId(), page.getRevId(), page.getTitle(), dbNode);
}

From source file:com.mingo.query.util.QueryUtils.java

License:Apache License

/**
 * Checks what query has valid format./*from  w w  w .  j av  a 2s  . co m*/
 *
 * @param query query
 * @return true if query is correct otherwise - false
 */
public static boolean validate(String query) {
    boolean valid = true;
    try {
        JSON.parse(query);
    } catch (JSONParseException e) {
        valid = false;
    }
    return valid;
}

From source file:com.mycompany.data.utils.Utils.java

public static DBObject convertPOJOtoDBObject(Object pojo) {
    String json = new Gson().toJson(pojo);
    return (DBObject) JSON.parse(json);
}

From source file:com.mythesis.userbehaviouranalysis.ProfileAnalysis.java

License:Apache License

/**
 * finds the profiles that match user's interests given his web history
 * @param userID the user's id//from w ww. j  a  v a2s  .  c  o m
 * @param history the user's web history
 * @param input a txt file that contains the necessary parameters
 */
public void perform(String userID, String[] history, File input) {

    System.out.println("total urls = " + history.length);
    //default parameters
    //number of random queries for each profile
    int numQueriesSuggestion = 5;
    //number of random webpages per query to suggest - total number of suggestions = 
    // numQueriesSuggestion*pagesPerQuerySuggestion
    int pagesPerQuerySuggestion = 1;
    //number of random queries to return as examples for alternatives profiles
    int numQueriesExample = 2;

    //we get the current date/time
    DateTime current = new DateTime();
    DateTimeFormatter fmt = DateTimeFormat.forPattern("dd/MM/yyyy HH:mm");
    String timestamp = fmt.print(current);

    //update user info - i'll store the results when i'll perform the last analysis
    Mongo mongo = new Mongo("localhost", 27017);
    DB db = mongo.getDB("profileAnalysis");
    DBCollection userinfo = db.getCollection("userinfo");
    BasicDBObject newDocument = new BasicDBObject();
    newDocument.put("$set", new BasicDBObject().append("timestamp", timestamp));
    BasicDBObject searchQuery = new BasicDBObject();
    searchQuery.put("userID", userID);
    userinfo.update(searchQuery, newDocument, true, false);

    //read the neccessary parameters
    Utils utils = new Utils();
    utils.readInput(input);
    HashMap<String, ArrayList<String>> wordvectors = utils.wordvectors;
    HashMap<String, String> crawlerOutputPaths = utils.crawlerOutputPaths;

    //get the urls' content
    ArrayList<String> webpages = new ArrayList<>();
    ArrayList<String> urls = new ArrayList<>();
    for (int i = 0; i < history.length; i++) {
        WebParser pageParser = new WebParser(history[i]);
        pageParser.parse();
        String content = pageParser.getContent();
        if ("".equals(content) || content == null)
            continue;
        webpages.add(content);
        urls.add(history[i]);
    }

    //calculate the urls' scores
    HashMap<String, double[]> historyScores = new HashMap<>();
    String[] webpagesArr = new String[webpages.size()];
    webpagesArr = webpages.toArray(webpagesArr);
    String[] urlsArr = new String[urls.size()];
    urlsArr = urls.toArray(urlsArr);
    for (String profile : wordvectors.keySet()) {
        Scorer scorer = new Scorer(webpagesArr, urlsArr, wordvectors.get(profile));
        double[] semanticScores = scorer.getSemanticScores();
        double[] relevanceScores = scorer.getRelevanceScores();
        double[] confidenceScores = scorer.getConfidenceScores();
        double[] scores = scoreFormula(semanticScores, relevanceScores, confidenceScores);
        historyScores.put(profile, scores);
    }

    //find the maximum score of every url and get summation of the scores for each profile
    HashMap<String, Double> userProfilesScore = new HashMap<>();
    for (int i = 0; i < webpages.size(); i++) {
        double max = 0.0;
        String info = "undefined";
        for (String profile : historyScores.keySet()) {
            if (historyScores.get(profile)[i] > max) {
                max = historyScores.get(profile)[i];
                info = profile;
            }
        }
        if (!"undefined".equals(info)) {
            Double prevscore = userProfilesScore.get(info);
            userProfilesScore.put(info, (prevscore == null) ? max : prevscore + max);
        }
    }

    //find which profile level has maximum score e.g. if football/level=0 score is greater
    //than football/level=1 score then the user is better described as a football/level=0 user
    HashMap<String, Double> userProfileScores = new HashMap<>();
    HashMap<String, String> userProfileLevels = new HashMap<>();
    for (String s : userProfilesScore.keySet()) {
        String[] info = s.split("/");
        Double prevscore = userProfileScores.get(info[0] + "/" + info[1] + "/");
        if (prevscore == null) {
            userProfileScores.put(info[0] + "/" + info[1] + "/", userProfilesScore.get(s));
            userProfileLevels.put(info[0] + "/" + info[1] + "/", info[2]);
        } else if (userProfilesScore.get(s) > prevscore) {
            userProfileScores.put(info[0] + "/" + info[1] + "/", userProfilesScore.get(s));
            userProfileLevels.put(info[0] + "/" + info[1] + "/", info[2]);
        }
    }

    //put the final profiles together in this simple form: domain/profile/level of expertise and rank them
    Double totalScore = 0.0;
    for (String s : userProfileScores.keySet())
        totalScore += userProfileScores.get(s);

    Map<String, Double> userProfiles = new HashMap<>();
    for (String s : userProfileLevels.keySet())
        userProfiles.put(s + userProfileLevels.get(s), round(userProfileScores.get(s) * 100 / totalScore, 2));

    userProfiles = sortByValue(userProfiles);

    //find page suggestions for every profile
    HashMap<String, ArrayList<String>> pageSuggestions = new HashMap<>();
    for (String profile : userProfiles.keySet()) {
        String path = crawlerOutputPaths.get(profile);
        ArrayList<String> suggestions = getSuggestions(path, numQueriesSuggestion, pagesPerQuerySuggestion,
                history);
        pageSuggestions.put(profile, suggestions);
    }

    //find alternative profiles for every profile and representative queries
    HashMap<String, HashMap<String, ArrayList<String>>> alternativeProfiles = new HashMap<>();
    for (String userProfile : userProfiles.keySet()) {
        String[] userProfileInfo = userProfile.split("/");
        HashMap<String, ArrayList<String>> profileQueries = new HashMap<>();
        for (String profile : wordvectors.keySet()) {
            String[] profileInfo = profile.split("/");
            if (profileInfo[0].equals(userProfileInfo[0]) && profileInfo[1].equals(userProfileInfo[1])
                    && !profileInfo[2].equals(userProfileInfo[2])) {
                String path = crawlerOutputPaths.get(profile);
                ArrayList<String> queries = getQueries(path, numQueriesExample);
                for (int i = 0; i < queries.size(); i++) {
                    String query = queries.get(i);
                    queries.set(i, query.substring(query.lastIndexOf("\\") + 1).replace("-query", "")
                            .replace("+", " "));
                }
                profileQueries.put(profile, queries);
            }
        }
        alternativeProfiles.put(userProfile, profileQueries);
    }

    //prepare JSON response
    JSONObject response = new JSONObject();
    response.put("userID", userID);
    response.put("timestamp", timestamp);
    JSONArray list = new JSONArray();

    for (String profile : userProfiles.keySet()) {
        JSONObject profileInfo = new JSONObject();
        profileInfo.put("profile", profile);
        profileInfo.put("score", userProfiles.get(profile));

        JSONArray temp = new JSONArray();
        ArrayList<String> suggestions = pageSuggestions.get(profile);
        for (String s : suggestions)
            temp.add(s);
        profileInfo.put("suggestions", temp);

        JSONArray alternativesArray = new JSONArray();
        for (String s : alternativeProfiles.get(profile).keySet()) {
            JSONObject alternativeInfo = new JSONObject();
            alternativeInfo.put("alternative", s);
            ArrayList<String> queries = alternativeProfiles.get(profile).get(s);
            JSONArray queriesArray = new JSONArray();
            for (String str : queries) {
                queriesArray.add(str);
            }
            alternativeInfo.put("queries", queriesArray);
            alternativesArray.add(alternativeInfo);
        }

        profileInfo.put("alternatives", alternativesArray);
        list.add(profileInfo);
    }
    response.put("profiles", list);
    System.out.println("JSON response is ready: " + response);

    //delete previous analysis and store results
    DBCollection collection = db.getCollection("history");
    BasicDBObject previous = new BasicDBObject();
    previous.put("userID", userID);
    collection.remove(previous);
    DBObject dbObject = (DBObject) JSON.parse(response.toString());
    collection.insert(dbObject);
    System.out.println("I saved the analysis...");

}