Example usage for com.mongodb Mongo Mongo

List of usage examples for com.mongodb Mongo Mongo

Introduction

In this page you can find the example usage for com.mongodb Mongo Mongo.

Prototype

Mongo(final MongoClientURI mongoURI, @Nullable final MongoDriverInformation mongoDriverInformation) 

Source Link

Usage

From source file:com.mycompany.Farmerama.contactWithOther.java

public contactWithOther(String collectionName) {
    Mongo mongo = new Mongo("localhost", 27017);
    db = mongo.getDB("PrivateMessages");
    PrivateMessages = db.getCollection(collectionName);

}

From source file:com.mycompany.Farmerama.contactWithOther.java

public contactWithOther(String thisname, String othername) {
    Mongo mongo = new Mongo("localhost", 27017);
    db = mongo.getDB("PrivateMessages");
    PrivateMessages = db.getCollection(thisname + "with" + othername);

}

From source file:com.mycompany.Farmerama.CreateOfferPage.java

public CreateOfferPage() {
    Mongo mongo = new Mongo("localhost", 27017);
    dbS = mongo.getDB("Sell");
    offerS = dbS.getCollection("offer");
    dbB = mongo.getDB("Buy");
    offerB = dbB.getCollection("offer");
}

From source file:com.mycompany.Farmerama.getAllAccounts.java

public getAllAccounts() {
    Mongo mongo = new Mongo("localhost", 27017);
    db = mongo.getDB("accounts");
    account = db.getCollection("account");

}

From source file:com.mycompany.Farmerama.LoginCheckUserToDb.java

public LoginCheckUserToDb() {
    Mongo mongo = new Mongo("localhost", 27017);
    db = mongo.getDB("accounts");
    account = db.getCollection("account");
}

From source file:com.mycompany.Farmerama.PhotoAlbum.java

public PhotoAlbum(String username) {
    Mongo mongo = new Mongo("localhost", 27017);
    db = mongo.getDB("album");
    photo = db.getCollection(username);/*w  w w . j  av  a  2  s.  com*/
    //initComponents();
    this.userName = username;
}

From source file:com.mycompany.Farmerama.SignUpVal.java

public SignUpVal() {
    Mongo mongo = new Mongo("localhost", 27017);
    db = mongo.getDB("accounts");
    account = db.getCollection("account");
}

From source file:com.mythesis.userbehaviouranalysis.ProfileAnalysis.java

License:Apache License

/**
 * finds the profiles that match user's interests given his web history
 * @param userID the user's id//from   w ww  . j  av a  2 s.  c o  m
 * @param history the user's web history
 * @param input a txt file that contains the necessary parameters
 */
public void perform(String userID, String[] history, File input) {

    System.out.println("total urls = " + history.length);
    //default parameters
    //number of random queries for each profile
    int numQueriesSuggestion = 5;
    //number of random webpages per query to suggest - total number of suggestions = 
    // numQueriesSuggestion*pagesPerQuerySuggestion
    int pagesPerQuerySuggestion = 1;
    //number of random queries to return as examples for alternatives profiles
    int numQueriesExample = 2;

    //we get the current date/time
    DateTime current = new DateTime();
    DateTimeFormatter fmt = DateTimeFormat.forPattern("dd/MM/yyyy HH:mm");
    String timestamp = fmt.print(current);

    //update user info - i'll store the results when i'll perform the last analysis
    Mongo mongo = new Mongo("localhost", 27017);
    DB db = mongo.getDB("profileAnalysis");
    DBCollection userinfo = db.getCollection("userinfo");
    BasicDBObject newDocument = new BasicDBObject();
    newDocument.put("$set", new BasicDBObject().append("timestamp", timestamp));
    BasicDBObject searchQuery = new BasicDBObject();
    searchQuery.put("userID", userID);
    userinfo.update(searchQuery, newDocument, true, false);

    //read the neccessary parameters
    Utils utils = new Utils();
    utils.readInput(input);
    HashMap<String, ArrayList<String>> wordvectors = utils.wordvectors;
    HashMap<String, String> crawlerOutputPaths = utils.crawlerOutputPaths;

    //get the urls' content
    ArrayList<String> webpages = new ArrayList<>();
    ArrayList<String> urls = new ArrayList<>();
    for (int i = 0; i < history.length; i++) {
        WebParser pageParser = new WebParser(history[i]);
        pageParser.parse();
        String content = pageParser.getContent();
        if ("".equals(content) || content == null)
            continue;
        webpages.add(content);
        urls.add(history[i]);
    }

    //calculate the urls' scores
    HashMap<String, double[]> historyScores = new HashMap<>();
    String[] webpagesArr = new String[webpages.size()];
    webpagesArr = webpages.toArray(webpagesArr);
    String[] urlsArr = new String[urls.size()];
    urlsArr = urls.toArray(urlsArr);
    for (String profile : wordvectors.keySet()) {
        Scorer scorer = new Scorer(webpagesArr, urlsArr, wordvectors.get(profile));
        double[] semanticScores = scorer.getSemanticScores();
        double[] relevanceScores = scorer.getRelevanceScores();
        double[] confidenceScores = scorer.getConfidenceScores();
        double[] scores = scoreFormula(semanticScores, relevanceScores, confidenceScores);
        historyScores.put(profile, scores);
    }

    //find the maximum score of every url and get summation of the scores for each profile
    HashMap<String, Double> userProfilesScore = new HashMap<>();
    for (int i = 0; i < webpages.size(); i++) {
        double max = 0.0;
        String info = "undefined";
        for (String profile : historyScores.keySet()) {
            if (historyScores.get(profile)[i] > max) {
                max = historyScores.get(profile)[i];
                info = profile;
            }
        }
        if (!"undefined".equals(info)) {
            Double prevscore = userProfilesScore.get(info);
            userProfilesScore.put(info, (prevscore == null) ? max : prevscore + max);
        }
    }

    //find which profile level has maximum score e.g. if football/level=0 score is greater
    //than football/level=1 score then the user is better described as a football/level=0 user
    HashMap<String, Double> userProfileScores = new HashMap<>();
    HashMap<String, String> userProfileLevels = new HashMap<>();
    for (String s : userProfilesScore.keySet()) {
        String[] info = s.split("/");
        Double prevscore = userProfileScores.get(info[0] + "/" + info[1] + "/");
        if (prevscore == null) {
            userProfileScores.put(info[0] + "/" + info[1] + "/", userProfilesScore.get(s));
            userProfileLevels.put(info[0] + "/" + info[1] + "/", info[2]);
        } else if (userProfilesScore.get(s) > prevscore) {
            userProfileScores.put(info[0] + "/" + info[1] + "/", userProfilesScore.get(s));
            userProfileLevels.put(info[0] + "/" + info[1] + "/", info[2]);
        }
    }

    //put the final profiles together in this simple form: domain/profile/level of expertise and rank them
    Double totalScore = 0.0;
    for (String s : userProfileScores.keySet())
        totalScore += userProfileScores.get(s);

    Map<String, Double> userProfiles = new HashMap<>();
    for (String s : userProfileLevels.keySet())
        userProfiles.put(s + userProfileLevels.get(s), round(userProfileScores.get(s) * 100 / totalScore, 2));

    userProfiles = sortByValue(userProfiles);

    //find page suggestions for every profile
    HashMap<String, ArrayList<String>> pageSuggestions = new HashMap<>();
    for (String profile : userProfiles.keySet()) {
        String path = crawlerOutputPaths.get(profile);
        ArrayList<String> suggestions = getSuggestions(path, numQueriesSuggestion, pagesPerQuerySuggestion,
                history);
        pageSuggestions.put(profile, suggestions);
    }

    //find alternative profiles for every profile and representative queries
    HashMap<String, HashMap<String, ArrayList<String>>> alternativeProfiles = new HashMap<>();
    for (String userProfile : userProfiles.keySet()) {
        String[] userProfileInfo = userProfile.split("/");
        HashMap<String, ArrayList<String>> profileQueries = new HashMap<>();
        for (String profile : wordvectors.keySet()) {
            String[] profileInfo = profile.split("/");
            if (profileInfo[0].equals(userProfileInfo[0]) && profileInfo[1].equals(userProfileInfo[1])
                    && !profileInfo[2].equals(userProfileInfo[2])) {
                String path = crawlerOutputPaths.get(profile);
                ArrayList<String> queries = getQueries(path, numQueriesExample);
                for (int i = 0; i < queries.size(); i++) {
                    String query = queries.get(i);
                    queries.set(i, query.substring(query.lastIndexOf("\\") + 1).replace("-query", "")
                            .replace("+", " "));
                }
                profileQueries.put(profile, queries);
            }
        }
        alternativeProfiles.put(userProfile, profileQueries);
    }

    //prepare JSON response
    JSONObject response = new JSONObject();
    response.put("userID", userID);
    response.put("timestamp", timestamp);
    JSONArray list = new JSONArray();

    for (String profile : userProfiles.keySet()) {
        JSONObject profileInfo = new JSONObject();
        profileInfo.put("profile", profile);
        profileInfo.put("score", userProfiles.get(profile));

        JSONArray temp = new JSONArray();
        ArrayList<String> suggestions = pageSuggestions.get(profile);
        for (String s : suggestions)
            temp.add(s);
        profileInfo.put("suggestions", temp);

        JSONArray alternativesArray = new JSONArray();
        for (String s : alternativeProfiles.get(profile).keySet()) {
            JSONObject alternativeInfo = new JSONObject();
            alternativeInfo.put("alternative", s);
            ArrayList<String> queries = alternativeProfiles.get(profile).get(s);
            JSONArray queriesArray = new JSONArray();
            for (String str : queries) {
                queriesArray.add(str);
            }
            alternativeInfo.put("queries", queriesArray);
            alternativesArray.add(alternativeInfo);
        }

        profileInfo.put("alternatives", alternativesArray);
        list.add(profileInfo);
    }
    response.put("profiles", list);
    System.out.println("JSON response is ready: " + response);

    //delete previous analysis and store results
    DBCollection collection = db.getCollection("history");
    BasicDBObject previous = new BasicDBObject();
    previous.put("userID", userID);
    collection.remove(previous);
    DBObject dbObject = (DBObject) JSON.parse(response.toString());
    collection.insert(dbObject);
    System.out.println("I saved the analysis...");

}

From source file:com.mythesis.userbehaviouranalysis.ProfileAnalysis.java

License:Apache License

/**
 * a method that stores the query that has been suggested by the user
 * @param crawlerOutputPath SWebRank output directory used to check if a relevant query already exists
 * @param profile the query's relevant profile
 * @param query the given query// w w  w.  ja v a  2 s .c  o  m
 */
public void storeQuery(String crawlerOutputPath, String profile, String query) {

    System.out.println(crawlerOutputPath);
    System.out.println(profile);
    System.out.println(query);
    //Find output paths
    File root = new File(crawlerOutputPath);
    File[] contents = root.listFiles();
    List<String> sWebRanklevels = new ArrayList<>();
    for (File f : contents) {
        if (f.getAbsolutePath().contains("level"))
            sWebRanklevels.add(f.getAbsolutePath());
    }

    //Find all query paths
    List<String> queries = new ArrayList<>();
    for (String s : sWebRanklevels) {
        File level = new File(s);
        File[] queriesFiles = level.listFiles();
        for (File f : queriesFiles) {
            if (!f.getAbsolutePath().contains("txt")) {
                String str = f.getAbsolutePath();
                queries.add(str.substring(str.lastIndexOf("\\") + 1).replace("-query", "").replace("+", " "));
            }
        }
    }

    //check if a relevant query already exists - I use Jaro-Winkler distance
    query = query.trim().replaceAll(" +", " ");
    for (String q : queries) {
        JaroWinklerDistance jwd = new JaroWinklerDistance();
        double distance = jwd.getDistance(q, query);
        if (distance > 0.9) { // threshold = 0.9
            return;
        }
    }

    Mongo mongo = new Mongo("localhost", 27017);
    DB db = mongo.getDB("profileAnalysis");

    DBCollection DBqueries = db.getCollection("newQueries");
    BasicDBObject searchQuery = new BasicDBObject();
    searchQuery.put("profile", profile);
    DBObject document = DBqueries.findOne(searchQuery);
    boolean flag = false;

    //check if a relevant query exists in the database - I use Jaro-Winkler distance
    if (document != null) {
        flag = true;
        BasicDBList storedQueries = (BasicDBList) document.get("queries");
        for (Object quer : storedQueries) {
            JaroWinklerDistance jwd = new JaroWinklerDistance();
            double distance = jwd.getDistance((String) quer, query);
            if (distance > 0.9) { // threshold = 0.9
                return;
            }
        }
    }

    //if document already exists add the new query
    if (flag) {
        DBqueries.update(searchQuery, new BasicDBObject("$push", new BasicDBObject("queries", query)));
    } else { //otherwise create a new document
        BasicDBList dbl = new BasicDBList();
        dbl.add(query);
        BasicDBObject entry = new BasicDBObject("profile", profile).append("queries", dbl);
        DBqueries.insert(entry);
    }
}

From source file:com.mythesis.userbehaviouranalysis.RequestServlet.java

License:Apache License

/**
 * Handles the HTTP <code>GET</code> method.
 * @param request servlet request/*ww  w  .  j  ava2  s.c o m*/
 * @param response servlet response
 * @throws ServletException if a servlet-specific error occurs
 * @throws IOException if an I/O error occurs
 */
@Override
protected void doGet(HttpServletRequest request, HttpServletResponse response)
        throws ServletException, IOException {

    String userPath = request.getServletPath();
    String params = request.getQueryString().split("=")[1];
    System.out.println("I'm going to send the analysis to " + request.getQueryString());

    if (userPath.equals("/analysis")) {
        Mongo mongo = new Mongo("localhost", 27017);
        DB db = mongo.getDB("profileAnalysis");
        DBCollection userinfo = db.getCollection("history");
        BasicDBObject searchQuery = new BasicDBObject();
        searchQuery.put("userID", params);
        DBCursor cursor = userinfo.find(searchQuery);
        if (cursor.hasNext()) {
            String entry = cursor.next().toString();
            System.out.println(entry);
            response.setHeader("Access-Control-Request-Method", "GET");
            response.setContentType("application/json");
            response.getWriter().write(entry);
        }
    }

}