List of usage examples for com.mongodb DBCollection insert
public WriteResult insert(final List<? extends DBObject> documents)
From source file:com.maoyan.pf.webcollector.spider.ShowrateCrawler.java
License:Open Source License
public static void main(String[] args) throws Exception { Executor executor = new Executor() { @Override/*from ww w .j a v a 2 s .c o m*/ public void execute(CrawlDatum datum, CrawlDatums next) throws Exception { MongoClient mongoClient = new MongoClient("localhost", 27017); // ? // DBCollection dbCollection = mongoClient.getDB("maoyan_crawler").getCollection("rankings_am"); DB db = mongoClient.getDB("maoyan_crawler"); // ????? Set<String> colls = db.getCollectionNames(); for (String s : colls) { // Collection(?"") if (s.equals("show_rate")) { db.getCollection(s).drop(); } } DBCollection dbCollection = db.getCollection("show_rate"); // ProfilesIni pi = new ProfilesIni(); // FirefoxProfile profile = pi.getProfile("default"); WebDriver driver = new FirefoxDriver(); driver.manage().window().maximize(); driver.manage().timeouts().pageLoadTimeout(3, TimeUnit.SECONDS); // driver.setJavascriptEnabled(false); System.out.println("??\n"); driver.get(datum.getUrl()); // System.out.println(driver.getPageSource()); List<WebElement> movie_name = driver .findElements(By.xpath("//div[@id='playPlan_table']/ul/li[@class='c1 lineDot']")); List<WebElement> boxoffice_rate = driver .findElements(By.xpath("//div[@id='playPlan_table']/ul/li[@class='c2 red']")); List<WebElement> visit_pershow = driver .findElements(By.xpath("//div[@id='playPlan_table']/ul/li[@class='c3 gray']")); WebElement title = driver.findElement(By.xpath("//p[@id='pieTip']")); for (int i = 0; i < movie_name.size(); i++) { String movie_name_val = movie_name.get(i).getText(); String boxofficerate_val = boxoffice_rate.get(i).getText(); String visit_pershow_val = visit_pershow.get(i).getText(); BasicDBObject dbObject = new BasicDBObject(); dbObject.append("title", title.getText()).append("is_gold", "?") .append("show_type", "?").append("movie_name", movie_name_val) .append("boxoffice_rate", boxofficerate_val).append("visit_pershow", visit_pershow_val); dbCollection.insert(dbObject); } System.out.println("?\n"); WebElement click_gold = driver.findElement(By.id("playPlan_time")); click_gold.click(); String gold_seat = driver.getWindowHandle(); driver.switchTo().window(gold_seat); List<WebElement> movie_name_gold = driver .findElements(By.xpath("//div[@id='playPlan_table']/ul/li[@class='c1 lineDot']")); List<WebElement> boxoffice_rate_gold = driver .findElements(By.xpath("//div[@id='playPlan_table']/ul/li[@class='c2 red']")); List<WebElement> visit_pershow_gold = driver .findElements(By.xpath("//div[@id='playPlan_table']/ul/li[@class='c3 gray']")); WebElement title_gold = driver.findElement(By.xpath("//p[@id='pieTip']")); for (int i = 0; i < movie_name_gold.size(); i++) { String movie_name_val = movie_name_gold.get(i).getText(); String boxofficerate_val = boxoffice_rate_gold.get(i).getText(); String visit_pershow_val = visit_pershow_gold.get(i).getText(); BasicDBObject dbObject = new BasicDBObject(); dbObject.append("title", title_gold.getText()).append("is_gold", "") .append("show_type", "?").append("movie_name", movie_name_val) .append("boxoffice_rate", boxofficerate_val).append("visit_pershow", visit_pershow_val); dbCollection.insert(dbObject); } System.out.println("?\n"); WebElement click_vist = driver.findElement(By.xpath("//*[@id='show--type']")); click_vist.click(); String gold_vist = driver.getWindowHandle(); driver.switchTo().window(gold_vist); List<WebElement> movie_name_gold_visit = driver .findElements(By.xpath("//div[@id='playPlan_table']/ul/li[@class='c1 lineDot']")); List<WebElement> boxoffice_rate_gold_visit = driver .findElements(By.xpath("//div[@id='playPlan_table']/ul/li[@class='c2 red']")); List<WebElement> visit_pershow_gold_visit = driver .findElements(By.xpath("//div[@id='playPlan_table']/ul/li[@class='c3 gray']")); WebElement title_gold_visit = driver.findElement(By.xpath("//p[@id='pieTip']")); for (int i = 0; i < movie_name_gold_visit.size(); i++) { String movie_name_val = movie_name_gold_visit.get(i).getText(); String boxofficerate_val = boxoffice_rate_gold_visit.get(i).getText(); String visit_pershow_val = visit_pershow_gold_visit.get(i).getText(); BasicDBObject dbObject = new BasicDBObject(); dbObject.append("title", title_gold_visit.getText()).append("is_gold", "") .append("show_type", "").append("movie_name", movie_name_val) .append("boxoffice_rate", boxofficerate_val).append("visit_pershow", visit_pershow_val); dbCollection.insert(dbObject); } System.out.println("?\n"); click_gold.click(); String normal_seat = driver.getWindowHandle(); driver.switchTo().window(normal_seat); List<WebElement> movie_name_normal_seat = driver .findElements(By.xpath("//div[@id='playPlan_table']/ul/li[@class='c1 lineDot']")); List<WebElement> boxoffice_rate_normal_seat = driver .findElements(By.xpath("//div[@id='playPlan_table']/ul/li[@class='c2 red']")); List<WebElement> visit_pershow_normal_seat = driver .findElements(By.xpath("//div[@id='playPlan_table']/ul/li[@class='c3 gray']")); WebElement title_normal_seat = driver.findElement(By.xpath("//p[@id='pieTip']")); for (int i = 0; i < movie_name_normal_seat.size(); i++) { String movie_name_val = movie_name_normal_seat.get(i).getText(); String boxofficerate_val = boxoffice_rate_normal_seat.get(i).getText(); String visit_pershow_val = visit_pershow_normal_seat.get(i).getText(); BasicDBObject dbObject = new BasicDBObject(); dbObject.append("title", title_normal_seat.getText()).append("is_gold", "?") .append("show_type", "").append("movie_name", movie_name_val) .append("boxoffice_rate", boxofficerate_val).append("visit_pershow", visit_pershow_val); dbCollection.insert(dbObject); } driver.close(); driver.quit(); mongoClient.close(); } }; //DBDBManager DBManager manager = new BerkeleyDBManager("maoyan"); //Crawler?DBManagerExecutor Crawler crawler = new Crawler(manager, executor); crawler.addSeed("http://pf.maoyan.com/show/rate"); crawler.start(1); }
From source file:com.mebigfatguy.mongobrowser.actions.NewObjectAction.java
License:Apache License
@Override public void actionPerformed(ActionEvent e) { JTree tree = context.getTree(); TreePath path = tree.getSelectionPath(); MongoTreeNode collectionNode = (MongoTreeNode) path.getLastPathComponent(); DBCollection dbCollection = (DBCollection) collectionNode.getUserObject(); BasicDBObject dbObj = new BasicDBObject(); dbCollection.insert(dbObj); DefaultTreeModel model = (DefaultTreeModel) tree.getModel(); MongoTreeNode objectNode = new MongoTreeNode(dbObj, false); collectionNode.add(objectNode);/*from w ww . j a v a 2 s.com*/ MongoTreeNode slug = new MongoTreeNode(); objectNode.add(slug); model.nodeStructureChanged((MongoTreeNode) model.getRoot()); TreePath selection = new TreePath(objectNode.getPath()); tree.scrollPathToVisible(selection); tree.setSelectionPath(selection); }
From source file:com.miya.twit.mongodb.DBConnectSentiment.java
public void insertWord(List<TweetDBEntity> list) { DBCollection collection = dbConnection(); if (collection != null) { BasicDBObject document;//from ww w .ja va 2s . c o m for (TweetDBEntity twit : list) { document = new BasicDBObject(); document.append("text", twit.getText()); document.append("rootType", twit.getRootType()); document.append("polarity", twit.getPolarity()); collection.insert(document); System.out.println(collection.getCount() + " ---" + twit.getText() + " tweet'i yazld"); } } }
From source file:com.miya.twit.mongodb.DBConnectSentiment.java
public void insertExpression(List<Expressions> list) { DBCollection collection = dbConnectionForExpression(); if (collection != null) { BasicDBObject document;//from www .j a v a 2 s.co m for (Expressions exp : list) { document = new BasicDBObject(); document.append("text", exp.getExpression()); document.append("polarity", Polarity.getPolarityValue(exp.getPolarity())); collection.insert(document); System.out.println( collection.getCount() + "--------- " + Polarity.getPolarityValue(exp.getPolarity()) + " ---" + exp.getExpression() + " Deyim'i yazld"); } } }
From source file:com.mycompany.bean.PlytaService.java
public String insertDocument(String DBName, String CollectionName, Plyta PlytaInstance) throws Exception { try {/*from w w w .j a va 2s.co m*/ DBCollection collection = getConnection(DBName, CollectionName); BasicDBObject utwor; List<BasicDBObject> utwory = new ArrayList<BasicDBObject>(); if (PlytaInstance.getUtwory() != null) { for (int i = 0; i < PlytaInstance.getUtwory().size(); i++) { utwor = new BasicDBObject(); utwor.put("nazwa", PlytaInstance.getUtwory().get(i).getNazwa()); utwor.put("dlugosc", PlytaInstance.getUtwory().get(i).getDlugosc()); utwory.add(utwor); } } BasicDBObject document = new BasicDBObject("tytul", PlytaInstance.getTytul()) .append("autor", PlytaInstance.getAutor()) .append("liczbaUtworow", PlytaInstance.getLiczbaUtworow()) .append("wytwornia", PlytaInstance.getWytwornia()) .append("rokWydania", PlytaInstance.getRokWydania()) .append("producent", PlytaInstance.getProducent()).append("gatunek", PlytaInstance.getGatunek()) .append("dlugosc", PlytaInstance.getDlugosc()).append("single", PlytaInstance.getSingle()) .append("nagrody", PlytaInstance.getNagrody()) .append("rodzajAlbumu", PlytaInstance.getRodzajAlbumu()).append("utwory", utwory); collection.insert(document); log.log(Level.INFO, "Document inserted"); return "Pomyslnie dodano dokument: " + collection.count(); } catch (Exception e) { log.log(Level.SEVERE, "Document inserting error! Exception thrown"); System.out.println("Blad podczas dodawania dokumentu:" + e.getClass().getName()); System.out.println("wiadomosc: " + e.getMessage()); e.printStackTrace(); return null; } }
From source file:com.mycompany.model.MarkerDAOImpl.java
@Override public void saveOrUpdate(Marker marker) { DBCollection markerCollection = MongoConfig.getMongoConnection().getCollection("markers"); BasicDBObject document = new BasicDBObject(); document.put("lat", marker.getLat()); document.put("lng", marker.getLng()); document.put("title", marker.getTitle()); document.put("icon", marker.getIcon()); document.put("content", marker.getContent()); if (marker.getId().length() > 0) { BasicDBObject query = new BasicDBObject(); query.append("_id", new ObjectId(marker.getId())); markerCollection.update(query, document); } else {/* www. ja v a 2 s . co m*/ markerCollection.insert(document); } }
From source file:com.mycompany.mongodemo.MongoDemoClass.java
public static void main(String[] args) { MongoClient mongo = new MongoClient("localhost", 27017); /*/* w ww . j av a 2s.com*/ first create database and collection(table) in mongodb code use demodb // to create db db.createCollection("users") // create table users in db */ DB db = mongo.getDB("demodb"); DBCollection col = db.getCollection("users"); // creating list of POST BasicDBList postList = new BasicDBList(); postList.add("a1"); // add post reference in list postList.add("a2"); postList.add("a3"); // create nested element : posts BasicDBObject posts = new BasicDBObject("publicPost", postList).append("privatePost", postList) .append("exclusivePost", postList); // create insert document BasicDBObject doc = new BasicDBObject("firstName", "vishal").append("lastName", "patel") .append("email", "vishal.6794@gmail.com").append("userName", "im_vishal") .append("password", "admin").append("userType", 1).append("lastAccessTime", new Date()) .append("posts", posts).append("rating", 10).append("verified", true); // insert into collection using insert method col.insert(doc); /* display first document from collection in this case our collection is users */ DBObject mydoc = col.findOne(); System.out.println(mydoc); }
From source file:com.mythesis.userbehaviouranalysis.ProfileAnalysis.java
License:Apache License
/** * finds the profiles that match user's interests given his web history * @param userID the user's id/*from w ww . ja v a 2 s .co m*/ * @param history the user's web history * @param input a txt file that contains the necessary parameters */ public void perform(String userID, String[] history, File input) { System.out.println("total urls = " + history.length); //default parameters //number of random queries for each profile int numQueriesSuggestion = 5; //number of random webpages per query to suggest - total number of suggestions = // numQueriesSuggestion*pagesPerQuerySuggestion int pagesPerQuerySuggestion = 1; //number of random queries to return as examples for alternatives profiles int numQueriesExample = 2; //we get the current date/time DateTime current = new DateTime(); DateTimeFormatter fmt = DateTimeFormat.forPattern("dd/MM/yyyy HH:mm"); String timestamp = fmt.print(current); //update user info - i'll store the results when i'll perform the last analysis Mongo mongo = new Mongo("localhost", 27017); DB db = mongo.getDB("profileAnalysis"); DBCollection userinfo = db.getCollection("userinfo"); BasicDBObject newDocument = new BasicDBObject(); newDocument.put("$set", new BasicDBObject().append("timestamp", timestamp)); BasicDBObject searchQuery = new BasicDBObject(); searchQuery.put("userID", userID); userinfo.update(searchQuery, newDocument, true, false); //read the neccessary parameters Utils utils = new Utils(); utils.readInput(input); HashMap<String, ArrayList<String>> wordvectors = utils.wordvectors; HashMap<String, String> crawlerOutputPaths = utils.crawlerOutputPaths; //get the urls' content ArrayList<String> webpages = new ArrayList<>(); ArrayList<String> urls = new ArrayList<>(); for (int i = 0; i < history.length; i++) { WebParser pageParser = new WebParser(history[i]); pageParser.parse(); String content = pageParser.getContent(); if ("".equals(content) || content == null) continue; webpages.add(content); urls.add(history[i]); } //calculate the urls' scores HashMap<String, double[]> historyScores = new HashMap<>(); String[] webpagesArr = new String[webpages.size()]; webpagesArr = webpages.toArray(webpagesArr); String[] urlsArr = new String[urls.size()]; urlsArr = urls.toArray(urlsArr); for (String profile : wordvectors.keySet()) { Scorer scorer = new Scorer(webpagesArr, urlsArr, wordvectors.get(profile)); double[] semanticScores = scorer.getSemanticScores(); double[] relevanceScores = scorer.getRelevanceScores(); double[] confidenceScores = scorer.getConfidenceScores(); double[] scores = scoreFormula(semanticScores, relevanceScores, confidenceScores); historyScores.put(profile, scores); } //find the maximum score of every url and get summation of the scores for each profile HashMap<String, Double> userProfilesScore = new HashMap<>(); for (int i = 0; i < webpages.size(); i++) { double max = 0.0; String info = "undefined"; for (String profile : historyScores.keySet()) { if (historyScores.get(profile)[i] > max) { max = historyScores.get(profile)[i]; info = profile; } } if (!"undefined".equals(info)) { Double prevscore = userProfilesScore.get(info); userProfilesScore.put(info, (prevscore == null) ? max : prevscore + max); } } //find which profile level has maximum score e.g. if football/level=0 score is greater //than football/level=1 score then the user is better described as a football/level=0 user HashMap<String, Double> userProfileScores = new HashMap<>(); HashMap<String, String> userProfileLevels = new HashMap<>(); for (String s : userProfilesScore.keySet()) { String[] info = s.split("/"); Double prevscore = userProfileScores.get(info[0] + "/" + info[1] + "/"); if (prevscore == null) { userProfileScores.put(info[0] + "/" + info[1] + "/", userProfilesScore.get(s)); userProfileLevels.put(info[0] + "/" + info[1] + "/", info[2]); } else if (userProfilesScore.get(s) > prevscore) { userProfileScores.put(info[0] + "/" + info[1] + "/", userProfilesScore.get(s)); userProfileLevels.put(info[0] + "/" + info[1] + "/", info[2]); } } //put the final profiles together in this simple form: domain/profile/level of expertise and rank them Double totalScore = 0.0; for (String s : userProfileScores.keySet()) totalScore += userProfileScores.get(s); Map<String, Double> userProfiles = new HashMap<>(); for (String s : userProfileLevels.keySet()) userProfiles.put(s + userProfileLevels.get(s), round(userProfileScores.get(s) * 100 / totalScore, 2)); userProfiles = sortByValue(userProfiles); //find page suggestions for every profile HashMap<String, ArrayList<String>> pageSuggestions = new HashMap<>(); for (String profile : userProfiles.keySet()) { String path = crawlerOutputPaths.get(profile); ArrayList<String> suggestions = getSuggestions(path, numQueriesSuggestion, pagesPerQuerySuggestion, history); pageSuggestions.put(profile, suggestions); } //find alternative profiles for every profile and representative queries HashMap<String, HashMap<String, ArrayList<String>>> alternativeProfiles = new HashMap<>(); for (String userProfile : userProfiles.keySet()) { String[] userProfileInfo = userProfile.split("/"); HashMap<String, ArrayList<String>> profileQueries = new HashMap<>(); for (String profile : wordvectors.keySet()) { String[] profileInfo = profile.split("/"); if (profileInfo[0].equals(userProfileInfo[0]) && profileInfo[1].equals(userProfileInfo[1]) && !profileInfo[2].equals(userProfileInfo[2])) { String path = crawlerOutputPaths.get(profile); ArrayList<String> queries = getQueries(path, numQueriesExample); for (int i = 0; i < queries.size(); i++) { String query = queries.get(i); queries.set(i, query.substring(query.lastIndexOf("\\") + 1).replace("-query", "") .replace("+", " ")); } profileQueries.put(profile, queries); } } alternativeProfiles.put(userProfile, profileQueries); } //prepare JSON response JSONObject response = new JSONObject(); response.put("userID", userID); response.put("timestamp", timestamp); JSONArray list = new JSONArray(); for (String profile : userProfiles.keySet()) { JSONObject profileInfo = new JSONObject(); profileInfo.put("profile", profile); profileInfo.put("score", userProfiles.get(profile)); JSONArray temp = new JSONArray(); ArrayList<String> suggestions = pageSuggestions.get(profile); for (String s : suggestions) temp.add(s); profileInfo.put("suggestions", temp); JSONArray alternativesArray = new JSONArray(); for (String s : alternativeProfiles.get(profile).keySet()) { JSONObject alternativeInfo = new JSONObject(); alternativeInfo.put("alternative", s); ArrayList<String> queries = alternativeProfiles.get(profile).get(s); JSONArray queriesArray = new JSONArray(); for (String str : queries) { queriesArray.add(str); } alternativeInfo.put("queries", queriesArray); alternativesArray.add(alternativeInfo); } profileInfo.put("alternatives", alternativesArray); list.add(profileInfo); } response.put("profiles", list); System.out.println("JSON response is ready: " + response); //delete previous analysis and store results DBCollection collection = db.getCollection("history"); BasicDBObject previous = new BasicDBObject(); previous.put("userID", userID); collection.remove(previous); DBObject dbObject = (DBObject) JSON.parse(response.toString()); collection.insert(dbObject); System.out.println("I saved the analysis..."); }
From source file:com.mythesis.userbehaviouranalysis.ProfileAnalysis.java
License:Apache License
/** * a method that stores the query that has been suggested by the user * @param crawlerOutputPath SWebRank output directory used to check if a relevant query already exists * @param profile the query's relevant profile * @param query the given query//from ww w . j a v a 2s . com */ public void storeQuery(String crawlerOutputPath, String profile, String query) { System.out.println(crawlerOutputPath); System.out.println(profile); System.out.println(query); //Find output paths File root = new File(crawlerOutputPath); File[] contents = root.listFiles(); List<String> sWebRanklevels = new ArrayList<>(); for (File f : contents) { if (f.getAbsolutePath().contains("level")) sWebRanklevels.add(f.getAbsolutePath()); } //Find all query paths List<String> queries = new ArrayList<>(); for (String s : sWebRanklevels) { File level = new File(s); File[] queriesFiles = level.listFiles(); for (File f : queriesFiles) { if (!f.getAbsolutePath().contains("txt")) { String str = f.getAbsolutePath(); queries.add(str.substring(str.lastIndexOf("\\") + 1).replace("-query", "").replace("+", " ")); } } } //check if a relevant query already exists - I use Jaro-Winkler distance query = query.trim().replaceAll(" +", " "); for (String q : queries) { JaroWinklerDistance jwd = new JaroWinklerDistance(); double distance = jwd.getDistance(q, query); if (distance > 0.9) { // threshold = 0.9 return; } } Mongo mongo = new Mongo("localhost", 27017); DB db = mongo.getDB("profileAnalysis"); DBCollection DBqueries = db.getCollection("newQueries"); BasicDBObject searchQuery = new BasicDBObject(); searchQuery.put("profile", profile); DBObject document = DBqueries.findOne(searchQuery); boolean flag = false; //check if a relevant query exists in the database - I use Jaro-Winkler distance if (document != null) { flag = true; BasicDBList storedQueries = (BasicDBList) document.get("queries"); for (Object quer : storedQueries) { JaroWinklerDistance jwd = new JaroWinklerDistance(); double distance = jwd.getDistance((String) quer, query); if (distance > 0.9) { // threshold = 0.9 return; } } } //if document already exists add the new query if (flag) { DBqueries.update(searchQuery, new BasicDBObject("$push", new BasicDBObject("queries", query))); } else { //otherwise create a new document BasicDBList dbl = new BasicDBList(); dbl.add(query); BasicDBObject entry = new BasicDBObject("profile", profile).append("queries", dbl); DBqueries.insert(entry); } }
From source file:com.nlp.twitterstream.MongoUtil.java
License:Open Source License
/** * Insert document into database/*from w ww .java 2 s. co m*/ * * @param collection * dbcollection object */ public WriteResult insertDoc(DBCollection collection, BasicDBObject basicDBObj) { WriteResult writeRes = collection.insert(basicDBObj); return writeRes; }