List of usage examples for twitter4j QueryResult getTweets
List<Status> getTweets();
From source file:crawltweets2mongo.MonoThread.java
void getNewTweets(GeoLocation myLoc, double radius) { try {// w w w . j av a 2s .com Query query = new Query(); Query.Unit unit = Query.KILOMETERS; // or Query.MILES; query.setGeoCode(myLoc, radius, unit); if (radius > 200) query.setCount(20000); else query.setCount(20000); QueryResult result; result = this.twitter.search(query); //System.out.println("Getting Tweets. by Geo.."); List<Status> tweets = result.getTweets(); for (Status tweet : tweets) { BasicDBObject basicObj = new BasicDBObject(); basicObj.put("user_Rname", tweet.getUser().getName()); basicObj.put("user_name", tweet.getUser().getScreenName()); basicObj.put("retweet_count", tweet.getRetweetCount()); basicObj.put("tweet_followers_count", tweet.getUser().getFollowersCount()); UserMentionEntity[] mentioned = tweet.getUserMentionEntities(); basicObj.put("tweet_mentioned_count", mentioned.length); basicObj.put("tweet_ID", tweet.getId()); basicObj.put("tweet_text", tweet.getText()); Status temp1 = tweet.getRetweetedStatus(); if (temp1 != null) basicObj.put("Re_tweet_ID", temp1.getUser().getId()); GeoLocation loc = tweet.getGeoLocation(); if (loc != null) { basicObj.put("Latitude", loc.getLatitude()); basicObj.put("Longitude", loc.getLongitude()); } basicObj.put("CreateTime", tweet.getCreatedAt()); basicObj.put("FavoriteCount", tweet.getFavoriteCount()); basicObj.put("user_Id", tweet.getUser().getId()); if (tweet.getUser().getTimeZone() != null) basicObj.put("UsertimeZone", tweet.getUser().getTimeZone()); if (tweet.getUser().getStatus() != null) basicObj.put("UserStatus", tweet.getUser().getStatus()); //basicObj.put("tweetLocation", tweet.getPlace().getGeometryCoordinates()); String U_Loc = tweet.getUser().getLocation(); if (U_Loc != null) basicObj.put("userLocation", U_Loc); basicObj.put("number_of_rt", tweet.getRetweetCount()); //basicObj.put("isRetweet", tweet.getPlace().getGeometryCoordinates()); //basicObj.put("POS", tweet.getWithheldInCountries()); if (mentioned.length > 0) { basicObj.append("mentions", pickMentions(mentioned)); } try { //items.insert(basicObj); collection.insert(basicObj); } catch (Exception e) { // System.out.println("MongoDB Connection Error : " + e.getMessage()); // loadMenu(); } } collection.ensureIndex(new BasicDBObject("tweet_ID", 1), new BasicDBObject("unique", true)); } catch (TwitterException ex) { java.util.logging.Logger.getLogger(MonoThread.class.getName()).log(Level.SEVERE, null, ex); } }
From source file:DataCollections.TweetCollections_HashTags.java
public void collectTweetsFromHashtag_popterm(Hashtag_dbo hashtag) throws InterruptedException { long max_id = 0; boolean midused = false, sidused = false; if (hashtag.values[Hashtag_dbo.map.get("max_id")].used) { max_id = hashtag.values[Hashtag_dbo.map.get("")].lnumber; midused = true;/* w w w . j a v a2 s. c o m*/ } long since_id = 0; if (hashtag.values[Hashtag_dbo.map.get("since_id")].used) { since_id = hashtag.values[Hashtag_dbo.map.get("since_id")].lnumber; sidused = true; } Query q = new Query(hashtag.values[Hashtag_dbo.map.get("hashtag_popterm")].string); //LogPrinter.printLog("Collection Tweets for hashtag_searchterm"+hashtag.values[Hashtag_dbo.map.get("hashtag_popterm")].string); q.setCount(100); if (midused) { q.setMaxId(max_id); } if (sidused) { q.setSinceId(since_id); } QueryResult result = null; try { result = searchres.search(q); } catch (Exception e) { LogPrinter.printLog("Tweet Search Resources Rate Limit reached "); if (e instanceof TwitterException) { } if (e instanceof InterruptedException) { //Thread.sleep(((TwitterException)e).getRetryAfter()*1000+5000); } } int count = 0; for (Status s : result.getTweets()) { Tweet_dbo tweet = thelper.convertStatusToTweet_dbo(s); String whereclause = "tweet_id = " + Long.toString(tweet.values[Tweet_dbo.map.get("tweet_id")].lnumber); tweet.values[Tweet_dbo.map.get("processed")].setValue("true"); tweet.values[Tweet_dbo.map.get("f_search")].setValue("true"); tweet.values[Tweet_dbo.map.get("searchterm")] .setValue(hashtag.values[Hashtag_dbo.map.get("hashtag_popterm")].string); if (TweetsTable.select(whereclause, 0, 2).length == 0) { //LogPrinter.printLog(" Inserting tweet "+count+tweet.values[Tweet_dbo.map.get("tweet_id")].lnumber); TweetsTable.insert(tweet); users_edgescollections.extract_InsertUsers_EdgesFromTweet(s); count++; } } }
From source file:de.dev.eth0.retweeter.Distributor.java
License:BEER-WARE LICENSE
/** * Distributes the tweets to all target accounts. * * @return//from w ww .ja va 2 s . c o m * @throws TwitterException */ public int distribute() throws TwitterException { if (!getConfig().isDistributorConfigValid()) { return Integer.MIN_VALUE; } Twitter twitter = getTwitter(); String search = buildSearchString(); Query query = new Query(search); QueryResult result = twitter.search(query); int count = 0; // Iterate through results for (Tweet tweet : result.getTweets()) { // Check if already distributed if (!distributedTweets.contains(tweet.getId()) && StringUtils.contains(tweet.getText(), getConfig().getDistributorConfig().getHashtag())) { // Retweet each result to all targetaccounts for (String targetaccount : getConfig().getDistributorConfig().getTargetaccounts()) { // Dont distribute to yourself if (!StringUtils.equalsIgnoreCase(tweet.getFromUser(), targetaccount)) { StringBuilder sb = new StringBuilder(); sb.append("@"); sb.append(targetaccount); sb.append(": RT @"); sb.append(tweet.getFromUser()); sb.append(" "); sb.append(tweet.getText()); String text = sb.toString(); // try to tweet, might cause an exception (status duplicate) if already tweeted logger.debug("Distributing tweet {} to {}", tweet.toString(), targetaccount); try { twitter.updateStatus(text.length() < 140 ? text : text.substring(0, 140)); count++; } catch (TwitterException te) { logger.warn("distribute of tweet " + tweet.toString() + "failed " + text, te); } distributedTweets.add(tweet.getId()); } } } } return count; }
From source file:de.dev.eth0.retweeter.Retweeter.java
License:BEER-WARE LICENSE
/** * Performs the retweet-action and returns the number of tweets found with the configured hashtag * * @return number of retweeted tweets/*from w ww .j a v a2 s .c o m*/ * @throws TwitterException */ public int retweet() throws TwitterException { if (!getConfig().isRetweeterConfigValid()) { return Integer.MIN_VALUE; } Twitter twitter = getTwitter(); String search = buildSearchString(); Query query = new Query(search); QueryResult result = twitter.search(query); List<Status> lastretweets = twitter.getRetweetedByMe(); int count = 0; for (Tweet tweet : result.getTweets()) { // ignore retweets and check if the hashtag is really in the tweet's text if (!StringUtils.startsWith(tweet.getText(), "RT @") && StringUtils.contains(tweet.getText(), getConfig().getRetweeterConfig().getHashtag()) && !ALREADY_RETWEETED.contains(tweet.getId())) { boolean retweeted = false; for (Status retweet : lastretweets) { if (tweet.getId() == retweet.getRetweetedStatus().getId()) { retweeted = true; break; } } if (!retweeted) { // try to retweet, might fail logger.debug("found new tweet to retweet: {}", tweet.toString()); try { twitter.retweetStatus(tweet.getId()); count++; } catch (TwitterException te) { logger.debug("retweet failed", te); } finally { ALREADY_RETWEETED.add(tweet.getId()); } } } } return count; }
From source file:de.jetsli.twitter.TwitterSearch.java
License:Apache License
long search(String term, Collection<Status> result, Map<String, User> userMap, int tweets, long lastMaxCreateTime) throws TwitterException { long maxId = 0L; long maxMillis = 0L; // TODO it looks like only one page is possible with 4.0.0 int maxPages = 1; int hitsPerPage = tweets; boolean breakPaging = false; for (int page = 0; page < maxPages; page++) { Query query = new Query(term); // RECENT or POPULAR query.setResultType(Query.MIXED); // avoid that more recent results disturb our paging! if (page > 0) query.setMaxId(maxId);/*from w w w . j a v a 2 s . c o m*/ query.setCount(hitsPerPage); QueryResult res = twitter.search(query); // is res.getTweets() sorted? for (Status twe : res.getTweets()) { // determine maxId in the first page if (page == 0 && maxId < twe.getId()) maxId = twe.getId(); if (maxMillis < twe.getCreatedAt().getTime()) maxMillis = twe.getCreatedAt().getTime(); if (twe.getCreatedAt().getTime() + 1000 < lastMaxCreateTime) breakPaging = true; else { String userName = twe.getUser().getScreenName().toLowerCase(); User user = userMap.get(userName); if (user == null) userMap.put(userName, twe.getUser()); result.add(twe); } } // minMillis could force us to leave earlier than defined by maxPages // or if resulting tweets are less then request (but -10 because of twitter strangeness) if (breakPaging || res.getTweets().size() < hitsPerPage - 10) break; } return maxMillis; }
From source file:de.jetwick.tw.NewClass.java
License:Apache License
/** A thread using the search API */ public Thread search() { return new Thread() { @Override//from w ww . ja v a 2 s .com public void run() { int MINUTES = 2; Twitter twitter = new TwitterFactory().getInstance(); try { while (!isInterrupted()) { Query query = new Query(queryTerms); // RECENT or POPULAR or MIXED // doesn't make a difference if MIXED or RECENT query.setResultType(Query.MIXED); query.setPage(1); query.setRpp(100); QueryResult res = twitter.search(query); for (Tweet tw : res.getTweets()) { searchMap.put(tw.getId(), tw.getText()); } Thread.sleep(MINUTES * 60 * 1000L); } } catch (Exception ex) { ex.printStackTrace(); } } }; }
From source file:de.jetwick.tw.TwitterSearch.java
License:Apache License
long search(String term, Collection<JTweet> result, Map<String, JUser> userMap, int tweets, long lastMaxCreateTime) throws TwitterException { long maxId = 0L; long maxMillis = 0L; int hitsPerPage; int maxPages; if (tweets < 100) { hitsPerPage = tweets;//from ww w . j av a2s . c o m maxPages = 1; } else { hitsPerPage = 100; maxPages = tweets / hitsPerPage; if (tweets % hitsPerPage > 0) maxPages++; } boolean breakPaging = false; for (int page = 0; page < maxPages; page++) { Query query = new Query(term); // RECENT or POPULAR query.setResultType(Query.MIXED); // avoid that more recent results disturb our paging! if (page > 0) query.setMaxId(maxId); query.setPage(page + 1); query.setRpp(hitsPerPage); QueryResult res = twitter.search(query); // is res.getTweets() sorted? for (Object o : res.getTweets()) { Tweet twe = (Tweet) o; // determine maxId in the first page if (page == 0 && maxId < twe.getId()) maxId = twe.getId(); if (maxMillis < twe.getCreatedAt().getTime()) maxMillis = twe.getCreatedAt().getTime(); if (twe.getCreatedAt().getTime() + 1000 < lastMaxCreateTime) breakPaging = true; else { String userName = twe.getFromUser().toLowerCase(); JUser user = userMap.get(userName); if (user == null) { user = new JUser(userName).init(twe); userMap.put(userName, user); } result.add(new JTweet(twe, user)); } } // minMillis could force us to leave earlier than defined by maxPages // or if resulting tweets are less then request (but -10 because of twitter strangeness) if (breakPaging || res.getTweets().size() < hitsPerPage - 10) break; } return maxMillis; }
From source file:dhbw.clippinggorilla.external.twitter.TwitterUtils.java
/** * @param includedTagsSet/*from w ww . j a v a2 s . c o m*/ * @param excludedTagsSet * @param sinceDate * @return Gets Tweets for Profile */ public static LinkedHashSet<Article> getArticlesFromTwitter(Set<String> includedTagsSet, Set<String> excludedTagsSet, LocalDateTime sinceDate) { Query query = queryBuilder(includedTagsSet, sinceDate); LinkedHashSet<Article> result = new LinkedHashSet<>(); QueryResult tweets = searchTweets(query); for (Status status : tweets.getTweets()) { result.add(fillArticle(status)); } return result; }
From source file:dhbw.clippinggorilla.external.twitter.TwitterUtils.java
public static void main(String[] args) { Set<String> includedTagsSet = new HashSet<>(); Set<String> excludedTagsSet = new HashSet<>(); LocalDateTime date;/* w w w . java 2 s .c om*/ includedTagsSet.add("bmw, mercedes"); includedTagsSet.add("Audi, toyota"); includedTagsSet.add("merkel"); includedTagsSet.add("dat boi, pepe"); includedTagsSet.add("dhbw"); includedTagsSet.add("VW Golf"); Query query = queryBuilder(includedTagsSet, LocalDateTime.of(2017, 5, 1, 0, 0)); QueryResult result = searchTweets(query); for (Status status : result.getTweets()) { System.out.println("@" + status.getUser().getScreenName() + ":" + status.getText()); for (MediaEntity mediaEntity : status.getMediaEntities()) { System.out.println(mediaEntity.getType()); } System.out.println("_________________________________________________"); } }
From source file:dk.netarkivet.harvester.tools.TwitterDecidingScope.java
License:Open Source License
/** * This routine makes any necessary Twitter API calls and queues the content discovered. * * @param controller The controller for this crawl. *///w ww.ja va 2 s . co m @Override public void initialize(CrawlController controller) { super.initialize(controller); twitter = (new TwitterFactory()).getInstance(); keywords = null; try { keywords = (StringList) super.getAttribute(ATTR_KEYWORDS); pages = ((Integer) super.getAttribute(ATTR_PAGES)).intValue(); geoLocations = (StringList) super.getAttribute(ATTR_GEOLOCATIONS); language = (String) super.getAttribute(ATTR_LANG); if (language == null) { language = "all"; } resultsPerPage = (Integer) super.getAttribute(ATTR_RESULTS_PER_PAGE); queueLinks = (Boolean) super.getAttribute(ATTR_QUEUE_LINKS); queueUserStatus = (Boolean) super.getAttribute(ATTR_QUEUE_USER_STATUS); queueUserStatusLinks = (Boolean) super.getAttribute(ATTR_QUEUE_USER_STATUS_LINKS); queueKeywordLinks = (Boolean) super.getAttribute(ATTR_QUEUE_KEYWORD_LINKS); } catch (AttributeNotFoundException e1) { e1.printStackTrace(); throw new RuntimeException(e1); } catch (MBeanException e1) { e1.printStackTrace(); throw new RuntimeException(e1); } catch (ReflectionException e1) { e1.printStackTrace(); throw new RuntimeException(e1); } for (Object keyword : keywords) { log.info("Twitter Scope keyword: {}", keyword); } // If keywords or geoLocations is missing, add a list with a single empty string so that the main loop is // executed at least once. if (keywords == null || keywords.isEmpty()) { keywords = new StringList("keywords", "empty keyword list", new String[] { "" }); } if (geoLocations == null || geoLocations.isEmpty()) { geoLocations = new StringList("geolocations", "empty geolocation list", new String[] { "" }); } log.info("Twitter Scope will queue {} page(s) of results.", pages); // Nested loop over keywords, geo_locations and pages. for (Object keyword : keywords) { String keywordString = (String) keyword; for (Object geoLocation : geoLocations) { String urlQuery = (String) keyword; Query query = new Query(); query.setRpp(resultsPerPage); if (language != null && !language.equals("")) { query.setLang(language); urlQuery += " lang:" + language; keywordString += " lang:" + language; } urlQuery = "http://twitter.com/search/" + URLEncoder.encode(urlQuery); if (queueKeywordLinks) { addSeedIfLegal(urlQuery); } for (int page = 1; page <= pages; page++) { query.setPage(page); if (!keyword.equals("")) { query.setQuery(keywordString); } if (!geoLocation.equals("")) { String[] locationArray = ((String) geoLocation).split(","); try { GeoLocation location = new GeoLocation(Double.parseDouble(locationArray[0]), Double.parseDouble(locationArray[1])); query.setGeoCode(location, Double.parseDouble(locationArray[2]), locationArray[3]); } catch (NumberFormatException e) { e.printStackTrace(); } } try { final QueryResult result = twitter.search(query); List<Tweet> tweets = result.getTweets(); for (Tweet tweet : tweets) { long id = tweet.getId(); String fromUser = tweet.getFromUser(); String tweetUrl = "http://www.twitter.com/" + fromUser + "/status/" + id; addSeedIfLegal(tweetUrl); tweetCount++; if (queueLinks) { extractEmbeddedLinks(tweet); } if (queueUserStatus) { String statusUrl = "http://twitter.com/" + tweet.getFromUser() + "/"; addSeedIfLegal(statusUrl); linkCount++; if (queueUserStatusLinks) { queueUserStatusLinks(tweet.getFromUser()); } } } } catch (TwitterException e1) { log.error(e1.getMessage()); } } } } System.out.println( TwitterDecidingScope.class + " added " + tweetCount + " tweets and " + linkCount + " other links."); }