List of usage examples for twitter4j Query setMaxId
public void setMaxId(long maxId)
From source file:com.twitter.TwitterCrawler.java
/** * * @return @throws TwitterException// w w w . j a v a 2 s . c om * @throws InterruptedException returns the number of tweets */ public TwitterApiResponse stream() throws TwitterException, InterruptedException { //************************ Variables ************************* TwitterApiResponse response = new TwitterApiResponse(); long timeNow = System.currentTimeMillis(); long afterInterval = timeNow + (interval * 1000); int countTweets = 0; //define the tweet id that the application should read relative to the IDs of Tweets it has already processed long maxID = 0; ArrayList<Status> tweets = new ArrayList<Status>(); Query query = new Query(keywords); //************************ Action ************************* //max 450 calls in 15 mins -->30/min, 5 calls/10 secs while (timeNow < afterInterval) { try { if (sinceID != 0) { query.setSinceId(sinceID); } if (maxID != 0) { query.setMaxId(maxID); } query.setResultType(Query.ResultType.recent); query.setCount(100); QueryResult result = twitter.search(query); tweets.addAll(result.getTweets()); if (tweets.size() == 100) { countTweets += tweets.size(); maxID = tweets.get(tweets.size() - 1).getId() - 1; } else { countTweets += tweets.size(); break; } timeNow = System.currentTimeMillis(); } catch (TwitterException te) { System.out.println("Couldn't connect: " + te); } } sinceID = tweets.get(0).getId(); response.setCount(countTweets); response.setSinceID(sinceID); return response; }
From source file:crawler.TwitterFeed.java
/** * */// w w w .j av a 2 s . c o m public void get_tweets() throws SQLException { CSVReader cr = new CSVReader(); ArrayList<String> names = new ArrayList<>(); Map<String, String> name_ticker; names = cr.get_company_names(tickers); System.out.println(names); ConfigurationBuilder cb = new ConfigurationBuilder(); cb.setOAuthConsumerKey("WNSycI2GS33ZAwHJ9Fb4A"); cb.setOAuthConsumerSecret("BBWeSThT3ZV2g9c5BDuUduDyNAWyzouMI0XjQy7KUc"); cb.setOAuthAccessToken("1852271029-SLfE061bImfcRxWQZpy1pAgpEkfFhg3757Q9JRf"); cb.setOAuthAccessTokenSecret("1uPSfYPbaENtXFYWsryBIAHnUsmG3pT6lGH0NzxTCGW7O"); Twitter twitter = new TwitterFactory(cb.build()).getInstance(); for (String s : names) { String[] temp; if (s.contains(",")) { temp = s.split(","); } else { temp = s.split(" "); } System.out.println( "--------------------------------------------------------------------------------------" + temp[0]); Query query = new Query("#" + temp[0]); query.lang("en"); int numberOfTweets = 500; long lastID = Long.MAX_VALUE; ArrayList<Status> tweets = new ArrayList<>(); while (tweets.size() < numberOfTweets) { if (numberOfTweets - tweets.size() > 100) { query.setCount(100); } else { query.setCount(numberOfTweets - tweets.size()); } try { QueryResult result = twitter.search(query); tweets.addAll(result.getTweets()); System.out.println("Gathered " + tweets.size() + " tweets"); for (Status t : tweets) { if (t.getId() < lastID) { lastID = t.getId(); } } } catch (TwitterException te) { System.out.println("Couldn't connect: " + te); } query.setMaxId(lastID - 1); } Connection con = DriverManager.getConnection("jdbc:mysql://localhost:3306/CrawledData", "root", ""); for (int i = 0; i < tweets.size(); i++) { Status t = (Status) tweets.get(i); //GeoLocation loc = t.getGeoLocation(); String user = t.getUser().getScreenName(); String msg = t.getText(); String time = t.getCreatedAt().toString(); //if (loc!=null) { // Double lat = t.getGeoLocation().getLatitude(); // Double lon = t.getGeoLocation().getLongitude(); // System.out.println(i + " USER: " + user + " wrote: " + msg + " located at " + lat + ", " + lon); //} //else{ System.out.println(i + " USER: " + user + " wrote: " + msg + " at: " + time); //} try { Class.forName("com.mysql.jdbc.Driver"); PreparedStatement ps = con .prepareStatement("INSERT IGNORE INTO TwitterFeed VALUES (?,?,?,?,?)"); ps.setString(1, s); ps.setString(2, cr.get_ticker_from_company(s)); ps.setString(3, user); ps.setString(4, msg); ps.setString(5, time); int k = ps.executeUpdate(); if (k <= 0) { System.out.println("Entry Unsuccessful"); } else { System.out.println("Entry Successful"); } } catch (ClassNotFoundException | SQLException e) { System.out.println(e); } } con.close(); } }
From source file:DataCollections.TweetCollections_HashTags.java
public void collectTweetsFromHashtag_popterm(Hashtag_dbo hashtag) throws InterruptedException { long max_id = 0; boolean midused = false, sidused = false; if (hashtag.values[Hashtag_dbo.map.get("max_id")].used) { max_id = hashtag.values[Hashtag_dbo.map.get("")].lnumber; midused = true;/*from w w w . ja v a2 s . c o m*/ } long since_id = 0; if (hashtag.values[Hashtag_dbo.map.get("since_id")].used) { since_id = hashtag.values[Hashtag_dbo.map.get("since_id")].lnumber; sidused = true; } Query q = new Query(hashtag.values[Hashtag_dbo.map.get("hashtag_popterm")].string); //LogPrinter.printLog("Collection Tweets for hashtag_searchterm"+hashtag.values[Hashtag_dbo.map.get("hashtag_popterm")].string); q.setCount(100); if (midused) { q.setMaxId(max_id); } if (sidused) { q.setSinceId(since_id); } QueryResult result = null; try { result = searchres.search(q); } catch (Exception e) { LogPrinter.printLog("Tweet Search Resources Rate Limit reached "); if (e instanceof TwitterException) { } if (e instanceof InterruptedException) { //Thread.sleep(((TwitterException)e).getRetryAfter()*1000+5000); } } int count = 0; for (Status s : result.getTweets()) { Tweet_dbo tweet = thelper.convertStatusToTweet_dbo(s); String whereclause = "tweet_id = " + Long.toString(tweet.values[Tweet_dbo.map.get("tweet_id")].lnumber); tweet.values[Tweet_dbo.map.get("processed")].setValue("true"); tweet.values[Tweet_dbo.map.get("f_search")].setValue("true"); tweet.values[Tweet_dbo.map.get("searchterm")] .setValue(hashtag.values[Hashtag_dbo.map.get("hashtag_popterm")].string); if (TweetsTable.select(whereclause, 0, 2).length == 0) { //LogPrinter.printLog(" Inserting tweet "+count+tweet.values[Tweet_dbo.map.get("tweet_id")].lnumber); TweetsTable.insert(tweet); users_edgescollections.extract_InsertUsers_EdgesFromTweet(s); count++; } } }
From source file:de.jetsli.twitter.TwitterSearch.java
License:Apache License
long search(String term, Collection<Status> result, Map<String, User> userMap, int tweets, long lastMaxCreateTime) throws TwitterException { long maxId = 0L; long maxMillis = 0L; // TODO it looks like only one page is possible with 4.0.0 int maxPages = 1; int hitsPerPage = tweets; boolean breakPaging = false; for (int page = 0; page < maxPages; page++) { Query query = new Query(term); // RECENT or POPULAR query.setResultType(Query.MIXED); // avoid that more recent results disturb our paging! if (page > 0) query.setMaxId(maxId); query.setCount(hitsPerPage);/*from w ww .ja v a 2s.c o m*/ QueryResult res = twitter.search(query); // is res.getTweets() sorted? for (Status twe : res.getTweets()) { // determine maxId in the first page if (page == 0 && maxId < twe.getId()) maxId = twe.getId(); if (maxMillis < twe.getCreatedAt().getTime()) maxMillis = twe.getCreatedAt().getTime(); if (twe.getCreatedAt().getTime() + 1000 < lastMaxCreateTime) breakPaging = true; else { String userName = twe.getUser().getScreenName().toLowerCase(); User user = userMap.get(userName); if (user == null) userMap.put(userName, twe.getUser()); result.add(twe); } } // minMillis could force us to leave earlier than defined by maxPages // or if resulting tweets are less then request (but -10 because of twitter strangeness) if (breakPaging || res.getTweets().size() < hitsPerPage - 10) break; } return maxMillis; }
From source file:de.jetwick.tw.TwitterSearch.java
License:Apache License
long search(String term, Collection<JTweet> result, Map<String, JUser> userMap, int tweets, long lastMaxCreateTime) throws TwitterException { long maxId = 0L; long maxMillis = 0L; int hitsPerPage; int maxPages; if (tweets < 100) { hitsPerPage = tweets;//from w w w . j a v a 2 s .c om maxPages = 1; } else { hitsPerPage = 100; maxPages = tweets / hitsPerPage; if (tweets % hitsPerPage > 0) maxPages++; } boolean breakPaging = false; for (int page = 0; page < maxPages; page++) { Query query = new Query(term); // RECENT or POPULAR query.setResultType(Query.MIXED); // avoid that more recent results disturb our paging! if (page > 0) query.setMaxId(maxId); query.setPage(page + 1); query.setRpp(hitsPerPage); QueryResult res = twitter.search(query); // is res.getTweets() sorted? for (Object o : res.getTweets()) { Tweet twe = (Tweet) o; // determine maxId in the first page if (page == 0 && maxId < twe.getId()) maxId = twe.getId(); if (maxMillis < twe.getCreatedAt().getTime()) maxMillis = twe.getCreatedAt().getTime(); if (twe.getCreatedAt().getTime() + 1000 < lastMaxCreateTime) breakPaging = true; else { String userName = twe.getFromUser().toLowerCase(); JUser user = userMap.get(userName); if (user == null) { user = new JUser(userName).init(twe); userMap.put(userName, user); } result.add(new JTweet(twe, user)); } } // minMillis could force us to leave earlier than defined by maxPages // or if resulting tweets are less then request (but -10 because of twitter strangeness) if (breakPaging || res.getTweets().size() < hitsPerPage - 10) break; } return maxMillis; }
From source file:de.vanita5.twittnuker.loader.support.TweetSearchLoader.java
License:Open Source License
@Override public List<Status> getStatuses(final Twitter twitter, final Paging paging) throws TwitterException { if (twitter == null) return null; final Query query = new Query(processQuery(mQuery)); query.setRpp(paging.getCount());//from w w w . j a va 2 s. c o m if (paging.getMaxId() > 0) { query.setMaxId(paging.getMaxId()); } return Arrays.asList(twitter.search(query).getStatuses()); }
From source file:it.greenvulcano.gvesb.social.twitter.directcall.TwitterOperationSearch.java
License:Open Source License
@Override public void execute(SocialAdapterAccount account) throws SocialAdapterException { try {//from ww w . j ava2s . co m Twitter twitter = (Twitter) account.getProxyObject(); Query q = new Query(); if ((query != null) && !"".equals(query)) { q.setQuery(query); } if ((sinceId != null) && !"".equals(sinceId)) { q.setSinceId(Long.parseLong(sinceId)); } if ((maxId != null) && !"".equals(maxId)) { q.setMaxId(Long.parseLong(maxId)); } if ((since != null) && !"".equals(since)) { q.setSince(since); } if ((until != null) && !"".equals(until)) { q.setUntil(until); } if ((count != null) && !"".equals(count)) { q.setCount(Integer.parseInt(count)); } XMLUtils parser = null; try { parser = XMLUtils.getParserInstance(); doc = parser.newDocument("TwitterQuery"); Element root = doc.getDocumentElement(); parser.setAttribute(root, "user", twitter.getScreenName()); parser.setAttribute(root, "userId", String.valueOf(twitter.getId())); parser.setAttribute(root, "createdAt", DateUtils.nowToString(DateUtils.FORMAT_ISO_DATETIME_UTC)); QueryResult result; do { result = twitter.search(q); List<Status> tweets = result.getTweets(); for (Status tweet : tweets) { dumpTweet(parser, root, tweet); } } while ((q = result.nextQuery()) != null); } catch (Exception exc) { logger.error("Error formatting TwitterOperationSearch query[" + query + "], sinceId[" + sinceId + "], maxId[" + maxId + "], since[" + since + "], until[" + until + "] and count[" + count + "] response.", exc); throw new SocialAdapterException("Error formatting TwitterOperationSearch query[" + query + "], sinceId[" + sinceId + "], maxId[" + maxId + "], since[" + since + "], until[" + until + "] and count[" + count + "] response.", exc); } finally { XMLUtils.releaseParserInstance(parser); } } catch (NumberFormatException exc) { logger.error("Call to TwitterOperationSearch failed. Check query[" + query + "], sinceId[" + sinceId + "], maxId[" + maxId + "], since[" + since + "], until[" + until + "] and count[" + count + "] format.", exc); throw new SocialAdapterException("Call to TwitterOperationSearch failed. Check query[" + query + "], sinceId[" + sinceId + "], maxId[" + maxId + "], since[" + since + "], until[" + until + "] and count[" + count + "] format.", exc); } }
From source file:net.lacolaco.smileessence.view.SearchFragment.java
License:Open Source License
@Override public void onPullUpToRefresh(final PullToRefreshBase<ListView> refreshView) { final MainActivity activity = getMainActivity(); final Account currentAccount = activity.getCurrentAccount(); Twitter twitter = TwitterApi.getTwitter(currentAccount); final SearchListAdapter adapter = getListAdapter(activity); String queryString = adapter.getQuery(); if (TextUtils.isEmpty(queryString)) { new UIHandler() { @Override// w w w. ja v a 2 s . c om public void run() { notifyTextEmpty(activity); refreshView.onRefreshComplete(); } }.post(); return; } final Query query = SearchTask.getBaseQuery(activity, queryString); if (adapter.getCount() > 0) { query.setMaxId(adapter.getLastID() - 1); } new SearchTask(twitter, query, activity) { @Override protected void onPostExecute(QueryResult queryResult) { super.onPostExecute(queryResult); if (queryResult != null) { List<twitter4j.Status> tweets = queryResult.getTweets(); for (twitter4j.Status status : tweets) { if (!status.isRetweet()) { StatusViewModel viewModel = new StatusViewModel(status, currentAccount); adapter.addToBottom(viewModel); StatusFilter.filter(activity, viewModel); } } updateListViewWithNotice(refreshView.getRefreshableView(), adapter, false); refreshView.onRefreshComplete(); } } }.execute(); }
From source file:ontoSentiment.Busca.java
public void buscarPorAssunto(String busca, String lang) throws TwitterException { int totalTweets = 0; long maxID = -1; Query q = new Query(busca + " -filter:retweets -filter:links -filter:replies -filter:images"); q.setCount(Util.TWEETS_PER_QUERY); // How many tweets, max, to retrieve q.resultType(Query.ResultType.recent); // Get all tweets q.setLang(lang);/*from ww w . j ava2 s .c om*/ QueryResult r = Util.getTwitter().search(q); do { for (Status s : r.getTweets()) { totalTweets++; if (maxID == -1 || s.getId() < maxID) { maxID = s.getId(); } //System.out.printf("O tweet de id %s disse as %s, @%-20s disse: %s\n", new Long(s.getId()).toString(), s.getCreatedAt().toString(), s.getUser().getScreenName(), Util.cleanText(s.getText())); System.out.println(Util.cleanText(s.getText())); } q = r.nextQuery(); if (q != null) { q.setMaxId(maxID); r = Util.getTwitter().search(q); System.out.println("Total tweets: " + totalTweets); System.out.println("Maximo ID: " + maxID); Util.imprimirRateLimit(Util.RATE_LIMIT_OPTION_SEARCH_TWEETS); } } while (q != null); }
From source file:org.anc.lapps.datasource.twitter.TwitterDatasource.java
/** Contacts the Twitter API and gets any number of tweets corresponding to a certain query. The main * purpose of this function is to avoid the limit of 100 tweets that can be extracted at once. * * @param numberOfTweets the number of tweets to be printed * @param query the query to be searched by the twitter client * @param twitter the twitter client//from w w w . j av a 2s. com * * @return A JSON string containing a Data object with either a list containing the tweets as a payload * (when successful) or a String payload (for errors). */ private Data getTweetsByCount(int numberOfTweets, Query query, Twitter twitter) { ArrayList<Status> tweets = new ArrayList<>(); if (!(numberOfTweets > 0)) { // Default of 15 tweets numberOfTweets = 15; } // Set the last ID to the maximum possible value as a default long lastID = Long.MAX_VALUE; int original; try { while (tweets.size() < numberOfTweets) { // Keep number of original to avoid infinite looping when not getting enough tweets original = tweets.size(); // If there are more than 100 tweets left to be extracted, extract // 100 during the next query, since 100 is the limit to retrieve at once if (numberOfTweets - tweets.size() > 100) query.setCount(100); else query.setCount(numberOfTweets - tweets.size()); // Extract tweets corresponding to the query then add them to the list QueryResult result = twitter.search(query); tweets.addAll(result.getTweets()); // Iterate through the list and get the lastID to know where to start from // if there are more tweets to be extracted for (Status status : tweets) if (status.getId() < lastID) lastID = status.getId(); query.setMaxId(lastID - 1); // Break the loop if the tweet count didn't change. This would prevent an infinite loop when // tweets for the specified query are not available if (tweets.size() == original) break; } } catch (TwitterException te) { // Put the list of tweets in Data format then output as JSon String. // Since we checked earlier for errors, we assume that an error occuring at this point due // to Rate Limits is caused by a too high request. Thus, we output the retrieved tweets and log // the error String errorDataJson = generateError(te.getMessage()); logger.error(errorDataJson); if (te.exceededRateLimitation() && tweets.size() > 0) { Data<ArrayList<Status>> tweetsData = new Data<>(); tweetsData.setDiscriminator(Discriminators.Uri.LIST); tweetsData.setPayload(tweets); return tweetsData; } else { return Serializer.parse(errorDataJson, Data.class); } } // Return a special error message if no tweets are found if (tweets.size() == 0) { String noTweetsMessage = "No tweets found for the following query. " + "Note: Twitter's REST API only retrieves tweets from the past week."; String errorDataJson = generateError(noTweetsMessage); return Serializer.parse(errorDataJson, Data.class); } else { // Put the list of tweets in Data format then output as JSon String. Data<ArrayList<Status>> tweetsData = new Data<>(); tweetsData.setDiscriminator(Discriminators.Uri.LIST); tweetsData.setPayload(tweets); return tweetsData; } }