Example usage for twitter4j TwitterObjectFactory getRawJSON

List of usage examples for twitter4j TwitterObjectFactory getRawJSON

Introduction

In this page you can find the example usage for twitter4j TwitterObjectFactory getRawJSON.

Prototype

public static String getRawJSON(Object obj) 

Source Link

Document

Returns a raw JSON form of the provided object.
Note that raw JSON forms can be retrieved only from the same thread invoked the last method call and will become inaccessible once another method call

Usage

From source file:ac.simons.tweetarchive.tweets.UserStreamAdapterImpl.java

License:Apache License

@Override
public void onStatus(final Status status) {
    this.tweetStorageService.store(status, TwitterObjectFactory.getRawJSON(status));
}

From source file:at.aictopic1.twitter.AICStream.java

private StatusListener getListener() {
    return new StatusListener() {
        public void onStatus(Status status) {
            String rawJSON = TwitterObjectFactory.getRawJSON(status);

            File file = new File(AICStream.filePath);

            BufferedWriter writer = null;
            try {
                writer = new BufferedWriter(new FileWriter(file, true));
                writer.append(rawJSON);/*  w w  w  .j a  v  a  2 s  . c  o  m*/
                writer.newLine();
                writer.flush();
            } catch (FileNotFoundException ex) {
                Logger.getLogger(AICStream.class.getName()).log(Level.SEVERE, null, ex);
            } catch (IOException ex) {
                Logger.getLogger(AICStream.class.getName()).log(Level.SEVERE, null, ex);
            } finally {
                if (writer != null) {
                    try {
                        writer.close();
                    } catch (IOException ex) {
                        Logger.getLogger(AICStream.class.getName()).log(Level.SEVERE, null, ex);
                    }
                }
            }
        }

        public void onDeletionNotice(StatusDeletionNotice statusDeletionNotice) {
            System.out.println("Got a status deletion notice id:" + statusDeletionNotice.getStatusId());
        }

        public void onTrackLimitationNotice(int numberOfLimitedStatuses) {
            System.out.println("Got track limitation notice:" + numberOfLimitedStatuses);
        }

        public void onException(Exception ex) {
            ex.printStackTrace();
        }

        public void onScrubGeo(long userId, long upToStatusId) {
            System.out.println("Got scrub_geo event userId:" + userId + " upToStatusId:" + upToStatusId);
        }

        public void onStallWarning(StallWarning warning) {
            System.out.println("Got stall warning:" + warning);
        }
    };
}

From source file:cc.twittertools.util.ExtractSubcollection.java

License:Apache License

@SuppressWarnings("static-access")
public static void main(String[] args) throws Exception {
    Options options = new Options();

    options.addOption(OptionBuilder.withArgName("dir").hasArg().withDescription("source collection directory")
            .create(COLLECTION_OPTION));
    options.addOption(//from   ww w .  java  2s.co m
            OptionBuilder.withArgName("file").hasArg().withDescription("list of tweetids").create(ID_OPTION));
    options.addOption(
            OptionBuilder.withArgName("file").hasArg().withDescription("output JSON").create(OUTPUT_OPTION));
    options.addOption(OptionBuilder.withArgName("file").hasArg()
            .withDescription("file to store missing tweeids").create(MISSING_OPTION));

    CommandLine cmdline = null;
    CommandLineParser parser = new GnuParser();
    try {
        cmdline = parser.parse(options, args);
    } catch (ParseException exp) {
        System.err.println("Error parsing command line: " + exp.getMessage());
        System.exit(-1);
    }

    if (!cmdline.hasOption(COLLECTION_OPTION) || !cmdline.hasOption(ID_OPTION)
            || !cmdline.hasOption(OUTPUT_OPTION) || !cmdline.hasOption(MISSING_OPTION)) {
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp(ExtractSubcollection.class.getName(), options);
        System.exit(-1);
    }

    String outputFile = cmdline.getOptionValue(OUTPUT_OPTION);
    String missingFile = cmdline.getOptionValue(MISSING_OPTION);
    String collectionPath = cmdline.getOptionValue(COLLECTION_OPTION);

    LongOpenHashSet tweetids = new LongOpenHashSet();
    File tweetidsFile = new File(cmdline.getOptionValue(ID_OPTION));
    if (!tweetidsFile.exists()) {
        System.err.println("Error: " + tweetidsFile + " does not exist!");
        System.exit(-1);
    }
    LOG.info("Reading tweetids from " + tweetidsFile);

    FileInputStream fin = new FileInputStream(tweetidsFile);
    BufferedReader br = new BufferedReader(new InputStreamReader(fin));

    String s;
    while ((s = br.readLine()) != null) {
        tweetids.add(Long.parseLong(s));
    }
    br.close();
    fin.close();
    LOG.info("Read " + tweetids.size() + " tweetids.");

    File file = new File(collectionPath);
    if (!file.exists()) {
        System.err.println("Error: " + file + " does not exist!");
        System.exit(-1);
    }

    // Store tweet ids we've already seen to dedup.
    LongOpenHashSet seen = new LongOpenHashSet();

    Writer out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(outputFile), "UTF-8"));

    StatusStream stream = new JsonStatusCorpusReader(file);
    Status status;
    while ((status = stream.next()) != null) {
        if (tweetids.contains(status.getId()) && !seen.contains(status.getId())) {
            out.write(TwitterObjectFactory.getRawJSON(status) + "\n");
            seen.add(status.getId());
        }
    }
    stream.close();
    out.close();

    LOG.info("Extracted " + seen.size() + " tweetids.");
    LOG.info("Storing missing tweetids...");

    out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(missingFile), "UTF-8"));
    LongIterator iter = tweetids.iterator();
    while (iter.hasNext()) {
        long t = iter.nextLong();
        if (!seen.contains(t)) {
            out.write(t + "\n");
        }
    }
    out.close();

    LOG.info("Done!");
}

From source file:cc.twittertools.util.VerifySubcollection.java

License:Apache License

@SuppressWarnings("static-access")
public static void main(String[] args) throws Exception {
    Options options = new Options();

    options.addOption(OptionBuilder.withArgName("dir").hasArg().withDescription("source collection directory")
            .create(COLLECTION_OPTION));
    options.addOption(//from   www.  j av  a2 s. c o  m
            OptionBuilder.withArgName("file").hasArg().withDescription("list of tweetids").create(ID_OPTION));

    CommandLine cmdline = null;
    CommandLineParser parser = new GnuParser();
    try {
        cmdline = parser.parse(options, args);
    } catch (ParseException exp) {
        System.err.println("Error parsing command line: " + exp.getMessage());
        System.exit(-1);
    }

    if (!cmdline.hasOption(COLLECTION_OPTION) || !cmdline.hasOption(ID_OPTION)) {
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp(ExtractSubcollection.class.getName(), options);
        System.exit(-1);
    }

    String collectionPath = cmdline.getOptionValue(COLLECTION_OPTION);

    LongOpenHashSet tweetids = new LongOpenHashSet();
    File tweetidsFile = new File(cmdline.getOptionValue(ID_OPTION));
    if (!tweetidsFile.exists()) {
        System.err.println("Error: " + tweetidsFile + " does not exist!");
        System.exit(-1);
    }
    LOG.info("Reading tweetids from " + tweetidsFile);

    FileInputStream fin = new FileInputStream(tweetidsFile);
    BufferedReader br = new BufferedReader(new InputStreamReader(fin));

    String s;
    while ((s = br.readLine()) != null) {
        tweetids.add(Long.parseLong(s));
    }
    br.close();
    fin.close();
    LOG.info("Read " + tweetids.size() + " tweetids.");

    File file = new File(collectionPath);
    if (!file.exists()) {
        System.err.println("Error: " + file + " does not exist!");
        System.exit(-1);
    }

    LongOpenHashSet seen = new LongOpenHashSet();
    TreeMap<Long, String> tweets = Maps.newTreeMap();

    PrintStream out = new PrintStream(System.out, true, "UTF-8");
    StatusStream stream = new JsonStatusCorpusReader(file);
    Status status;
    int cnt = 0;
    while ((status = stream.next()) != null) {
        if (!tweetids.contains(status.getId())) {
            LOG.error("tweetid " + status.getId() + " doesn't belong in collection");
            continue;
        }
        if (seen.contains(status.getId())) {
            LOG.error("tweetid " + status.getId() + " already seen!");
            continue;
        }

        tweets.put(status.getId(), TwitterObjectFactory.getRawJSON(status));
        seen.add(status.getId());
        cnt++;
    }
    LOG.info("total of " + cnt + " tweets in subcollection.");

    for (Map.Entry<Long, String> entry : tweets.entrySet()) {
        out.println(entry.getValue());
    }

    stream.close();
    out.close();
}

From source file:CollectionOfMentions.ControllerCollectionOfMentions.java

License:Open Source License

public void run() {

    startDateTime = new DateTime();

    //checks on dates to make sure it's not abobe 7 days
    if (numberOfMinutes < 0) {
        numberOfMinutes = 0;/*from ww w  .j  a  va 2 s .co m*/
    }
    if (numberOfMinutes > 59) {
        numberOfMinutes = 59;
    }
    if (numberOfHours > 24) {
        numberOfHours = 24;
    }
    if (numberOfHours < 0) {
        numberOfHours = 0;
    }
    if (numberOfDays > 7) {
        numberOfDays = 7;
    }
    if (numberOfDays < 0) {
        numberOfDays = 0;
    }

    stopTime = startDateTime.getMillis() + numberOfMinutes * 60000 + numberOfHours * 3600000
            + numberOfDays * 3600000 * 24;
    if (stopTime - startDateTime.getMillis() > 3600000 * 24 * 7) {
        stopTime = startDateTime.getMillis() + 3600000 * 24 * 7;
    }

    //registers actual start time in the status field
    opsJobInfo = dsJobsInfo.createUpdateOperations(JobInfo.class).set("status",
            String.valueOf(startDateTime.getMillis()));
    dsJobsInfo.update(updateQueryJobInfo, opsJobInfo, false, WriteConcern.UNACKNOWLEDGED);

    //registers actual end time in the end field
    opsJobInfo = dsJobsInfo.createUpdateOperations(JobInfo.class).set("end", String.valueOf(stopTime));
    dsJobsInfo.update(updateQueryJobInfo, opsJobInfo, false, WriteConcern.UNACKNOWLEDGED);

    final Object lock = new Object();

    StatusListener listener;
    listener = new StatusListener() {
        @Override
        public void onStatus(Status status) {
            nbTweets++;

            if (System.currentTimeMillis() > stopTime) {

                //updating the job a last time;
                //**************************************
                //saving statuses to the db.
                //                    if (!twitterStatuses.isEmpty()) {
                //                        opsJob = dsJobs.createUpdateOperations(Job.class).addAll("statuses", statusesIds, true);
                //                        dsJobs.update(updateQueryJob, opsJob);
                //
                //                        dsTweets.save(twitterStatuses);
                //                    }
                // 91 is the code for twitter stream has stopped collecting.
                progress = 91;

                //recording the progress, nbTweets and end time of the job
                opsJobInfo = dsJobsInfo.createUpdateOperations(JobInfo.class).set("progress", progress)
                        .set("nbTweets", nbTweets).set("end", System.currentTimeMillis());
                dsJobsInfo.update(updateQueryJobInfo, opsJobInfo);

                synchronized (lock) {
                    lock.notify();
                }

            } else {

                tweet = new Tweet();
                tweet.setStatus(TwitterObjectFactory.getRawJSON(status));
                tweet.setIdTweet(nbTweets);
                tweet.setJobId(jobUUID);
                ////                    System.out.println("@" + status.getUser().getScreenName() + " - " + status.getText());
                //                    twitterStatus = convertStatus.convertOneToTwitterStatus(status);
                //                    twitterStatus.setJobId(jobUUID);
                //                    twitterStatuses.add(twitterStatus);
                //                    
                //                    
                //
                //                    statusesIds.add(status.getId());
                //                    timeSinceLastStatus = System.currentTimeMillis() - timeLastStatus;
                //
                //                    //**************************************
                //                    //adjusting the frequency of saves to DB, function of number of statuses received per second
                //                    if (timeSinceLastStatus < 200) {
                //                        sizeBatch = 100;
                //                    } else {
                //                        sizeBatch = 25;
                //                    }
                //                    timeLastStatus = System.currentTimeMillis();
                //                    progressLong = (Long) ((System.currentTimeMillis() - startDateTime.getMillis()) * 98 / (stopTime - startDateTime.getMillis()));

                //                    if (statusesIds.size() > sizeBatch || progressLong.intValue() > progress) {
                //**************************************
                //saving statuses to the db.
                try {
                    dsTweets.save(tweet, WriteConcern.UNACKNOWLEDGED);
                    opsJobInfo = dsJobsInfo.createUpdateOperations(JobInfo.class).set("nbTweets", nbTweets);
                    dsJobsInfo.update(updateQueryJobInfo, opsJobInfo, false, WriteConcern.UNACKNOWLEDGED);
                } catch (MongoException m) {
                    System.out.println("saving of statuses to the db failed");
                }
                //                        twitterStatuses = new ArrayList();
                //
                //                        //**************************************
                //                        //updating list of status ids of the job.
                //                        opsJob = dsJobs.createUpdateOperations(Job.class).addAll("statuses", statusesIds, true);
                //                        dsJobs.update(updateQueryJob, opsJob);
                //                        statusesIds = new ArrayList();
                //
                //                        //updating progress.
                //                        System.out.println("progress: " + progressLong);
                //                        progress = progressLong.intValue();
                //                        opsJobInfo = dsJobsInfo.createUpdateOperations(JobInfo.class).set("progress", progress).set("nbTweets", nbTweets);
                //                        dsJobsInfo.update(updateQueryJobInfo, opsJobInfo);

                //**************************************
                //                    }
            }
        }

        @Override
        public void onDeletionNotice(StatusDeletionNotice statusDeletionNotice) {
            System.out.println("Got a status deletion notice id:" + statusDeletionNotice.getStatusId());
        }

        @Override
        public void onTrackLimitationNotice(int numberOfLimitedStatuses) {
            System.out.println("Got track limitation notice:" + numberOfLimitedStatuses);
        }

        @Override
        public void onScrubGeo(long userId, long upToStatusId) {
            System.out.println("Got scrub_geo event userId:" + userId + " upToStatusId:" + upToStatusId);
        }

        @Override
        public void onException(Exception ex) {
            System.out.println("Exception: " + ex);
        }

        @Override
        public void onStallWarning(StallWarning sw) {
            System.out.println("Got stall warning:" + sw.getMessage());

        }
    };
    twitterStream.addListener(listener);

    FilterQuery fq = new FilterQuery();
    String[] mentions = mention.split(",");
    fq.track(mentions);

    //        twitterStream.filter(new FilterQuery(0, users, keywords));
    twitterStream.filter(fq);

    try {
        synchronized (lock) {
            lock.wait();
        }
    } catch (InterruptedException e) {
    }
    try {
        twitterStream.shutdown();
    } catch (Exception e) {
        System.out.println("exception when shutdown of twitter stream");
        System.out.println("error: " + e.getMessage());
    }
    System.out.println("shutdown of twitter stream was successful");

}

From source file:com.marklogic.tweetdeck.SaveRawJSON.java

License:Apache License

/**
 * Usage: java twitter4j.examples.json.SaveRawJSON
 *
 * @param args/* w  ww  .j a  v  a  2  s .co  m*/
 *            String[]
 */
public static void main(String[] args) {
    Twitter twitter = new TwitterFactory().getInstance();
    System.out.println("Saving public timeline.");
    try {
        new File("statuses").mkdir();
        List<Status> statuses = twitter.getHomeTimeline();
        for (Status status : statuses) {
            String rawJSON = TwitterObjectFactory.getRawJSON(status);
            String fileName = "statuses/" + status.getId() + ".json";
            storeJSON(rawJSON, fileName);
            System.out.println(fileName + " - " + status.getText());
        }
        System.out.print("\ndone.");
        System.exit(0);
    } catch (IOException ioe) {
        ioe.printStackTrace();
        System.out.println("Failed to store tweets: " + ioe.getMessage());
    } catch (TwitterException te) {
        te.printStackTrace();
        System.out.println("Failed to get timeline: " + te.getMessage());
        System.exit(-1);
    }
}

From source file:com.ontotext.s4.TwitterVisualization.downloadTweets.SearchTweets.java

License:Open Source License

/**
 * Saves a Tweet into file. The name of the file will be the id of the Tweet.
 * /*from ww  w.  java 2  s. c o  m*/
 * @param tweet
 *            Tweet message to save.
 */
private void saveTweetIntoFile(Status tweet) {
    /*
     * checks if data folder exist. If not creates it.
     */
    File files = new File(dataFolder);
    if (!files.exists()) {
        files.mkdirs();
    }

    BufferedWriter writer = null;
    try {
        /*
         * create file into data folder with id for name, json for extension
         * and UTF-8 encoding
         */
        writer = new BufferedWriter(new OutputStreamWriter(
                new FileOutputStream(dataFolder + "/" + tweet.getId() + ".json"), "UTF-8"));

        /*
         * appends raw Json from tweet and then add new line into file
         */
        writer.append(TwitterObjectFactory.getRawJSON(tweet) + "\n");
    } catch (IOException e) {
        logger.debug(e);
    } finally {
        try {
            writer.close();
        } catch (IOException e) {
            logger.debug("Something went wrong when closing tweet file.", e);
        }
    }
}

From source file:com.pulzitinc.flume.source.TwitterSource.java

License:Apache License

/**
 * Start processing events. This uses the Twitter Streaming API to sample
 * Twitter, and process tweets.//from w ww. j a v a2s. c  o  m
 */
@Override
public void start() {
    // The channel is the piece of Flume that sits between the Source and Sink,
    // and is used to process events.
    final ChannelProcessor channel = getChannelProcessor();

    final Map<String, String> headers = new HashMap<String, String>();

    // The StatusListener is a twitter4j API, which can be added to a Twitter
    // stream, and will execute methods every time a message comes in through
    // the stream.
    UserStreamListener listener = new UserStreamAdapter() {
        // The onStatus method is executed every time a new tweet comes in.
        public void onStatus(Status status) {
            // The EventBuilder is used to build an event using the headers and
            // the raw JSON of a tweet
            logger.debug(status.getUser().getScreenName() + ": " + status.getText() + " - "
                    + TwitterObjectFactory.getRawJSON(status));

            headers.put("timestamp", String.valueOf(status.getCreatedAt().getTime()));
            Event event = EventBuilder.withBody(DataObjectFactory.getRawJSON(status).getBytes(), headers);

            channel.processEvent(event);
        }

        public void onException(Exception ex) {
            ex.printStackTrace();
        }
    };

    logger.debug("Setting up Twitter stream using consumer key {} and access token {}",
            new String[] { consumerKey, accessToken });

    // Set up the stream's listener (defined above),
    twitterStream.addListener(listener);

    logger.debug("Starting up Twitter consuming...");

    twitterStream.user();

    super.start();
}

From source file:com.sinfonier.spouts.Twitter.java

License:Apache License

@Override
public void open(Map conf, TopologyContext context, SpoutOutputCollector collector) {
    xml = new XMLProperties(spoutName, ComponentType.SPOUT, xmlPath);
    _collector = collector;/*from  w ww .  j  a v  a 2s  . c  om*/
    SinfonierUtils.broadcastWorker((String) conf.get(Config.TOPOLOGY_NAME), context);

    twitterStream = createTwitterStream();
    queue = new LinkedBlockingQueue<String>(1000);

    StatusListener listener = new StatusListener() {

        @Override
        public void onStatus(Status status) {

            queue.offer(TwitterObjectFactory.getRawJSON(status));
        }

        @Override
        public void onDeletionNotice(StatusDeletionNotice sdn) {
        }

        @Override
        public void onTrackLimitationNotice(int i) {
        }

        @Override
        public void onScrubGeo(long l, long l1) {
        }

        @Override
        public void onException(Exception e) {
        }

        @Override
        public void onStallWarning(StallWarning warning) {
        }

    };

    twitterStream.addListener(listener);

    List<Object> rawKeywords = xml.getList("keyword");
    // String rawKeywords = xml.get("keywords");
    if (rawKeywords.isEmpty()) {
        twitterStream.sample();
    } else {
        List<String> keywords = new ArrayList<String>();
        for (Object field : rawKeywords) {
            keywords.add(((String) field).trim());
        }
        FilterQuery query = new FilterQuery().track(keywords.toArray(new String[keywords.size()]));
        twitterStream.filter(query);
    }
}

From source file:com.telefonica.iot.cygnus.sources.TwitterSource.java

License:Open Source License

private String getStringJSONTweet(Status status) {
    String jsonTweet = TwitterObjectFactory.getRawJSON(status);
    totalTextIndexed += jsonTweet.length();

    return jsonTweet;
}