Example usage for org.apache.commons.cli BasicParser BasicParser

List of usage examples for org.apache.commons.cli BasicParser BasicParser

Introduction

In this page you can find the example usage for org.apache.commons.cli BasicParser BasicParser.

Prototype

BasicParser

Source Link

Usage

From source file:benchmarkio.controlcenter.LaunchRocket.java

public static void main(final String[] args) throws Exception {
    // create the parser
    final CommandLineParser parser = new BasicParser();

    // parse the command line arguments
    final CommandLine cmd = parser.parse(options, args);

    if (cmd.hasOption("u")) {
        displayHelp();//  w  w w .java  2s.com
    }

    final String host = cmd.getOptionValue("host");
    final int port = Integer.parseInt(cmd.getOptionValue("port"));
    final BrokerType brokerType = BrokerType.valueOf(cmd.getOptionValue("broker-type"));
    final int numConsumers = Integer.parseInt(cmd.getOptionValue("num-consumers"));
    final int numProducers = Integer.parseInt(cmd.getOptionValue("num-producers"));
    final int totalNumberOfMessages = Integer.parseInt(cmd.getOptionValue("total-number-of-messages"));
    final double msgSizeInKB = Double.parseDouble(cmd.getOptionValue("msg-size-in-kb"));

    // Optional options
    final Optional<String> optionalBenchmarkType = Optional.fromNullable(cmd.getOptionValue("benchmark-type"));
    final Optional<String> optionalDurable = Optional.fromNullable(cmd.getOptionValue("durable"));
    // Kafka Specific
    final Optional<String> optionalZookeeper = Optional.fromNullable(cmd.getOptionValue("zookeeper"));
    Optional<String> optionalKafkaProducerType = Optional
            .fromNullable(cmd.getOptionValue("kafka-producer-type"));

    BenchmarkType benchmarkType;
    if (optionalBenchmarkType.isPresent()) {
        benchmarkType = BenchmarkType.valueOf(optionalBenchmarkType.get());
    } else {
        log.info("Benchmark type was not specified, defaulting to: {}", BenchmarkType.PRODUCER_AND_CONSUMER);

        benchmarkType = BenchmarkType.PRODUCER_AND_CONSUMER;
    }

    boolean durable = false;
    if (optionalDurable.isPresent()) {
        durable = Boolean.valueOf(optionalDurable.get());
    } else {
        log.info("Durable parameter was not specified, defaulting to: FALSE");
    }

    if (brokerType == BrokerType.KAFKA) {
        if (!optionalZookeeper.isPresent()) {
            log.error("zookeeper is missing, it is a required property for KAFKA broker");

            System.exit(0);
        }

        if (!optionalKafkaProducerType.isPresent()) {
            log.info("kafka-producer-type is not specified, defaulting to sync");

            optionalKafkaProducerType = Optional.of("sync");
        } else if (!optionalKafkaProducerType.get().equals("sync")
                && !optionalKafkaProducerType.get().equals("async")) {
            log.warn("kafka-producer-type is not one of the accepted sync | async values, defaulting to sync");

            optionalKafkaProducerType = Optional.of("sync");
        }
    }

    log.info("destination (topic or queue): {}", Consts.DESTINATION_NAME);
    log.info("host: {}", host);
    log.info("port: {}", port);
    log.info("broker-type: {}", brokerType);
    log.info("benchmark-type: {}", benchmarkType);
    log.info("durable: {}", durable);
    log.info("num-consumers: {}", numConsumers);
    log.info("num-producers: {}", numProducers);
    log.info("total-number-of-messages: {}", totalNumberOfMessages);
    log.info("msg-size-in-kb: {}", msgSizeInKB);

    if (brokerType == BrokerType.KAFKA) {
        log.info("zookeeper: {}", optionalZookeeper.get());
        log.info("kafka-producer-type: {}", optionalKafkaProducerType.get());
    }

    LaunchRocket.start(brokerType, benchmarkType, durable, host, port, numConsumers, numProducers,
            totalNumberOfMessages, msgSizeInKB, optionalZookeeper, optionalKafkaProducerType);

    System.exit(0);
}

From source file:com.enioka.jqm.tools.Main.java

/**
 * Startup method for the packaged JAR//from  ww  w .j  a  v  a2s  . c  o m
 * 
 * @param args
 *            0 is node name
 */
@SuppressWarnings("static-access")
public static void main(String[] args) {
    Helpers.setLogFileName("cli");
    Option o00 = OptionBuilder.withArgName("nodeName").hasArg().withDescription("name of the JQM node to start")
            .isRequired().create("startnode");
    Option o01 = OptionBuilder.withDescription("display help").withLongOpt("help").create("h");
    Option o11 = OptionBuilder.withArgName("applicationname").hasArg()
            .withDescription("name of the application to launch").isRequired().create("enqueue");
    Option o21 = OptionBuilder.withArgName("xmlpath").hasArg()
            .withDescription("path of the XML configuration file to import").isRequired()
            .create("importjobdef");
    Option o31 = OptionBuilder.withArgName("xmlpath").hasArg()
            .withDescription("export all queue definitions into an XML file").isRequired()
            .create("exportallqueues");
    OptionBuilder.withArgName("xmlpath").hasArg()
            .withDescription("export some queue definitions into an XML file").isRequired()
            .create("exportqueuefile");
    OptionBuilder.withArgName("queues").hasArg().withDescription("queues to export").withValueSeparator(',')
            .isRequired().create("queue");
    Option o51 = OptionBuilder.withArgName("xmlpath").hasArg()
            .withDescription("import all queue definitions from an XML file").isRequired()
            .create("importqueuefile");
    Option o61 = OptionBuilder.withArgName("nodeName").hasArg()
            .withDescription("creates a JQM node of this name, or updates it if it exists. Implies -u.")
            .isRequired().create("createnode");
    Option o71 = OptionBuilder.withDescription("display JQM engine version").withLongOpt("version").create("v");
    Option o81 = OptionBuilder.withDescription("upgrade JQM database").withLongOpt("upgrade").create("u");
    Option o91 = OptionBuilder.withArgName("jobInstanceId").hasArg()
            .withDescription("get job instance status by ID").isRequired().withLongOpt("getstatus").create("g");
    Option o101 = OptionBuilder.withArgName("password").hasArg()
            .withDescription("creates or resets root admin account password").isRequired().withLongOpt("root")
            .create("r");
    Option o111 = OptionBuilder.withArgName("option").hasArg()
            .withDescription(
                    "ws handling. Possible values are: enable, disable, ssl, nossl, internalpki, externalapi")
            .isRequired().withLongOpt("gui").create("w");
    Option o121 = OptionBuilder.withArgName("id[,logfilepath]").hasArg().withDescription("single launch mode")
            .isRequired().withLongOpt("gui").create("s");
    Option o131 = OptionBuilder.withArgName("resourcefile").hasArg()
            .withDescription("resource parameter file to use. Default is resources.xml")
            .withLongOpt("resources").create("p");
    Option o141 = OptionBuilder.withArgName("login,password,role1,role2,...").hasArgs(Option.UNLIMITED_VALUES)
            .withValueSeparator(',')
            .withDescription("Create or update a JQM account. Roles must exist beforehand.").create("U");

    Options options = new Options();
    OptionGroup og1 = new OptionGroup();
    og1.setRequired(true);
    og1.addOption(o00);
    og1.addOption(o01);
    og1.addOption(o11);
    og1.addOption(o21);
    og1.addOption(o31);
    og1.addOption(o51);
    og1.addOption(o61);
    og1.addOption(o71);
    og1.addOption(o81);
    og1.addOption(o91);
    og1.addOption(o101);
    og1.addOption(o111);
    og1.addOption(o121);
    og1.addOption(o141);
    options.addOptionGroup(og1);
    OptionGroup og2 = new OptionGroup();
    og2.addOption(o131);
    options.addOptionGroup(og2);

    HelpFormatter formatter = new HelpFormatter();
    formatter.setWidth(160);

    try {
        // Parse arguments
        CommandLineParser parser = new BasicParser();
        CommandLine line = parser.parse(options, args);

        // Other db connection?
        if (line.getOptionValue(o131.getOpt()) != null) {
            jqmlogger.info("Using resource XML file " + line.getOptionValue(o131.getOpt()));
            Helpers.resourceFile = line.getOptionValue(o131.getOpt());
        }

        // Set db connection
        Helpers.registerJndiIfNeeded();

        // Enqueue
        if (line.getOptionValue(o11.getOpt()) != null) {
            enqueue(line.getOptionValue(o11.getOpt()));
        }
        // Get status
        if (line.getOptionValue(o91.getOpt()) != null) {
            getStatus(Integer.parseInt(line.getOptionValue(o91.getOpt())));
        }
        // Import XML
        else if (line.getOptionValue(o21.getOpt()) != null) {
            importJobDef(line.getOptionValue(o21.getOpt()));
        }
        // Start engine
        else if (line.getOptionValue(o00.getOpt()) != null) {
            startEngine(line.getOptionValue(o00.getOpt()));
        }
        // Export all Queues
        else if (line.getOptionValue(o31.getOpt()) != null) {
            exportAllQueues(line.getOptionValue(o31.getOpt()));
        }
        // Import queues
        else if (line.getOptionValue(o51.getOpt()) != null) {
            importQueues(line.getOptionValue(o51.getOpt()));
        }
        // Create node
        else if (line.getOptionValue(o61.getOpt()) != null) {
            createEngine(line.getOptionValue(o61.getOpt()));
        }
        // Upgrade
        else if (line.hasOption(o81.getOpt())) {
            upgrade();
        }
        // Help
        else if (line.hasOption(o01.getOpt())) {
            formatter.printHelp("java -jar jqm-engine.jar", options, true);
        }
        // Version
        else if (line.hasOption(o71.getOpt())) {
            jqmlogger.info("Engine version: " + Helpers.getMavenVersion());
        }
        // Root password
        else if (line.hasOption(o101.getOpt())) {
            root(line.getOptionValue(o101.getOpt()));
        }
        // Web options
        else if (line.hasOption(o111.getOpt())) {
            ws(line.getOptionValue(o111.getOpt()));
        }
        // Web options
        else if (line.hasOption(o121.getOpt())) {
            single(line.getOptionValue(o121.getOpt()));
        }
        // User handling
        else if (line.hasOption(o141.getOpt())) {
            user(line.getOptionValues(o141.getOpt()));
        }
    } catch (ParseException exp) {
        jqmlogger.fatal("Could not read command line: " + exp.getMessage());
        formatter.printHelp("java -jar jqm-engine.jar", options, true);
        return;
    }
}

From source file:junkProducer.java

public static void main(String[] args) throws Exception {
    // read command line args for a connection to Kafka
    CommandLineParser parser = new BasicParser();
    Options options = getCommonRequiredOptions();
    CommandLine cmd = parser.parse(options, args);

    // create the producer and consumer KafkaDataStore objects
    Map<String, String> dsConf = getKafkaDataStoreConf(cmd);
    //        dsConf.put("isProducer", "true");
    DataStore producerDS = DataStoreFinder.getDataStore(dsConf);

    // verify that we got back our KafkaDataStore objects properly
    if (producerDS == null) {
        throw new Exception("Null producer KafkaDataStore");
    }/*from w w  w .j  a  v a 2 s  .  c o m*/

    // create the schema which creates a topic in Kafka
    // (only needs to be done once)
    final String sftName = "junk";
    final String sftSchema = "trainStatus:String,trainCode:String,publicMessage:String,direction:String,dtg:Date,*geom:Point:srid=4326";
    SimpleFeatureType sft = SimpleFeatureTypes.createType(sftName, sftSchema);
    // set zkPath to default if not specified
    String zkPath = (dsConf.get(ZK_PATH) == null) ? "/geomesa/ds/kafka" : dsConf.get(ZK_PATH);
    SimpleFeatureType preppedOutputSft = KafkaDataStoreHelper.createStreamingSFT(sft, zkPath);
    // only create the schema if it hasn't been created already
    if (!Arrays.asList(producerDS.getTypeNames()).contains(sftName))
        producerDS.createSchema(preppedOutputSft);

    // the live consumer must be created before the producer writes features
    // in order to read streaming data.
    // i.e. the live consumer will only read data written after its instantiation
    SimpleFeatureStore producerFS = (SimpleFeatureStore) producerDS.getFeatureSource(sftName);

    // creates and adds SimpleFeatures to the producer on an interval
    System.out.println("Writing features to Kafka... refresh GeoServer layer preview to see changes");
    addSimpleFeatures(sft, producerFS);

    System.exit(0);
}

From source file:com.genentech.retrival.SDFExport.SDFExporter.java

public static void main(String[] args) throws ParseException, JDOMException, IOException { // create command line Options object
    Options options = new Options();
    Option opt = new Option("sqlFile", true, "sql-xml file");
    opt.setRequired(true);/*  w w w . j  a  va2  s.  co m*/
    options.addOption(opt);

    opt = new Option("sqlName", true, "name of SQL element in xml file");
    opt.setRequired(true);
    options.addOption(opt);

    opt = new Option("molSqlName", true,
            "name of SQL element in xml file returning molfile for first column in sqlName");
    opt.setRequired(false);
    options.addOption(opt);

    opt = new Option("removeSalt", false, "remove any known salts from structure.");
    opt.setRequired(false);
    options.addOption(opt);

    opt = new Option("o", "out", true, "output file");
    opt.setRequired(false);
    options.addOption(opt);

    opt = new Option("i", "in", true,
            "input file, tab separated, each line executes the query once. Use '.tab' to read from stdin");
    opt.setRequired(false);
    options.addOption(opt);

    opt = new Option("newLineReplacement", true,
            "If given newlines in fields will be replaced by this string.");
    options.addOption(opt);

    opt = new Option("ignoreMismatches", false, "If not given each input must return at least one hit hits.");
    options.addOption(opt);

    CommandLineParser parser = new BasicParser();
    CommandLine cmd = null;
    try {
        cmd = parser.parse(options, args);
    } catch (Exception e) {
        System.err.println(e.getMessage());
        exitWithHelp(options);
    }

    String outFile = cmd.getOptionValue("o");
    String inFile = cmd.getOptionValue("i");
    String sqlFile = cmd.getOptionValue("sqlFile");
    String sqlName = cmd.getOptionValue("sqlName");
    String molSqlName = cmd.getOptionValue("molSqlName");
    String newLineReplacement = cmd.getOptionValue("newLineReplacement");

    boolean removeSalt = cmd.hasOption("removeSalt");
    boolean ignoreMismatches = cmd.hasOption("ignoreMismatches");

    SDFExporter exporter = null;
    try {
        exporter = new SDFExporter(sqlFile, sqlName, molSqlName, outFile, newLineReplacement, removeSalt,
                ignoreMismatches);
    } catch (Exception e) {
        e.printStackTrace();
        System.err.println();
        exitWithHelp(options);
    }

    args = cmd.getArgs();

    if (inFile != null && args.length > 0) {
        System.err.println("inFile and arguments may not be mixed!\n");
        exitWithHelp(options);
    }

    if (inFile == null) {
        if (exporter.getParamTypes().length != args.length) {
            System.err.printf("sql statement (%s) needs %d parameters but got only %d.\n", sqlName,
                    exporter.getParamTypes().length, args.length);
            exitWithHelp(options);
        }

        exporter.export(args);
    } else {
        BufferedReader in;
        if (".tab".equalsIgnoreCase(inFile))
            in = new BufferedReader(new InputStreamReader(System.in));
        else
            in = new BufferedReader(new FileReader(inFile));

        exporter.export(in);
        in.close();
    }

    exporter.close();
}

From source file:ed.util.LicenseHeaderCheck.java

public static void main(String args[]) throws Exception {

    Options o = new Options();
    o.addOption("r", false, "recursive");
    o.addOption("skip", true, "substrings not to match");

    CommandLine cl = (new BasicParser()).parse(o, args);

    if (cl.getArgList().size() < 2) {
        System.err.println("usage: LicenseHeaderCheck [-r] <header file> <dir or files>");
        return;/* w  w  w .j av a2s . c om*/
    }

    LicenseHeaderCheck checker = new LicenseHeaderCheck(new File(cl.getArgList().get(0).toString()),
            cl.hasOption("r"));

    if (cl.getOptionValues("skip") != null)
        for (String skip : cl.getOptionValues("skip"))
            checker.addSkip(skip);

    for (int i = 1; i < cl.getArgList().size(); i++) {
        checker.go(new File(cl.getArgList().get(i).toString()));
    }
}

From source file:com.spotify.cassandra.opstools.autobalance.Main.java

public static void main(String[] args) throws IOException, InterruptedException, ParseException {
    final Options options = new Options();
    options.addOption("f", "force", false, "Force auto balance");
    options.addOption("d", "dryrun", false, "Dry run");
    options.addOption("r", "noresolve", false, "Don't resolve host names");
    options.addOption("h", "host", true, "Host to connect to (default: localhost)");
    options.addOption("p", "port", true, "Port to connect to (default: 7199)");

    CommandLineParser parser = new BasicParser();
    CommandLine cmd = parser.parse(options, args);
    new Main().run(cmd);
}

From source file:knowledgeMiner.InformationDripBootstrapping.java

public static void main(String[] args) {
    Options options = new Options();
    options.addOption("r", true, "The number of ripples (-1 for unlimited).");
    options.addOption("c", true, "The concept to begin with (\"article\" or #concept).");
    options.addOption("N", true, "The initial hashmap size for the nodes.");
    options.addOption("i", true, "Initial run number.");

    CommandLineParser parser = new BasicParser();
    try {/* w  w  w .j  a va 2  s.c  om*/
        CommandLine parse = parser.parse(options, args);
        InformationDripBootstrapping rb = new InformationDripBootstrapping(parse.getOptionValue("c"),
                parse.getOptionValue("r"), parse.getOptionValue("N"), parse.getOptionValue("i"));
        rb.run();
    } catch (Exception e) {
        e.printStackTrace();
        System.exit(1);
    }
}

From source file:com.example.geomesa.kafka08.KafkaQuickStart.java

public static void main(String[] args) throws Exception {
    // read command line args for a connection to Kafka
    CommandLineParser parser = new BasicParser();
    Options options = getCommonRequiredOptions();
    CommandLine cmd = parser.parse(options, args);

    // create the producer and consumer KafkaDataStore objects
    Map<String, String> dsConf = getKafkaDataStoreConf(cmd);
    dsConf.put("isProducer", "true");
    DataStore producerDS = DataStoreFinder.getDataStore(dsConf);
    dsConf.put("isProducer", "false");
    DataStore consumerDS = DataStoreFinder.getDataStore(dsConf);

    // verify that we got back our KafkaDataStore objects properly
    if (producerDS == null) {
        throw new Exception("Null producer KafkaDataStore");
    }/*from ww  w.jav  a  2s .c  om*/
    if (consumerDS == null) {
        throw new Exception("Null consumer KafkaDataStore");
    }

    // create the schema which creates a topic in Kafka
    // (only needs to be done once)
    final String sftName = "KafkaQuickStart08";
    final String sftSchema = "name:String,age:Int,dtg:Date,*geom:Point:srid=4326";
    SimpleFeatureType sft = SimpleFeatureTypes.createType(sftName, sftSchema);
    // set zkPath to default if not specified
    String zkPath = (dsConf.get(ZK_PATH) == null) ? "/geomesa/ds/kafka" : dsConf.get(ZK_PATH);
    SimpleFeatureType preppedOutputSft = KafkaDataStoreHelper.createStreamingSFT(sft, zkPath);
    // only create the schema if it hasn't been created already
    if (!Arrays.asList(producerDS.getTypeNames()).contains(sftName))
        producerDS.createSchema(preppedOutputSft);
    if (!cmd.hasOption("automated")) {
        System.out.println("Register KafkaDataStore in GeoServer (Press enter to continue)");
        System.in.read();
    }

    // the live consumer must be created before the producer writes features
    // in order to read streaming data.
    // i.e. the live consumer will only read data written after its instantiation
    SimpleFeatureSource consumerFS = consumerDS.getFeatureSource(sftName);
    SimpleFeatureStore producerFS = (SimpleFeatureStore) producerDS.getFeatureSource(sftName);

    // creates and adds SimpleFeatures to the producer every 1/5th of a second
    System.out.println("Writing features to Kafka... refresh GeoServer layer preview to see changes");
    Instant replayStart = new Instant();

    String vis = cmd.getOptionValue(VISIBILITY);
    if (vis != null)
        System.out.println("Writing features with " + vis);
    addSimpleFeatures(sft, producerFS, vis);
    Instant replayEnd = new Instant();

    // read from Kafka after writing all the features.
    // LIVE CONSUMER - will obtain the current state of SimpleFeatures
    System.out.println("\nConsuming with the live consumer...");
    SimpleFeatureCollection featureCollection = consumerFS.getFeatures();
    System.out.println(featureCollection.size() + " features were written to Kafka");

    addDeleteNewFeature(sft, producerFS);

    // read from Kafka after writing all the features.
    // LIVE CONSUMER - will obtain the current state of SimpleFeatures
    System.out.println("\nConsuming with the live consumer...");
    featureCollection = consumerFS.getFeatures();
    System.out.println(featureCollection.size() + " features were written to Kafka");

    // the state of the two SimpleFeatures is real time here
    System.out.println("Here are the two SimpleFeatures that were obtained with the live consumer:");
    SimpleFeatureIterator featureIterator = featureCollection.features();
    SimpleFeature feature1 = featureIterator.next();
    SimpleFeature feature2 = featureIterator.next();
    featureIterator.close();
    printFeature(feature1);
    printFeature(feature2);

    // REPLAY CONSUMER - will obtain the state of SimpleFeatures at any specified time
    // Replay consumer requires a ReplayConfig which takes a time range and a
    // duration of time to process
    System.out.println("\nConsuming with the replay consumer...");
    Duration readBehind = new Duration(1000); // 1 second readBehind
    ReplayConfig rc = new ReplayConfig(replayStart, replayEnd, readBehind);
    SimpleFeatureType replaySFT = KafkaDataStoreHelper.createReplaySFT(preppedOutputSft, rc);
    producerDS.createSchema(replaySFT);
    SimpleFeatureSource replayConsumerFS = consumerDS.getFeatureSource(replaySFT.getName());

    // querying for the state of SimpleFeatures approximately 5 seconds before the replayEnd.
    // the ReplayKafkaConsumerFeatureSource will build the state of SimpleFeatures
    // by processing all of the messages that were sent in between queryTime-readBehind and queryTime.
    // only the messages in between replayStart and replayEnd are cached.
    Instant queryTime = replayEnd.minus(5000);
    featureCollection = replayConsumerFS.getFeatures(ReplayTimeHelper.toFilter(queryTime));
    System.out.println(featureCollection.size() + " features were written to Kafka");

    System.out.println("Here are the two SimpleFeatures that were obtained with the replay consumer:");
    featureIterator = featureCollection.features();
    feature1 = featureIterator.next();
    feature2 = featureIterator.next();
    featureIterator.close();
    printFeature(feature1);
    printFeature(feature2);

    if (System.getProperty("clear") != null) {
        // Run Java command with -Dclear=true
        // This will cause a 'clear'
        producerFS.removeFeatures(Filter.INCLUDE);
    }

    System.exit(0);
}

From source file:com.example.geomesa.kafka09.KafkaQuickStart.java

public static void main(String[] args) throws Exception {
    // read command line args for a connection to Kafka
    CommandLineParser parser = new BasicParser();
    Options options = getCommonRequiredOptions();
    CommandLine cmd = parser.parse(options, args);

    // create the producer and consumer KafkaDataStore objects
    Map<String, String> dsConf = getKafkaDataStoreConf(cmd);
    dsConf.put("isProducer", "true");
    DataStore producerDS = DataStoreFinder.getDataStore(dsConf);
    dsConf.put("isProducer", "false");
    DataStore consumerDS = DataStoreFinder.getDataStore(dsConf);

    // verify that we got back our KafkaDataStore objects properly
    if (producerDS == null) {
        throw new Exception("Null producer KafkaDataStore");
    }//from ww  w .j  a  va2 s  . co  m
    if (consumerDS == null) {
        throw new Exception("Null consumer KafkaDataStore");
    }

    // create the schema which creates a topic in Kafka
    // (only needs to be done once)
    final String sftName = "KafkaQuickStart09";
    final String sftSchema = "name:String,age:Int,dtg:Date,*geom:Point:srid=4326";
    SimpleFeatureType sft = SimpleFeatureTypes.createType(sftName, sftSchema);
    // set zkPath to default if not specified
    String zkPath = (dsConf.get(ZK_PATH) == null) ? "/geomesa/ds/kafka" : dsConf.get(ZK_PATH);
    SimpleFeatureType preppedOutputSft = KafkaDataStoreHelper.createStreamingSFT(sft, zkPath);
    // only create the schema if it hasn't been created already
    if (!Arrays.asList(producerDS.getTypeNames()).contains(sftName))
        producerDS.createSchema(preppedOutputSft);
    if (!cmd.hasOption("automated")) {
        System.out.println("Register KafkaDataStore in GeoServer (Press enter to continue)");
        System.in.read();
    }

    // the live consumer must be created before the producer writes features
    // in order to read streaming data.
    // i.e. the live consumer will only read data written after its instantiation
    SimpleFeatureSource consumerFS = consumerDS.getFeatureSource(sftName);
    SimpleFeatureStore producerFS = (SimpleFeatureStore) producerDS.getFeatureSource(sftName);

    // creates and adds SimpleFeatures to the producer every 1/5th of a second
    System.out.println("Writing features to Kafka... refresh GeoServer layer preview to see changes");
    Instant replayStart = new Instant();

    String vis = cmd.getOptionValue(VISIBILITY);
    if (vis != null)
        System.out.println("Writing features with " + vis);
    addSimpleFeatures(sft, producerFS, vis);
    Instant replayEnd = new Instant();

    // read from Kafka after writing all the features.
    // LIVE CONSUMER - will obtain the current state of SimpleFeatures
    System.out.println("\nConsuming with the live consumer...");
    SimpleFeatureCollection featureCollection = consumerFS.getFeatures();
    System.out.println(featureCollection.size() + " features were written to Kafka");

    addDeleteNewFeature(sft, producerFS);

    // read from Kafka after writing all the features.
    // LIVE CONSUMER - will obtain the current state of SimpleFeatures
    System.out.println("\nConsuming with the live consumer...");
    featureCollection = consumerFS.getFeatures();
    System.out.println(featureCollection.size() + " features were written to Kafka");

    // the state of the two SimpleFeatures is real time here
    System.out.println("Here are the two SimpleFeatures that were obtained with the live consumer:");
    SimpleFeatureIterator featureIterator = featureCollection.features();
    SimpleFeature feature1 = featureIterator.next();
    SimpleFeature feature2 = featureIterator.next();
    featureIterator.close();
    printFeature(feature1);
    printFeature(feature2);

    // REPLAY CONSUMER - will obtain the state of SimpleFeatures at any specified time
    // Replay consumer requires a ReplayConfig which takes a time range and a
    // duration of time to process
    System.out.println("\nConsuming with the replay consumer...");
    Duration readBehind = new Duration(1000); // 1 second readBehind
    ReplayConfig rc = new ReplayConfig(replayStart, replayEnd, readBehind);
    SimpleFeatureType replaySFT = KafkaDataStoreHelper.createReplaySFT(preppedOutputSft, rc);
    producerDS.createSchema(replaySFT);
    SimpleFeatureSource replayConsumerFS = consumerDS.getFeatureSource(replaySFT.getName());

    // querying for the state of SimpleFeatures approximately 5 seconds before the replayEnd.
    // the ReplayKafkaConsumerFeatureSource will build the state of SimpleFeatures
    // by processing all of the messages that were sent in between queryTime-readBehind and queryTime.
    // only the messages in between replayStart and replayEnd are cached.
    Instant queryTime = replayEnd.minus(5000);
    featureCollection = replayConsumerFS.getFeatures(ReplayTimeHelper.toFilter(queryTime));
    System.out.println(featureCollection.size() + " features were written to Kafka");

    System.out.println("Here are the two SimpleFeatures that were obtained with the replay consumer:");
    featureIterator = featureCollection.features();
    feature1 = featureIterator.next();
    feature2 = featureIterator.next();
    featureIterator.close();
    printFeature(feature1);
    printFeature(feature2);

    if (System.getProperty("clear") != null) {
        // Run Java command with -Dclear=true
        // This will cause a 'clear'
        producerFS.removeFeatures(Filter.INCLUDE);
    }

    System.exit(0);
}

From source file:com.example.geomesa.kafka10.KafkaQuickStart.java

public static void main(String[] args) throws Exception {
    // read command line args for a connection to Kafka
    CommandLineParser parser = new BasicParser();
    Options options = getCommonRequiredOptions();
    CommandLine cmd = parser.parse(options, args);

    // create the producer and consumer KafkaDataStore objects
    Map<String, String> dsConf = getKafkaDataStoreConf(cmd);
    dsConf.put("isProducer", "true");
    DataStore producerDS = DataStoreFinder.getDataStore(dsConf);
    dsConf.put("isProducer", "false");
    DataStore consumerDS = DataStoreFinder.getDataStore(dsConf);

    // verify that we got back our KafkaDataStore objects properly
    if (producerDS == null) {
        throw new Exception("Null producer KafkaDataStore");
    }/*from  w w  w  . ja v a 2  s.  co  m*/
    if (consumerDS == null) {
        throw new Exception("Null consumer KafkaDataStore");
    }

    // create the schema which creates a topic in Kafka
    // (only needs to be done once)
    final String sftName = "KafkaQuickStart10";
    final String sftSchema = "name:String,age:Int,dtg:Date,*geom:Point:srid=4326";
    SimpleFeatureType sft = SimpleFeatureTypes.createType(sftName, sftSchema);
    // set zkPath to default if not specified
    String zkPath = (dsConf.get(ZK_PATH) == null) ? "/geomesa/ds/kafka" : dsConf.get(ZK_PATH);
    SimpleFeatureType preppedOutputSft = KafkaDataStoreHelper.createStreamingSFT(sft, zkPath);
    // only create the schema if it hasn't been created already
    if (!Arrays.asList(producerDS.getTypeNames()).contains(sftName))
        producerDS.createSchema(preppedOutputSft);
    if (!cmd.hasOption("automated")) {
        System.out.println("Register KafkaDataStore in GeoServer (Press enter to continue)");
        System.in.read();
    }

    // the live consumer must be created before the producer writes features
    // in order to read streaming data.
    // i.e. the live consumer will only read data written after its instantiation
    SimpleFeatureSource consumerFS = consumerDS.getFeatureSource(sftName);
    SimpleFeatureStore producerFS = (SimpleFeatureStore) producerDS.getFeatureSource(sftName);

    // creates and adds SimpleFeatures to the producer every 1/5th of a second
    System.out.println("Writing features to Kafka... refresh GeoServer layer preview to see changes");
    Instant replayStart = new Instant();

    String vis = cmd.getOptionValue(VISIBILITY);
    if (vis != null)
        System.out.println("Writing features with " + vis);
    addSimpleFeatures(sft, producerFS, vis);
    Instant replayEnd = new Instant();

    // read from Kafka after writing all the features.
    // LIVE CONSUMER - will obtain the current state of SimpleFeatures
    System.out.println("\nConsuming with the live consumer...");
    SimpleFeatureCollection featureCollection = consumerFS.getFeatures();
    System.out.println(featureCollection.size() + " features were written to Kafka");

    addDeleteNewFeature(sft, producerFS);

    // read from Kafka after writing all the features.
    // LIVE CONSUMER - will obtain the current state of SimpleFeatures
    System.out.println("\nConsuming with the live consumer...");
    featureCollection = consumerFS.getFeatures();
    System.out.println(featureCollection.size() + " features were written to Kafka");

    // the state of the two SimpleFeatures is real time here
    System.out.println("Here are the two SimpleFeatures that were obtained with the live consumer:");
    SimpleFeatureIterator featureIterator = featureCollection.features();
    SimpleFeature feature1 = featureIterator.next();
    SimpleFeature feature2 = featureIterator.next();
    featureIterator.close();
    printFeature(feature1);
    printFeature(feature2);

    // REPLAY CONSUMER - will obtain the state of SimpleFeatures at any specified time
    // Replay consumer requires a ReplayConfig which takes a time range and a
    // duration of time to process
    System.out.println("\nConsuming with the replay consumer...");
    Duration readBehind = new Duration(1000); // 1 second readBehind
    ReplayConfig rc = new ReplayConfig(replayStart, replayEnd, readBehind);
    SimpleFeatureType replaySFT = KafkaDataStoreHelper.createReplaySFT(preppedOutputSft, rc);
    producerDS.createSchema(replaySFT);
    SimpleFeatureSource replayConsumerFS = consumerDS.getFeatureSource(replaySFT.getName());

    // querying for the state of SimpleFeatures approximately 5 seconds before the replayEnd.
    // the ReplayKafkaConsumerFeatureSource will build the state of SimpleFeatures
    // by processing all of the messages that were sent in between queryTime-readBehind and queryTime.
    // only the messages in between replayStart and replayEnd are cached.
    Instant queryTime = replayEnd.minus(5000);
    featureCollection = replayConsumerFS.getFeatures(ReplayTimeHelper.toFilter(queryTime));
    System.out.println(featureCollection.size() + " features were written to Kafka");

    System.out.println("Here are the two SimpleFeatures that were obtained with the replay consumer:");
    featureIterator = featureCollection.features();
    feature1 = featureIterator.next();
    feature2 = featureIterator.next();
    featureIterator.close();
    printFeature(feature1);
    printFeature(feature2);

    if (System.getProperty("clear") != null) {
        // Run Java command with -Dclear=true
        // This will cause a 'clear'
        producerFS.removeFeatures(Filter.INCLUDE);
    }

    System.exit(0);
}