Example usage for java.lang Exception Exception

List of usage examples for java.lang Exception Exception

Introduction

In this page you can find the example usage for java.lang Exception Exception.

Prototype

public Exception(Throwable cause) 

Source Link

Document

Constructs a new exception with the specified cause and a detail message of (cause==null ?

Usage

From source file:iarnrodProducer.java

public static void main(String[] args) throws Exception {
    // read command line args for a connection to Kafka
    CommandLineParser parser = new BasicParser();
    Options options = getCommonRequiredOptions();
    CommandLine cmd = parser.parse(options, args);

    // create the producer and consumer KafkaDataStore objects
    Map<String, String> dsConf = getKafkaDataStoreConf(cmd);
    dsConf.put("isProducer", "true");
    DataStore producerDS = DataStoreFinder.getDataStore(dsConf);

    // verify that we got back our KafkaDataStore objects properly
    if (producerDS == null) {
        throw new Exception("Null producer KafkaDataStore");
    }/*from  ww w . ja  va 2 s.  co  m*/

    // create the schema which creates a topic in Kafka
    // (only needs to be done once)
    final String sftName = "iarnrod";
    final String sftSchema = "trainStatus:String,trainCode:String,publicMessage:String,direction:String,dtg:Date,*geom:Point:srid=4326";
    SimpleFeatureType sft = SimpleFeatureTypes.createType(sftName, sftSchema);
    // set zkPath to default if not specified
    String zkPath = (dsConf.get(ZK_PATH) == null) ? "/geomesa/ds/kafka" : dsConf.get(ZK_PATH);
    SimpleFeatureType preppedOutputSft = KafkaDataStoreHelper.createStreamingSFT(sft, zkPath);
    // only create the schema if it hasn't been created already
    if (!Arrays.asList(producerDS.getTypeNames()).contains(sftName))
        producerDS.createSchema(preppedOutputSft);

    // the live consumer must be created before the producer writes features
    // in order to read streaming data.
    // i.e. the live consumer will only read data written after its instantiation
    SimpleFeatureStore producerFS = (SimpleFeatureStore) producerDS.getFeatureSource(sftName);

    // creates and adds SimpleFeatures to the producer on an interval
    System.out.println("Writing features to Kafka... refresh GeoServer layer preview to see changes");
    addSimpleFeatures(sft, producerFS);

    System.exit(0);
}

From source file:com.example.geomesa.kafka08.KafkaQuickStart.java

public static void main(String[] args) throws Exception {
    // read command line args for a connection to Kafka
    CommandLineParser parser = new BasicParser();
    Options options = getCommonRequiredOptions();
    CommandLine cmd = parser.parse(options, args);

    // create the producer and consumer KafkaDataStore objects
    Map<String, String> dsConf = getKafkaDataStoreConf(cmd);
    dsConf.put("isProducer", "true");
    DataStore producerDS = DataStoreFinder.getDataStore(dsConf);
    dsConf.put("isProducer", "false");
    DataStore consumerDS = DataStoreFinder.getDataStore(dsConf);

    // verify that we got back our KafkaDataStore objects properly
    if (producerDS == null) {
        throw new Exception("Null producer KafkaDataStore");
    }//  w  ww.j  a va  2s  .  co m
    if (consumerDS == null) {
        throw new Exception("Null consumer KafkaDataStore");
    }

    // create the schema which creates a topic in Kafka
    // (only needs to be done once)
    final String sftName = "KafkaQuickStart08";
    final String sftSchema = "name:String,age:Int,dtg:Date,*geom:Point:srid=4326";
    SimpleFeatureType sft = SimpleFeatureTypes.createType(sftName, sftSchema);
    // set zkPath to default if not specified
    String zkPath = (dsConf.get(ZK_PATH) == null) ? "/geomesa/ds/kafka" : dsConf.get(ZK_PATH);
    SimpleFeatureType preppedOutputSft = KafkaDataStoreHelper.createStreamingSFT(sft, zkPath);
    // only create the schema if it hasn't been created already
    if (!Arrays.asList(producerDS.getTypeNames()).contains(sftName))
        producerDS.createSchema(preppedOutputSft);
    if (!cmd.hasOption("automated")) {
        System.out.println("Register KafkaDataStore in GeoServer (Press enter to continue)");
        System.in.read();
    }

    // the live consumer must be created before the producer writes features
    // in order to read streaming data.
    // i.e. the live consumer will only read data written after its instantiation
    SimpleFeatureSource consumerFS = consumerDS.getFeatureSource(sftName);
    SimpleFeatureStore producerFS = (SimpleFeatureStore) producerDS.getFeatureSource(sftName);

    // creates and adds SimpleFeatures to the producer every 1/5th of a second
    System.out.println("Writing features to Kafka... refresh GeoServer layer preview to see changes");
    Instant replayStart = new Instant();

    String vis = cmd.getOptionValue(VISIBILITY);
    if (vis != null)
        System.out.println("Writing features with " + vis);
    addSimpleFeatures(sft, producerFS, vis);
    Instant replayEnd = new Instant();

    // read from Kafka after writing all the features.
    // LIVE CONSUMER - will obtain the current state of SimpleFeatures
    System.out.println("\nConsuming with the live consumer...");
    SimpleFeatureCollection featureCollection = consumerFS.getFeatures();
    System.out.println(featureCollection.size() + " features were written to Kafka");

    addDeleteNewFeature(sft, producerFS);

    // read from Kafka after writing all the features.
    // LIVE CONSUMER - will obtain the current state of SimpleFeatures
    System.out.println("\nConsuming with the live consumer...");
    featureCollection = consumerFS.getFeatures();
    System.out.println(featureCollection.size() + " features were written to Kafka");

    // the state of the two SimpleFeatures is real time here
    System.out.println("Here are the two SimpleFeatures that were obtained with the live consumer:");
    SimpleFeatureIterator featureIterator = featureCollection.features();
    SimpleFeature feature1 = featureIterator.next();
    SimpleFeature feature2 = featureIterator.next();
    featureIterator.close();
    printFeature(feature1);
    printFeature(feature2);

    // REPLAY CONSUMER - will obtain the state of SimpleFeatures at any specified time
    // Replay consumer requires a ReplayConfig which takes a time range and a
    // duration of time to process
    System.out.println("\nConsuming with the replay consumer...");
    Duration readBehind = new Duration(1000); // 1 second readBehind
    ReplayConfig rc = new ReplayConfig(replayStart, replayEnd, readBehind);
    SimpleFeatureType replaySFT = KafkaDataStoreHelper.createReplaySFT(preppedOutputSft, rc);
    producerDS.createSchema(replaySFT);
    SimpleFeatureSource replayConsumerFS = consumerDS.getFeatureSource(replaySFT.getName());

    // querying for the state of SimpleFeatures approximately 5 seconds before the replayEnd.
    // the ReplayKafkaConsumerFeatureSource will build the state of SimpleFeatures
    // by processing all of the messages that were sent in between queryTime-readBehind and queryTime.
    // only the messages in between replayStart and replayEnd are cached.
    Instant queryTime = replayEnd.minus(5000);
    featureCollection = replayConsumerFS.getFeatures(ReplayTimeHelper.toFilter(queryTime));
    System.out.println(featureCollection.size() + " features were written to Kafka");

    System.out.println("Here are the two SimpleFeatures that were obtained with the replay consumer:");
    featureIterator = featureCollection.features();
    feature1 = featureIterator.next();
    feature2 = featureIterator.next();
    featureIterator.close();
    printFeature(feature1);
    printFeature(feature2);

    if (System.getProperty("clear") != null) {
        // Run Java command with -Dclear=true
        // This will cause a 'clear'
        producerFS.removeFeatures(Filter.INCLUDE);
    }

    System.exit(0);
}

From source file:com.example.geomesa.kafka09.KafkaQuickStart.java

public static void main(String[] args) throws Exception {
    // read command line args for a connection to Kafka
    CommandLineParser parser = new BasicParser();
    Options options = getCommonRequiredOptions();
    CommandLine cmd = parser.parse(options, args);

    // create the producer and consumer KafkaDataStore objects
    Map<String, String> dsConf = getKafkaDataStoreConf(cmd);
    dsConf.put("isProducer", "true");
    DataStore producerDS = DataStoreFinder.getDataStore(dsConf);
    dsConf.put("isProducer", "false");
    DataStore consumerDS = DataStoreFinder.getDataStore(dsConf);

    // verify that we got back our KafkaDataStore objects properly
    if (producerDS == null) {
        throw new Exception("Null producer KafkaDataStore");
    }/*  w  w w . j  av  a  2 s  .c  o m*/
    if (consumerDS == null) {
        throw new Exception("Null consumer KafkaDataStore");
    }

    // create the schema which creates a topic in Kafka
    // (only needs to be done once)
    final String sftName = "KafkaQuickStart09";
    final String sftSchema = "name:String,age:Int,dtg:Date,*geom:Point:srid=4326";
    SimpleFeatureType sft = SimpleFeatureTypes.createType(sftName, sftSchema);
    // set zkPath to default if not specified
    String zkPath = (dsConf.get(ZK_PATH) == null) ? "/geomesa/ds/kafka" : dsConf.get(ZK_PATH);
    SimpleFeatureType preppedOutputSft = KafkaDataStoreHelper.createStreamingSFT(sft, zkPath);
    // only create the schema if it hasn't been created already
    if (!Arrays.asList(producerDS.getTypeNames()).contains(sftName))
        producerDS.createSchema(preppedOutputSft);
    if (!cmd.hasOption("automated")) {
        System.out.println("Register KafkaDataStore in GeoServer (Press enter to continue)");
        System.in.read();
    }

    // the live consumer must be created before the producer writes features
    // in order to read streaming data.
    // i.e. the live consumer will only read data written after its instantiation
    SimpleFeatureSource consumerFS = consumerDS.getFeatureSource(sftName);
    SimpleFeatureStore producerFS = (SimpleFeatureStore) producerDS.getFeatureSource(sftName);

    // creates and adds SimpleFeatures to the producer every 1/5th of a second
    System.out.println("Writing features to Kafka... refresh GeoServer layer preview to see changes");
    Instant replayStart = new Instant();

    String vis = cmd.getOptionValue(VISIBILITY);
    if (vis != null)
        System.out.println("Writing features with " + vis);
    addSimpleFeatures(sft, producerFS, vis);
    Instant replayEnd = new Instant();

    // read from Kafka after writing all the features.
    // LIVE CONSUMER - will obtain the current state of SimpleFeatures
    System.out.println("\nConsuming with the live consumer...");
    SimpleFeatureCollection featureCollection = consumerFS.getFeatures();
    System.out.println(featureCollection.size() + " features were written to Kafka");

    addDeleteNewFeature(sft, producerFS);

    // read from Kafka after writing all the features.
    // LIVE CONSUMER - will obtain the current state of SimpleFeatures
    System.out.println("\nConsuming with the live consumer...");
    featureCollection = consumerFS.getFeatures();
    System.out.println(featureCollection.size() + " features were written to Kafka");

    // the state of the two SimpleFeatures is real time here
    System.out.println("Here are the two SimpleFeatures that were obtained with the live consumer:");
    SimpleFeatureIterator featureIterator = featureCollection.features();
    SimpleFeature feature1 = featureIterator.next();
    SimpleFeature feature2 = featureIterator.next();
    featureIterator.close();
    printFeature(feature1);
    printFeature(feature2);

    // REPLAY CONSUMER - will obtain the state of SimpleFeatures at any specified time
    // Replay consumer requires a ReplayConfig which takes a time range and a
    // duration of time to process
    System.out.println("\nConsuming with the replay consumer...");
    Duration readBehind = new Duration(1000); // 1 second readBehind
    ReplayConfig rc = new ReplayConfig(replayStart, replayEnd, readBehind);
    SimpleFeatureType replaySFT = KafkaDataStoreHelper.createReplaySFT(preppedOutputSft, rc);
    producerDS.createSchema(replaySFT);
    SimpleFeatureSource replayConsumerFS = consumerDS.getFeatureSource(replaySFT.getName());

    // querying for the state of SimpleFeatures approximately 5 seconds before the replayEnd.
    // the ReplayKafkaConsumerFeatureSource will build the state of SimpleFeatures
    // by processing all of the messages that were sent in between queryTime-readBehind and queryTime.
    // only the messages in between replayStart and replayEnd are cached.
    Instant queryTime = replayEnd.minus(5000);
    featureCollection = replayConsumerFS.getFeatures(ReplayTimeHelper.toFilter(queryTime));
    System.out.println(featureCollection.size() + " features were written to Kafka");

    System.out.println("Here are the two SimpleFeatures that were obtained with the replay consumer:");
    featureIterator = featureCollection.features();
    feature1 = featureIterator.next();
    feature2 = featureIterator.next();
    featureIterator.close();
    printFeature(feature1);
    printFeature(feature2);

    if (System.getProperty("clear") != null) {
        // Run Java command with -Dclear=true
        // This will cause a 'clear'
        producerFS.removeFeatures(Filter.INCLUDE);
    }

    System.exit(0);
}

From source file:com.example.geomesa.kafka10.KafkaQuickStart.java

public static void main(String[] args) throws Exception {
    // read command line args for a connection to Kafka
    CommandLineParser parser = new BasicParser();
    Options options = getCommonRequiredOptions();
    CommandLine cmd = parser.parse(options, args);

    // create the producer and consumer KafkaDataStore objects
    Map<String, String> dsConf = getKafkaDataStoreConf(cmd);
    dsConf.put("isProducer", "true");
    DataStore producerDS = DataStoreFinder.getDataStore(dsConf);
    dsConf.put("isProducer", "false");
    DataStore consumerDS = DataStoreFinder.getDataStore(dsConf);

    // verify that we got back our KafkaDataStore objects properly
    if (producerDS == null) {
        throw new Exception("Null producer KafkaDataStore");
    }/*from   w w  w  .java  2s .c o m*/
    if (consumerDS == null) {
        throw new Exception("Null consumer KafkaDataStore");
    }

    // create the schema which creates a topic in Kafka
    // (only needs to be done once)
    final String sftName = "KafkaQuickStart10";
    final String sftSchema = "name:String,age:Int,dtg:Date,*geom:Point:srid=4326";
    SimpleFeatureType sft = SimpleFeatureTypes.createType(sftName, sftSchema);
    // set zkPath to default if not specified
    String zkPath = (dsConf.get(ZK_PATH) == null) ? "/geomesa/ds/kafka" : dsConf.get(ZK_PATH);
    SimpleFeatureType preppedOutputSft = KafkaDataStoreHelper.createStreamingSFT(sft, zkPath);
    // only create the schema if it hasn't been created already
    if (!Arrays.asList(producerDS.getTypeNames()).contains(sftName))
        producerDS.createSchema(preppedOutputSft);
    if (!cmd.hasOption("automated")) {
        System.out.println("Register KafkaDataStore in GeoServer (Press enter to continue)");
        System.in.read();
    }

    // the live consumer must be created before the producer writes features
    // in order to read streaming data.
    // i.e. the live consumer will only read data written after its instantiation
    SimpleFeatureSource consumerFS = consumerDS.getFeatureSource(sftName);
    SimpleFeatureStore producerFS = (SimpleFeatureStore) producerDS.getFeatureSource(sftName);

    // creates and adds SimpleFeatures to the producer every 1/5th of a second
    System.out.println("Writing features to Kafka... refresh GeoServer layer preview to see changes");
    Instant replayStart = new Instant();

    String vis = cmd.getOptionValue(VISIBILITY);
    if (vis != null)
        System.out.println("Writing features with " + vis);
    addSimpleFeatures(sft, producerFS, vis);
    Instant replayEnd = new Instant();

    // read from Kafka after writing all the features.
    // LIVE CONSUMER - will obtain the current state of SimpleFeatures
    System.out.println("\nConsuming with the live consumer...");
    SimpleFeatureCollection featureCollection = consumerFS.getFeatures();
    System.out.println(featureCollection.size() + " features were written to Kafka");

    addDeleteNewFeature(sft, producerFS);

    // read from Kafka after writing all the features.
    // LIVE CONSUMER - will obtain the current state of SimpleFeatures
    System.out.println("\nConsuming with the live consumer...");
    featureCollection = consumerFS.getFeatures();
    System.out.println(featureCollection.size() + " features were written to Kafka");

    // the state of the two SimpleFeatures is real time here
    System.out.println("Here are the two SimpleFeatures that were obtained with the live consumer:");
    SimpleFeatureIterator featureIterator = featureCollection.features();
    SimpleFeature feature1 = featureIterator.next();
    SimpleFeature feature2 = featureIterator.next();
    featureIterator.close();
    printFeature(feature1);
    printFeature(feature2);

    // REPLAY CONSUMER - will obtain the state of SimpleFeatures at any specified time
    // Replay consumer requires a ReplayConfig which takes a time range and a
    // duration of time to process
    System.out.println("\nConsuming with the replay consumer...");
    Duration readBehind = new Duration(1000); // 1 second readBehind
    ReplayConfig rc = new ReplayConfig(replayStart, replayEnd, readBehind);
    SimpleFeatureType replaySFT = KafkaDataStoreHelper.createReplaySFT(preppedOutputSft, rc);
    producerDS.createSchema(replaySFT);
    SimpleFeatureSource replayConsumerFS = consumerDS.getFeatureSource(replaySFT.getName());

    // querying for the state of SimpleFeatures approximately 5 seconds before the replayEnd.
    // the ReplayKafkaConsumerFeatureSource will build the state of SimpleFeatures
    // by processing all of the messages that were sent in between queryTime-readBehind and queryTime.
    // only the messages in between replayStart and replayEnd are cached.
    Instant queryTime = replayEnd.minus(5000);
    featureCollection = replayConsumerFS.getFeatures(ReplayTimeHelper.toFilter(queryTime));
    System.out.println(featureCollection.size() + " features were written to Kafka");

    System.out.println("Here are the two SimpleFeatures that were obtained with the replay consumer:");
    featureIterator = featureCollection.features();
    feature1 = featureIterator.next();
    feature2 = featureIterator.next();
    featureIterator.close();
    printFeature(feature1);
    printFeature(feature2);

    if (System.getProperty("clear") != null) {
        // Run Java command with -Dclear=true
        // This will cause a 'clear'
        producerFS.removeFeatures(Filter.INCLUDE);
    }

    System.exit(0);
}

From source file:com.example.geomesa.kafka.KafkaQuickStart.java

public static void main(String[] args) throws Exception {
    // read command line args for a connection to Kafka
    CommandLineParser parser = new BasicParser();
    Options options = getCommonRequiredOptions();
    CommandLine cmd = parser.parse(options, args);

    // create the producer and consumer KafkaDataStore objects
    Map<String, String> dsConf = getKafkaDataStoreConf(cmd);
    dsConf.put("kafka.consumer.count", "0");
    DataStore producerDS = DataStoreFinder.getDataStore(dsConf);
    dsConf.put("kafka.consumer.count", "1");
    DataStore consumerDS = DataStoreFinder.getDataStore(dsConf);

    // verify that we got back our KafkaDataStore objects properly
    if (producerDS == null) {
        throw new Exception("Null producer KafkaDataStore");
    }//from  ww w  .j a  v  a 2 s.co  m
    if (consumerDS == null) {
        throw new Exception("Null consumer KafkaDataStore");
    }

    try {
        // create the schema which creates a topic in Kafka
        // (only needs to be done once)
        final String sftName = "KafkaQuickStart";
        final String sftSchema = "name:String,age:Int,dtg:Date,*geom:Point:srid=4326";
        SimpleFeatureType sft = SimpleFeatureTypes.createType(sftName, sftSchema);
        producerDS.createSchema(sft);

        if (!cmd.hasOption("automated")) {
            System.out.println("Register KafkaDataStore in GeoServer (Press enter to continue)");
            System.in.read();
        }

        // the live consumer must be created before the producer writes features
        // in order to read streaming data.
        // i.e. the live consumer will only read data written after its instantiation
        SimpleFeatureSource consumerFS = consumerDS.getFeatureSource(sftName);
        SimpleFeatureStore producerFS = (SimpleFeatureStore) producerDS.getFeatureSource(sftName);

        // creates and adds SimpleFeatures to the producer every 1/5th of a second
        System.out.println("Writing features to Kafka... refresh GeoServer layer preview to see changes");
        long replayStart = System.currentTimeMillis();

        String vis = cmd.getOptionValue("visibility");
        if (vis != null)
            System.out.println("Writing features with " + vis);
        addSimpleFeatures(sft, producerFS, vis);
        long replayEnd = System.currentTimeMillis();

        // read from Kafka after writing all the features.
        // LIVE CONSUMER - will obtain the current state of SimpleFeatures
        System.out.println("\nConsuming with the live consumer...");
        SimpleFeatureCollection featureCollection = consumerFS.getFeatures();
        System.out.println(featureCollection.size() + " features were written to Kafka");

        addDeleteNewFeature(sft, producerFS);

        // read from Kafka after writing all the features.
        // LIVE CONSUMER - will obtain the current state of SimpleFeatures
        System.out.println("\nConsuming with the live consumer...");
        featureCollection = consumerFS.getFeatures();
        System.out.println(featureCollection.size() + " features were written to Kafka");

        // the state of the two SimpleFeatures is real time here
        System.out.println("Here are the two SimpleFeatures that were obtained with the live consumer:");
        SimpleFeatureIterator featureIterator = featureCollection.features();
        SimpleFeature feature1 = featureIterator.next();
        SimpleFeature feature2 = featureIterator.next();
        featureIterator.close();
        printFeature(feature1);
        printFeature(feature2);

        if (System.getProperty("clear") != null) {
            // Run Java command with -Dclear=true
            // This will cause a 'clear'
            producerFS.removeFeatures(Filter.INCLUDE);
        }
    } finally {
        producerDS.dispose();
        consumerDS.dispose();
    }

    System.exit(0);
}

From source file:edu.cmu.lti.oaqa.knn4qa.apps.ExtractDataAndQueryAsSparseVectors.java

public static void main(String[] args) {
    String optKeys[] = { CommonParams.MAX_NUM_QUERY_PARAM, MAX_NUM_DATA_PARAM, CommonParams.MEMINDEX_PARAM,
            IN_QUERIES_PARAM, OUT_QUERIES_PARAM, OUT_DATA_PARAM, TEXT_FIELD_PARAM, TEST_QTY_PARAM, };
    String optDescs[] = { CommonParams.MAX_NUM_QUERY_DESC, MAX_NUM_DATA_DESC, CommonParams.MEMINDEX_DESC,
            IN_QUERIES_DESC, OUT_QUERIES_DESC, OUT_DATA_DESC, TEXT_FIELD_DESC, TEST_QTY_DESC };
    boolean hasArg[] = { true, true, true, true, true, true, true, true };

    ParamHelper prmHlp = null;/*from  w  ww  .  ja v a2 s .  co m*/

    try {

        prmHlp = new ParamHelper(args, optKeys, optDescs, hasArg);

        CommandLine cmd = prmHlp.getCommandLine();
        Options opt = prmHlp.getOptions();

        int maxNumQuery = Integer.MAX_VALUE;

        String tmpn = cmd.getOptionValue(CommonParams.MAX_NUM_QUERY_PARAM);
        if (tmpn != null) {
            try {
                maxNumQuery = Integer.parseInt(tmpn);
            } catch (NumberFormatException e) {
                UsageSpecify(CommonParams.MAX_NUM_QUERY_PARAM, opt);
            }
        }

        int maxNumData = Integer.MAX_VALUE;
        tmpn = cmd.getOptionValue(MAX_NUM_DATA_PARAM);
        if (tmpn != null) {
            try {
                maxNumData = Integer.parseInt(tmpn);
            } catch (NumberFormatException e) {
                UsageSpecify(MAX_NUM_DATA_PARAM, opt);
            }
        }
        String memIndexPref = cmd.getOptionValue(CommonParams.MEMINDEX_PARAM);
        if (null == memIndexPref) {
            UsageSpecify(CommonParams.MEMINDEX_PARAM, opt);
        }
        String textField = cmd.getOptionValue(TEXT_FIELD_PARAM);
        if (null == textField) {
            UsageSpecify(TEXT_FIELD_PARAM, opt);
        }

        textField = textField.toLowerCase();
        int fieldId = -1;
        for (int i = 0; i < FeatureExtractor.mFieldNames.length; ++i)
            if (FeatureExtractor.mFieldNames[i].compareToIgnoreCase(textField) == 0) {
                fieldId = i;
                break;
            }
        if (-1 == fieldId) {
            Usage("Wrong field index, should be one of the following: "
                    + String.join(",", FeatureExtractor.mFieldNames), opt);
        }

        InMemForwardIndex indx = new InMemForwardIndex(
                FeatureExtractor.indexFileName(memIndexPref, FeatureExtractor.mFieldNames[fieldId]));

        BM25SimilarityLucene bm25simil = new BM25SimilarityLucene(FeatureExtractor.BM25_K1,
                FeatureExtractor.BM25_B, indx);

        String inQueryFile = cmd.getOptionValue(IN_QUERIES_PARAM);
        String outQueryFile = cmd.getOptionValue(OUT_QUERIES_PARAM);
        if ((inQueryFile == null) != (outQueryFile == null)) {
            Usage("You should either specify both " + IN_QUERIES_PARAM + " and " + OUT_QUERIES_PARAM
                    + " or none of them", opt);
        }
        String outDataFile = cmd.getOptionValue(OUT_DATA_PARAM);

        tmpn = cmd.getOptionValue(TEST_QTY_PARAM);
        int testQty = 0;
        if (tmpn != null) {
            try {
                testQty = Integer.parseInt(tmpn);
            } catch (NumberFormatException e) {
                UsageSpecify(TEST_QTY_PARAM, opt);
            }
        }

        ArrayList<DocEntry> testDocEntries = new ArrayList<DocEntry>();
        ArrayList<DocEntry> testQueryEntries = new ArrayList<DocEntry>();
        ArrayList<TrulySparseVector> testDocVectors = new ArrayList<TrulySparseVector>();
        ArrayList<TrulySparseVector> testQueryVectors = new ArrayList<TrulySparseVector>();

        if (outDataFile != null) {
            BufferedWriter out = new BufferedWriter(
                    new OutputStreamWriter(CompressUtils.createOutputStream(outDataFile)));

            ArrayList<DocEntryExt> docEntries = indx.getDocEntries();

            for (int id = 0; id < Math.min(maxNumData, docEntries.size()); ++id) {
                DocEntry e = docEntries.get(id).mDocEntry;
                TrulySparseVector v = bm25simil.getDocSparseVector(e, false);
                if (id < testQty) {
                    testDocEntries.add(e);
                    testDocVectors.add(v);
                }
                outputVector(out, v);
            }

            out.close();

        }

        Splitter splitOnSpace = Splitter.on(' ').trimResults().omitEmptyStrings();

        if (outQueryFile != null) {
            BufferedReader inpText = new BufferedReader(
                    new InputStreamReader(CompressUtils.createInputStream(inQueryFile)));
            BufferedWriter out = new BufferedWriter(
                    new OutputStreamWriter(CompressUtils.createOutputStream(outQueryFile)));

            String queryText = XmlHelper.readNextXMLIndexEntry(inpText);

            for (int queryQty = 0; queryText != null && queryQty < maxNumQuery; queryText = XmlHelper
                    .readNextXMLIndexEntry(inpText), queryQty++) {
                Map<String, String> queryFields = null;
                // 1. Parse a query

                try {
                    queryFields = XmlHelper.parseXMLIndexEntry(queryText);
                } catch (Exception e) {
                    System.err.println("Parsing error, offending QUERY:\n" + queryText);
                    throw new Exception("Parsing error.");
                }

                String fieldText = queryFields.get(FeatureExtractor.mFieldsSOLR[fieldId]);

                if (fieldText == null) {
                    fieldText = "";
                }

                ArrayList<String> tmpa = new ArrayList<String>();
                for (String s : splitOnSpace.split(fieldText))
                    tmpa.add(s);

                DocEntry e = indx.createDocEntry(tmpa.toArray(new String[tmpa.size()]));

                TrulySparseVector v = bm25simil.getDocSparseVector(e, true);
                if (queryQty < testQty) {
                    testQueryEntries.add(e);
                    testQueryVectors.add(v);
                }
                outputVector(out, v);
            }

            out.close();
        }

        int testedQty = 0, diffQty = 0;
        // Now let's do some testing
        for (int iq = 0; iq < testQueryEntries.size(); ++iq) {
            DocEntry queryEntry = testQueryEntries.get(iq);
            TrulySparseVector queryVector = testQueryVectors.get(iq);
            for (int id = 0; id < testDocEntries.size(); ++id) {
                DocEntry docEntry = testDocEntries.get(id);
                TrulySparseVector docVector = testDocVectors.get(id);
                float val1 = bm25simil.compute(queryEntry, docEntry);
                float val2 = TrulySparseVector.scalarProduct(queryVector, docVector);
                ++testedQty;
                if (Math.abs(val1 - val2) > 1e5) {
                    System.err.println(
                            String.format("Potential mismatch BM25=%f <-> scalar product=%f", val1, val2));
                    ++diffQty;
                }
            }
        }
        if (testedQty > 0)
            System.out.println(String.format("Tested %d Mismatched %d", testedQty, diffQty));

    } catch (ParseException e) {
        Usage("Cannot parse arguments: " + e, prmHlp != null ? prmHlp.getOptions() : null);
        e.printStackTrace();
    } catch (Exception e) {
        e.printStackTrace();
        System.err.println("Terminating due to an exception: " + e);
        System.exit(1);
    }
}

From source file:edu.msu.cme.rdp.alignment.errorcheck.RmPartialSeqs.java

/**
* This program detects partial sequences based on the best pairwise alignment for each query sequence, 
* @param args/*from  w  w  w  .ja v  a 2  s .co  m*/
* @throws Exception 
*/
public static void main(String[] args) throws Exception {

    String trainseqFile = null;
    String queryFile = null;
    PrintStream seqOutStream = null;
    PrintStream alignOutStream = null;
    AlignmentMode mode = AlignmentMode.overlap;
    int k = 10;
    int min_gaps = 50;

    try {
        CommandLine line = new PosixParser().parse(options, args);

        if (line.hasOption("alignment-mode")) {
            String m = line.getOptionValue("alignment-mode").toLowerCase();
            mode = AlignmentMode.valueOf(m);
        }

        if (line.hasOption("min_gaps")) {
            min_gaps = Integer.parseInt(line.getOptionValue("min_gaps"));
        }
        if (line.hasOption("knn")) {
            k = Integer.parseInt(line.getOptionValue("knn"));
        }
        if (line.hasOption("alignment-out")) {
            alignOutStream = new PrintStream(new File(line.getOptionValue("alignment-out")));
        }
        args = line.getArgs();
        if (args.length != 3) {
            throw new Exception("wrong number of arguments");
        }

        trainseqFile = args[0];
        queryFile = args[1];
        seqOutStream = new PrintStream(new File(args[2]));
    } catch (Exception e) {
        System.err.println("Error: " + e.getMessage());
        new HelpFormatter().printHelp(80,
                " [options] fulllengthSeqFile queryFile passedSeqOutFile\n  sequences can be either protein or nucleotide",
                "", options, "");
        return;
    }

    RmPartialSeqs theObj = new RmPartialSeqs(trainseqFile, queryFile, mode, k, min_gaps);

    theObj.checkPartial(seqOutStream, alignOutStream);
}

From source file:com.adito.server.Main.java

/**
 * Entry point//from   w  ww. j a  v  a  2  s.co  m
 * 
 * @param args
 * @throws Throwable
 */
public static void main(String[] args) throws Throwable {

    // This is a hack to allow the Install4J installer to get the java
    // runtime that will be used
    if (args.length > 0 && args[0].equals("--jvmdir")) {
        System.out.println(SystemProperties.get("java.home"));
        System.exit(0);
    }
    useWrapper = System.getProperty("wrapper.key") != null;
    final Main main = new Main();
    ContextHolder.setContext(main);

    if (useWrapper) {
        WrapperManager.start(main, args);
    } else {
        Integer returnCode = main.start(args);
        if (returnCode != null) {
            if (main.gui) {
                if (main.startupException == null) {
                    main.startupException = new Exception("An exit code of " + returnCode + " was returned.");
                }
                try {
                    if (SystemProperties.get("os.name").toLowerCase().startsWith("windows")) {
                        UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName());
                    }
                } catch (Exception e) {
                }
                String mesg = main.startupException.getMessage() == null ? "No message supplied."
                        : main.startupException.getMessage();
                StringBuffer buf = new StringBuffer();
                int l = 0;
                char ch = ' ';
                for (int i = 0; i < mesg.length(); i++) {
                    ch = mesg.charAt(i);
                    if (l > 50 && ch == ' ') {
                        buf.append("\n");
                        l = 0;
                    } else {
                        if (ch == '\n') {
                            l = 0;
                        } else {
                            l++;
                        }
                        buf.append(ch);
                    }
                }
                mesg = buf.toString();
                final String fMesg = mesg;
                SwingUtilities.invokeAndWait(new Runnable() {
                    public void run() {
                        JOptionPane.showMessageDialog(null, fMesg, "Startup Error", JOptionPane.ERROR_MESSAGE);
                    }
                });
            }
            System.exit(returnCode.intValue());
        } else {
            Runtime.getRuntime().addShutdownHook(new Thread() {
                public void run() {
                    if (!main.shuttingDown) {
                        main.stop(0);
                    }
                }
            });
        }
    }
}

From source file:edu.msu.cme.rdp.classifier.train.validation.distance.PairwiseSeqDistance.java

/**
* This program does the pairwise alignment between each pair of sequences, 
* reports a summary of the average distances and the stdev at each rank.
* @param args//  w  w w. j  a v  a  2s  .c o  m
* @throws Exception 
*/
public static void main(String[] args) throws Exception {

    String trainseqFile = null;
    String taxFile = null;
    PrintStream outStream = null;
    AlignmentMode mode = AlignmentMode.overlap;
    boolean show_alignment = false;

    try {
        CommandLine line = new PosixParser().parse(options, args);

        if (line.hasOption("show_alignment")) {
            show_alignment = true;
        }
        if (line.hasOption("alignment-mode")) {
            String m = line.getOptionValue("alignment-mode").toLowerCase();
            mode = AlignmentMode.valueOf(m);

        }

        if (args.length != 3) {
            throw new Exception("wrong arguments");
        }
        args = line.getArgs();
        trainseqFile = args[0];
        taxFile = args[1];
        outStream = new PrintStream(new File(args[2]));
    } catch (Exception e) {
        System.err.println("Command Error: " + e.getMessage());
        new HelpFormatter().printHelp(80, " [options] trainseqFile taxonFile outFile", "", options, "");
        return;
    }

    PairwiseSeqDistance theObj = new PairwiseSeqDistance(trainseqFile, taxFile, mode, show_alignment);

    theObj.printSummary(outStream);
}

From source file:com.hpe.nv.samples.advanced.AdvAllTestManagerClassMethods.java

public static void main(String[] args) throws Exception {
    try {//  w  w w .j a  va2  s. c o  m
        // program arguments
        Options options = new Options();
        options.addOption("i", "server-ip", true, "[mandatory] NV Test Manager IP");
        options.addOption("o", "server-port", true, "[mandatory] NV Test Manager port");
        options.addOption("u", "username", true, "[mandatory] NV username");
        options.addOption("w", "password", true, "[mandatory] NV password");
        options.addOption("e", "ssl", true, "[optional] Pass true to use SSL. Default: false");
        options.addOption("y", "proxy", true, "[optional] Proxy server host:port");
        options.addOption("t", "site-url", true,
                "[optional] Site under test URL. Default: HPE Network Virtualization site URL. If you change this value, make sure to change the --xpath argument too");
        options.addOption("x", "xpath", true,
                "[optional] Parameter for ExpectedConditions.visibilityOfElementLocated(By.xpath(...)) method. Use an xpath expression of some element in the site. Default: //div[@id='content']");
        options.addOption("a", "active-adapter-ip", true,
                "[optional] Active adapter IP. Default: --server-ip argument");
        options.addOption("s", "shunra-file-path", true,
                "[optional] File path for the .shunra file to analyze");
        options.addOption("z", "zip-result-file-path", true,
                "[optional] File path to store the analysis results as a .zip file");
        options.addOption("k", "analysis-ports", true,
                "[optional] A comma-separated list of ports for test analysis");
        options.addOption("f", "first-test-flow-tcp-port", true,
                "[optional] TCP port for the flow of the first test");
        options.addOption("g", "second-test-flow-tcp-port", true,
                "[optional] TCP port for the flow of the second test");
        options.addOption("b", "browser", true,
                "[optional] The browser for which the Selenium WebDriver is built. Possible values: Chrome and Firefox. Default: Firefox");
        options.addOption("d", "debug", true,
                "[optional] Pass true to view console debug messages during execution. Default: false");
        options.addOption("h", "help", false, "[optional] Generates and prints help information");

        // parse and validate the command line arguments
        CommandLineParser parser = new DefaultParser();
        CommandLine line = parser.parse(options, args);

        if (line.hasOption("help")) {
            // print help if help argument is passed
            HelpFormatter formatter = new HelpFormatter();
            formatter.printHelp("AdvAllTestManagerClassMethods.java", options);
            return;
        }

        if (line.hasOption("server-ip")) {
            serverIp = line.getOptionValue("server-ip");
            if (serverIp.equals("0.0.0.0")) {
                throw new Exception(
                        "Please replace the server IP argument value (0.0.0.0) with your NV Test Manager IP");
            }
        } else {
            throw new Exception("Missing argument -i/--server-ip <serverIp>");
        }

        if (line.hasOption("server-port")) {
            serverPort = Integer.parseInt(line.getOptionValue("server-port"));
        } else {
            throw new Exception("Missing argument -o/--server-port <serverPort>");
        }

        if (line.hasOption("username")) {
            username = line.getOptionValue("username");
        } else {
            throw new Exception("Missing argument -u/--username <username>");
        }

        if (line.hasOption("password")) {
            password = line.getOptionValue("password");
        } else {
            throw new Exception("Missing argument -w/--password <password>");
        }

        if (line.hasOption("ssl")) {
            ssl = Boolean.parseBoolean(line.getOptionValue("ssl"));
        }

        if (line.hasOption("site-url")) {
            siteUrl = line.getOptionValue("site-url");
        } else {
            siteUrl = "http://www8.hp.com/us/en/software-solutions/network-virtualization/index.html";
        }

        if (line.hasOption("xpath")) {
            xpath = line.getOptionValue("xpath");
        } else {
            xpath = "//div[@id='content']";
        }

        if (line.hasOption("zip-result-file-path")) {
            zipResultFilePath = line.getOptionValue("zip-result-file-path");
        }

        if (line.hasOption("proxy")) {
            proxySetting = line.getOptionValue("proxy");
        }

        if (line.hasOption("active-adapter-ip")) {
            activeAdapterIp = line.getOptionValue("active-adapter-ip");
        } else {
            activeAdapterIp = serverIp;
        }

        if (line.hasOption("analysis-ports")) {
            String analysisPortsStr = line.getOptionValue("analysis-ports");
            analysisPorts = analysisPortsStr.split(",");
        } else {
            analysisPorts = new String[] { "80", "8080" };
        }

        if (line.hasOption("shunra-file-path")) {
            shunraFilePath = line.getOptionValue("shunra-file-path");
        }

        if (line.hasOption("firstTestFlowTcpPort")) {
            firstTestFlowTcpPort = Integer.parseInt(line.getOptionValue("firstTestFlowTcpPort"));
        } else {
            firstTestFlowTcpPort = 8080;
        }

        if (line.hasOption("secondTestFlowTcpPort")) {
            secondTestFlowTcpPort = Integer.parseInt(line.getOptionValue("secondTestFlowTcpPort"));
        } else {
            secondTestFlowTcpPort = 80;
        }

        if (line.hasOption("browser")) {
            browser = line.getOptionValue("browser");
        } else {
            browser = "Firefox";
        }

        if (line.hasOption("debug")) {
            debug = Boolean.parseBoolean(line.getOptionValue("debug"));
        }

        String newLine = System.getProperty("line.separator");
        String testDescription = "***   This sample demonstrates all of the TestManager class APIs. These APIs let you:                                 ***"
                + newLine
                + "***   * initialize the TestManager object to pass logon credentials, the NV Test Manager IP, the port, and so on      ***"
                + newLine
                + "***   * set/get the NV configuration and active adapter                                                               ***"
                + newLine
                + "***   * get the running tests tokens                                                                                  ***"
                + newLine
                + "***   * start/stop packet list capture                                                                                ***"
                + newLine
                + "***   * get packet list information                                                                                   ***"
                + newLine
                + "***   * stop a specified array of tests or all of the running tests                                                   ***"
                + newLine
                + "***   * analyze a .shunra file, which is a compressed file that includes an events file, metadata, and packet lists   ***"
                + newLine
                + "***                                                                                                                   ***"
                + newLine
                + "***   You can view the actual steps of this sample in the AdvAllTestManagerClassMethods.java file.                    ***"
                + newLine;

        // print the sample's description
        System.out.println(testDescription);

        // start console spinner
        if (!debug) {
            spinner = new Thread(new Spinner());
            spinner.start();
        }

        // sample execution steps
        /*****    Part 1 - Initialize the TestManager object to pass logon credentials, the NV Test Manager IP, the port, and so on            *****/
        printPartDescription(
                "\b------    Part 1 - Initialize the TestManager object to pass logon credentials, the NV Test Manager IP, the port, and so on");
        initTestManager();
        printPartSeparator();
        /*****    Part 2 - Set/get the NV configuration and active adapter                                                                     *****/
        printPartDescription("------    Part 2 - Set/get the NV configuration and active adapter");
        setActiveAdapter();
        getActiveAdapter();
        setConfiguration();
        getConfiguration();
        printPartSeparator();
        /*****    Part 3 - Start tests and get the NV tests' tokens                                                                             *****/
        printPartDescription("------    Part 3 - Start tests and get the NV tests' tokens");
        startTest1();
        connectTest1ToTransactionManager();
        startTest2();
        connectTest2ToTransactionManager();
        getTestTokens();
        printPartSeparator();
        /*****    Part 4 - Start NV transactions, navigate to the site and start capturing the packet lists                                     *****/
        printPartDescription(
                "------    Part 4 - Start NV transactions, navigate to the site and start capturing the packet lists");
        startTransaction1();
        startTransaction2();
        startPacketListCapture();
        buildSeleniumWebDriver();
        seleniumNavigateToPage();
        printPartSeparator();
        /*****    Part 5 - Get the packet list information and print it to the console (if the --debug argument is set to true)                 *****/
        printPartDescription(
                "------    Part 5 - Get the packet list information and print it to the console (if the --debug argument is set to true)");
        getPacketListInfo();
        printPartSeparator();
        /*****    Part 6 - Stop capturing packet lists and stop the NV transactions                                                             *****/
        printPartDescription("------    Part 6 - Stop capturing packet lists and stop the NV transactions");
        stopPacketListCapture();
        stopTransaction1();
        siteTransaction1 = null;
        stopTransaction2();
        siteTransaction2 = null;
        printPartSeparator();
        /*****    Part 7 - Stop the first NV test using the "stopTests" method and then stop the second test using the "stopAllTests" method    *****/
        printPartDescription(
                "------    Part 7 - Stop the first NV test using the \"stopTests\" method and then stop the second test using the \"stopAllTests\" method");
        stopTest1();
        siteTest1 = null;
        driver.manage().timeouts().implicitlyWait(30, TimeUnit.SECONDS);
        stopAllTests();
        siteTest2 = null;
        printPartSeparator();
        /*****    Part 8 - Analyze the specified .shunra file and print the results to the console                                              *****/
        printPartDescription(
                "------    Part 8 - Analyze the specified .shunra file and print the results to the console");
        analyzeShunra();
        printPartSeparator();
        doneCallback();
    } catch (Exception e) {
        try {
            handleError(e.getMessage());
        } catch (Exception e2) {
            System.out.println("Error occurred: " + e2.getMessage());
        }
    }
}