Example usage for org.apache.commons.cli BasicParser BasicParser

List of usage examples for org.apache.commons.cli BasicParser BasicParser

Introduction

In this page you can find the example usage for org.apache.commons.cli BasicParser BasicParser.

Prototype

BasicParser

Source Link

Usage

From source file:iarnrodProducer.java

public static void main(String[] args) throws Exception {
    // read command line args for a connection to Kafka
    CommandLineParser parser = new BasicParser();
    Options options = getCommonRequiredOptions();
    CommandLine cmd = parser.parse(options, args);

    // create the producer and consumer KafkaDataStore objects
    Map<String, String> dsConf = getKafkaDataStoreConf(cmd);
    dsConf.put("isProducer", "true");
    DataStore producerDS = DataStoreFinder.getDataStore(dsConf);

    // verify that we got back our KafkaDataStore objects properly
    if (producerDS == null) {
        throw new Exception("Null producer KafkaDataStore");
    }//w  ww .j av  a  2  s. com

    // create the schema which creates a topic in Kafka
    // (only needs to be done once)
    final String sftName = "iarnrod";
    final String sftSchema = "trainStatus:String,trainCode:String,publicMessage:String,direction:String,dtg:Date,*geom:Point:srid=4326";
    SimpleFeatureType sft = SimpleFeatureTypes.createType(sftName, sftSchema);
    // set zkPath to default if not specified
    String zkPath = (dsConf.get(ZK_PATH) == null) ? "/geomesa/ds/kafka" : dsConf.get(ZK_PATH);
    SimpleFeatureType preppedOutputSft = KafkaDataStoreHelper.createStreamingSFT(sft, zkPath);
    // only create the schema if it hasn't been created already
    if (!Arrays.asList(producerDS.getTypeNames()).contains(sftName))
        producerDS.createSchema(preppedOutputSft);

    // the live consumer must be created before the producer writes features
    // in order to read streaming data.
    // i.e. the live consumer will only read data written after its instantiation
    SimpleFeatureStore producerFS = (SimpleFeatureStore) producerDS.getFeatureSource(sftName);

    // creates and adds SimpleFeatures to the producer on an interval
    System.out.println("Writing features to Kafka... refresh GeoServer layer preview to see changes");
    addSimpleFeatures(sft, producerFS);

    System.exit(0);
}

From source file:fr.inria.edelweiss.kgdqp.core.FedInferrencingCLI.java

public static void main(String args[]) throws ParseException, EngineException, InterruptedException {

    List<String> endpoints = new ArrayList<String>();
    String queryPath = null;/*ww w  . ja v a  2  s. c  o m*/
    boolean rulesSelection = false;
    File rulesDir = null;
    File ontDir = null;

    Options options = new Options();
    Option helpOpt = new Option("h", "help", false, "print this message");
    Option queryOpt = new Option("q", "query", true, "specify the sparql query file");
    Option endpointOpt = new Option("e", "endpoint", true, "a federated sparql endpoint URL");
    Option versionOpt = new Option("v", "version", false, "print the version information and exit");
    Option rulesOpt = new Option("r", "rulesDir", true, "directory containing the inference rules");
    Option ontOpt = new Option("o", "ontologiesDir", true,
            "directory containing the ontologies for rules selection");
    //        Option selOpt = new Option("s", "rulesSelection", false, "if set to true, only the applicable rules are run");
    options.addOption(queryOpt);
    options.addOption(endpointOpt);
    options.addOption(helpOpt);
    options.addOption(versionOpt);
    options.addOption(rulesOpt);
    options.addOption(ontOpt);
    //        options.addOption(selOpt);

    String header = "Corese/KGRAM distributed rule engine command line interface";
    String footer = "\nPlease report any issue to alban.gaignard@cnrs.fr, olivier.corby@inria.fr";

    CommandLineParser parser = new BasicParser();
    CommandLine cmd = parser.parse(options, args);
    if (cmd.hasOption("h")) {
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp("kgdqp", header, options, footer, true);
        System.exit(0);
    }
    if (!cmd.hasOption("e")) {
        logger.info("You must specify at least the URL of one sparql endpoint !");
        System.exit(0);
    } else {
        endpoints = new ArrayList<String>(Arrays.asList(cmd.getOptionValues("e")));
    }
    if (cmd.hasOption("o")) {
        rulesSelection = true;
        String ontDirPath = cmd.getOptionValue("o");
        ontDir = new File(ontDirPath);
        if (!ontDir.isDirectory()) {
            logger.warn(ontDirPath + " is not a valid directory path.");
            System.exit(0);
        }
    }
    if (!cmd.hasOption("r")) {
        logger.info("You must specify a path for inference rules directory !");
        System.exit(0);
    } else if (rulesSelection) {

    }

    if (cmd.hasOption("v")) {
        logger.info("version 3.0.4-SNAPSHOT");
        System.exit(0);
    }

    String rulesDirPath = cmd.getOptionValue("r");
    rulesDir = new File(rulesDirPath);
    if (!rulesDir.isDirectory()) {
        logger.warn(rulesDirPath + " is not a valid directory path.");
        System.exit(0);
    }

    /////////////////
    Graph graph = Graph.create();
    QueryProcessDQP execDQP = QueryProcessDQP.create(graph);
    for (String url : endpoints) {
        try {
            execDQP.addRemote(new URL(url), WSImplem.REST);
        } catch (MalformedURLException ex) {
            logger.error(url + " is not a well-formed URL");
            System.exit(1);
        }
    }

    // Local rules graph initialization
    Graph rulesG = Graph.create();
    Load ld = Load.create(rulesG);

    if (rulesSelection) {
        // Ontology loading
        if (ontDir.isDirectory()) {
            for (File o : ontDir.listFiles()) {
                logger.info("Loading " + o.getAbsolutePath());
                ld.load(o.getAbsolutePath());
            }
        }
    }

    // Rules loading
    if (rulesDir.isDirectory()) {
        for (File r : rulesDir.listFiles()) {
            logger.info("Loading " + r.getAbsolutePath());
            ld.load(r.getAbsolutePath());
        }
    }

    // Rule engine initialization
    RuleEngine ruleEngine = RuleEngine.create(graph);
    ruleEngine.set(execDQP);

    StopWatch sw = new StopWatch();
    logger.info("Federated graph size : " + graph.size());
    logger.info("Rules graph size : " + rulesG.size());

    // Rule selection
    logger.info("Rules selection");
    QueryProcess localKgram = QueryProcess.create(rulesG);
    ArrayList<String> applicableRules = new ArrayList<String>();
    sw.start();
    String rulesSelQuery = "";
    if (rulesSelection) {
        rulesSelQuery = pertinentRulesQuery;
    } else {
        rulesSelQuery = allRulesQuery;
    }
    Mappings maps = localKgram.query(rulesSelQuery);
    logger.info("Rules selected in " + sw.getTime() + " ms");
    logger.info("Applicable rules : " + maps.size());

    // Selected rule loading
    for (Mapping map : maps) {
        IDatatype dt = (IDatatype) map.getValue("?res");
        String rule = dt.getLabel();
        //loading rule in the rule engine
        //            logger.info("Adding rule : " + rule);
        applicableRules.add(rule);
        ruleEngine.addRule(rule);
    }

    // Rules application on distributed sparql endpoints
    logger.info("Rules application (" + applicableRules.size() + " rules)");
    ExecutorService threadPool = Executors.newCachedThreadPool();
    RuleEngineThread ruleThread = new RuleEngineThread(ruleEngine);
    sw.reset();
    sw.start();

    //        ruleEngine.process();
    threadPool.execute(ruleThread);
    threadPool.shutdown();

    //monitoring loop
    while (!threadPool.isTerminated()) {
        System.out.println("******************************");
        System.out.println(Util.jsonDqpCost(QueryProcessDQP.queryCounter, QueryProcessDQP.queryVolumeCounter,
                QueryProcessDQP.sourceCounter, QueryProcessDQP.sourceVolumeCounter));
        System.out.println("Rule engine running for " + sw.getTime() + " ms");
        System.out.println("Federated graph size : " + graph.size());
        Thread.sleep(10000);
    }

    logger.info("Federated graph size : " + graph.size());
    logger.info(Util.jsonDqpCost(QueryProcessDQP.queryCounter, QueryProcessDQP.queryVolumeCounter,
            QueryProcessDQP.sourceCounter, QueryProcessDQP.sourceVolumeCounter));

    ///////////// Query file processing
    //        StringBuffer fileData = new StringBuffer(1000);
    //        BufferedReader reader = null;
    //        try {
    //            reader = new BufferedReader(new FileReader(queryPath));
    //        } catch (FileNotFoundException ex) {
    //             logger.error("Query file "+queryPath+" not found !");
    //             System.exit(1);
    //        }
    //        char[] buf = new char[1024];
    //        int numRead = 0;
    //        try {
    //            while ((numRead = reader.read(buf)) != -1) {
    //                String readData = String.valueOf(buf, 0, numRead);
    //                fileData.append(readData);
    //                buf = new char[1024];
    //            }
    //            reader.close();
    //        } catch (IOException ex) {
    //           logger.error("Error while reading query file "+queryPath);
    //           System.exit(1);
    //        }
    //
    //        String sparqlQuery = fileData.toString();
    //
    //        Query q = exec.compile(sparqlQuery,null);
    //        System.out.println(q);
    //        
    //        StopWatch sw = new StopWatch();
    //        sw.start();
    //        Mappings map = exec.query(sparqlQuery);
    //        int dqpSize = map.size();
    //        System.out.println("--------");
    //        long time = sw.getTime();
    //        System.out.println(time + " " + dqpSize);
}

From source file:com.example.geomesa.kafka.KafkaQuickStart.java

public static void main(String[] args) throws Exception {
    // read command line args for a connection to Kafka
    CommandLineParser parser = new BasicParser();
    Options options = getCommonRequiredOptions();
    CommandLine cmd = parser.parse(options, args);

    // create the producer and consumer KafkaDataStore objects
    Map<String, String> dsConf = getKafkaDataStoreConf(cmd);
    dsConf.put("kafka.consumer.count", "0");
    DataStore producerDS = DataStoreFinder.getDataStore(dsConf);
    dsConf.put("kafka.consumer.count", "1");
    DataStore consumerDS = DataStoreFinder.getDataStore(dsConf);

    // verify that we got back our KafkaDataStore objects properly
    if (producerDS == null) {
        throw new Exception("Null producer KafkaDataStore");
    }//w  w  w  . j  a  v  a2s  . com
    if (consumerDS == null) {
        throw new Exception("Null consumer KafkaDataStore");
    }

    try {
        // create the schema which creates a topic in Kafka
        // (only needs to be done once)
        final String sftName = "KafkaQuickStart";
        final String sftSchema = "name:String,age:Int,dtg:Date,*geom:Point:srid=4326";
        SimpleFeatureType sft = SimpleFeatureTypes.createType(sftName, sftSchema);
        producerDS.createSchema(sft);

        if (!cmd.hasOption("automated")) {
            System.out.println("Register KafkaDataStore in GeoServer (Press enter to continue)");
            System.in.read();
        }

        // the live consumer must be created before the producer writes features
        // in order to read streaming data.
        // i.e. the live consumer will only read data written after its instantiation
        SimpleFeatureSource consumerFS = consumerDS.getFeatureSource(sftName);
        SimpleFeatureStore producerFS = (SimpleFeatureStore) producerDS.getFeatureSource(sftName);

        // creates and adds SimpleFeatures to the producer every 1/5th of a second
        System.out.println("Writing features to Kafka... refresh GeoServer layer preview to see changes");
        long replayStart = System.currentTimeMillis();

        String vis = cmd.getOptionValue("visibility");
        if (vis != null)
            System.out.println("Writing features with " + vis);
        addSimpleFeatures(sft, producerFS, vis);
        long replayEnd = System.currentTimeMillis();

        // read from Kafka after writing all the features.
        // LIVE CONSUMER - will obtain the current state of SimpleFeatures
        System.out.println("\nConsuming with the live consumer...");
        SimpleFeatureCollection featureCollection = consumerFS.getFeatures();
        System.out.println(featureCollection.size() + " features were written to Kafka");

        addDeleteNewFeature(sft, producerFS);

        // read from Kafka after writing all the features.
        // LIVE CONSUMER - will obtain the current state of SimpleFeatures
        System.out.println("\nConsuming with the live consumer...");
        featureCollection = consumerFS.getFeatures();
        System.out.println(featureCollection.size() + " features were written to Kafka");

        // the state of the two SimpleFeatures is real time here
        System.out.println("Here are the two SimpleFeatures that were obtained with the live consumer:");
        SimpleFeatureIterator featureIterator = featureCollection.features();
        SimpleFeature feature1 = featureIterator.next();
        SimpleFeature feature2 = featureIterator.next();
        featureIterator.close();
        printFeature(feature1);
        printFeature(feature2);

        if (System.getProperty("clear") != null) {
            // Run Java command with -Dclear=true
            // This will cause a 'clear'
            producerFS.removeFeatures(Filter.INCLUDE);
        }
    } finally {
        producerDS.dispose();
        consumerDS.dispose();
    }

    System.exit(0);
}

From source file:ch.fork.AdHocRailway.ui.AdHocRailway.java

public static void main(final String[] args) throws ParseException {

    Options options = new Options();
    options.addOption("c", "clean", false, "start with a clean config");
    CommandLineParser parser = new BasicParser();

    org.apache.commons.cli.CommandLine parsedCommandLine = parser.parse(options, args);

    AdHocRailway adHocRailway = new AdHocRailway(parsedCommandLine);
}

From source file:bogdanrechi.xmlo.Xmlo.java

/**
 * Main method.//from  w  w w . j a v a  2s.c  om
 *
 * @param args
 *          Program arguments.
 */
@SuppressWarnings("static-access")
public static void main(String[] args) {
    long timeStart = System.currentTimeMillis();

    _log = Logger.getLogger(Xmlo.class);

    Options argOptions = new Options();

    OptionGroup operationTypes = new OptionGroup();

    operationTypes.addOption(
            OptionBuilder.withDescription("XPath query").hasArg().withArgName("file").create("xpath"));
    operationTypes.addOption(
            OptionBuilder.withDescription("XSLT transformation").hasArg().withArgName("file").create("xslt"));
    operationTypes.addOption(
            OptionBuilder.withDescription("XQuery (with $sourceFilePath, see XmlOperations for details)")
                    .hasArg().withArgName("file").create("xquery"));
    operationTypes
            .addOption(OptionBuilder.withDescription("XSD verify").hasArgs().withArgName("file").create("xsd"));
    operationTypes
            .addOption(OptionBuilder.withDescription("DTD verify").hasArgs().withArgName("file").create("dtd"));

    argOptions.addOptionGroup(operationTypes);

    argOptions.addOption(OptionBuilder.withDescription("on screen information while performing")
            .withLongOpt("verbose").create("v"));

    argOptions.addOption(OptionBuilder.withDescription("output non-void or invalid-type results only")
            .withLongOpt("results-only").create("o"));

    argOptions.addOption(OptionBuilder.withDescription("replicate input structure on the destination side")
            .withLongOpt("keep-structure").create("k"));

    argOptions.addOption(OptionBuilder.withDescription("recursive browsing of the target structure")
            .withLongOpt("resursive").create("r"));

    argOptions.addOption(OptionBuilder.withDescription("XPath and XQuery results not numbered")
            .withLongOpt("not-numbered").create("nn"));

    argOptions.addOption(OptionBuilder.withDescription("destination files extension").hasArg()
            .withArgName("ext").withLongOpt("extension").create("x"));

    argOptions.addOption(OptionBuilder.withDescription("target files mask").hasArg().withArgName("files mask")
            .withLongOpt("target").create("t"));

    OptionGroup destinationGroup = new OptionGroup();

    destinationGroup.addOption(OptionBuilder.withDescription("destination file").hasArg().withArgName("file")
            .withLongOpt("destination-file").create("df"));
    destinationGroup.addOption(OptionBuilder.withDescription("destination folder").hasArg()
            .withArgName("folder").withLongOpt("destination-directory").create("dd"));
    destinationGroup.addOption(OptionBuilder.withDescription("destination terminal (and verbose)")
            .withLongOpt("destination-terminal").create("dt"));

    argOptions.addOptionGroup(destinationGroup);

    argOptions.addOption(OptionBuilder.withDescription("file containing namespaces aliases").hasArg()
            .withArgName("file").withLongOpt("namespaces").create("n"));

    argOptions.addOption(OptionBuilder.withDescription("usage information").withLongOpt("help").create("h"));

    argOptions.addOption(OptionBuilder.withDescription("show examples").withLongOpt("examples").create("e"));

    argOptions.addOption(OptionBuilder.withDescription("keep blank nodes while printing")
            .withLongOpt("keep-blanks").create("b"));

    argOptions.addOption(
            OptionBuilder.withDescription("show duration for each file when the verbose option is activated")
                    .withLongOpt("show-duration").create("d"));

    CommandLineParser parser = new BasicParser();
    try {
        CommandLine cmd = parser.parse(argOptions, args);

        if (cmd.hasOption('h') || cmd.hasOption('e') || cmd.getOptions().length == 0) {
            Format.println("\n" + General.getAboutInformation("resources/xmlo/metadata.properties"));

            if (cmd.hasOption('h') || cmd.getOptions().length == 0) {
                HelpFormatter formatter = new HelpFormatter();
                formatter.printHelp("xmlo", "where:", argOptions, null, true);
                Format.println();
            }

            if (cmd.hasOption("e")) {
                String examples = Files.readTextFileFromResources("resources/xmlo/examples.txt", _errorMessage);
                if (examples != null)
                    Format.println(examples);
                else {
                    Format.println("Internal error! Please see the log file for detalis.");
                    _log.error(_errorMessage.get());

                    System.exit(1);
                }
            }

            System.exit(0);
            return;
        }

        Format.println();

        // options

        if (cmd.hasOption('r'))
            _recursive = true;

        if (cmd.hasOption('k'))
            _keepStructure = true;

        if (cmd.hasOption('o'))
            _resultsOnly = true;

        if (cmd.hasOption('v'))
            _verbose = true;

        if (cmd.hasOption('b'))
            _keepBlanks = true;

        if (cmd.hasOption('d'))
            _showDuration = true;

        if (cmd.hasOption('x'))
            _extension = "." + cmd.getOptionValue('x');

        if (cmd.hasOption("nn"))
            _notNumbered = true;

        if (cmd.hasOption("df")) {
            _destination = cmd.getOptionValue("df");

            if (Files.isFolder(_destination))
                printErrorAndExit("The destination is a folder!");

            _destinationIsFile = true;
        }

        if (cmd.hasOption("dd")) {
            _destination = cmd.getOptionValue("dd");

            if (!Files.exists(_destination))
                printErrorAndExit("The destination folder does not exist!");

            if (!Files.isFolder(_destination))
                printErrorAndExit("The destination is not a folder!");
        }

        if (cmd.hasOption("dt"))
            _destinationIsTerminal = _verbose = true;

        if (cmd.hasOption('t'))
            _target = cmd.getOptionValue('t');

        if (cmd.hasOption('n')) {
            _namespaces = cmd.getOptionValue('n');
            extractNamespacesAliases();
        }

        // operations

        if (cmd.hasOption("xpath")) {
            if (_target == null)
                _target = Files.CURRENT_DIRECTORY + Files.FILE_SEPARATOR + "*" + EXTENSION_XPATH;

            doXPath(cmd.getOptionValue("xpath"));
        }

        if (cmd.hasOption("xslt")) {
            if (_target == null)
                _target = Files.CURRENT_DIRECTORY + Files.FILE_SEPARATOR + "*" + EXTENSION_XSLT;

            doXslt(cmd.getOptionValue("xslt"));
        }

        if (cmd.hasOption("xquery")) {
            if (_target == null)
                _target = Files.CURRENT_DIRECTORY + Files.FILE_SEPARATOR + "*" + EXTENSION_XQUERY;

            doXQuery(cmd.getOptionValue("xquery"));
        }

        if (cmd.hasOption("xsd")) {
            if (_target == null)
                _target = Files.CURRENT_DIRECTORY + Files.FILE_SEPARATOR + "*" + EXTENSION_XSD;

            doXsd(cmd.getOptionValues("xsd"));
        }

        if (cmd.hasOption("dtd")) {
            if (_target == null)
                _target = Files.CURRENT_DIRECTORY + Files.FILE_SEPARATOR + "*" + EXTENSION_DTD;

            doDtd(cmd.getOptionValues("dtd"));
        }
    } catch (ParseException e) {
        printErrorAndExit(e.getMessage());
    }

    Format.println("Finished%s.", _showDuration ? " in " + TimeMeasure.printDuration(timeStart) : "");

    if (Platform.SYSTEM_IS_LINUX)
        Format.println();

    System.exit(0);
}

From source file:fr.inria.edelweiss.kgdqp.core.CentralizedInferrencing.java

public static void main(String args[])
        throws ParseException, EngineException, InterruptedException, IOException {

    List<String> endpoints = new ArrayList<String>();
    String queryPath = null;/*from  w w  w . j  a v a  2  s . c  o m*/
    boolean rulesSelection = false;
    File rulesDir = null;
    File ontDir = null;

    /////////////////
    Graph graph = Graph.create();
    QueryProcess exec = QueryProcess.create(graph);

    Options options = new Options();
    Option helpOpt = new Option("h", "help", false, "print this message");
    //        Option queryOpt = new Option("q", "query", true, "specify the sparql query file");
    //        Option endpointOpt = new Option("e", "endpoint", true, "a federated sparql endpoint URL");
    Option versionOpt = new Option("v", "version", false, "print the version information and exit");
    Option rulesOpt = new Option("r", "rulesDir", true, "directory containing the inference rules");
    Option ontOpt = new Option("o", "ontologiesDir", true,
            "directory containing the ontologies for rules selection");
    //        Option locOpt = new Option("c", "centralized", false, "performs centralized inferences");
    Option dataOpt = new Option("l", "load", true, "data file or directory to be loaded");
    //        Option selOpt = new Option("s", "rulesSelection", false, "if set to true, only the applicable rules are run");
    //        options.addOption(queryOpt);
    //        options.addOption(endpointOpt);
    options.addOption(helpOpt);
    options.addOption(versionOpt);
    options.addOption(rulesOpt);
    options.addOption(ontOpt);
    //        options.addOption(selOpt);
    //        options.addOption(locOpt);
    options.addOption(dataOpt);

    String header = "Corese/KGRAM rule engine experiment command line interface";
    String footer = "\nPlease report any issue to alban.gaignard@cnrs.fr, olivier.corby@inria.fr";

    CommandLineParser parser = new BasicParser();
    CommandLine cmd = parser.parse(options, args);
    if (cmd.hasOption("h")) {
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp("kgdqp", header, options, footer, true);
        System.exit(0);
    }
    if (cmd.hasOption("o")) {
        rulesSelection = true;
        String ontDirPath = cmd.getOptionValue("o");
        ontDir = new File(ontDirPath);
        if (!ontDir.isDirectory()) {
            logger.warn(ontDirPath + " is not a valid directory path.");
            System.exit(0);
        }
    }
    if (!cmd.hasOption("r")) {
        logger.info("You must specify a path for inference rules directory !");
        System.exit(0);
    }

    if (cmd.hasOption("l")) {
        String[] dataPaths = cmd.getOptionValues("l");
        for (String path : dataPaths) {
            Load ld = Load.create(graph);
            ld.load(path);
            logger.info("Loaded " + path);
        }
    }

    if (cmd.hasOption("v")) {
        logger.info("version 3.0.4-SNAPSHOT");
        System.exit(0);
    }

    String rulesDirPath = cmd.getOptionValue("r");
    rulesDir = new File(rulesDirPath);
    if (!rulesDir.isDirectory()) {
        logger.warn(rulesDirPath + " is not a valid directory path.");
        System.exit(0);
    }

    // Local rules graph initialization
    Graph rulesG = Graph.create();
    Load ld = Load.create(rulesG);

    if (rulesSelection) {
        // Ontology loading
        if (ontDir.isDirectory()) {
            for (File o : ontDir.listFiles()) {
                logger.info("Loading " + o.getAbsolutePath());
                ld.load(o.getAbsolutePath());
            }
        }
    }

    // Rules loading
    if (rulesDir.isDirectory()) {
        for (File r : rulesDir.listFiles()) {
            logger.info("Loading " + r.getAbsolutePath());
            ld.load(r.getAbsolutePath());
        }
    }

    // Rule engine initialization
    RuleEngine ruleEngine = RuleEngine.create(graph);
    ruleEngine.set(exec);
    ruleEngine.setOptimize(true);
    ruleEngine.setConstructResult(true);
    ruleEngine.setTrace(true);

    StopWatch sw = new StopWatch();
    logger.info("Federated graph size : " + graph.size());
    logger.info("Rules graph size : " + rulesG.size());

    // Rule selection
    logger.info("Rules selection");
    QueryProcess localKgram = QueryProcess.create(rulesG);
    ArrayList<String> applicableRules = new ArrayList<String>();
    sw.start();
    String rulesSelQuery = "";
    if (rulesSelection) {
        rulesSelQuery = pertinentRulesQuery;
    } else {
        rulesSelQuery = allRulesQuery;
    }
    Mappings maps = localKgram.query(rulesSelQuery);
    logger.info("Rules selected in " + sw.getTime() + " ms");
    logger.info("Applicable rules : " + maps.size());

    // Selected rule loading
    for (Mapping map : maps) {
        IDatatype dt = (IDatatype) map.getValue("?res");
        String rule = dt.getLabel();
        //loading rule in the rule engine
        //            logger.info("Adding rule : ");
        //            System.out.println("-------");
        //            System.out.println(rule);
        //            System.out.println("");
        //            if (! rule.toLowerCase().contains("sameas")) {
        applicableRules.add(rule);
        ruleEngine.addRule(rule);
        //            }
    }

    // Rules application on distributed sparql endpoints
    logger.info("Rules application (" + applicableRules.size() + " rules)");
    ExecutorService threadPool = Executors.newCachedThreadPool();
    RuleEngineThread ruleThread = new RuleEngineThread(ruleEngine);
    sw.reset();
    sw.start();

    //        ruleEngine.process();
    threadPool.execute(ruleThread);
    threadPool.shutdown();

    //monitoring loop
    while (!threadPool.isTerminated()) {
        //            System.out.println("******************************");
        //            System.out.println(Util.jsonDqpCost(QueryProcessDQP.queryCounter, QueryProcessDQP.queryVolumeCounter, QueryProcessDQP.sourceCounter, QueryProcessDQP.sourceVolumeCounter));
        //            System.out.println("Rule engine running for " + sw.getTime() + " ms");
        //            System.out.println("Federated graph size : " + graph.size());
        System.out.println(sw.getTime() + " , " + graph.size());
        Thread.sleep(5000);
    }

    logger.info("Federated graph size : " + graph.size());
    //        logger.info(Util.jsonDqpCost(QueryProcessDQP.queryCounter, QueryProcessDQP.queryVolumeCounter, QueryProcessDQP.sourceCounter, QueryProcessDQP.sourceVolumeCounter));

    //        TripleFormat f = TripleFormat.create(graph, true);
    //        f.write("/tmp/gAll.ttl");

}

From source file:main.java.RMDupper.java

public static void main(String[] args) throws IOException {
    System.err.println("DeDup v" + VERSION);
    // the command line parameters
    Options helpOptions = new Options();
    helpOptions.addOption("h", "help", false, "show this help page");
    Options options = new Options();
    options.addOption("h", "help", false, "show this help page");
    options.addOption("i", "input", true,
            "the input file if this option is not specified,\nthe input is expected to be piped in");
    options.addOption("o", "output", true, "the output folder. Has to be specified if input is set.");
    options.addOption("m", "merged", false,
            "the input only contains merged reads.\n If this option is specified read names are not examined for prefixes.\n Both the start and end of the aligment are considered for all reads.");
    options.addOption("v", "version", false, "the version of DeDup.");
    HelpFormatter helpformatter = new HelpFormatter();
    CommandLineParser parser = new BasicParser();
    try {/*  ww  w  .  ja  va 2 s .  co m*/
        CommandLine cmd = parser.parse(helpOptions, args);
        if (cmd.hasOption('h')) {
            helpformatter.printHelp(CLASS_NAME, options);
            System.exit(0);
        }
    } catch (ParseException e1) {
    }

    String input = "";
    String outputpath = "";
    Boolean merged = Boolean.FALSE;
    try {
        CommandLine cmd = parser.parse(options, args);

        if (cmd.hasOption('i')) {
            input = cmd.getOptionValue('i');
            piped = false;
        }
        if (cmd.hasOption('o')) {
            outputpath = cmd.getOptionValue('o');
        }
        if (cmd.hasOption('m')) {
            merged = Boolean.TRUE;
        }
        if (cmd.hasOption('v')) {
            System.out.println("DeDup v" + VERSION);
            System.exit(0);
        }
    } catch (ParseException e) {
        helpformatter.printHelp(CLASS_NAME, options);
        System.err.println(e.getMessage());
        System.exit(0);
    }
    DecimalFormat df = new DecimalFormat("##.##");

    if (piped) {
        RMDupper rmdup = new RMDupper(System.in, System.out, merged);
        rmdup.readSAMFile();

        System.err.println("We are in piping mode!");
        System.err.println("Total reads: " + rmdup.dupStats.total + "\n");
        System.err.println("Reverse removed: " + rmdup.dupStats.removed_reverse + "\n");
        System.err.println("Forward removed: " + rmdup.dupStats.removed_forward + "\n");
        System.err.println("Merged removed: " + rmdup.dupStats.removed_merged + "\n");
        System.err.println("Total removed: " + (rmdup.dupStats.removed_forward + rmdup.dupStats.removed_merged
                + rmdup.dupStats.removed_reverse) + "\n");
        if (rmdup.dupStats.removed_merged + rmdup.dupStats.removed_forward
                + rmdup.dupStats.removed_reverse == 0) {
            System.err.println("Duplication Rate: " + df.format(0.00));
        } else {
            System.err.println("Duplication Rate: "
                    + df.format((double) (rmdup.dupStats.removed_merged + rmdup.dupStats.removed_reverse
                            + rmdup.dupStats.removed_forward) / (double) rmdup.dupStats.total));
        }

    } else {
        if (outputpath.length() == 0) {
            System.err.println("The output folder has to be specified");
            helpformatter.printHelp(CLASS_NAME, options);
            System.exit(0);
        }

        //Check whether we have a directory as output path, else produce error message and quit!

        File f = new File(outputpath);
        if (!f.isDirectory()) {
            System.err.println("The output folder should be a folder and not a file!");
            System.exit(0);
        }

        File inputFile = new File(input);
        File outputFile = new File(
                outputpath + "/" + Files.getNameWithoutExtension(inputFile.getAbsolutePath()) + "_rmdup.bam");
        File outputlog = new File(
                outputpath + "/" + Files.getNameWithoutExtension(inputFile.getAbsolutePath()) + ".log");
        File outputhist = new File(
                outputpath + "/" + Files.getNameWithoutExtension(inputFile.getAbsolutePath()) + ".hist");

        try {
            FileWriter fw = new FileWriter(outputlog);
            FileWriter histfw = new FileWriter(outputhist);
            BufferedWriter bfw = new BufferedWriter(fw);
            BufferedWriter histbfw = new BufferedWriter(histfw);

            RMDupper rmdup = new RMDupper(inputFile, outputFile, merged);
            rmdup.readSAMFile();
            rmdup.inputSam.close();
            rmdup.outputSam.close();

            bfw.write("Total reads: " + rmdup.dupStats.total + "\n");
            bfw.write("Reverse removed: " + rmdup.dupStats.removed_reverse + "\n");
            bfw.write("Forward removed: " + rmdup.dupStats.removed_forward + "\n");
            bfw.write("Merged removed: " + rmdup.dupStats.removed_merged + "\n");
            bfw.write("Total removed: " + (rmdup.dupStats.removed_forward + rmdup.dupStats.removed_merged
                    + rmdup.dupStats.removed_reverse) + "\n");
            bfw.write("Duplication Rate: "
                    + df.format((double) (rmdup.dupStats.removed_merged + rmdup.dupStats.removed_reverse
                            + rmdup.dupStats.removed_forward) / (double) rmdup.dupStats.total));
            bfw.flush();
            bfw.close();

            histbfw.write(rmdup.oc.getHistogram());
            histbfw.flush();
            histbfw.close();

            System.out.println("Total reads: " + rmdup.dupStats.total + "\n");
            System.out.println("Unmerged removed: "
                    + (rmdup.dupStats.removed_forward + rmdup.dupStats.removed_reverse) + "\n");
            System.out.println("Merged removed: " + rmdup.dupStats.removed_merged + "\n");
            System.out.println("Total removed: " + (rmdup.dupStats.removed_forward
                    + rmdup.dupStats.removed_merged + rmdup.dupStats.removed_reverse) + "\n");
            if (rmdup.dupStats.removed_merged + rmdup.dupStats.removed_forward
                    + rmdup.dupStats.removed_reverse == 0) {
                System.out.println("Duplication Rate: " + df.format(0.00));
            } else {
                System.out.println("Duplication Rate: "
                        + df.format((double) (rmdup.dupStats.removed_merged + rmdup.dupStats.removed_reverse
                                + rmdup.dupStats.removed_forward) / (double) rmdup.dupStats.total));
            }
        } catch (IOException e) {
            e.printStackTrace();
        }
    }
}

From source file:edu.vt.cs.cnd2xsd.Cnd2XsdConverter.java

/**
 * Usage: Cnd2Xsd [path to source cnd] [path to write the xsd]
 * @param args/*from  ww  w  .  j a  va2s  .  c o  m*/
 * @throws LoginException
 * @throws RepositoryException
 * @throws IOException
 * @throws JAXBException
 */
@SuppressWarnings("static-access")
public static void main(String[] args) throws LoginException, RepositoryException, IOException, JAXBException,
        org.apache.commons.cli.ParseException {

    Session session = null;
    Cnd2XsdConverter converter = new Cnd2XsdConverter();

    try {
        Options opt = new Options();

        opt.addOption(OptionBuilder.hasArg(true).isRequired(false)
                .withDescription("Path for the input cnd file").create("fc"));
        opt.addOption(OptionBuilder.hasArg(true).isRequired(false).withDescription("Path for properties map.")
                .create("fp"));
        opt.addOption(OptionBuilder.hasArg(true).isRequired(false)
                .withDescription("Path for generating XML schema.").create("fx"));
        opt.addOption(OptionBuilder.hasArg(false).isRequired(false).withDescription("Prints this list.")
                .create("help"));
        opt.addOption(OptionBuilder.hasArg(true).isRequired(false).withDescription("The namespace for the XSD.")
                .create("ns"));
        opt.addOption(OptionBuilder.hasArg(true).isRequired(false).withDescription("The namespace prefix.")
                .create("nsp"));
        opt.addOption(OptionBuilder.hasArg(true).isRequired(false)
                .withDescription("The root element in the XSD.").create("r"));
        opt.addOption(OptionBuilder.hasArg(true).isRequired(false).withDescription("The root element type.")
                .create("rtype"));

        //create the basic parser
        BasicParser parser = new BasicParser();
        CommandLine cl = parser.parse(opt, args);
        HelpFormatter f = new HelpFormatter();
        //check if we have any leftover args
        if (cl.getArgs().length != 0 || args.length == 0) {
            f.printHelp(MAINCLI, opt);
            return;
        }

        if (cl.hasOption("help")) {
            f.printHelp(MAINCLI, opt);
            return;
        }

        String cndFilePath = cl.getOptionValue("fc");
        String xsdFilePath = cl.getOptionValue("fx");
        String propmapPath = cl.getOptionValue("fp");
        String ns = cl.getOptionValue("ns");
        String nsPrefix = cl.getOptionValue("nsp");
        String rt = cl.getOptionValue("r");
        String rtype = cl.getOptionValue("rtype");

        converter.init(cndFilePath, propmapPath, ns, nsPrefix, rt, rtype);
        FileOutputStream fout = new FileOutputStream(xsdFilePath);
        converter.convert(fout);

    } finally {
        if (session != null) {
            session.save();
            session.logout();
        }
    }
}

From source file:com.example.geomesa.hbase.HBaseQuickStart.java

public static void main(String[] args) throws Exception {
    // find out where -- in HBase -- the user wants to store data
    CommandLineParser parser = new BasicParser();
    Options options = getCommonRequiredOptions();
    CommandLine cmd = parser.parse(options, args);

    // verify that we can see this HBase destination in a GeoTools manner
    Map<String, Serializable> dsConf = getHBaseDataStoreConf(cmd);
    DataStore dataStore = DataStoreFinder.getDataStore(dsConf);
    assert dataStore != null;

    // establish specifics concerning the SimpleFeatureType to store
    String simpleFeatureTypeName = "QuickStart";
    SimpleFeatureType simpleFeatureType = createSimpleFeatureType(simpleFeatureTypeName);

    // write Feature-specific metadata to the destination table in HBase
    // (first creating the table if it does not already exist); you only need
    // to create the FeatureType schema the *first* time you write any Features
    // of this type to the table
    System.out.println("Creating feature-type (schema):  " + simpleFeatureTypeName);
    dataStore.createSchema(simpleFeatureType);

    // create new features locally, and add them to this table
    System.out.println("Creating new features");
    FeatureCollection featureCollection = createNewFeatures(simpleFeatureType, 1000);
    System.out.println("Inserting new features");
    insertFeatures(simpleFeatureTypeName, dataStore, featureCollection);

    // query a few Features from this table
    System.out.println("Submitting query");
    queryFeatures(simpleFeatureTypeName, dataStore, "Where", -78.5, 37.5, -78.0, 38.0, "When",
            "2014-07-01T00:00:00.000Z", "2014-09-30T23:59:59.999Z", "(Who = 'Bierce')");
}

From source file:com.jakev.genaidl.App.java

public static void main(String[] args) {

    int rtn = 0;//from w w w.ja v  a2s .  com

    String dexFileName = "";
    String dexDbName = "";
    String outputDirectory = ".";
    int sdkVersion = 23;

    CommandLineParser parser = new BasicParser();
    CommandLine cmd = null;

    gOptions.addOption("a", true, "Android API level to use.");
    gOptions.addOption("d", false, "Show debugging information.");
    gOptions.addOption("h", false, "Show help screen.");
    gOptions.addOption("i", true, "Input DEX/ODEX file.");
    gOptions.addOption("o", true, "Output directory for AIDL files.");

    try {
        cmd = parser.parse(gOptions, args);

        if (cmd.hasOption("h")) {
            usage();
            System.exit(0);
        }

        if (cmd.hasOption("d"))
            gDebug = true;

        if (cmd.hasOption("a")) {
            try {
                sdkVersion = Integer.parseInt(cmd.getOptionValue("a"));
            } catch (NumberFormatException e) {
                System.err.println("[ERROR] Numeric API level required!");
                System.exit(-2);
            }
        }

        if (cmd.hasOption("o")) {
            outputDirectory = cmd.getOptionValue("o");
        }

        if (!cmd.hasOption("i")) {
            System.err.println("[ERROR] Input (-i) parameter is required!");
            usage();
            System.exit(-1);
        }

    } catch (ParseException e) {
        System.err.println("[ERROR] Unable to parse command line properties: " + e);
        System.exit(-1);
    }

    dexFileName = cmd.getOptionValue("i");

    if (!isFile(dexFileName)) {
        System.err.println("[ERROR] File '" + dexFileName + "' does not exist!");
        System.exit(-3);
    }

    if (gDebug) {
        System.out.println("Loading DEX into object.");
    }
    try {
        gDexFile = DexFileFactory.loadDexFile(dexFileName, sdkVersion, true);
    } catch (IOException e) {
        System.err.println("[ERROR] Unable to load DEX file!");
        System.exit(-4);
    }

    if (gDebug) {
        System.out.println("About to process DEX...");
    }
    rtn = processDex(outputDirectory);
    if (rtn != 0) {
        System.err.println("[ERROR] Error processing DEX!");
    }

    /* Close it down. */
    System.exit(rtn);
}