Example usage for java.io File exists

List of usage examples for java.io File exists

Introduction

In this page you can find the example usage for java.io File exists.

Prototype

public boolean exists() 

Source Link

Document

Tests whether the file or directory denoted by this abstract pathname exists.

Usage

From source file:it.jnrpe.server.JNRPEServer.java

/**
 * The main method./*w ww . j  a  v a  2  s.c o m*/
 * 
 * @param args
 *            The command line
 */
public static void main(final String[] args) {
    CommandLine cl = parseCommandLine(args);
    if (cl.hasOption("--help")) {
        if (!cl.hasOption("--conf")) {
            printUsage(null);
        }
    }

    if (cl.hasOption("--version")) {
        printVersion();
        System.exit(0);
    }

    JNRPEConfiguration conf = null;
    try {
        conf = loadConfiguration((String) cl.getValue("--conf"));
    } catch (Exception e) {
        System.out.println("Unable to parse the configuration at " + cl.getValue("--conf") + ". The error is : "
                + e.getMessage());
        e.printStackTrace();
        System.exit(-1);
    }

    String sPluginPath = conf.getServerSection().getPluginPath();
    if (sPluginPath == null) {
        System.out.println("Plugin path has not been specified");
        System.exit(-1);
    }
    File fPluginPath = new File(sPluginPath);

    if (fPluginPath.exists()) {
        if (!fPluginPath.isDirectory()) {
            System.out.println("Specified plugin path ('" + sPluginPath + "') must be a directory");
            System.exit(-1);
        }
    } else {
        System.out.println("Specified plugin path ('" + sPluginPath + "') do not exist");
        System.exit(-1);
    }

    IPluginRepository pr = null;
    try {
        pr = loadPluginDefinitions(conf.getServerSection().getPluginPath());
    } catch (PluginConfigurationException e) {
        System.out.println("An error has occurred while parsing " + "the plugin packages : " + e.getMessage());
        System.exit(-1);
    }

    if (cl.hasOption("--help") && cl.getValue("--help") != null) {
        printHelp(pr, (String) cl.getValue("--help"));
    }

    if (cl.hasOption("--list")) {
        printPluginList(pr);
    }

    CommandRepository cr = conf.createCommandRepository();

    JNRPEBuilder builder = JNRPEBuilder.forRepositories(pr, cr)
            .acceptParams(conf.getServerSection().acceptParams())
            .withMaxAcceptedConnections(conf.getServerSection().getBacklogSize())
            .withReadTimeout(conf.getServerSection().getReadTimeout())
            .withWriteTimeout(conf.getServerSection().getWriteTimeout())
            .withListener(new EventLoggerListener());

    for (String sAcceptedAddress : conf.getServerSection().getAllowedAddresses()) {
        builder.acceptHost(sAcceptedAddress);
    }

    JNRPE jnrpe = builder.build();

    for (BindAddress bindAddress : conf.getServerSection().getBindAddresses()) {
        int iPort = DEFAULT_PORT;
        String[] vsParts = bindAddress.getBindingAddress().split(":");
        String sIp = vsParts[0];
        if (vsParts.length > 1) {
            iPort = Integer.parseInt(vsParts[1]);
        }

        try {
            jnrpe.listen(sIp, iPort, bindAddress.isSSL());
        } catch (UnknownHostException e) {
            System.out.println(
                    String.format("Error binding the server to %s:%d : %s", sIp, iPort, e.getMessage()));
        }
    }

    if (cl.hasOption("--interactive")) {
        new JNRPEConsole(jnrpe, pr, cr).start();
        System.exit(0);
    }
}

From source file:com.github.fritaly.svngraph.SvnGraph.java

public static void main(String[] args) throws Exception {
    if (args.length != 2) {
        System.out.println(String.format("%s <input-file> <output-file>", SvnGraph.class.getSimpleName()));
        System.exit(1);//  w ww . j  a  v  a2 s . c  o m
    }

    final File input = new File(args[0]);

    if (!input.exists()) {
        throw new IllegalArgumentException(
                String.format("The given file '%s' doesn't exist", input.getAbsolutePath()));
    }

    final File output = new File(args[1]);

    final Document document = DocumentBuilderFactory.newInstance().newDocumentBuilder().parse(input);

    final History history = new History(document);

    final Set<String> rootPaths = history.getRootPaths();

    System.out.println(rootPaths);

    for (String path : rootPaths) {
        System.out.println(path);
        System.out.println(history.getHistory(path).getRevisions());
        System.out.println();
    }

    int count = 0;

    FileWriter fileWriter = null;
    GraphMLWriter graphWriter = null;

    try {
        fileWriter = new FileWriter(output);

        graphWriter = new GraphMLWriter(fileWriter);

        final NodeStyle tagStyle = graphWriter.getNodeStyle();
        tagStyle.setFillColor(Color.WHITE);

        graphWriter.graph();

        // map associating node labels to their corresponding node id in the graph
        final Map<String, String> nodeIdsPerLabel = new TreeMap<>();

        // the node style associated to each branch
        final Map<String, NodeStyle> nodeStyles = new TreeMap<>();

        for (Revision revision : history.getSignificantRevisions()) {
            System.out.println(revision.getNumber() + " - " + revision.getMessage());

            // TODO Render also the deletion of branches
            // there should be only 1 significant update per revision (the one with action ADD)
            for (Update update : revision.getSignificantUpdates()) {
                if (update.isCopy()) {
                    // a merge is also considered a copy
                    final RevisionPath source = update.getCopySource();

                    System.out.println(String.format("  > %s %s from %s@%d", update.getAction(),
                            update.getPath(), source.getPath(), source.getRevision()));

                    final String sourceRoot = Utils.getRootName(source.getPath());

                    if (sourceRoot == null) {
                        // skip the revisions whose associated root is
                        // null (happens whether a branch was created
                        // outside the 'branches' directory for
                        // instance)
                        System.err.println(String.format("Skipped revision %d because of a null root",
                                source.getRevision()));
                        continue;
                    }

                    final String sourceLabel = computeNodeLabel(sourceRoot, source.getRevision());

                    // create a node for the source (path, revision)
                    final String sourceId;

                    if (nodeIdsPerLabel.containsKey(sourceLabel)) {
                        // retrieve the id of the existing node
                        sourceId = nodeIdsPerLabel.get(sourceLabel);
                    } else {
                        // create the new node
                        if (Utils.isTagPath(source.getPath())) {
                            graphWriter.setNodeStyle(tagStyle);
                        } else {
                            if (!nodeStyles.containsKey(sourceRoot)) {
                                final NodeStyle style = new NodeStyle();
                                style.setFillColor(randomColor());

                                nodeStyles.put(sourceRoot, style);
                            }

                            graphWriter.setNodeStyle(nodeStyles.get(sourceRoot));
                        }

                        sourceId = graphWriter.node(sourceLabel);

                        nodeIdsPerLabel.put(sourceLabel, sourceId);
                    }

                    // and another for the newly created directory
                    final String targetRoot = Utils.getRootName(update.getPath());

                    if (targetRoot == null) {
                        System.err.println(String.format("Skipped revision %d because of a null root",
                                revision.getNumber()));
                        continue;
                    }

                    final String targetLabel = computeNodeLabel(targetRoot, revision.getNumber());

                    if (Utils.isTagPath(update.getPath())) {
                        graphWriter.setNodeStyle(tagStyle);
                    } else {
                        if (!nodeStyles.containsKey(targetRoot)) {
                            final NodeStyle style = new NodeStyle();
                            style.setFillColor(randomColor());

                            nodeStyles.put(targetRoot, style);
                        }

                        graphWriter.setNodeStyle(nodeStyles.get(targetRoot));
                    }

                    final String targetId;

                    if (nodeIdsPerLabel.containsKey(targetLabel)) {
                        // retrieve the id of the existing node
                        targetId = nodeIdsPerLabel.get(targetLabel);
                    } else {
                        // create the new node
                        if (Utils.isTagPath(update.getPath())) {
                            graphWriter.setNodeStyle(tagStyle);
                        } else {
                            if (!nodeStyles.containsKey(targetRoot)) {
                                final NodeStyle style = new NodeStyle();
                                style.setFillColor(randomColor());

                                nodeStyles.put(targetRoot, style);
                            }

                            graphWriter.setNodeStyle(nodeStyles.get(targetRoot));
                        }

                        targetId = graphWriter.node(targetLabel);

                        nodeIdsPerLabel.put(targetLabel, targetId);
                    }

                    // create an edge between the 2 nodes
                    graphWriter.edge(sourceId, targetId);
                } else {
                    System.out.println(String.format("  > %s %s", update.getAction(), update.getPath()));
                }
            }

            System.out.println();

            count++;
        }

        // Dispatch the revisions per corresponding branch
        final Map<String, Set<Long>> revisionsPerBranch = new TreeMap<>();

        for (String nodeLabel : nodeIdsPerLabel.keySet()) {
            if (nodeLabel.contains("@")) {
                final String branchName = StringUtils.substringBefore(nodeLabel, "@");
                final long revision = Long.parseLong(StringUtils.substringAfter(nodeLabel, "@"));

                if (!revisionsPerBranch.containsKey(branchName)) {
                    revisionsPerBranch.put(branchName, new TreeSet<Long>());
                }

                revisionsPerBranch.get(branchName).add(revision);
            } else {
                throw new IllegalStateException(nodeLabel);
            }
        }

        // Recreate the missing edges between revisions from a same branch
        for (String branchName : revisionsPerBranch.keySet()) {
            final List<Long> branchRevisions = new ArrayList<>(revisionsPerBranch.get(branchName));

            for (int i = 0; i < branchRevisions.size() - 1; i++) {
                final String nodeLabel1 = String.format("%s@%d", branchName, branchRevisions.get(i));
                final String nodeLabel2 = String.format("%s@%d", branchName, branchRevisions.get(i + 1));

                graphWriter.edge(nodeIdsPerLabel.get(nodeLabel1), nodeIdsPerLabel.get(nodeLabel2));
            }
        }

        graphWriter.closeGraph();

        System.out.println(String.format("Found %d significant revisions", count));
    } finally {
        if (graphWriter != null) {
            graphWriter.close();
        }
        if (fileWriter != null) {
            fileWriter.close();
        }
    }

    System.out.println("Done");
}

From source file:cc.twittertools.index.IndexStatuses.java

@SuppressWarnings("static-access")
public static void main(String[] args) throws Exception {
    Options options = new Options();

    options.addOption(new Option(HELP_OPTION, "show help"));
    options.addOption(new Option(OPTIMIZE_OPTION, "merge indexes into a single segment"));
    options.addOption(new Option(STORE_TERM_VECTORS_OPTION, "store term vectors"));

    options.addOption(OptionBuilder.withArgName("dir").hasArg().withDescription("source collection directory")
            .create(COLLECTION_OPTION));
    options.addOption(//from  www.jav a  2  s  . c om
            OptionBuilder.withArgName("dir").hasArg().withDescription("index location").create(INDEX_OPTION));
    options.addOption(OptionBuilder.withArgName("file").hasArg().withDescription("file with deleted tweetids")
            .create(DELETES_OPTION));
    options.addOption(OptionBuilder.withArgName("id").hasArg().withDescription("max id").create(MAX_ID_OPTION));

    CommandLine cmdline = null;
    CommandLineParser parser = new GnuParser();
    try {
        cmdline = parser.parse(options, args);
    } catch (ParseException exp) {
        System.err.println("Error parsing command line: " + exp.getMessage());
        System.exit(-1);
    }

    if (cmdline.hasOption(HELP_OPTION) || !cmdline.hasOption(COLLECTION_OPTION)
            || !cmdline.hasOption(INDEX_OPTION)) {
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp(IndexStatuses.class.getName(), options);
        System.exit(-1);
    }

    String collectionPath = cmdline.getOptionValue(COLLECTION_OPTION);
    String indexPath = cmdline.getOptionValue(INDEX_OPTION);

    final FieldType textOptions = new FieldType();
    textOptions.setIndexed(true);
    textOptions.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS);
    textOptions.setStored(true);
    textOptions.setTokenized(true);
    if (cmdline.hasOption(STORE_TERM_VECTORS_OPTION)) {
        textOptions.setStoreTermVectors(true);
    }

    LOG.info("collection: " + collectionPath);
    LOG.info("index: " + indexPath);

    LongOpenHashSet deletes = null;
    if (cmdline.hasOption(DELETES_OPTION)) {
        deletes = new LongOpenHashSet();
        File deletesFile = new File(cmdline.getOptionValue(DELETES_OPTION));
        if (!deletesFile.exists()) {
            System.err.println("Error: " + deletesFile + " does not exist!");
            System.exit(-1);
        }
        LOG.info("Reading deletes from " + deletesFile);

        FileInputStream fin = new FileInputStream(deletesFile);
        byte[] ignoreBytes = new byte[2];
        fin.read(ignoreBytes); // "B", "Z" bytes from commandline tools
        BufferedReader br = new BufferedReader(new InputStreamReader(new CBZip2InputStream(fin)));

        String s;
        while ((s = br.readLine()) != null) {
            if (s.contains("\t")) {
                deletes.add(Long.parseLong(s.split("\t")[0]));
            } else {
                deletes.add(Long.parseLong(s));
            }
        }
        br.close();
        fin.close();
        LOG.info("Read " + deletes.size() + " tweetids from deletes file.");
    }

    long maxId = Long.MAX_VALUE;
    if (cmdline.hasOption(MAX_ID_OPTION)) {
        maxId = Long.parseLong(cmdline.getOptionValue(MAX_ID_OPTION));
        LOG.info("index: " + maxId);
    }

    long startTime = System.currentTimeMillis();
    File file = new File(collectionPath);
    if (!file.exists()) {
        System.err.println("Error: " + file + " does not exist!");
        System.exit(-1);
    }

    StatusStream stream = new JsonStatusCorpusReader(file);

    Directory dir = FSDirectory.open(new File(indexPath));
    IndexWriterConfig config = new IndexWriterConfig(Version.LUCENE_43, IndexStatuses.ANALYZER);
    config.setOpenMode(OpenMode.CREATE);

    IndexWriter writer = new IndexWriter(dir, config);
    int cnt = 0;
    Status status;
    try {
        while ((status = stream.next()) != null) {
            if (status.getText() == null) {
                continue;
            }

            // Skip deletes tweetids.
            if (deletes != null && deletes.contains(status.getId())) {
                continue;
            }

            if (status.getId() > maxId) {
                continue;
            }

            cnt++;
            Document doc = new Document();
            doc.add(new LongField(StatusField.ID.name, status.getId(), Field.Store.YES));
            doc.add(new LongField(StatusField.EPOCH.name, status.getEpoch(), Field.Store.YES));
            doc.add(new TextField(StatusField.SCREEN_NAME.name, status.getScreenname(), Store.YES));

            doc.add(new Field(StatusField.TEXT.name, status.getText(), textOptions));

            doc.add(new IntField(StatusField.FRIENDS_COUNT.name, status.getFollowersCount(), Store.YES));
            doc.add(new IntField(StatusField.FOLLOWERS_COUNT.name, status.getFriendsCount(), Store.YES));
            doc.add(new IntField(StatusField.STATUSES_COUNT.name, status.getStatusesCount(), Store.YES));

            long inReplyToStatusId = status.getInReplyToStatusId();
            if (inReplyToStatusId > 0) {
                doc.add(new LongField(StatusField.IN_REPLY_TO_STATUS_ID.name, inReplyToStatusId,
                        Field.Store.YES));
                doc.add(new LongField(StatusField.IN_REPLY_TO_USER_ID.name, status.getInReplyToUserId(),
                        Field.Store.YES));
            }

            String lang = status.getLang();
            if (!lang.equals("unknown")) {
                doc.add(new TextField(StatusField.LANG.name, status.getLang(), Store.YES));
            }

            long retweetStatusId = status.getRetweetedStatusId();
            if (retweetStatusId > 0) {
                doc.add(new LongField(StatusField.RETWEETED_STATUS_ID.name, retweetStatusId, Field.Store.YES));
                doc.add(new LongField(StatusField.RETWEETED_USER_ID.name, status.getRetweetedUserId(),
                        Field.Store.YES));
                doc.add(new IntField(StatusField.RETWEET_COUNT.name, status.getRetweetCount(), Store.YES));
                if (status.getRetweetCount() < 0 || status.getRetweetedStatusId() < 0) {
                    LOG.warn("Error parsing retweet fields of " + status.getId());
                }
            }

            writer.addDocument(doc);
            if (cnt % 100000 == 0) {
                LOG.info(cnt + " statuses indexed");
            }
        }

        LOG.info(String.format("Total of %s statuses added", cnt));

        if (cmdline.hasOption(OPTIMIZE_OPTION)) {
            LOG.info("Merging segments...");
            writer.forceMerge(1);
            LOG.info("Done!");
        }

        LOG.info("Total elapsed time: " + (System.currentTimeMillis() - startTime) + "ms");
    } catch (Exception e) {
        e.printStackTrace();
    } finally {
        writer.close();
        dir.close();
        stream.close();
    }
}

From source file:com.act.biointerpretation.l2expansion.L2ExpansionDriver.java

public static void main(String[] args) throws Exception {

    // Build command line parser.
    Options opts = new Options();
    for (Option.Builder b : OPTION_BUILDERS) {
        opts.addOption(b.build());/*from w  w w.  j av a2  s. c  o  m*/
    }

    CommandLine cl = null;
    try {
        CommandLineParser parser = new DefaultParser();
        cl = parser.parse(opts, args);
    } catch (ParseException e) {
        LOGGER.error("Argument parsing failed: %s", e.getMessage());
        HELP_FORMATTER.printHelp(L2ExpansionDriver.class.getCanonicalName(), HELP_MESSAGE, opts, null, true);
        System.exit(1);
    }

    // Print help.
    if (cl.hasOption(OPTION_HELP)) {
        HELP_FORMATTER.printHelp(L2ExpansionDriver.class.getCanonicalName(), HELP_MESSAGE, opts, null, true);
        return;
    }

    // Get output files.
    String outputPath = cl.getOptionValue(OPTION_OUTPUT_PATH);
    File outputFile = new File(outputPath);
    if (outputFile.isDirectory() || outputFile.exists()) {
        LOGGER.error("Supplied output file is a directory or already exists.");
        System.exit(1);
    }
    outputFile.createNewFile();
    File inchiOutputFile = new File(outputPath + ".inchis");
    if (inchiOutputFile.isDirectory() || inchiOutputFile.exists()) {
        LOGGER.error("Supplied inchi output file is a directory or already exists.");
        System.exit(1);
    }
    inchiOutputFile.createNewFile();

    Optional<OutputStream> maybeProgressStream = Optional.empty();
    if (cl.hasOption(OPTION_PROGRESS_PATH)) {
        String progressPath = cl.getOptionValue(OPTION_PROGRESS_PATH);
        File progressFile = new File(progressPath);
        LOGGER.info("Writing incremental results to file at %s", progressFile.getAbsolutePath());
        if (progressFile.isDirectory() || progressFile.exists()) {
            LOGGER.error("Supplied progress file is a directory or already exists.");
            System.exit(1);
        }
        maybeProgressStream = Optional.of(new FileOutputStream(progressFile));
    }

    // Get metabolite list
    L2InchiCorpus inchiCorpus = getInchiCorpus(cl, OPTION_METABOLITES);
    LOGGER.info("%d substrate inchis.", inchiCorpus.getInchiList().size());

    Integer maxMass = NO_MASS_THRESHOLD;
    if (cl.hasOption(OPTION_MASS_THRESHOLD)) {
        maxMass = Integer.parseInt(cl.getOptionValue(OPTION_MASS_THRESHOLD));
        LOGGER.info("Filtering out substrates with mass more than %d daltons.", maxMass);
    }
    inchiCorpus.filterByMass(maxMass);
    LOGGER.info("%d substrate inchis that are importable as molecules.", inchiCorpus.getInchiList().size());

    PredictionGenerator generator = new AllPredictionsGenerator(new ReactionProjector());

    L2Expander expander = buildExpander(cl, inchiCorpus, generator);
    L2PredictionCorpus predictionCorpus = expander.getPredictions(maybeProgressStream);

    LOGGER.info("Done with L2 expansion. Produced %d predictions.", predictionCorpus.getCorpus().size());

    LOGGER.info("Writing corpus to file.");
    predictionCorpus.writePredictionsToJsonFile(outputFile);
    L2InchiCorpus productInchis = new L2InchiCorpus(predictionCorpus.getUniqueProductInchis());
    productInchis.writeToFile(inchiOutputFile);
    LOGGER.info("L2ExpansionDriver complete!");
}

From source file:de.prozesskraft.pkraft.Wrap.java

public static void main(String[] args) throws org.apache.commons.cli.ParseException, IOException {

    //      try/*from   w w w  .j  a va 2 s  .c  om*/
    //      {
    //         if (args.length != 3)
    //         {
    //            System.out.println("Please specify processdefinition file (xml) and an outputfilename");
    //         }
    //         
    //      }
    //      catch (ArrayIndexOutOfBoundsException e)
    //      {
    //         System.out.println("***ArrayIndexOutOfBoundsException: Please specify processdefinition.xml, openoffice_template.od*, newfile_for_processdefinitions.odt\n" + e.toString());
    //      }

    /*----------------------------
      get options from ini-file
    ----------------------------*/
    File inifile = new java.io.File(
            WhereAmI.getInstallDirectoryAbsolutePath(Wrap.class) + "/" + "../etc/pkraft-wrap.ini");

    if (inifile.exists()) {
        try {
            ini = new Ini(inifile);
        } catch (InvalidFileFormatException e1) {
            // TODO Auto-generated catch block
            e1.printStackTrace();
        } catch (IOException e1) {
            // TODO Auto-generated catch block
            e1.printStackTrace();
        }
    } else {
        System.err.println("ini file does not exist: " + inifile.getAbsolutePath());
        System.exit(1);
    }

    /*----------------------------
      create boolean options
    ----------------------------*/
    Option ohelp = new Option("help", "print this message");
    Option ov = new Option("v", "prints version and build-date");

    /*----------------------------
      create argument options
    ----------------------------*/
    Option ooutput = OptionBuilder.withArgName("FILE").hasArg()
            .withDescription("[mandatory; default: .] file for generated wrapper process.")
            //            .isRequired()
            .create("output");

    Option odefinition = OptionBuilder.withArgName("FILE").hasArg()
            .withDescription("[mandatory] process definition file.")
            //            .isRequired()
            .create("definition");

    /*----------------------------
      create options object
    ----------------------------*/
    Options options = new Options();

    options.addOption(ohelp);
    options.addOption(ov);
    options.addOption(ooutput);
    options.addOption(odefinition);

    /*----------------------------
      create the parser
    ----------------------------*/
    CommandLineParser parser = new GnuParser();
    try {
        // parse the command line arguments
        commandline = parser.parse(options, args);

    } catch (Exception exp) {
        // oops, something went wrong
        System.err.println("Parsing failed. Reason: " + exp.getMessage());
        exiter();
    }

    /*----------------------------
      usage/help
    ----------------------------*/
    if (commandline.hasOption("help")) {
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp("wrap", options);
        System.exit(0);
    }

    else if (commandline.hasOption("v")) {
        System.out.println("web:     www.prozesskraft.de");
        System.out.println("version: [% version %]");
        System.out.println("date:    [% date %]");
        System.exit(0);
    }

    /*----------------------------
      ueberpruefen ob eine schlechte kombination von parametern angegeben wurde
    ----------------------------*/
    if (!(commandline.hasOption("definition"))) {
        System.err.println("option -definition is mandatory.");
        exiter();
    }

    if (!(commandline.hasOption("output"))) {
        System.err.println("option -output is mandatory.");
        exiter();
    }

    /*----------------------------
      die lizenz ueberpruefen und ggf abbrechen
    ----------------------------*/

    // check for valid license
    ArrayList<String> allPortAtHost = new ArrayList<String>();
    allPortAtHost.add(ini.get("license-server", "license-server-1"));
    allPortAtHost.add(ini.get("license-server", "license-server-2"));
    allPortAtHost.add(ini.get("license-server", "license-server-3"));

    MyLicense lic = new MyLicense(allPortAtHost, "1", "user-edition", "0.1");

    // lizenz-logging ausgeben
    for (String actLine : (ArrayList<String>) lic.getLog()) {
        System.err.println(actLine);
    }

    // abbruch, wenn lizenz nicht valide
    if (!lic.isValid()) {
        System.exit(1);
    }

    /*----------------------------
      die eigentliche business logic
    ----------------------------*/
    Process p1 = new Process();
    java.io.File output = new java.io.File(commandline.getOptionValue("output"));

    if (output.exists()) {
        System.err.println("warn: already exists: " + output.getCanonicalPath());
        exiter();
    }

    p1.setInfilexml(commandline.getOptionValue("definition"));
    System.err.println("info: reading process definition " + commandline.getOptionValue("definition"));

    // dummy process
    Process p2 = null;

    try {
        p2 = p1.readXml();
    } catch (JAXBException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
        System.err.println("error");
        exiter();
    }

    // den wrapper process generieren
    Process p3 = p2.getProcessAsWrapper();
    p3.setOutfilexml(output.getAbsolutePath());

    // den neuen wrap-process rausschreiben
    p3.writeXml();
}

From source file:edu.msu.cme.rdp.multicompare.Reprocess.java

/**
 * This class reprocesses the classification results (allrank output) and print out hierarchy output file, based on the confidence cutoff;
 * and print out only the detail classification results with assignment at certain rank with confidence above the cutoff or/and matching a given taxon.
 * @param args//from  w ww.  j ava 2  s  .  c  o m
 * @throws Exception 
 */
public static void main(String[] args) throws Exception {

    PrintWriter assign_out = new PrintWriter(new NullWriter());
    float conf = 0.8f;
    PrintStream heir_out = null;
    String hier_out_filename = null;
    ClassificationResultFormatter.FORMAT format = ClassificationResultFormatter.FORMAT.allRank;
    String rank = null;
    String taxonFilterFile = null;
    String train_propfile = null;
    String gene = null;
    List<MCSample> samples = new ArrayList();

    try {
        CommandLine line = new PosixParser().parse(options, args);
        if (line.hasOption(CmdOptions.HIER_OUTFILE_SHORT_OPT)) {
            hier_out_filename = line.getOptionValue(CmdOptions.HIER_OUTFILE_SHORT_OPT);
            heir_out = new PrintStream(hier_out_filename);
        } else {
            throw new Exception(
                    "It make sense to provide output filename for " + CmdOptions.HIER_OUTFILE_LONG_OPT);
        }
        if (line.hasOption(CmdOptions.OUTFILE_SHORT_OPT)) {
            assign_out = new PrintWriter(line.getOptionValue(CmdOptions.OUTFILE_SHORT_OPT));
        }

        if (line.hasOption(CmdOptions.RANK_SHORT_OPT)) {
            rank = line.getOptionValue(CmdOptions.RANK_SHORT_OPT);
        }
        if (line.hasOption(CmdOptions.TAXON_SHORT_OPT)) {
            taxonFilterFile = line.getOptionValue(CmdOptions.TAXON_SHORT_OPT);
        }

        if (line.hasOption(CmdOptions.BOOTSTRAP_SHORT_OPT)) {
            conf = Float.parseFloat(line.getOptionValue(CmdOptions.BOOTSTRAP_SHORT_OPT));
            if (conf < 0 || conf > 1) {
                throw new IllegalArgumentException("Confidence must be in the range [0,1]");
            }
        }
        if (line.hasOption(CmdOptions.FORMAT_SHORT_OPT)) {
            String f = line.getOptionValue(CmdOptions.FORMAT_SHORT_OPT);
            if (f.equalsIgnoreCase("allrank")) {
                format = ClassificationResultFormatter.FORMAT.allRank;
            } else if (f.equalsIgnoreCase("fixrank")) {
                format = ClassificationResultFormatter.FORMAT.fixRank;
            } else if (f.equalsIgnoreCase("db")) {
                format = ClassificationResultFormatter.FORMAT.dbformat;
            } else if (f.equalsIgnoreCase("filterbyconf")) {
                format = ClassificationResultFormatter.FORMAT.filterbyconf;
            } else {
                throw new IllegalArgumentException(
                        "Not valid output format, only allrank, fixrank, filterbyconf and db allowed");
            }
        }
        if (line.hasOption(CmdOptions.TRAINPROPFILE_SHORT_OPT)) {
            if (gene != null) {
                throw new IllegalArgumentException(
                        "Already specified the gene from the default location. Can not specify train_propfile");
            } else {
                train_propfile = line.getOptionValue(CmdOptions.TRAINPROPFILE_SHORT_OPT);
            }
        }
        if (line.hasOption(CmdOptions.GENE_SHORT_OPT)) {
            if (train_propfile != null) {
                throw new IllegalArgumentException(
                        "Already specified train_propfile. Can not specify gene any more");
            }
            gene = line.getOptionValue(CmdOptions.GENE_SHORT_OPT).toLowerCase();

            if (!gene.equals(ClassifierFactory.RRNA_16S_GENE) && !gene.equals(ClassifierFactory.FUNGALLSU_GENE)
                    && !gene.equals(ClassifierFactory.FUNGALITS_warcup_GENE)
                    && !gene.equals(ClassifierFactory.FUNGALITS_unite_GENE)) {
                throw new IllegalArgumentException(gene + " is NOT valid, only allows "
                        + ClassifierFactory.RRNA_16S_GENE + ", " + ClassifierFactory.FUNGALLSU_GENE + ", "
                        + ClassifierFactory.FUNGALITS_warcup_GENE + " and "
                        + ClassifierFactory.FUNGALITS_unite_GENE);
            }
        }
        args = line.getArgs();
        if (args.length < 1) {
            throw new Exception("Incorrect number of command line arguments");
        }

        for (String arg : args) {
            String[] inFileNames = arg.split(",");
            String inputFile = inFileNames[0];
            File idmappingFile = null;

            if (inFileNames.length == 2) {
                idmappingFile = new File(inFileNames[1]);
                if (!idmappingFile.exists()) {
                    System.err.println("Failed to find input file \"" + inFileNames[1] + "\"");
                    return;
                }
            }

            MCSample nextSample = new MCSampleResult(inputFile, idmappingFile);
            samples.add(nextSample);

        }
    } catch (Exception e) {
        System.out.println("Command Error: " + e.getMessage());
        new HelpFormatter().printHelp(120,
                "Reprocess [options] <Classification_allrank_result>[,idmappingfile] ...", "", options, "");
        return;
    }

    if (train_propfile == null && gene == null) {
        gene = ClassifierFactory.RRNA_16S_GENE;
    }

    HashSet<String> taxonFilter = null;
    if (taxonFilterFile != null) {
        taxonFilter = readTaxonFilterFile(taxonFilterFile);
    }

    MultiClassifier multiClassifier = new MultiClassifier(train_propfile, gene);
    DefaultPrintVisitor printVisitor = new DefaultPrintVisitor(heir_out, samples);
    MultiClassifierResult result = multiClassifier.multiClassificationParser(samples, conf, assign_out, format,
            rank, taxonFilter);

    result.getRoot().topDownVisit(printVisitor);

    assign_out.close();
    heir_out.close();
    if (multiClassifier.hasCopyNumber()) {
        // print copy number corrected counts
        File cn_corrected_s = new File(new File(hier_out_filename).getParentFile(),
                "cncorrected_" + hier_out_filename);
        PrintStream cn_corrected_hier_out = new PrintStream(cn_corrected_s);
        printVisitor = new DefaultPrintVisitor(cn_corrected_hier_out, samples, true);
        result.getRoot().topDownVisit(printVisitor);
        cn_corrected_hier_out.close();
    }

}

From source file:io.anserini.index.IndexTweets.java

@SuppressWarnings("static-access")
public static void main(String[] args) throws Exception {
    Options options = new Options();

    options.addOption(new Option(HELP_OPTION, "show help"));
    options.addOption(new Option(OPTIMIZE_OPTION, "merge indexes into a single segment"));
    options.addOption(new Option(STORE_TERM_VECTORS_OPTION, "store term vectors"));

    options.addOption(OptionBuilder.withArgName("dir").hasArg().withDescription("source collection directory")
            .create(COLLECTION_OPTION));
    options.addOption(/*from w  w  w  .  ja  v  a 2  s  .c o  m*/
            OptionBuilder.withArgName("dir").hasArg().withDescription("index location").create(INDEX_OPTION));
    options.addOption(OptionBuilder.withArgName("file").hasArg().withDescription("file with deleted tweetids")
            .create(DELETES_OPTION));
    options.addOption(OptionBuilder.withArgName("id").hasArg().withDescription("max id").create(MAX_ID_OPTION));

    CommandLine cmdline = null;
    CommandLineParser parser = new GnuParser();
    try {
        cmdline = parser.parse(options, args);
    } catch (ParseException exp) {
        System.err.println("Error parsing command line: " + exp.getMessage());
        System.exit(-1);
    }

    if (cmdline.hasOption(HELP_OPTION) || !cmdline.hasOption(COLLECTION_OPTION)
            || !cmdline.hasOption(INDEX_OPTION)) {
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp(IndexTweets.class.getName(), options);
        System.exit(-1);
    }

    String collectionPath = cmdline.getOptionValue(COLLECTION_OPTION);
    String indexPath = cmdline.getOptionValue(INDEX_OPTION);

    final FieldType textOptions = new FieldType();
    textOptions.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS);
    textOptions.setStored(true);
    textOptions.setTokenized(true);
    if (cmdline.hasOption(STORE_TERM_VECTORS_OPTION)) {
        textOptions.setStoreTermVectors(true);
    }

    LOG.info("collection: " + collectionPath);
    LOG.info("index: " + indexPath);

    LongOpenHashSet deletes = null;
    if (cmdline.hasOption(DELETES_OPTION)) {
        deletes = new LongOpenHashSet();
        File deletesFile = new File(cmdline.getOptionValue(DELETES_OPTION));
        if (!deletesFile.exists()) {
            System.err.println("Error: " + deletesFile + " does not exist!");
            System.exit(-1);
        }
        LOG.info("Reading deletes from " + deletesFile);

        FileInputStream fin = new FileInputStream(deletesFile);
        byte[] ignoreBytes = new byte[2];
        fin.read(ignoreBytes); // "B", "Z" bytes from commandline tools
        BufferedReader br = new BufferedReader(new InputStreamReader(new CBZip2InputStream(fin)));

        String s;
        while ((s = br.readLine()) != null) {
            if (s.contains("\t")) {
                deletes.add(Long.parseLong(s.split("\t")[0]));
            } else {
                deletes.add(Long.parseLong(s));
            }
        }
        br.close();
        fin.close();
        LOG.info("Read " + deletes.size() + " tweetids from deletes file.");
    }

    long maxId = Long.MAX_VALUE;
    if (cmdline.hasOption(MAX_ID_OPTION)) {
        maxId = Long.parseLong(cmdline.getOptionValue(MAX_ID_OPTION));
        LOG.info("index: " + maxId);
    }

    long startTime = System.currentTimeMillis();
    File file = new File(collectionPath);
    if (!file.exists()) {
        System.err.println("Error: " + file + " does not exist!");
        System.exit(-1);
    }

    StatusStream stream = new JsonStatusCorpusReader(file);

    Directory dir = FSDirectory.open(Paths.get(indexPath));
    final IndexWriterConfig config = new IndexWriterConfig(ANALYZER);

    config.setOpenMode(IndexWriterConfig.OpenMode.CREATE);

    IndexWriter writer = new IndexWriter(dir, config);
    int cnt = 0;
    Status status;
    try {
        while ((status = stream.next()) != null) {
            if (status.getText() == null) {
                continue;
            }

            // Skip deletes tweetids.
            if (deletes != null && deletes.contains(status.getId())) {
                continue;
            }

            if (status.getId() > maxId) {
                continue;
            }

            cnt++;
            Document doc = new Document();
            doc.add(new LongPoint(StatusField.ID.name, status.getId()));
            doc.add(new StoredField(StatusField.ID.name, status.getId()));
            doc.add(new LongPoint(StatusField.EPOCH.name, status.getEpoch()));
            doc.add(new StoredField(StatusField.EPOCH.name, status.getEpoch()));
            doc.add(new TextField(StatusField.SCREEN_NAME.name, status.getScreenname(), Store.YES));

            doc.add(new Field(StatusField.TEXT.name, status.getText(), textOptions));

            doc.add(new IntPoint(StatusField.FRIENDS_COUNT.name, status.getFollowersCount()));
            doc.add(new StoredField(StatusField.FRIENDS_COUNT.name, status.getFollowersCount()));
            doc.add(new IntPoint(StatusField.FOLLOWERS_COUNT.name, status.getFriendsCount()));
            doc.add(new StoredField(StatusField.FOLLOWERS_COUNT.name, status.getFriendsCount()));
            doc.add(new IntPoint(StatusField.STATUSES_COUNT.name, status.getStatusesCount()));
            doc.add(new StoredField(StatusField.STATUSES_COUNT.name, status.getStatusesCount()));

            long inReplyToStatusId = status.getInReplyToStatusId();
            if (inReplyToStatusId > 0) {
                doc.add(new LongPoint(StatusField.IN_REPLY_TO_STATUS_ID.name, inReplyToStatusId));
                doc.add(new StoredField(StatusField.IN_REPLY_TO_STATUS_ID.name, inReplyToStatusId));
                doc.add(new LongPoint(StatusField.IN_REPLY_TO_USER_ID.name, status.getInReplyToUserId()));
                doc.add(new StoredField(StatusField.IN_REPLY_TO_USER_ID.name, status.getInReplyToUserId()));
            }

            String lang = status.getLang();
            if (!lang.equals("unknown")) {
                doc.add(new TextField(StatusField.LANG.name, status.getLang(), Store.YES));
            }

            long retweetStatusId = status.getRetweetedStatusId();
            if (retweetStatusId > 0) {
                doc.add(new LongPoint(StatusField.RETWEETED_STATUS_ID.name, retweetStatusId));
                doc.add(new StoredField(StatusField.RETWEETED_STATUS_ID.name, retweetStatusId));
                doc.add(new LongPoint(StatusField.RETWEETED_USER_ID.name, status.getRetweetedUserId()));
                doc.add(new StoredField(StatusField.RETWEETED_USER_ID.name, status.getRetweetedUserId()));
                doc.add(new IntPoint(StatusField.RETWEET_COUNT.name, status.getRetweetCount()));
                doc.add(new StoredField(StatusField.RETWEET_COUNT.name, status.getRetweetCount()));
                if (status.getRetweetCount() < 0 || status.getRetweetedStatusId() < 0) {
                    LOG.warn("Error parsing retweet fields of " + status.getId());
                }
            }

            writer.addDocument(doc);
            if (cnt % 100000 == 0) {
                LOG.info(cnt + " statuses indexed");
            }
        }

        LOG.info(String.format("Total of %s statuses added", cnt));

        if (cmdline.hasOption(OPTIMIZE_OPTION)) {
            LOG.info("Merging segments...");
            writer.forceMerge(1);
            LOG.info("Done!");
        }

        LOG.info("Total elapsed time: " + (System.currentTimeMillis() - startTime) + "ms");
    } catch (Exception e) {
        e.printStackTrace();
    } finally {
        writer.close();
        dir.close();
        stream.close();
    }
}

From source file:mase.stat.MasterTournament.java

public static void main(String[] args) throws Exception {
    List<File> sampleFolders = new ArrayList<>();
    List<File> testFolders = new ArrayList<>();
    int freq = 0;
    String name = "";
    boolean self = false;
    String individuals = null;/*from w ww  .ja  va  2 s .c om*/
    int elite = 0;

    for (int x = 0; x < args.length; x++) {
        if (args[x].equalsIgnoreCase(TEST_FOLDER)) {
            File folder = new File(args[1 + x++]);
            if (!folder.exists()) {
                throw new Exception("Folder does not exist: " + folder.getAbsolutePath());
            }
            testFolders.add(folder);
        } else if (args[x].equalsIgnoreCase(SAMPLE_FOLDER)) {
            File folder = new File(args[1 + x++]);
            if (!folder.exists()) {
                throw new Exception("Folder does not exist: " + folder.getAbsolutePath());
            }
            sampleFolders.add(folder);
        } else if (args[x].equalsIgnoreCase(BOTH_FOLDER)) {
            File folder = new File(args[1 + x++]);
            if (!folder.exists()) {
                throw new Exception("Folder does not exist: " + folder.getAbsolutePath());
            }
            sampleFolders.add(folder);
            testFolders.add(folder);
        } else if (args[x].equalsIgnoreCase(FREQUENCY)) {
            freq = Integer.parseInt(args[1 + x++]);
        } else if (args[x].equalsIgnoreCase(ELITE)) {
            elite = Integer.parseInt(args[1 + x++]);
        } else if (args[x].equalsIgnoreCase(OUTNAME)) {
            name = args[1 + x++];
        } else if (args[x].equalsIgnoreCase(SELF)) {
            self = true;
        } else if (args[x].equalsIgnoreCase(INDIVIDUALS)) {
            individuals = args[1 + x++];
        }
    }

    if (testFolders.isEmpty() || sampleFolders.isEmpty()) {
        System.out.println("Nothing to evaluate!");
        return;
    }

    MaseProblem sim = Reevaluate.createSimulator(args);
    MasterTournament mt = new MasterTournament(sampleFolders, testFolders, sim, name);

    if (individuals != null) {
        mt.makeIndsTournaments(individuals);
    } else if (self) {
        mt.makeSelfTournaments(freq);
    } else {
        mt.makeSampleTournaments(freq, elite);
    }

    mt.executor.shutdown();
}

From source file:com.act.lcms.v2.MZCollisionCounter.java

public static void main(String[] args) throws Exception {
    CLIUtil cliUtil = new CLIUtil(MassChargeCalculator.class, HELP_MESSAGE, OPTION_BUILDERS);
    CommandLine cl = cliUtil.parseCommandLine(args);

    File inputFile = new File(cl.getOptionValue(OPTION_INPUT_INCHI_LIST));
    if (!inputFile.exists()) {
        cliUtil.failWithMessage("Input file at does not exist at %s", inputFile.getAbsolutePath());
    }/*from w  w  w .  j  a v a 2s .c  o  m*/

    List<MassChargeCalculator.MZSource> sources = new ArrayList<>();
    try (BufferedReader reader = new BufferedReader(new FileReader(inputFile))) {
        String line;
        while ((line = reader.readLine()) != null) {
            line = line.trim();
            sources.add(new MassChargeCalculator.MZSource(line));
            if (sources.size() % 1000 == 0) {
                LOGGER.info("Loaded %d sources from input file", sources.size());
            }
        }
    }

    Set<String> considerIons = Collections.emptySet();
    if (cl.hasOption(OPTION_ONLY_CONSIDER_IONS)) {
        List<String> ions = Arrays.asList(cl.getOptionValues(OPTION_ONLY_CONSIDER_IONS));
        LOGGER.info("Only considering ions for m/z calculation: %s", StringUtils.join(ions, ", "));
        considerIons = new HashSet<>(ions);
    }

    TSVWriter<String, Long> tsvWriter = new TSVWriter<>(Arrays.asList("collisions", "count"));
    tsvWriter.open(new File(cl.getOptionValue(OPTION_OUTPUT_FILE)));

    try {
        LOGGER.info("Loaded %d sources in total from input file", sources.size());

        MassChargeCalculator.MassChargeMap mzMap = MassChargeCalculator.makeMassChargeMap(sources,
                considerIons);

        if (!cl.hasOption(OPTION_COUNT_WINDOW_INTERSECTIONS)) {
            // Do an exact analysis of the m/z collisions if windowing is not specified.

            LOGGER.info("Computing precise collision histogram.");
            Iterable<Double> mzs = mzMap.ionMZIter();
            Map<Integer, Long> collisionHistogram = histogram(
                    StreamSupport.stream(mzs.spliterator(), false).map(mz -> { // See comment about Iterable below.
                        try {
                            return mzMap.ionMZToMZSources(mz).size();
                        } catch (NoSuchElementException e) {
                            LOGGER.error("Caught no such element exception for mz %f: %s", mz, e.getMessage());
                            throw e;
                        }
                    }));
            List<Integer> sortedCollisions = new ArrayList<>(collisionHistogram.keySet());
            Collections.sort(sortedCollisions);
            for (Integer collision : sortedCollisions) {
                tsvWriter.append(new HashMap<String, Long>() {
                    {
                        put("collisions", collision.longValue());
                        put("count", collisionHistogram.get(collision));
                    }
                });
            }
        } else {
            /* After some deliberation (thanks Gil!), the windowed variant of this calculation counts the number of
             * structures whose 0.01 Da m/z windows (for some set of ions) overlap with each other.
             *
             * For example, let's assume we have five total input structures, and are only searching for one ion.  Let's
             * also assume that three of those structures have m/z A and the remaining two have m/z B.  The windows might
             * look like this in the m/z domain:
             * |----A----|
             *        |----B----|
             * Because A represents three structures and overlaps with B, which represents two, we assign A a count of 5--
             * this is the number of structures we believe could fall into the range of A given our current peak calling
             * approach.  Similarly, B is assigned a count of 5, as the possibility for collision/confusion is symmetric.
             *
             * Note that this is an over-approximation of collisions, as we could more precisely only consider intersections
             * when the exact m/z of B falls within the window around A and vice versa.  However, because we have observed
             * cases where the MS sensor doesn't report structures at exactly the m/z we predict, we employ this weaker
             * definition of intersection to give a slightly pessimistic view of what confusions might be possible. */
            // Compute windows for every m/z.  We don't care about the original mz values since we just want the count.
            List<Double> mzs = mzMap.ionMZsSorted();

            final Double windowHalfWidth;
            if (cl.hasOption(OPTION_WINDOW_HALFWIDTH)) {
                // Don't use get with default for this option, as we want the exact FP value of the default tolerance.
                windowHalfWidth = Double.valueOf(cl.getOptionValue(OPTION_WINDOW_HALFWIDTH));
            } else {
                windowHalfWidth = DEFAULT_WINDOW_TOLERANCE;
            }

            /* Window = (lower bound, upper bound), counter of represented m/z's that collide with this window, and number
             * of representative structures (which will be used in counting collisions). */
            LinkedList<CollisionWindow> allWindows = new LinkedList<CollisionWindow>() {
                {
                    for (Double mz : mzs) {
                        // CPU for memory trade-off: don't re-compute the window bounds over and over and over and over and over.
                        try {
                            add(new CollisionWindow(mz, windowHalfWidth, mzMap.ionMZToMZSources(mz).size()));
                        } catch (NoSuchElementException e) {
                            LOGGER.error("Caught no such element exception for mz %f: %s", mz, e.getMessage());
                            throw e;
                        }
                    }
                }
            };

            // Sweep line time!  The window ranges are the interesting points.  We just accumulate overlap counts as we go.
            LinkedList<CollisionWindow> workingSet = new LinkedList<>();
            List<CollisionWindow> finished = new LinkedList<>();

            while (allWindows.size() > 0) {
                CollisionWindow thisWindow = allWindows.pop();
                // Remove any windows from the working set that don't overlap with the next window.
                while (workingSet.size() > 0 && workingSet.peekFirst().getMaxMZ() < thisWindow.getMinMZ()) {
                    finished.add(workingSet.pop());
                }

                for (CollisionWindow w : workingSet) {
                    /* Add the size of the new overlapping window's structure count to each of the windows in the working set,
                     * which represents the number of possible confused structures that fall within the overlapping region.
                     * We exclude the window itself as it should already have counted the colliding structures it represents. */
                    w.getAccumulator().add(thisWindow.getStructureCount());

                    /* Reciprocally, add the structure counts of all windows with which the current window overlaps to it. */
                    thisWindow.getAccumulator().add(w.getStructureCount());
                }

                // Now that accumulation is complete, we can safely add the current window.
                workingSet.add(thisWindow);
            }

            // All the interesting events are done, so drop the remaining windows into the finished set.
            finished.addAll(workingSet);

            Map<Long, Long> collisionHistogram = histogram(
                    finished.stream().map(w -> w.getAccumulator().longValue()));
            List<Long> sortedCollisions = new ArrayList<>(collisionHistogram.keySet());
            Collections.sort(sortedCollisions);
            for (Long collision : sortedCollisions) {
                tsvWriter.append(new HashMap<String, Long>() {
                    {
                        put("collisions", collision);
                        put("count", collisionHistogram.get(collision));
                    }
                });
            }
        }
    } finally {
        if (tsvWriter != null) {
            tsvWriter.close();
        }
    }
}

From source file:ar.com.qbe.siniestros.model.utils.MimeMagic.Magic.java

/**
 * DOCUMENT ME!// www.ja  v  a2s  .  co  m
 *
 * @param args DOCUMENT ME!
 */
public static void main(String[] args) {
    try {
        if (args.length == 0) {
            System.err.println("usage: test <file>");
            System.exit(1);
            ;
        }
        File f = new File(args[0]);

        if (f.exists()) {
            MagicMatch match = Magic.getMagicMatch(f, true, false);

            System.out.println("filename: " + args[0]);
            printMagicMatch(System.out, match, "");
        } else {
            System.err.println("file '" + f.getCanonicalPath() + "' not found");
        }
    } catch (MagicMatchNotFoundException e) {
        System.out.println("no match found");
    } catch (Exception e) {
        System.err.println("error: " + e);
        e.printStackTrace(System.err);
    }
}