Example usage for java.io InputStreamReader InputStreamReader

List of usage examples for java.io InputStreamReader InputStreamReader

Introduction

In this page you can find the example usage for java.io InputStreamReader InputStreamReader.

Prototype

public InputStreamReader(InputStream in) 

Source Link

Document

Creates an InputStreamReader that uses the default charset.

Usage

From source file:com.genentech.chemistry.openEye.apps.SDFTorsionScanner.java

/**
 * @param args/*from   w w w.ja va2  s .  c o m*/
 */
public static void main(String... args) throws IOException {
    // create command line Options object
    Options options = new Options();
    Option opt = new Option(OPT_INFILE, true,
            "input file oe-supported Use .sdf|.smi to specify the file type.");
    opt.setRequired(true);
    options.addOption(opt);

    opt = new Option(OPT_OUTFILE, true, "Output filename.");
    opt.setRequired(true);
    options.addOption(opt);

    opt = new Option(OPT_STARTTorsion, true,
            "The torsion in your inMol will be rotated by this value for the first job");
    opt.setRequired(false);
    options.addOption(opt);

    opt = new Option(OPT_TORSIONIncrement, true, "Incremnt each subsequent conformation by this step size");
    opt.setRequired(true);
    options.addOption(opt);

    opt = new Option(OPT_NSTEPS, true, "Number of conformations to create");
    opt.setRequired(true);
    options.addOption(opt);

    opt = new Option(OPT_BONDFILE, true, "The file containing the bond atoms that define the torsion.");
    opt.setRequired(true);
    options.addOption(opt);

    opt = new Option(OPT_MINIMIZE, false,
            "Minimize conformer at each step using MMFFs.  If maxConfsPerStep is > 1, "
                    + "all confs will be mimimized and the lowest E will be output.");
    opt.setRequired(false);
    options.addOption(opt);

    opt = new Option(OPT_CONSTRIANT, true,
            "One of strong (90), medium (45), weak(20), none or a floating point number"
                    + " to specify the tethered constraint strength for -minimize (def=strong)");
    opt.setRequired(false);
    options.addOption(opt);

    opt = new Option(OPT_MAXCONFS_PER_STEP, true,
            "While holding the torsion fixed, maximum number of conformations of free atoms to generat.  default=1");
    opt.setRequired(false);
    options.addOption(opt);

    opt = new Option(OPT_COREFILE, true, "Outputfile to store guessed core.");
    opt.setRequired(false);
    options.addOption(opt);

    opt = new Option(OPT_TORSIONS_ATOM_TAG, true,
            "Name of sdf tag which will contain the indices of atoms that define the torsion.");
    opt.setRequired(true);
    options.addOption(opt);

    CommandLineParser parser = new PosixParser();
    CommandLine cmd = null;
    try {
        cmd = parser.parse(options, args);
    } catch (Exception e) {
        System.err.println(e.getMessage());
        exitWithHelp(options);
    }
    args = cmd.getArgs();

    if (args.length != 0) {
        System.err.println("Unknown arguments" + args);
        exitWithHelp(options);
    }

    if (cmd.hasOption("d")) {
        System.err.println("Start debugger and press return:");
        new BufferedReader(new InputStreamReader(System.in)).readLine();
    }

    String inFile = cmd.getOptionValue(OPT_INFILE);
    String outFile = cmd.getOptionValue(OPT_OUTFILE);

    String bondFile = cmd.getOptionValue(OPT_BONDFILE);
    String coreFilename = cmd.getOptionValue(OPT_COREFILE);
    String torsionAtomsTag = cmd.getOptionValue(OPT_TORSIONS_ATOM_TAG);

    int nSteps = Integer.parseInt(cmd.getOptionValue(OPT_NSTEPS));
    int nStartTor = Integer.parseInt(cmd.getOptionValue(OPT_STARTTorsion));
    int nTorIncr = Integer.parseInt(cmd.getOptionValue(OPT_TORSIONIncrement));

    int nMaxConfsPerStep = 1;
    if (cmd.hasOption(OPT_MAXCONFS_PER_STEP)) {
        nMaxConfsPerStep = Integer.parseInt(cmd.getOptionValue(OPT_MAXCONFS_PER_STEP));
    }

    String constraintStrength = cmd.getOptionValue(OPT_CONSTRIANT);

    boolean doMinimize = cmd.hasOption(OPT_MINIMIZE);

    SDFTorsionScanner torGenerator = new SDFTorsionScanner(bondFile, coreFilename, torsionAtomsTag, nSteps,
            nStartTor, nTorIncr, nMaxConfsPerStep, doMinimize, constraintStrength);

    torGenerator.run(inFile, outFile, coreFilename);
}

From source file:com.genentech.retrival.SDFExport.SDFExporter.java

public static void main(String[] args) throws ParseException, JDOMException, IOException { // create command line Options object
    Options options = new Options();
    Option opt = new Option("sqlFile", true, "sql-xml file");
    opt.setRequired(true);//from  w  w w  .j  av  a 2  s .c  om
    options.addOption(opt);

    opt = new Option("sqlName", true, "name of SQL element in xml file");
    opt.setRequired(true);
    options.addOption(opt);

    opt = new Option("molSqlName", true,
            "name of SQL element in xml file returning molfile for first column in sqlName");
    opt.setRequired(false);
    options.addOption(opt);

    opt = new Option("removeSalt", false, "remove any known salts from structure.");
    opt.setRequired(false);
    options.addOption(opt);

    opt = new Option("o", "out", true, "output file");
    opt.setRequired(false);
    options.addOption(opt);

    opt = new Option("i", "in", true,
            "input file, tab separated, each line executes the query once. Use '.tab' to read from stdin");
    opt.setRequired(false);
    options.addOption(opt);

    opt = new Option("newLineReplacement", true,
            "If given newlines in fields will be replaced by this string.");
    options.addOption(opt);

    opt = new Option("ignoreMismatches", false, "If not given each input must return at least one hit hits.");
    options.addOption(opt);

    CommandLineParser parser = new BasicParser();
    CommandLine cmd = null;
    try {
        cmd = parser.parse(options, args);
    } catch (Exception e) {
        System.err.println(e.getMessage());
        exitWithHelp(options);
    }

    String outFile = cmd.getOptionValue("o");
    String inFile = cmd.getOptionValue("i");
    String sqlFile = cmd.getOptionValue("sqlFile");
    String sqlName = cmd.getOptionValue("sqlName");
    String molSqlName = cmd.getOptionValue("molSqlName");
    String newLineReplacement = cmd.getOptionValue("newLineReplacement");

    boolean removeSalt = cmd.hasOption("removeSalt");
    boolean ignoreMismatches = cmd.hasOption("ignoreMismatches");

    SDFExporter exporter = null;
    try {
        exporter = new SDFExporter(sqlFile, sqlName, molSqlName, outFile, newLineReplacement, removeSalt,
                ignoreMismatches);
    } catch (Exception e) {
        e.printStackTrace();
        System.err.println();
        exitWithHelp(options);
    }

    args = cmd.getArgs();

    if (inFile != null && args.length > 0) {
        System.err.println("inFile and arguments may not be mixed!\n");
        exitWithHelp(options);
    }

    if (inFile == null) {
        if (exporter.getParamTypes().length != args.length) {
            System.err.printf("sql statement (%s) needs %d parameters but got only %d.\n", sqlName,
                    exporter.getParamTypes().length, args.length);
            exitWithHelp(options);
        }

        exporter.export(args);
    } else {
        BufferedReader in;
        if (".tab".equalsIgnoreCase(inFile))
            in = new BufferedReader(new InputStreamReader(System.in));
        else
            in = new BufferedReader(new FileReader(inFile));

        exporter.export(in);
        in.close();
    }

    exporter.close();
}

From source file:edu.nyu.vida.data_polygamy.scalar_function_computation.Aggregation.java

/**
 * @param args//from w w  w . j a  v a2s .  c om
 */
@SuppressWarnings({ "deprecation" })
public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException {

    Options options = new Options();

    Option forceOption = new Option("f", "force", false,
            "force the computation of the aggregate functions " + "even if files already exist");
    forceOption.setRequired(false);
    options.addOption(forceOption);

    Option gOption = new Option("g", "group", true, "set group of datasets for which the aggregate functions"
            + " will be computed, followed by their temporal and spatial attribute indices");
    gOption.setRequired(true);
    gOption.setArgName("GROUP");
    gOption.setArgs(Option.UNLIMITED_VALUES);
    options.addOption(gOption);

    Option machineOption = new Option("m", "machine", true, "machine identifier");
    machineOption.setRequired(true);
    machineOption.setArgName("MACHINE");
    machineOption.setArgs(1);
    options.addOption(machineOption);

    Option nodesOption = new Option("n", "nodes", true, "number of nodes");
    nodesOption.setRequired(true);
    nodesOption.setArgName("NODES");
    nodesOption.setArgs(1);
    options.addOption(nodesOption);

    Option s3Option = new Option("s3", "s3", false, "data on Amazon S3");
    s3Option.setRequired(false);
    options.addOption(s3Option);

    Option awsAccessKeyIdOption = new Option("aws_id", "aws-id", true,
            "aws access key id; " + "this is required if the execution is on aws");
    awsAccessKeyIdOption.setRequired(false);
    awsAccessKeyIdOption.setArgName("AWS-ACCESS-KEY-ID");
    awsAccessKeyIdOption.setArgs(1);
    options.addOption(awsAccessKeyIdOption);

    Option awsSecretAccessKeyOption = new Option("aws_key", "aws-id", true,
            "aws secrect access key; " + "this is required if the execution is on aws");
    awsSecretAccessKeyOption.setRequired(false);
    awsSecretAccessKeyOption.setArgName("AWS-SECRET-ACCESS-KEY");
    awsSecretAccessKeyOption.setArgs(1);
    options.addOption(awsSecretAccessKeyOption);

    Option bucketOption = new Option("b", "s3-bucket", true,
            "bucket on s3; " + "this is required if the execution is on aws");
    bucketOption.setRequired(false);
    bucketOption.setArgName("S3-BUCKET");
    bucketOption.setArgs(1);
    options.addOption(bucketOption);

    Option helpOption = new Option("h", "help", false, "display this message");
    helpOption.setRequired(false);
    options.addOption(helpOption);

    HelpFormatter formatter = new HelpFormatter();
    CommandLineParser parser = new PosixParser();
    CommandLine cmd = null;

    try {
        cmd = parser.parse(options, args);
    } catch (ParseException e) {
        formatter.printHelp("hadoop jar data-polygamy.jar "
                + "edu.nyu.vida.data_polygamy.scalar_function_computation.Aggregation", options, true);
        System.exit(0);
    }

    if (cmd.hasOption("h")) {
        formatter.printHelp("hadoop jar data-polygamy.jar "
                + "edu.nyu.vida.data_polygamy.scalar_function_computation.Aggregation", options, true);
        System.exit(0);
    }

    boolean s3 = cmd.hasOption("s3");
    String s3bucket = "";
    String awsAccessKeyId = "";
    String awsSecretAccessKey = "";

    if (s3) {
        if ((!cmd.hasOption("aws_id")) || (!cmd.hasOption("aws_key")) || (!cmd.hasOption("b"))) {
            System.out.println(
                    "Arguments 'aws_id', 'aws_key', and 'b'" + " are mandatory if execution is on AWS.");
            formatter.printHelp(
                    "hadoop jar data-polygamy.jar "
                            + "edu.nyu.vida.data_polygamy.scalar_function_computation.Aggregation",
                    options, true);
            System.exit(0);
        }
        s3bucket = cmd.getOptionValue("b");
        awsAccessKeyId = cmd.getOptionValue("aws_id");
        awsSecretAccessKey = cmd.getOptionValue("aws_key");
    }

    boolean snappyCompression = false;
    boolean bzip2Compression = false;
    String machine = cmd.getOptionValue("m");
    int nbNodes = Integer.parseInt(cmd.getOptionValue("n"));

    Configuration s3conf = new Configuration();
    if (s3) {
        s3conf.set("fs.s3.awsAccessKeyId", awsAccessKeyId);
        s3conf.set("fs.s3.awsSecretAccessKey", awsSecretAccessKey);
        s3conf.set("bucket", s3bucket);
    }

    String datasetNames = "";
    String datasetIds = "";
    String preProcessingDatasets = "";

    ArrayList<String> shortDataset = new ArrayList<String>();
    ArrayList<String> shortDatasetAggregation = new ArrayList<String>();
    HashMap<String, String> datasetTempAtt = new HashMap<String, String>();
    HashMap<String, String> datasetSpatialAtt = new HashMap<String, String>();
    HashMap<String, String> preProcessingDataset = new HashMap<String, String>();
    HashMap<String, String> datasetId = new HashMap<String, String>();

    boolean removeExistingFiles = cmd.hasOption("f");
    String[] datasetArgs = cmd.getOptionValues("g");

    for (int i = 0; i < datasetArgs.length; i += 3) {
        String dataset = datasetArgs[i];

        // getting pre-processing
        String tempPreProcessing = FrameworkUtils.searchPreProcessing(dataset, s3conf, s3);
        if (tempPreProcessing == null) {
            System.out.println("No pre-processing available for " + dataset);
            continue;
        }
        preProcessingDataset.put(dataset, tempPreProcessing);

        shortDataset.add(dataset);
        datasetTempAtt.put(dataset, ((datasetArgs[i + 1] == "null") ? null : datasetArgs[i + 1]));
        datasetSpatialAtt.put(dataset, ((datasetArgs[i + 2] == "null") ? null : datasetArgs[i + 2]));

        datasetId.put(dataset, null);
    }

    if (shortDataset.size() == 0) {
        System.out.println("No datasets to process.");
        System.exit(0);
    }

    // getting dataset id

    Path path = null;
    FileSystem fs = null;

    if (s3) {
        path = new Path(s3bucket + FrameworkUtils.datasetsIndexDir);
        fs = FileSystem.get(path.toUri(), s3conf);
    } else {
        fs = FileSystem.get(new Configuration());
        path = new Path(fs.getHomeDirectory() + "/" + FrameworkUtils.datasetsIndexDir);
    }
    BufferedReader br = new BufferedReader(new InputStreamReader(fs.open(path)));
    String line = br.readLine();
    while (line != null) {
        String[] dt = line.split("\t");
        if (datasetId.containsKey(dt[0])) {
            datasetId.put(dt[0], dt[1]);
            datasetNames += dt[0] + ",";
            datasetIds += dt[1] + ",";
        }
        line = br.readLine();
    }
    br.close();
    if (s3)
        fs.close();

    datasetNames = datasetNames.substring(0, datasetNames.length() - 1);
    datasetIds = datasetIds.substring(0, datasetIds.length() - 1);
    Iterator<String> it = shortDataset.iterator();
    while (it.hasNext()) {
        String dataset = it.next();
        if (datasetId.get(dataset) == null) {
            System.out.println("No dataset id for " + dataset);
            System.exit(0);
        }
    }

    FrameworkUtils.createDir(s3bucket + FrameworkUtils.aggregatesDir, s3conf, s3);

    // getting smallest resolution

    HashMap<String, String> tempResMap = new HashMap<String, String>();
    HashMap<String, String> spatialResMap = new HashMap<String, String>();

    HashMap<String, String> datasetTemporalStrMap = new HashMap<String, String>();
    HashMap<String, String> datasetSpatialStrMap = new HashMap<String, String>();

    HashSet<String> input = new HashSet<String>();

    for (String dataset : shortDataset) {

        String[] datasetArray = preProcessingDataset.get(dataset).split("-");

        String datasetTemporalStr = datasetArray[datasetArray.length - 2];
        int datasetTemporal = utils.temporalResolution(datasetTemporalStr);

        String datasetSpatialStr = datasetArray[datasetArray.length - 1];
        int datasetSpatial = utils.spatialResolution(datasetSpatialStr);

        // finding all possible resolutions

        String[] temporalResolutions = FrameworkUtils.getAggTempResolutions(datasetTemporal);
        String[] spatialResolutions = FrameworkUtils.getAggSpatialResolutions(datasetSpatial);

        String temporalResolution = "";
        String spatialResolution = "";

        String tempRes = "";
        String spatialRes = "";

        boolean dataAdded = false;

        for (int i = 0; i < temporalResolutions.length; i++) {
            for (int j = 0; j < spatialResolutions.length; j++) {

                temporalResolution = temporalResolutions[i];
                spatialResolution = spatialResolutions[j];

                String aggregatesOutputFileName = s3bucket + FrameworkUtils.aggregatesDir + "/" + dataset + "/";

                if (removeExistingFiles) {
                    FrameworkUtils.removeFile(aggregatesOutputFileName, s3conf, s3);
                }

                if (!FrameworkUtils.fileExists(aggregatesOutputFileName, s3conf, s3)) {

                    dataAdded = true;

                    tempRes += temporalResolution + "-";
                    spatialRes += spatialResolution + "-";
                }
            }
        }

        if (dataAdded) {
            input.add(s3bucket + FrameworkUtils.preProcessingDir + "/" + preProcessingDataset.get(dataset));
            shortDatasetAggregation.add(dataset);

            tempResMap.put(dataset, tempRes.substring(0, tempRes.length() - 1));
            spatialResMap.put(dataset, spatialRes.substring(0, spatialRes.length() - 1));

            datasetTemporalStrMap.put(dataset, datasetTemporalStr);
            datasetSpatialStrMap.put(dataset, datasetSpatialStr);
        }
    }

    if (input.isEmpty()) {
        System.out.println("All the input datasets have aggregates.");
        System.out.println("Use -f in the beginning of the command line to force the computation.");
        System.exit(0);
    }

    it = input.iterator();
    while (it.hasNext()) {
        preProcessingDatasets += it.next() + ",";
    }

    Job aggJob = null;
    String aggregatesOutputDir = s3bucket + FrameworkUtils.aggregatesDir + "/tmp/";
    String jobName = "aggregates";

    FrameworkUtils.removeFile(aggregatesOutputDir, s3conf, s3);

    Configuration aggConf = new Configuration();
    Machine machineConf = new Machine(machine, nbNodes);

    aggConf.set("dataset-name", datasetNames);
    aggConf.set("dataset-id", datasetIds);

    for (int i = 0; i < shortDatasetAggregation.size(); i++) {
        String dataset = shortDatasetAggregation.get(i);
        String id = datasetId.get(dataset);
        aggConf.set("dataset-" + id + "-temporal-resolutions", tempResMap.get(dataset));
        aggConf.set("dataset-" + id + "-spatial-resolutions", spatialResMap.get(dataset));
        aggConf.set("dataset-" + id + "-temporal-att", datasetTempAtt.get(dataset));
        aggConf.set("dataset-" + id + "-spatial-att", datasetSpatialAtt.get(dataset));
        aggConf.set("dataset-" + id + "-temporal", datasetTemporalStrMap.get(dataset));
        aggConf.set("dataset-" + id + "-spatial", datasetSpatialStrMap.get(dataset));

        if (s3)
            aggConf.set("dataset-" + id,
                    s3bucket + FrameworkUtils.preProcessingDir + "/" + preProcessingDataset.get(dataset));
        else
            aggConf.set("dataset-" + id, FileSystem.get(new Configuration()).getHomeDirectory() + "/"
                    + FrameworkUtils.preProcessingDir + "/" + preProcessingDataset.get(dataset));
    }

    aggConf.set("mapreduce.tasktracker.map.tasks.maximum", String.valueOf(machineConf.getMaximumTasks()));
    aggConf.set("mapreduce.tasktracker.reduce.tasks.maximum", String.valueOf(machineConf.getMaximumTasks()));
    aggConf.set("mapreduce.jobtracker.maxtasks.perjob", "-1");
    aggConf.set("mapreduce.reduce.shuffle.parallelcopies", "20");
    aggConf.set("mapreduce.input.fileinputformat.split.minsize", "0");
    aggConf.set("mapreduce.task.io.sort.mb", "200");
    aggConf.set("mapreduce.task.io.sort.factor", "100");
    machineConf.setMachineConfiguration(aggConf);

    if (s3) {
        machineConf.setMachineConfiguration(aggConf);
        aggConf.set("fs.s3.awsAccessKeyId", awsAccessKeyId);
        aggConf.set("fs.s3.awsSecretAccessKey", awsSecretAccessKey);
    }

    if (snappyCompression) {
        aggConf.set("mapreduce.map.output.compress", "true");
        aggConf.set("mapreduce.map.output.compress.codec", "org.apache.hadoop.io.compress.SnappyCodec");
        //aggConf.set("mapreduce.output.fileoutputformat.compress.codec", "org.apache.hadoop.io.compress.SnappyCodec");
    }
    if (bzip2Compression) {
        aggConf.set("mapreduce.map.output.compress", "true");
        aggConf.set("mapreduce.map.output.compress.codec", "org.apache.hadoop.io.compress.BZip2Codec");
        //aggConf.set("mapreduce.output.fileoutputformat.compress.codec", "org.apache.hadoop.io.compress.BZip2Codec");
    }

    aggJob = new Job(aggConf);
    aggJob.setJobName(jobName);

    aggJob.setMapOutputKeyClass(SpatioTemporalWritable.class);
    aggJob.setMapOutputValueClass(AggregationArrayWritable.class);
    aggJob.setOutputKeyClass(SpatioTemporalWritable.class);
    aggJob.setOutputValueClass(FloatArrayWritable.class);
    //aggJob.setOutputKeyClass(Text.class);
    //aggJob.setOutputValueClass(Text.class);

    aggJob.setMapperClass(AggregationMapper.class);
    aggJob.setCombinerClass(AggregationCombiner.class);
    aggJob.setReducerClass(AggregationReducer.class);
    aggJob.setNumReduceTasks(machineConf.getNumberReduces());

    aggJob.setInputFormatClass(SequenceFileInputFormat.class);
    //aggJob.setOutputFormatClass(SequenceFileOutputFormat.class);
    LazyOutputFormat.setOutputFormatClass(aggJob, SequenceFileOutputFormat.class);
    //LazyOutputFormat.setOutputFormatClass(aggJob, TextOutputFormat.class);
    SequenceFileOutputFormat.setCompressOutput(aggJob, true);
    SequenceFileOutputFormat.setOutputCompressionType(aggJob, CompressionType.BLOCK);

    FileInputFormat.setInputDirRecursive(aggJob, true);
    FileInputFormat.setInputPaths(aggJob,
            preProcessingDatasets.substring(0, preProcessingDatasets.length() - 1));
    FileOutputFormat.setOutputPath(aggJob, new Path(aggregatesOutputDir));

    aggJob.setJarByClass(Aggregation.class);

    long start = System.currentTimeMillis();
    aggJob.submit();
    aggJob.waitForCompletion(true);
    System.out.println(jobName + "\t" + (System.currentTimeMillis() - start));

    // moving files to right place
    for (String dataset : shortDatasetAggregation) {
        String from = s3bucket + FrameworkUtils.aggregatesDir + "/tmp/" + dataset + "/";
        String to = s3bucket + FrameworkUtils.aggregatesDir + "/" + dataset + "/";
        FrameworkUtils.renameFile(from, to, s3conf, s3);
    }

}

From source file:it.nibbles.javacoin.BlockTool.java

public static void main(String[] args) throws Exception {
    OptionParser parser = new OptionParser();
    parser.accepts("help");
    parser.accepts("import");
    parser.accepts("export");
    parser.accepts("testnet2");
    parser.accepts("testnet3");
    parser.accepts("prodnet");
    parser.accepts("first").withRequiredArg().ofType(Integer.class);
    parser.accepts("last").withRequiredArg().ofType(Integer.class);
    parser.accepts("hash").withRequiredArg();
    parser.accepts("port").withRequiredArg().ofType(Integer.class);
    optBdbPath = parser.accepts("bdbPath").withRequiredArg().defaultsTo("data");
    //optJdbcDriver = parser.accepts("driver").withRequiredArg().defaultsTo("com.mysql.jdbc.Driver");
    optJdbcUrl = parser.accepts("url").withRequiredArg().defaultsTo("jdbc:mysql://localhost/javacoin_testnet3");
    optJdbcUser = parser.accepts("dbuser").withRequiredArg().defaultsTo("javacoin");
    optJdbcPassword = parser.accepts("dbpass").withRequiredArg().defaultsTo("pw");
    inputfile = parser.accepts("inputfile").withRequiredArg();
    outputfile = parser.accepts("outputfile").withRequiredArg();
    //    String[] args = {
    //      "--inputfile", "blocks-0-100000.txt", "--prodnet", "--load", "--url", "jdbc:mysql://localhost/javacoin_test"
    //    };//w  w w  .j  a  v a  2 s .  c om
    OptionSet options = parser.parse(args);
    if (args.length == 0 || options.hasArgument("help") || options.nonOptionArguments().size() > 0
            || (options.has("export") && options.has("import"))
            || (options.has("export") && !options.has("outputfile"))
            || (options.has("import") && !options.has("inputfile"))
            || (options.has("testnet2") && options.has("testnet3"))
            || (options.has("testnet2") && options.has("prodnet"))
            || (options.has("testnet3") && options.has("prodnet"))) {
        println(HELP_TEXT);
        return;
    }
    if (options.hasArgument("port")) {
        //listenPort = ((Integer) options.valueOf("port")).intValue();
    }
    cmdExportBlockchain = options.has("export");
    cmdImportBlockchain = options.has("import");
    isProdnet = options.has("prodnet");
    isTestNet2 = options.has("testnet2");
    isTestNet3 = options.has("testnet3");
    if (!isProdnet && !isTestNet2 && !isTestNet3)
        isTestNet3 = true;
    if (options.hasArgument("first")) {
        firstBlock = ((Integer) options.valueOf("first")).intValue();
        if (!options.hasArgument("last"))
            lastBlock = firstBlock;
    }
    if (options.hasArgument("last")) {
        lastBlock = ((Integer) options.valueOf("last")).intValue();
        if (!options.hasArgument("first"))
            firstBlock = lastBlock;
    }
    if (options.hasArgument("hash"))
        blockHash = (String) options.valueOf("hash");
    if (cmdExportBlockchain && blockHash == null && firstBlock == 0 && lastBlock == 0) {
        println("To save blocks you have to specify a range or an hash");
        return;
    }

    //println("save: " + cmdSaveBlockchain + " load: " + cmdLoadBlockchain + " prodnet: " + isProdnet + " testnet2: " + isTestNet2 + " testnet3: " + isTestNet3);
    //println("FirstBlock: " + firstBlock + " lastBlock: " + lastBlock + " inputfile: " + inputfile.value(options) + " outputfile: " + outputfile.value(options));
    BlockTool app = new BlockTool();
    app.init(options);
    if (cmdImportBlockchain) {
        //System.out.println("Press return to start import blocks to blockchain");
        //System.in.read();
        BufferedReader reader;
        if ("-".equals(inputfile.value(options)))
            reader = new BufferedReader(new InputStreamReader(System.in));
        else
            reader = new BufferedReader(new FileReader(inputfile.value(options)));
        int numBlocks = 0;
        Block block = app.readBlock(reader, false);
        while (block != null) {
            numBlocks++;
            long startTime = System.currentTimeMillis();
            blockChain.addBlock(block);
            long insertTime = System.currentTimeMillis() - startTime;
            System.out.printf(
                    "%6d Block " + BtcUtil.hexOut(block.getHash()) + " #txs: %4d insertTime(ms): %d%n",
                    block.getTransactions().size(), insertTime);
            block = app.readBlock(reader, false);
        }
        System.out.println("Numero blocchi letti: " + numBlocks);
    } else if (cmdExportBlockchain) {
        BlockChainLink blockLink;
        try (PrintWriter writer = new PrintWriter(new File(outputfile.value(options)))) {
            if (blockHash != null) {
                blockLink = storage.getLink(BtcUtil.hexIn(blockHash));
                app.writeBlock(writer, blockLink.getBlock());
            } else {
                for (int i = firstBlock; i <= lastBlock; i++) {
                    blockLink = storage.getLinkAtHeight(i);
                    app.writeBlock(writer, blockLink.getBlock());
                }
            }
        }
    }
    app.close();
}

From source file:com.genentech.chemistry.openEye.apps.SdfRMSDSphereExclusion.java

/**
 * @param args//w w  w.  j  av  a  2s  . com
 */
public static void main(String... args) throws IOException { // create command line Options object
    Options options = new Options();
    Option opt = new Option(OPT_INFILE, true,
            "input file oe-supported Use .sdf|.smi to specify the file type.");
    opt.setRequired(true);
    options.addOption(opt);

    opt = new Option(OPT_OUTFILE, true, "output file oe-supported. Use .sdf|.smi to specify the file type.");
    opt.setRequired(true);
    options.addOption(opt);

    opt = new Option(OPT_REFFILE, true,
            "Reference file of molecules which define pre-existign exclusion spheres.");
    options.addOption(opt);

    opt = new Option(OPT_RADIUS, true, "Radius of exclusion spehre in A2.");
    opt.setRequired(true);
    options.addOption(opt);

    opt = new Option(OPT_GROUPBY, true,
            "Group by fieldname, run sphere exclusion for consecutive groups of records with same value for this field.");
    options.addOption(opt);

    opt = new Option(OPT_DONotOpt, false,
            "If specified the RMSD is computed without trying to optimize the alignment.");
    options.addOption(opt);

    opt = new Option(OPT_PRINT_All, false, "print all molecule, check includeIdx tag");
    options.addOption(opt);

    opt = new Option(OPT_MIRROR, false, "For non-chiral molecules also try mirror image");
    options.addOption(opt);

    CommandLineParser parser = new PosixParser();
    CommandLine cmd = null;
    try {
        cmd = parser.parse(options, args);
    } catch (Exception e) {
        System.err.println(e.getMessage());
        exitWithHelp(options);
    }
    args = cmd.getArgs();

    if (cmd.hasOption("d")) {
        System.err.println("Start debugger and press return:");
        new BufferedReader(new InputStreamReader(System.in)).readLine();
    }

    String inFile = cmd.getOptionValue(OPT_INFILE);
    String outFile = cmd.getOptionValue(OPT_OUTFILE);
    String refFile = cmd.getOptionValue(OPT_REFFILE);
    String groupBy = cmd.getOptionValue(OPT_GROUPBY);
    boolean doOptimize = !cmd.hasOption(OPT_DONotOpt);
    double radius = Double.parseDouble(cmd.getOptionValue(OPT_RADIUS));
    boolean printAll = cmd.hasOption(OPT_PRINT_All);
    boolean doMirror = cmd.hasOption(OPT_MIRROR);

    SdfRMSDSphereExclusion sphereExclusion = new SdfRMSDSphereExclusion(refFile, outFile, radius, printAll,
            doMirror, doOptimize, groupBy);

    sphereExclusion.run(inFile);
    sphereExclusion.close();
}

From source file:edu.nyu.vida.data_polygamy.feature_identification.IndexCreation.java

/**
 * @param args// ww w  .j av a  2  s .  c  o  m
 */
@SuppressWarnings({ "deprecation" })
public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException {

    Options options = new Options();

    Option forceOption = new Option("f", "force", false,
            "force the computation of the index and events " + "even if files already exist");
    forceOption.setRequired(false);
    options.addOption(forceOption);

    Option thresholdOption = new Option("t", "use-custom-thresholds", false,
            "use custom thresholds for regular and rare events, defined in HDFS_HOME/"
                    + FrameworkUtils.thresholdDir + " file");
    thresholdOption.setRequired(false);
    options.addOption(thresholdOption);

    Option gOption = new Option("g", "group", true,
            "set group of datasets for which the indices and events" + " will be computed");
    gOption.setRequired(true);
    gOption.setArgName("GROUP");
    gOption.setArgs(Option.UNLIMITED_VALUES);
    options.addOption(gOption);

    Option machineOption = new Option("m", "machine", true, "machine identifier");
    machineOption.setRequired(true);
    machineOption.setArgName("MACHINE");
    machineOption.setArgs(1);
    options.addOption(machineOption);

    Option nodesOption = new Option("n", "nodes", true, "number of nodes");
    nodesOption.setRequired(true);
    nodesOption.setArgName("NODES");
    nodesOption.setArgs(1);
    options.addOption(nodesOption);

    Option s3Option = new Option("s3", "s3", false, "data on Amazon S3");
    s3Option.setRequired(false);
    options.addOption(s3Option);

    Option awsAccessKeyIdOption = new Option("aws_id", "aws-id", true,
            "aws access key id; " + "this is required if the execution is on aws");
    awsAccessKeyIdOption.setRequired(false);
    awsAccessKeyIdOption.setArgName("AWS-ACCESS-KEY-ID");
    awsAccessKeyIdOption.setArgs(1);
    options.addOption(awsAccessKeyIdOption);

    Option awsSecretAccessKeyOption = new Option("aws_key", "aws-id", true,
            "aws secrect access key; " + "this is required if the execution is on aws");
    awsSecretAccessKeyOption.setRequired(false);
    awsSecretAccessKeyOption.setArgName("AWS-SECRET-ACCESS-KEY");
    awsSecretAccessKeyOption.setArgs(1);
    options.addOption(awsSecretAccessKeyOption);

    Option bucketOption = new Option("b", "s3-bucket", true,
            "bucket on s3; " + "this is required if the execution is on aws");
    bucketOption.setRequired(false);
    bucketOption.setArgName("S3-BUCKET");
    bucketOption.setArgs(1);
    options.addOption(bucketOption);

    Option helpOption = new Option("h", "help", false, "display this message");
    helpOption.setRequired(false);
    options.addOption(helpOption);

    HelpFormatter formatter = new HelpFormatter();
    CommandLineParser parser = new PosixParser();
    CommandLine cmd = null;

    try {
        cmd = parser.parse(options, args);
    } catch (ParseException e) {
        formatter.printHelp("hadoop jar data-polygamy.jar "
                + "edu.nyu.vida.data_polygamy.feature_identification.IndexCreation", options, true);
        System.exit(0);
    }

    if (cmd.hasOption("h")) {
        formatter.printHelp("hadoop jar data-polygamy.jar "
                + "edu.nyu.vida.data_polygamy.feature_identification.IndexCreation", options, true);
        System.exit(0);
    }

    boolean s3 = cmd.hasOption("s3");
    String s3bucket = "";
    String awsAccessKeyId = "";
    String awsSecretAccessKey = "";

    if (s3) {
        if ((!cmd.hasOption("aws_id")) || (!cmd.hasOption("aws_key")) || (!cmd.hasOption("b"))) {
            System.out.println(
                    "Arguments 'aws_id', 'aws_key', and 'b'" + " are mandatory if execution is on AWS.");
            formatter.printHelp("hadoop jar data-polygamy.jar "
                    + "edu.nyu.vida.data_polygamy.feature_identification.IndexCreation", options, true);
            System.exit(0);
        }
        s3bucket = cmd.getOptionValue("b");
        awsAccessKeyId = cmd.getOptionValue("aws_id");
        awsSecretAccessKey = cmd.getOptionValue("aws_key");
    }

    boolean snappyCompression = false;
    boolean bzip2Compression = false;
    String machine = cmd.getOptionValue("m");
    int nbNodes = Integer.parseInt(cmd.getOptionValue("n"));

    Configuration s3conf = new Configuration();
    if (s3) {
        s3conf.set("fs.s3.awsAccessKeyId", awsAccessKeyId);
        s3conf.set("fs.s3.awsSecretAccessKey", awsSecretAccessKey);
        s3conf.set("bucket", s3bucket);
    }

    String datasetNames = "";
    String datasetIds = "";

    ArrayList<String> shortDataset = new ArrayList<String>();
    ArrayList<String> shortDatasetIndex = new ArrayList<String>();
    HashMap<String, String> datasetAgg = new HashMap<String, String>();
    HashMap<String, String> datasetId = new HashMap<String, String>();
    HashMap<String, HashMap<Integer, Double>> datasetRegThreshold = new HashMap<String, HashMap<Integer, Double>>();
    HashMap<String, HashMap<Integer, Double>> datasetRareThreshold = new HashMap<String, HashMap<Integer, Double>>();

    Path path = null;
    FileSystem fs = FileSystem.get(new Configuration());
    BufferedReader br;

    boolean removeExistingFiles = cmd.hasOption("f");
    boolean isThresholdUserDefined = cmd.hasOption("t");

    for (String dataset : cmd.getOptionValues("g")) {

        // getting aggregates
        String[] aggregate = FrameworkUtils.searchAggregates(dataset, s3conf, s3);
        if (aggregate.length == 0) {
            System.out.println("No aggregates found for " + dataset + ".");
            continue;
        }

        // getting aggregates header
        String aggregatesHeaderFileName = FrameworkUtils.searchAggregatesHeader(dataset, s3conf, s3);
        if (aggregatesHeaderFileName == null) {
            System.out.println("No aggregate header for " + dataset);
            continue;
        }

        String aggregatesHeader = s3bucket + FrameworkUtils.preProcessingDir + "/" + aggregatesHeaderFileName;

        shortDataset.add(dataset);
        datasetId.put(dataset, null);

        if (s3) {
            path = new Path(aggregatesHeader);
            fs = FileSystem.get(path.toUri(), s3conf);
        } else {
            path = new Path(fs.getHomeDirectory() + "/" + aggregatesHeader);
        }

        br = new BufferedReader(new InputStreamReader(fs.open(path)));
        datasetAgg.put(dataset, br.readLine().split("\t")[1]);
        br.close();
        if (s3)
            fs.close();
    }

    if (shortDataset.size() == 0) {
        System.out.println("No datasets to process.");
        System.exit(0);
    }

    // getting dataset id

    if (s3) {
        path = new Path(s3bucket + FrameworkUtils.datasetsIndexDir);
        fs = FileSystem.get(path.toUri(), s3conf);
    } else {
        path = new Path(fs.getHomeDirectory() + "/" + FrameworkUtils.datasetsIndexDir);
    }
    br = new BufferedReader(new InputStreamReader(fs.open(path)));
    String line = br.readLine();
    while (line != null) {
        String[] dt = line.split("\t");
        if (datasetId.containsKey(dt[0])) {
            datasetId.put(dt[0], dt[1]);
            datasetNames += dt[0] + ",";
            datasetIds += dt[1] + ",";
        }
        line = br.readLine();
    }
    br.close();

    datasetNames = datasetNames.substring(0, datasetNames.length() - 1);
    datasetIds = datasetIds.substring(0, datasetIds.length() - 1);
    Iterator<String> it = shortDataset.iterator();
    while (it.hasNext()) {
        String dataset = it.next();
        if (datasetId.get(dataset) == null) {
            System.out.println("No dataset id for " + dataset);
            System.exit(0);
        }
    }

    // getting user defined thresholds

    if (isThresholdUserDefined) {
        if (s3) {
            path = new Path(s3bucket + FrameworkUtils.thresholdDir);
            fs = FileSystem.get(path.toUri(), s3conf);
        } else {
            path = new Path(fs.getHomeDirectory() + "/" + FrameworkUtils.thresholdDir);
        }
        br = new BufferedReader(new InputStreamReader(fs.open(path)));
        line = br.readLine();
        while (line != null) {
            // getting dataset name
            String dataset = line.trim();
            HashMap<Integer, Double> regThresholds = new HashMap<Integer, Double>();
            HashMap<Integer, Double> rareThresholds = new HashMap<Integer, Double>();
            line = br.readLine();
            while ((line != null) && (line.split("\t").length > 1)) {
                // getting attribute ids and thresholds
                String[] keyVals = line.trim().split("\t");
                int att = Integer.parseInt(keyVals[0].trim());
                regThresholds.put(att, Double.parseDouble(keyVals[1].trim()));
                rareThresholds.put(att, Double.parseDouble(keyVals[2].trim()));
                line = br.readLine();
            }
            datasetRegThreshold.put(dataset, regThresholds);
            datasetRareThreshold.put(dataset, rareThresholds);
        }
        br.close();
    }
    if (s3)
        fs.close();

    // datasets that will use existing merge tree
    ArrayList<String> useMergeTree = new ArrayList<String>();

    // creating index for each spatio-temporal resolution

    FrameworkUtils.createDir(s3bucket + FrameworkUtils.indexDir, s3conf, s3);

    HashSet<String> input = new HashSet<String>();

    for (String dataset : shortDataset) {

        String indexCreationOutputFileName = s3bucket + FrameworkUtils.indexDir + "/" + dataset + "/";
        String mergeTreeFileName = s3bucket + FrameworkUtils.mergeTreeDir + "/" + dataset + "/";

        if (removeExistingFiles) {
            FrameworkUtils.removeFile(indexCreationOutputFileName, s3conf, s3);
            FrameworkUtils.removeFile(mergeTreeFileName, s3conf, s3);
            FrameworkUtils.createDir(mergeTreeFileName, s3conf, s3);
        } else if (datasetRegThreshold.containsKey(dataset)) {
            FrameworkUtils.removeFile(indexCreationOutputFileName, s3conf, s3);
            if (FrameworkUtils.fileExists(mergeTreeFileName, s3conf, s3)) {
                useMergeTree.add(dataset);
            }
        }

        if (!FrameworkUtils.fileExists(indexCreationOutputFileName, s3conf, s3)) {
            input.add(s3bucket + FrameworkUtils.aggregatesDir + "/" + dataset);
            shortDatasetIndex.add(dataset);
        }

    }

    if (input.isEmpty()) {
        System.out.println("All the input datasets have indices.");
        System.out.println("Use -f in the beginning of the command line to force the computation.");
        System.exit(0);
    }

    String aggregateDatasets = "";
    it = input.iterator();
    while (it.hasNext()) {
        aggregateDatasets += it.next() + ",";
    }

    Job icJob = null;
    Configuration icConf = new Configuration();
    Machine machineConf = new Machine(machine, nbNodes);

    String jobName = "index";
    String indexOutputDir = s3bucket + FrameworkUtils.indexDir + "/tmp/";

    FrameworkUtils.removeFile(indexOutputDir, s3conf, s3);

    icConf.set("dataset-name", datasetNames);
    icConf.set("dataset-id", datasetIds);

    if (!useMergeTree.isEmpty()) {
        String useMergeTreeStr = "";
        for (String dt : useMergeTree) {
            useMergeTreeStr += dt + ",";
        }
        icConf.set("use-merge-tree", useMergeTreeStr.substring(0, useMergeTreeStr.length() - 1));
    }

    for (int i = 0; i < shortDataset.size(); i++) {
        String dataset = shortDataset.get(i);
        String id = datasetId.get(dataset);
        icConf.set("dataset-" + id + "-aggregates", datasetAgg.get(dataset));
        if (datasetRegThreshold.containsKey(dataset)) {
            HashMap<Integer, Double> regThresholds = datasetRegThreshold.get(dataset);
            String thresholds = "";
            for (int att : regThresholds.keySet()) {
                thresholds += String.valueOf(att) + "-" + String.valueOf(regThresholds.get(att)) + ",";
            }
            icConf.set("regular-" + id, thresholds.substring(0, thresholds.length() - 1));
        }

        if (datasetRareThreshold.containsKey(dataset)) {
            HashMap<Integer, Double> rareThresholds = datasetRareThreshold.get(dataset);
            String thresholds = "";
            for (int att : rareThresholds.keySet()) {
                thresholds += String.valueOf(att) + "-" + String.valueOf(rareThresholds.get(att)) + ",";
            }
            icConf.set("rare-" + id, thresholds.substring(0, thresholds.length() - 1));
        }
    }

    icConf.set("mapreduce.tasktracker.map.tasks.maximum", String.valueOf(machineConf.getMaximumTasks()));
    icConf.set("mapreduce.tasktracker.reduce.tasks.maximum", String.valueOf(machineConf.getMaximumTasks()));
    icConf.set("mapreduce.jobtracker.maxtasks.perjob", "-1");
    icConf.set("mapreduce.reduce.shuffle.parallelcopies", "20");
    icConf.set("mapreduce.input.fileinputformat.split.minsize", "0");
    icConf.set("mapreduce.task.io.sort.mb", "200");
    icConf.set("mapreduce.task.io.sort.factor", "100");
    //icConf.set("mapreduce.task.timeout", "1800000");
    machineConf.setMachineConfiguration(icConf);

    if (s3) {
        machineConf.setMachineConfiguration(icConf);
        icConf.set("fs.s3.awsAccessKeyId", awsAccessKeyId);
        icConf.set("fs.s3.awsSecretAccessKey", awsSecretAccessKey);
        icConf.set("bucket", s3bucket);
    }

    if (snappyCompression) {
        icConf.set("mapreduce.map.output.compress", "true");
        icConf.set("mapreduce.map.output.compress.codec", "org.apache.hadoop.io.compress.SnappyCodec");
        //icConf.set("mapreduce.output.fileoutputformat.compress.codec", "org.apache.hadoop.io.compress.SnappyCodec");
    }
    if (bzip2Compression) {
        icConf.set("mapreduce.map.output.compress", "true");
        icConf.set("mapreduce.map.output.compress.codec", "org.apache.hadoop.io.compress.BZip2Codec");
        //icConf.set("mapreduce.output.fileoutputformat.compress.codec", "org.apache.hadoop.io.compress.BZip2Codec");
    }

    icJob = new Job(icConf);
    icJob.setJobName(jobName);

    icJob.setMapOutputKeyClass(AttributeResolutionWritable.class);
    icJob.setMapOutputValueClass(SpatioTemporalFloatWritable.class);
    icJob.setOutputKeyClass(AttributeResolutionWritable.class);
    icJob.setOutputValueClass(TopologyTimeSeriesWritable.class);
    //icJob.setOutputKeyClass(Text.class);
    //icJob.setOutputValueClass(Text.class);

    icJob.setMapperClass(IndexCreationMapper.class);
    icJob.setReducerClass(IndexCreationReducer.class);
    icJob.setNumReduceTasks(machineConf.getNumberReduces());

    icJob.setInputFormatClass(SequenceFileInputFormat.class);
    //icJob.setOutputFormatClass(SequenceFileOutputFormat.class);
    LazyOutputFormat.setOutputFormatClass(icJob, SequenceFileOutputFormat.class);
    //LazyOutputFormat.setOutputFormatClass(icJob, TextOutputFormat.class);
    SequenceFileOutputFormat.setCompressOutput(icJob, true);
    SequenceFileOutputFormat.setOutputCompressionType(icJob, CompressionType.BLOCK);

    FileInputFormat.setInputDirRecursive(icJob, true);
    FileInputFormat.setInputPaths(icJob, aggregateDatasets.substring(0, aggregateDatasets.length() - 1));
    FileOutputFormat.setOutputPath(icJob, new Path(indexOutputDir));

    icJob.setJarByClass(IndexCreation.class);

    long start = System.currentTimeMillis();
    icJob.submit();
    icJob.waitForCompletion(true);
    System.out.println(jobName + "\t" + (System.currentTimeMillis() - start));

    // moving files to right place
    for (String dataset : shortDatasetIndex) {
        String from = s3bucket + FrameworkUtils.indexDir + "/tmp/" + dataset + "/";
        String to = s3bucket + FrameworkUtils.indexDir + "/" + dataset + "/";
        FrameworkUtils.renameFile(from, to, s3conf, s3);
    }

}

From source file:edu.asu.bscs.csiebler.waypointapplication.WaypointServerStub.java

/**
 *
 * @param args//w ww  .  j ava  2s.  c om
 */
public static void main(String args[]) {
    try {
        String url = "http://127.0.0.1:8080/";

        if (args.length > 0) {
            url = args[0];
        }

        WaypointServerStub cjc = new WaypointServerStub(url);
        BufferedReader stdin = new BufferedReader(new InputStreamReader(System.in));
        System.out.print(
                "Enter end or {remove|get|getNames|getCategoryNames|getNamesInCategory|dist|bear} arg1 arg2 eg  remove Toreros >");
        String inStr = stdin.readLine();
        StringTokenizer st = new StringTokenizer(inStr);
        String opn = st.nextToken();

        while (!opn.equalsIgnoreCase("end")) {
            switch (opn) {
            case "remove": {
                boolean result = cjc.remove(st.nextToken());
                System.out.println("response: " + result);
                break;
            }
            case "get": {
                Waypoint result = cjc.get(st.nextToken());
                System.out.println("response: " + result.toJsonString());
                break;
            }
            case "getNames": {
                String[] result = cjc.getNames();
                for (int i = 0; i < result.length; i++) {
                    System.out.println("response[" + i + "] is " + result[i]);
                }
                break;
            }
            case "getCategoryNames": {
                String[] result = cjc.getCategoryNames();
                for (int i = 0; i < result.length; i++) {
                    System.out.println("response[" + i + "] is " + result[i]);
                }
                break;
            }
            case "getNamesInCategory": {
                String[] result = cjc.getNamesInCategory(st.nextToken());
                for (int i = 0; i < result.length; i++) {
                    System.out.println("response[" + i + "] is " + result[i]);
                }
                break;
            }
            case "dist": {
                double result = cjc.distanceGCTo(st.nextToken(), st.nextToken());
                System.out.println("response: " + result);
                break;
            }
            case "bear": {
                double result = cjc.bearingGCInitTo(st.nextToken(), st.nextToken());
                System.out.println("response: " + result);
                break;
            }
            default: {
                break;
            }
            }

            System.out.print(
                    "Enter end or {remove|get|getNames|getCategoryNames|getNamesInCategory|dist|bear} arg1 arg2 eg  remove Toreros >");
            inStr = stdin.readLine();
            st = new StringTokenizer(inStr);
            opn = st.nextToken();
        }
    } catch (Exception e) {
        System.out.println("Oops, you didn't enter the right stuff");
    }
}

From source file:com.github.chrbayer84.keybits.KeyBits.java

/**
 * @param args//from  w  ww  .ja  v a2 s.  c  o  m
 */
public static void main(String[] args) throws Exception {
    int number_of_addresses = 1;
    int depth = 1;

    String usage = "java -jar KeyBits.jar [options]";
    // create parameters which can be chosen
    Option help = new Option("h", "print this message");
    Option verbose = new Option("v", "verbose");
    Option exprt = new Option("e", "export public key to blockchain");
    Option imprt = OptionBuilder.withArgName("string").hasArg()
            .withDescription("import public key from blockchain").create("i");

    Option blockchain_address = OptionBuilder.withArgName("string").hasArg().withDescription("bitcoin address")
            .create("a");
    Option create_wallet = OptionBuilder.withArgName("file name").hasArg().withDescription("create wallet")
            .create("c");
    Option update_wallet = OptionBuilder.withArgName("file name").hasArg().withDescription("update wallet")
            .create("u");
    Option balance_wallet = OptionBuilder.withArgName("file name").hasArg()
            .withDescription("return balance of wallet").create("b");
    Option show_wallet = OptionBuilder.withArgName("file name").hasArg()
            .withDescription("show content of wallet").create("w");
    Option send_coins = OptionBuilder.withArgName("file name").hasArg().withDescription("send satoshis")
            .create("s");
    Option monitor_pending = OptionBuilder.withArgName("file name").hasArg()
            .withDescription("monitor pending transactions of wallet").create("p");
    Option monitor_depth = OptionBuilder.withArgName("file name").hasArg()
            .withDescription("monitor transaction depths of wallet").create("d");
    Option number = OptionBuilder.withArgName("integer").hasArg()
            .withDescription("in combination with -c, -d, -r or -s").create("n");
    Option reset = OptionBuilder.withArgName("file name").hasArg().withDescription("reset wallet").create("r");

    Options options = new Options();

    options.addOption(help);
    options.addOption(verbose);
    options.addOption(imprt);
    options.addOption(exprt);

    options.addOption(blockchain_address);
    options.addOption(create_wallet);
    options.addOption(update_wallet);
    options.addOption(balance_wallet);
    options.addOption(show_wallet);
    options.addOption(send_coins);
    options.addOption(monitor_pending);
    options.addOption(monitor_depth);
    options.addOption(number);
    options.addOption(reset);

    BasicParser parser = new BasicParser();
    CommandLine cmd = null;

    String header = "This is KeyBits v0.01b.1412953962" + System.getProperty("line.separator");
    // show help if wrong usage
    try {
        cmd = parser.parse(options, args);
    } catch (Exception e) {
        printHelp(usage, options, header);
    }

    if (cmd.getOptions().length == 0)
        printHelp(usage, options, header);

    if (cmd.hasOption("h"))
        printHelp(usage, options, header);

    if (cmd.hasOption("v"))
        System.out.println(header);

    if (cmd.hasOption("c") && cmd.hasOption("n"))
        number_of_addresses = new Integer(cmd.getOptionValue("n")).intValue();

    if (cmd.hasOption("d") && cmd.hasOption("n"))
        depth = new Integer(cmd.getOptionValue("n")).intValue();

    String checkpoints_file_name = "checkpoints";
    if (!new File(checkpoints_file_name).exists())
        checkpoints_file_name = null;

    // ---------------------------------------------------------------------

    if (cmd.hasOption("c")) {
        String wallet_file_name = cmd.getOptionValue("c");

        String passphrase = HelpfulStuff.insertPassphrase("enter password for " + wallet_file_name);
        if (!new File(wallet_file_name).exists()) {
            String passphrase_ = HelpfulStuff.reInsertPassphrase("enter password for " + wallet_file_name);

            if (!passphrase.equals(passphrase_)) {
                System.out.println("passwords do not match");
                System.exit(0);
            }
        }

        MyWallet.createWallet(wallet_file_name, wallet_file_name + ".chain", number_of_addresses, passphrase);
        System.exit(0);
    }

    if (cmd.hasOption("u")) {
        String wallet_file_name = cmd.getOptionValue("u");
        MyWallet.updateWallet(wallet_file_name, wallet_file_name + ".chain", checkpoints_file_name);
        System.exit(0);
    }

    if (cmd.hasOption("b")) {
        String wallet_file_name = cmd.getOptionValue("b");
        System.out.println(
                MyWallet.getBalanceOfWallet(wallet_file_name, wallet_file_name + ".chain").longValue());
        System.exit(0);
    }

    if (cmd.hasOption("w")) {
        String wallet_file_name = cmd.getOptionValue("w");
        System.out.println(MyWallet.showContentOfWallet(wallet_file_name, wallet_file_name + ".chain"));
        System.exit(0);
    }

    if (cmd.hasOption("p")) {
        System.out.println("monitoring of pending transactions ... ");
        String wallet_file_name = cmd.getOptionValue("p");
        MyWallet.monitorPendingTransactions(wallet_file_name, wallet_file_name + ".chain",
                checkpoints_file_name);
        System.exit(0);
    }

    if (cmd.hasOption("d")) {
        System.out.println("monitoring of transaction depth ... ");
        String wallet_file_name = cmd.getOptionValue("d");
        MyWallet.monitorTransactionDepth(wallet_file_name, wallet_file_name + ".chain", checkpoints_file_name,
                depth);
        System.exit(0);
    }

    if (cmd.hasOption("r") && cmd.hasOption("n")) {
        long epoch = new Long(cmd.getOptionValue("n"));
        System.out.println("resetting wallet ... ");
        String wallet_file_name = cmd.getOptionValue("r");

        File chain_file = (new File(wallet_file_name + ".chain"));
        if (chain_file.exists())
            chain_file.delete();

        MyWallet.setCreationTime(wallet_file_name, epoch);
        MyWallet.updateWallet(wallet_file_name, wallet_file_name + ".chain", checkpoints_file_name);

        System.exit(0);
    }

    if (cmd.hasOption("s") && cmd.hasOption("a") && cmd.hasOption("n")) {
        String wallet_file_name = cmd.getOptionValue("s");
        String address = cmd.getOptionValue("a");
        Integer amount = new Integer(cmd.getOptionValue("n"));

        String wallet_passphrase = HelpfulStuff.insertPassphrase("enter password for " + wallet_file_name);
        MyWallet.sendCoins(wallet_file_name, wallet_file_name + ".chain", checkpoints_file_name, address,
                new BigInteger(amount + ""), wallet_passphrase);

        System.out.println("waiting ...");
        Thread.sleep(10000);
        System.out.println("monitoring of transaction depth ... ");
        MyWallet.monitorTransactionDepth(wallet_file_name, wallet_file_name + ".chain", checkpoints_file_name,
                1);
        System.out.println("transaction fixed in blockchain with depth " + depth);
        System.exit(0);
    }

    // ----------------------------------------------------------------------------------------
    //                                  creates public key
    // ----------------------------------------------------------------------------------------

    GnuPGP gpg = new GnuPGP();

    if (cmd.hasOption("e")) {
        BufferedReader input = new BufferedReader(new InputStreamReader(System.in));
        String line = null;
        StringBuffer sb = new StringBuffer();
        while ((line = input.readLine()) != null)
            sb.append(line + "\n");

        PGPPublicKeyRing public_key_ring = gpg.getDearmored(sb.toString());
        //System.out.println(gpg.showPublicKeys(public_key_ring));

        byte[] public_key_ring_encoded = gpg.getEncoded(public_key_ring);

        String[] addresses = (new Encoding()).encodePublicKey(public_key_ring_encoded);
        //         System.out.println(gpg.showPublicKey(gpg.getDecoded(encoding.decodePublicKey(addresses))));

        // file names for message
        String public_key_file_name = Long.toHexString(public_key_ring.getPublicKey().getKeyID()) + ".wallet";
        String public_key_wallet_file_name = public_key_file_name;
        String public_key_chain_file_name = public_key_wallet_file_name + ".chain";

        // hier muss dass passwort noch nach encodeAddresses weitergeleitet werden da sonst zweimal abfrage
        String public_key_wallet_passphrase = HelpfulStuff
                .insertPassphrase("enter password for " + public_key_wallet_file_name);
        if (!new File(public_key_wallet_file_name).exists()) {
            String public_key_wallet_passphrase_ = HelpfulStuff
                    .reInsertPassphrase("enter password for " + public_key_wallet_file_name);

            if (!public_key_wallet_passphrase.equals(public_key_wallet_passphrase_)) {
                System.out.println("passwords do not match");
                System.exit(0);
            }
        }

        MyWallet.createWallet(public_key_wallet_file_name, public_key_chain_file_name, 1,
                public_key_wallet_passphrase);
        MyWallet.updateWallet(public_key_wallet_file_name, public_key_chain_file_name, checkpoints_file_name);
        String public_key_address = MyWallet.getAddress(public_key_wallet_file_name, public_key_chain_file_name,
                0);
        System.out.println("address of public key: " + public_key_address);

        // 10000 additional satoshis for sending transaction to address of recipient of message and 10000 for fees
        KeyBits.encodeAddresses(public_key_wallet_file_name, public_key_chain_file_name, checkpoints_file_name,
                addresses, 2 * SendRequest.DEFAULT_FEE_PER_KB.intValue(), depth, public_key_wallet_passphrase);
    }

    if (cmd.hasOption("i")) {
        String location = cmd.getOptionValue("i");

        String[] addresses = null;
        if (location.indexOf(",") > -1) {
            String[] locations = location.split(",");
            addresses = MyWallet.getAddressesFromBlockAndTransaction("main.wallet", "main.wallet.chain",
                    checkpoints_file_name, locations[0], locations[1]);
        } else {
            addresses = BlockchainDotInfo.getKeys(location);
        }

        byte[] encoded = (new Encoding()).decodePublicKey(addresses);
        PGPPublicKeyRing public_key_ring = gpg.getDecoded(encoded);

        System.out.println(gpg.getArmored(public_key_ring));

        System.exit(0);
    }
}

From source file:Main.java

private static String parseInputStream(InputStream is) {
    InputStreamReader isr = new InputStreamReader(is);
    BufferedReader br = new BufferedReader(isr);
    String line = null;/*w w w  .j ava2s  . c  o m*/
    StringBuilder sb = new StringBuilder();
    try {
        while ((line = br.readLine()) != null) {
            sb.append(line).append("\n");
        }
    } catch (IOException e) {
        e.printStackTrace();
    }

    return sb.toString();
}

From source file:Main.java

public static String convertStreamToString(InputStream is) {
    BufferedReader reader = new BufferedReader(new InputStreamReader(is));
    StringBuilder sb = new StringBuilder();

    String line = null;// w  w  w  .  jav  a2 s.c  o  m
    try {
        while ((line = reader.readLine()) != null) {
            sb.append(line + "\n");
        }
    } catch (IOException e) {
        return null;
    } finally {
        try {
            is.close();
        } catch (IOException e) {
            return null;
        }
    }
    return sb.toString();
}