Example usage for java.util Arrays asList

List of usage examples for java.util Arrays asList

Introduction

In this page you can find the example usage for java.util Arrays asList.

Prototype

@SafeVarargs
@SuppressWarnings("varargs")
public static <T> List<T> asList(T... a) 

Source Link

Document

Returns a fixed-size list backed by the specified array.

Usage

From source file:com.verizon.Main.java

public static void main(String[] args) throws Exception {

    String warehouseLocation = "file:" + System.getProperty("user.dir") + "spark-warehouse";

    SparkSession spark = SparkSession.builder().appName("Verizon").config("spark.master", "local[2]")
            .config("spark.sql.warehouse.dir", warehouseLocation).enableHiveSupport().getOrCreate();

    Configuration configuration = new Configuration();
    configuration.addResource(new Path(System.getProperty("HADOOP_INSTALL") + "/conf/core-site.xml"));
    configuration.addResource(new Path(System.getProperty("HADOOP_INSTALL") + "/conf/hdfs-site.xml"));
    configuration.set("fs.hdfs.impl", org.apache.hadoop.hdfs.DistributedFileSystem.class.getName());
    configuration.set("fs.file.impl", org.apache.hadoop.fs.LocalFileSystem.class.getName());

    FileSystem hdfs = FileSystem.get(new URI("hdfs://localhost:9000"), configuration);

    SQLContext context = new SQLContext(spark);
    String schemaString = " Device,Title,ReviewText,SubmissionTime,UserNickname";
    //spark.read().textFile(schemaString)
    Dataset<Row> df = spark.read().csv("hdfs://localhost:9000/data.csv");
    //df.show();/*from  w ww .j  av a 2s  .c om*/
    //#df.printSchema();
    df = df.select("_c2");

    Path file = new Path("hdfs://localhost:9000/tempFile.txt");
    if (hdfs.exists(file)) {
        hdfs.delete(file, true);
    }

    df.write().csv("hdfs://localhost:9000/tempFile.txt");

    JavaRDD<String> lines = spark.read().textFile("hdfs://localhost:9000/tempFile.txt").javaRDD();
    JavaRDD<String> words = lines.flatMap(new FlatMapFunction<String, String>() {
        @Override
        public Iterator<String> call(String s) {
            return Arrays.asList(SPACE.split(s)).iterator();
        }
    });

    JavaPairRDD<String, Integer> ones = words.mapToPair(new PairFunction<String, String, Integer>() {
        @Override
        public Tuple2<String, Integer> call(String s) {
            s = s.replaceAll("[^a-zA-Z0-9]+", "");
            s = s.toLowerCase().trim();
            return new Tuple2<>(s, 1);
        }
    });

    JavaPairRDD<String, Integer> counts = ones.reduceByKey(new Function2<Integer, Integer, Integer>() {
        @Override
        public Integer call(Integer i1, Integer i2) {
            return i1 + i2;
        }
    });

    JavaPairRDD<Integer, String> frequencies = counts
            .mapToPair(new PairFunction<Tuple2<String, Integer>, Integer, String>() {
                @Override
                public Tuple2<Integer, String> call(Tuple2<String, Integer> s) {
                    return new Tuple2<Integer, String>(s._2, s._1);
                }
            });

    frequencies = frequencies.sortByKey(false);

    JavaPairRDD<String, Integer> result = frequencies
            .mapToPair(new PairFunction<Tuple2<Integer, String>, String, Integer>() {
                @Override
                public Tuple2<String, Integer> call(Tuple2<Integer, String> s) throws Exception {
                    return new Tuple2<String, Integer>(s._2, s._1);
                }

            });

    //JavaPairRDD<Integer,String> sortedByFreq = sort(frequencies, "descending"); 
    file = new Path("hdfs://localhost:9000/allresult.csv");
    if (hdfs.exists(file)) {
        hdfs.delete(file, true);
    }

    //FileUtils.deleteDirectory(new File("allresult.csv"));

    result.saveAsTextFile("hdfs://localhost:9000/allresult.csv");

    List<Tuple2<String, Integer>> output = result.take(250);

    ExportToHive hiveExport = new ExportToHive();
    String rows = "";
    for (Tuple2<String, Integer> tuple : output) {
        String date = new Date().toString();
        String keyword = tuple._1();
        Integer count = tuple._2();
        //System.out.println( keyword+ "," +count);
        rows += date + "," + "Samsung Galaxy s7," + keyword + "," + count + System.lineSeparator();

    }
    //System.out.println(rows);
    /*
    file = new Path("hdfs://localhost:9000/result.csv");
            
    if ( hdfs.exists( file )) { hdfs.delete( file, true ); } 
    OutputStream os = hdfs.create(file);
    BufferedWriter br = new BufferedWriter( new OutputStreamWriter( os, "UTF-8" ) );
    br.write(rows);
    br.close();
    */
    hdfs.close();

    FileUtils.deleteQuietly(new File("result.csv"));
    FileUtils.writeStringToFile(new File("result.csv"), rows);

    hiveExport.writeToHive(spark);
    ExportDataToServer exportServer = new ExportDataToServer();
    exportServer.sendDataToRESTService(rows);
    spark.stop();
}

From source file:com.revo.deployr.client.example.data.io.anon.discrete.exec.EncodedDataInBinaryFileOut.java

public static void main(String args[]) throws Exception {

    RClient rClient = null;/*w  w  w .  ja  va 2 s. c  o  m*/

    try {

        /*
         * Determine DeployR server endpoint.
         */
        String endpoint = System.getProperty("endpoint");
        log.info("[ CONFIGURATION  ] Using endpoint=" + endpoint);

        /*
         * Establish RClient connection to DeployR server.
         *
         * An RClient connection is the mandatory starting
         * point for any application using the client library.
         */
        rClient = RClientFactory.createClient(endpoint);

        log.info("[   CONNECTION   ] Established anonymous " + "connection [ RClient ].");

        /*
         * Create the AnonymousProjectExecutionOptions object
         * to specify data inputs and output to the script.
         *
         * This options object can be used to pass standard
         * execution model parameters on execution calls. All
         * fields are optional.
         *
         * See the Standard Execution Model chapter in the
         * Client Library Tutorial on the DeployR website for
         * further details.
         */
        AnonymousProjectExecutionOptions options = new AnonymousProjectExecutionOptions();

        /* 
         * Simulate application generated data. This data
         * is first encoded using the RDataFactory before
         * being passed as an input on the execution.
         *
         * This encoded R input is automatically converted
         * into a workspace object before script execution.
         */
        RData generatedData = simulateGeneratedData();
        if (generatedData != null) {
            List<RData> rinputs = Arrays.asList(generatedData);
            options.rinputs = rinputs;
        }

        log.info("[   DATA INPUT   ] DeployR-encoded R input "
                + "set on execution, [ ProjectExecutionOptions.rinputs ].");

        /*
         * Execute a public analytics Web service as an anonymous
         * user based on a repository-managed R script:
         * /testuser/example-data-io/dataIO.R
         */
        RScriptExecution exec = rClient.executeScript("dataIO.R", "example-data-io", "testuser", null, options);

        log.info("[   EXECUTION    ] Discrete R script " + "execution completed [ RScriptExecution ].");

        /*
         * Retrieve the working directory file (artifact) called
         * hip.rData that was generated by the execution.
         *
         * Outputs generated by an execution can be used in any
         * number of ways by client applications, including:
         *
         * 1. Use output data to perform further calculations.
         * 2. Display output data to an end-user.
         * 3. Write output data to a database.
         * 4. Pass output data along to another Web service.
         * 5. etc.
         */
        List<RProjectFile> wdFiles = exec.about().artifacts;

        for (RProjectFile wdFile : wdFiles) {
            if (wdFile.about().filename.equals("hip.rData")) {
                log.info("[  DATA OUTPUT   ] Retrieved working directory " + "file output "
                        + wdFile.about().filename + " [ RProjectFile ].");
                InputStream fis = null;
                try {
                    fis = wdFile.download();
                } catch (Exception ex) {
                    log.warn("Working directory binary file " + ex);
                } finally {
                    IOUtils.closeQuietly(fis);
                }
            }
        }

    } catch (Exception ex) {
        log.warn("Unexpected runtime exception=" + ex);
    } finally {
        try {
            if (rClient != null) {
                /*
                 * Release rClient connection before application exits.
                 */
                rClient.release();
            }
        } catch (Exception fex) {
        }
    }

}

From source file:com.metamx.druid.utils.ExposeS3DataSource.java

public static void main(String[] args) throws ServiceException, IOException, NoSuchAlgorithmException {
    CLI cli = new CLI();
    cli.addOption(new RequiredOption(null, "s3Bucket", true, "s3 bucket to pull data from"));
    cli.addOption(new RequiredOption(null, "s3Path", true,
            "base input path in s3 bucket.  Everything until the date strings."));
    cli.addOption(new RequiredOption(null, "timeInterval", true, "ISO8601 interval of dates to index"));
    cli.addOption(new RequiredOption(null, "granularity", true, String.format(
            "granularity of index, supported granularities: [%s]", Arrays.asList(Granularity.values()))));
    cli.addOption(new RequiredOption(null, "zkCluster", true, "Cluster string to connect to ZK with."));
    cli.addOption(new RequiredOption(null, "zkBasePath", true, "The base path to register index changes to."));

    CommandLine commandLine = cli.parse(args);

    if (commandLine == null) {
        return;//  ww  w. j a  v  a  2  s  .  c o m
    }

    String s3Bucket = commandLine.getOptionValue("s3Bucket");
    String s3Path = commandLine.getOptionValue("s3Path");
    String timeIntervalString = commandLine.getOptionValue("timeInterval");
    String granularity = commandLine.getOptionValue("granularity");
    String zkCluster = commandLine.getOptionValue("zkCluster");
    String zkBasePath = commandLine.getOptionValue("zkBasePath");

    Interval timeInterval = new Interval(timeIntervalString);
    Granularity gran = Granularity.valueOf(granularity.toUpperCase());
    final RestS3Service s3Client = new RestS3Service(new AWSCredentials(
            System.getProperty("com.metamx.aws.accessKey"), System.getProperty("com.metamx.aws.secretKey")));
    ZkClient zkClient = new ZkClient(new ZkConnection(zkCluster), Integer.MAX_VALUE, new StringZkSerializer());

    zkClient.waitUntilConnected();

    for (Interval interval : gran.getIterable(timeInterval)) {
        log.info("Processing interval[%s]", interval);
        String s3DatePath = JOINER.join(s3Path, gran.toPath(interval.getStart()));
        if (!s3DatePath.endsWith("/")) {
            s3DatePath += "/";
        }

        StorageObjectsChunk chunk = s3Client.listObjectsChunked(s3Bucket, s3DatePath, "/", 2000, null, true);
        TreeSet<String> commonPrefixes = Sets.newTreeSet();
        commonPrefixes.addAll(Arrays.asList(chunk.getCommonPrefixes()));

        if (commonPrefixes.isEmpty()) {
            log.info("Nothing at s3://%s/%s", s3Bucket, s3DatePath);
            continue;
        }

        String latestPrefix = commonPrefixes.last();

        log.info("Latest segments at [s3://%s/%s]", s3Bucket, latestPrefix);

        chunk = s3Client.listObjectsChunked(s3Bucket, latestPrefix, "/", 2000, null, true);
        Integer partitionNumber;
        if (chunk.getCommonPrefixes().length == 0) {
            partitionNumber = null;
        } else {
            partitionNumber = -1;
            for (String partitionPrefix : chunk.getCommonPrefixes()) {
                String[] splits = partitionPrefix.split("/");
                partitionNumber = Math.max(partitionNumber, Integer.parseInt(splits[splits.length - 1]));
            }
        }

        log.info("Highest segment partition[%,d]", partitionNumber);

        if (partitionNumber == null) {
            final S3Object s3Obj = new S3Object(new S3Bucket(s3Bucket),
                    String.format("%sdescriptor.json", latestPrefix));
            updateWithS3Object(zkBasePath, s3Client, zkClient, s3Obj);
        } else {
            for (int i = partitionNumber; i >= 0; --i) {
                final S3Object partitionObject = new S3Object(new S3Bucket(s3Bucket),
                        String.format("%s%s/descriptor.json", latestPrefix, i));

                updateWithS3Object(zkBasePath, s3Client, zkClient, partitionObject);
            }
        }
    }
}

From source file:com.revo.deployr.client.example.data.io.anon.discrete.exec.ExternalDataInDataFileOut.java

public static void main(String args[]) throws Exception {

    RClient rClient = null;//w  w  w . j a  v  a2 s . c  o m

    try {

        /*
         * Determine DeployR server endpoint.
         */
        String endpoint = System.getProperty("endpoint");
        log.info("[ CONFIGURATION  ] Using endpoint=" + endpoint);

        /*
         * Establish RClient connection to DeployR server.
         *
         * An RClient connection is the mandatory starting
         * point for any application using the client library.
         */
        rClient = RClientFactory.createClient(endpoint);

        log.info("[   CONNECTION   ] Established anonymous " + "connection [ RClient ].");

        /*
         * Create the AnonymousProjectExecutionOptions object
         * to specify data inputs and output to the script.
         *
         * This options object can be used to pass standard
         * execution model parameters on execution calls. All
         * fields are optional.
         *
         * See the Standard Execution Model chapter in the
         * Client Library Tutorial on the DeployR website for
         * further details.
         */
        AnonymousProjectExecutionOptions options = new AnonymousProjectExecutionOptions();

        /* 
         * Load an R object literal "hipStarUrl" into the
         * workspace prior to script execution.
         *
         * The R script checks for the existence of "hipStarUrl"
         * in the workspace and if present uses the URL path
         * to load the Hipparcos star dataset from the DAT file
         * at that location.
         */
        RData hipStarUrl = RDataFactory.createString("hipStarUrl", HIP_DAT_URL);
        List<RData> rinputs = Arrays.asList(hipStarUrl);
        options.rinputs = rinputs;

        log.info("[   DATA INPUT   ] External data source input "
                + "set on execution, [ ProjectPreloadOptions.rinputs ].");

        /*
         * Execute a public analytics Web service as an anonymous
         * user based on a repository-managed R script:
         * /testuser/example-data-io/dataIO.R
         */
        RScriptExecution exec = rClient.executeScript("dataIO.R", "example-data-io", "testuser", null, options);

        log.info("[   EXECUTION    ] Discrete R script " + "execution completed [ RScriptExecution ].");

        /*
         * Retrieve the working directory file (artifact) called
         * hip.csv that was generated by the execution.
         *
         * Outputs generated by an execution can be used in any
         * number of ways by client applications, including:
         *
         * 1. Use output data to perform further calculations.
         * 2. Display output data to an end-user.
         * 3. Write output data to a database.
         * 4. Pass output data along to another Web service.
         * 5. etc.
         */
        List<RProjectFile> wdFiles = exec.about().artifacts;

        for (RProjectFile wdFile : wdFiles) {
            if (wdFile.about().filename.equals("hip.csv")) {
                log.info("[  DATA OUTPUT   ] Retrieved working directory " + "file output "
                        + wdFile.about().filename + " [ RProjectFile ].");
                InputStream fis = null;
                try {
                    fis = wdFile.download();
                } catch (Exception ex) {
                    log.warn("Working directory data file " + ex);
                } finally {
                    IOUtils.closeQuietly(fis);
                }
            }
        }

    } catch (Exception ex) {
        log.warn("Unexpected runtime exception=" + ex);
    } finally {
        try {
            if (rClient != null) {
                /*
                 * Release rClient connection before application exits.
                 */
                rClient.release();
            }
        } catch (Exception fex) {
        }
    }

}

From source file:com.act.lcms.db.io.LoadPlateCompositionIntoDB.java

public static void main(String[] args) throws Exception {
    Options opts = new Options();
    opts.addOption(Option.builder("t").argName("type")
            .desc("The type of plate composition in this file, valid options are: "
                    + StringUtils.join(Arrays.asList(Plate.CONTENT_TYPE.values()), ", "))
            .hasArg().longOpt("plate-type").required().build());
    opts.addOption(Option.builder("i").argName("path").desc("The plate composition file to read").hasArg()
            .longOpt("input-file").required().build());

    // DB connection options.
    opts.addOption(Option.builder().argName("database url")
            .desc("The url to use when connecting to the LCMS db").hasArg().longOpt("db-url").build());
    opts.addOption(Option.builder("u").argName("database user").desc("The LCMS DB user").hasArg()
            .longOpt("db-user").build());
    opts.addOption(Option.builder("p").argName("database password").desc("The LCMS DB password").hasArg()
            .longOpt("db-pass").build());
    opts.addOption(Option.builder("H").argName("database host")
            .desc(String.format("The LCMS DB host (default = %s)", DB.DEFAULT_HOST)).hasArg().longOpt("db-host")
            .build());/*from w  w  w .  java 2 s.  co m*/
    opts.addOption(Option.builder("P").argName("database port")
            .desc(String.format("The LCMS DB port (default = %d)", DB.DEFAULT_PORT)).hasArg().longOpt("db-port")
            .build());
    opts.addOption(Option.builder("N").argName("database name")
            .desc(String.format("The LCMS DB name (default = %s)", DB.DEFAULT_DB_NAME)).hasArg()
            .longOpt("db-name").build());

    // Everybody needs a little help from their friends.
    opts.addOption(
            Option.builder("h").argName("help").desc("Prints this help message").longOpt("help").build());

    CommandLine cl = null;
    try {
        CommandLineParser parser = new DefaultParser();
        cl = parser.parse(opts, args);
    } catch (ParseException e) {
        System.err.format("Argument parsing failed: %s\n", e.getMessage());
        HelpFormatter fmt = new HelpFormatter();
        fmt.printHelp(LoadPlateCompositionIntoDB.class.getCanonicalName(), opts, true);
        System.exit(1);
    }

    if (cl.hasOption("help")) {
        new HelpFormatter().printHelp(LoadPlateCompositionIntoDB.class.getCanonicalName(), opts, true);
        return;
    }

    File inputFile = new File(cl.getOptionValue("input-file"));
    if (!inputFile.exists()) {
        System.err.format("Unable to find input file at %s\n", cl.getOptionValue("input-file"));
        new HelpFormatter().printHelp(LoadPlateCompositionIntoDB.class.getCanonicalName(), opts, true);
        System.exit(1);
    }

    PlateCompositionParser parser = new PlateCompositionParser();
    parser.processFile(inputFile);

    Plate.CONTENT_TYPE contentType = null;
    try {
        contentType = Plate.CONTENT_TYPE.valueOf(cl.getOptionValue("plate-type"));
    } catch (IllegalArgumentException e) {
        System.err.format("Unrecognized plate type '%s'\n", cl.getOptionValue("plate-type"));
        new HelpFormatter().printHelp(LoadPlateCompositionIntoDB.class.getCanonicalName(), opts, true);
        System.exit(1);
    }

    DB db;
    if (cl.hasOption("db-url")) {
        db = new DB().connectToDB(cl.getOptionValue("db-url"));
    } else {
        Integer port = null;
        if (cl.getOptionValue("P") != null) {
            port = Integer.parseInt(cl.getOptionValue("P"));
        }
        db = new DB().connectToDB(cl.getOptionValue("H"), port, cl.getOptionValue("N"), cl.getOptionValue("u"),
                cl.getOptionValue("p"));
    }

    try {
        db.getConn().setAutoCommit(false);

        Plate p = Plate.getOrInsertFromPlateComposition(db, parser, contentType);

        switch (contentType) {
        case LCMS:
            List<LCMSWell> LCMSWells = LCMSWell.getInstance().insertFromPlateComposition(db, parser, p);
            for (LCMSWell LCMSWell : LCMSWells) {
                System.out.format("%d: %d x %d  %s  %s\n", LCMSWell.getId(), LCMSWell.getPlateColumn(),
                        LCMSWell.getPlateRow(), LCMSWell.getMsid(), LCMSWell.getComposition());
            }
            break;
        case STANDARD:
            List<StandardWell> standardWells = StandardWell.getInstance().insertFromPlateComposition(db, parser,
                    p);
            for (StandardWell standardWell : standardWells) {
                System.out.format("%d: %d x %d  %s\n", standardWell.getId(), standardWell.getPlateColumn(),
                        standardWell.getPlateRow(), standardWell.getChemical());
            }
            break;
        case DELIVERED_STRAIN:
            List<DeliveredStrainWell> deliveredStrainWells = DeliveredStrainWell.getInstance()
                    .insertFromPlateComposition(db, parser, p);
            for (DeliveredStrainWell deliveredStrainWell : deliveredStrainWells) {
                System.out.format("%d: %d x %d (%s) %s %s \n", deliveredStrainWell.getId(),
                        deliveredStrainWell.getPlateColumn(), deliveredStrainWell.getPlateRow(),
                        deliveredStrainWell.getWell(), deliveredStrainWell.getMsid(),
                        deliveredStrainWell.getComposition());
            }
            break;
        case INDUCTION:
            List<InductionWell> inductionWells = InductionWell.getInstance().insertFromPlateComposition(db,
                    parser, p);
            for (InductionWell inductionWell : inductionWells) {
                System.out.format("%d: %d x %d %s %s %s %d\n", inductionWell.getId(),
                        inductionWell.getPlateColumn(), inductionWell.getPlateRow(), inductionWell.getMsid(),
                        inductionWell.getComposition(), inductionWell.getChemical(), inductionWell.getGrowth());
            }
            break;
        case PREGROWTH:
            List<PregrowthWell> pregrowthWells = PregrowthWell.getInstance().insertFromPlateComposition(db,
                    parser, p);
            for (PregrowthWell pregrowthWell : pregrowthWells) {
                System.out.format("%d: %d x %d (%s @ %s) %s %s %d\n", pregrowthWell.getId(),
                        pregrowthWell.getPlateColumn(), pregrowthWell.getPlateRow(),
                        pregrowthWell.getSourcePlate(), pregrowthWell.getSourceWell(), pregrowthWell.getMsid(),
                        pregrowthWell.getComposition(), pregrowthWell.getGrowth());
            }
            break;
        case FEEDING_LCMS:
            List<FeedingLCMSWell> feedingLCMSWells = FeedingLCMSWell.getInstance()
                    .insertFromPlateComposition(db, parser, p);
            for (FeedingLCMSWell feedingLCMSWell : feedingLCMSWells) {
                System.out.format("%d: %d x %d (%s @ %s) %s %s %f\n", feedingLCMSWell.getId(),
                        feedingLCMSWell.getPlateColumn(), feedingLCMSWell.getPlateRow(),
                        feedingLCMSWell.getMsid(), feedingLCMSWell.getComposition(),
                        feedingLCMSWell.getExtract(), feedingLCMSWell.getChemical(),
                        feedingLCMSWell.getConcentration());
            }
            break;
        default:
            System.err.format("Unrecognized/unimplemented data type '%s'\n", contentType);
            break;
        }
        // If we didn't encounter an exception, commit the transaction.
        db.getConn().commit();
    } catch (Exception e) {
        System.err.format("Caught exception when trying to load plate composition, rolling back. %s\n",
                e.getMessage());
        db.getConn().rollback();
        throw (e);
    } finally {
        db.getConn().close();
    }

}

From source file:com.twentyn.chemicalClassifier.Runner.java

public static void main(String[] args) throws Exception {
    BufferedReader reader = new BufferedReader(new FileReader(args[0]));
    BufferedWriter writer = new BufferedWriter(new FileWriter(args[1]));

    try {//  w  w w  . j  a v  a 2s  .  c o m
        Oscar oscar = new Oscar();

        String line = null;
        /* NOTE: this is exactly the wrong way to write a TSV reader.  Caveat emptor.
         * See http://tburette.github.io/blog/2014/05/25/so-you-want-to-write-your-own-CSV-code/
         * and then use org.apache.commons.csv.CSVParser instead.
         */
        while ((line = reader.readLine()) != null) {
            // TSV means split on tabs!  Nothing else will do.
            List<String> fields = Arrays.asList(line.split("\t"));
            // Choke if our invariants aren't satisfied.  We expect ever line to have a name and an InChI.
            if (fields.size() != 2) {
                throw new RuntimeException(
                        String.format("Found malformed line (all lines must have two fields: %s", line));
            }
            String name = fields.get(1);
            List<ResolvedNamedEntity> entities = oscar.findAndResolveNamedEntities(name);

            System.out.println("**********");
            System.out.println("Name: " + name);
            List<String> outputFields = new ArrayList<>(fields.size() + 1);
            outputFields.addAll(fields);
            if (entities.size() == 0) {
                System.out.println("No match");
                outputFields.add("noMatch");
            } else if (entities.size() == 1) {
                ResolvedNamedEntity entity = entities.get(0);
                NamedEntity ne = entity.getNamedEntity();
                if (ne.getStart() != 0 || ne.getEnd() != name.length()) {
                    System.out.println("Partial match");
                    printEntity(entity);
                    outputFields.add("partialMatch");
                } else {
                    System.out.println("Exact match");
                    printEntity(entity);
                    outputFields.add("exactMatch");
                    List<ChemicalStructure> structures = entity.getChemicalStructures(FormatType.STD_INCHI);
                    for (ChemicalStructure s : structures) {
                        outputFields.add(s.getValue());
                    }
                }
            } else { // Multiple matches found!
                System.out.println("Multiple matches");
                for (ResolvedNamedEntity e : entities) {
                    printEntity(e);
                }
                outputFields.add("multipleMatches");
            }

            writer.write(String.join("\t", outputFields));
            writer.newLine();
        }
    } finally {
        writer.flush();
        writer.close();
    }
}

From source file:Tester.java

public static void main(String[] args) throws Exception {

    final String filename = "fcl/generated.fcl";
    final String[] linguisticTermNames = { "muuuuuuypeque", "muypeque", "peque", "normal", "grande",
            "muygrande", "muuuuygrande" };
    final RegionDistributionInfo[] linguisticTerms = new RegionDistributionInfo[linguisticTermNames.length];
    for (int i = 0; i < linguisticTermNames.length; ++i)
        linguisticTerms[i] = new RegionDistributionInfo(linguisticTermNames[i],
                1.0 / (linguisticTerms.length - 1));

    final boolean database = true;

    System.out.println("Creating dataset " + System.currentTimeMillis());

    final MobileDevices mobileDevices = createDataset(database);
    System.out.println(mobileDevices.getMobileDevices().size());

    final Map<DeviceCapability, Variable> inputVariables = new HashMap<DeviceCapability, Variable>();

    final Variable realSizeVar = new Variable("real_size", Arrays.asList(linguisticTerms));
    final Variable resoSizeVar = new Variable("reso_size", Arrays.asList(linguisticTerms));

    inputVariables.put(DeviceCapability.real_size, realSizeVar);
    inputVariables.put(DeviceCapability.reso_size, resoSizeVar);

    final Map<String, Variable> outputVariables = new HashMap<String, Variable>();
    outputVariables.put("hey", new Variable("hey", Arrays.asList(new RegionDistributionInfo("ho", 1.0 / 2),
            new RegionDistributionInfo("lets", 1.0 / 2), new RegionDistributionInfo("go", 1.0 / 2))));
    final String rules = "// the rules \n";

    final FclCreator creator = new FclCreator();
    System.out.println("Creating rule file " + System.currentTimeMillis());

    final WarningStore warningStore = new WarningStore();
    final String fileContent = creator.createRuleFile("prueba", inputVariables,
            new HashMap<UserCapability, Variable>(), outputVariables, mobileDevices, rules, warningStore);
    warningStore.print();//from   w  ww  .j  a  v  a 2  s  .c  o m
    final File file = new File(filename);
    file.createNewFile();
    System.out.println("Dumping the rule file " + System.currentTimeMillis());
    FileUtils.writeStringToFile(file, fileContent);

    System.out.println("Processing the file " + System.currentTimeMillis());
    final FIS fis = FIS.load(filename, true);
    net.sourceforge.jFuzzyLogic.rule.Variable realSize = fis.getVariable("real_size");

    JFreeChart theChart = realSize.chart(false);
    @SuppressWarnings("unused")
    BufferedImage img = theChart.createBufferedImage(1000, 1000);

    /*
    FileOutputStream fos = new FileOutputStream("imagen.png");
    ImageEncoder myEncoder = ImageEncoderFactory.newInstance("png");
    myEncoder.encode(img, fos);
    fos.flush();
    fos.close();
    */

    fis.chart();

}

From source file:com.example.java.collections.ArrayExample.java

public static void main(String[] args) {

    /* ########################################################### */
    // Initializing an Array
    String[] creatures = { "goldfish", "oscar", "guppy", "minnow" };
    int[] numbers = new int[10];
    int counter = 0;
    while (counter < numbers.length) {
        numbers[counter] = counter;/*from w  ww  .  j av a2 s .c  o  m*/
        System.out.println("number[" + counter + "]: " + counter);
        counter++;
    }
    for (int theInt : numbers) {
        System.out.println(theInt);
    }
    //System.out.println(numbers[numbers.length]);
    /* ########################################################### */

    /* ########################################################### */
    // Using Charecter Array
    String name = "Michael";
    // Charecter Array
    char[] charName = name.toCharArray();
    System.out.println(charName);

    // Array Fuctions
    char[] html = new char[] { 'M', 'i', 'c', 'h', 'a', 'e', 'l' };
    char[] lastFour = new char[4];
    System.arraycopy(html, 3, lastFour, 0, lastFour.length);
    System.out.println(lastFour);
    /* ########################################################### */

    /* ########################################################### */
    // Using Arrays of Other Types
    Object[] person = new Object[] { "Michael", new Integer(94), new Integer(1), new Date() };

    String fname = (String) person[0]; //ok
    Integer age = (Integer) person[1]; //ok
    Date start = (Date) person[2]; //oops!
    /* ########################################################### */

    /* ########################################################### */
    // Muti Dimestional Array
    String[][] bits = { { "Michael", "Ernest", "MFE" }, { "Ernest", "Friedman-Hill", "EFH" },
            { "Kathi", "Duggan", "KD" }, { "Jeff", "Kellum", "JK" } };

    bits[0] = new String[] { "Rudy", "Polanski", "RP" };
    bits[1] = new String[] { "Rudy", "Washington", "RW" };
    bits[2] = new String[] { "Rudy", "O'Reilly", "RO" };
    /* ########################################################### */

    /* ########################################################### */
    //Create ArrayList from array
    String[] stringArray = { "a", "b", "c", "d", "e" };
    ArrayList<String> arrayList = new ArrayList<String>(Arrays.asList(stringArray));
    System.out.println(arrayList);
    // [a, b, c, d, e]
    /* ########################################################### */

    /* ########################################################### */
    //Check if an array contains a certain value
    String[] stringArray1 = { "a", "b", "c", "d", "e" };
    boolean b = Arrays.asList(stringArray).contains("a");
    System.out.println(b);
    // true
    /* ########################################################### */

    /* ########################################################### */
    //Concatenate two arrays
    int[] intArray = { 1, 2, 3, 4, 5 };
    int[] intArray2 = { 6, 7, 8, 9, 10 };
    // Apache Commons Lang library
    int[] combinedIntArray = ArrayUtils.addAll(intArray, intArray2);
    /* ########################################################### */

    /* ########################################################### */
    //Joins the elements of the provided array into a single String
    // Apache common lang
    String j = StringUtils.join(new String[] { "a", "b", "c" }, ", ");
    System.out.println(j);
    // a, b, c
    /* ########################################################### */

    /* ########################################################### */
    //Covnert ArrayList to Array
    String[] stringArray3 = { "a", "b", "c", "d", "e" };
    ArrayList<String> arrayList1 = new ArrayList<String>(Arrays.asList(stringArray));
    String[] stringArr = new String[arrayList.size()];
    arrayList.toArray(stringArr);
    for (String s : stringArr) {
        System.out.println(s);
    }
    /* ########################################################### */

    /* ########################################################### */
    //Convert Array to Set
    Set<String> set = new HashSet<String>(Arrays.asList(stringArray));
    System.out.println(set);
    //[d, e, b, c, a]
    /* ########################################################### */

    /* ########################################################### */
    //Reverse an array
    int[] intArray1 = { 1, 2, 3, 4, 5 };
    ArrayUtils.reverse(intArray1);
    System.out.println(Arrays.toString(intArray1));
    //[5, 4, 3, 2, 1]
    /* ########################################################### */

    /* ########################################################### */
    // Remove element of an array
    int[] intArray3 = { 1, 2, 3, 4, 5 };
    int[] removed = ArrayUtils.removeElement(intArray3, 3);//create a new array
    System.out.println(Arrays.toString(removed));
    /* ########################################################### */

    /* ########################################################### */
    byte[] bytes = ByteBuffer.allocate(4).putInt(8).array();
    for (byte t : bytes) {
        System.out.format("0x%x ", t);
    }
    /* ########################################################### */

}

From source file:cz.muni.fi.mir.mathmlcanonicalization.MathMLCanonicalizerCommandLineTool.java

/**
 * @param args the command line arguments
 *///from   w  ww.  j av a  2 s .c o m
public static void main(String[] args) throws FileNotFoundException, XMLStreamException {
    final Options options = new Options();
    options.addOption("c", true, "load configuration file");
    options.addOption("dtd", false,
            "enforce injection of XHTML + MathML 1.1 DTD reference into input documents");
    options.addOption("w", false, "overwrite input files by canonical outputs");
    options.addOption("h", false, "print help");

    final CommandLineParser parser = new PosixParser();
    CommandLine line = null;
    try {
        line = parser.parse(options, args);
    } catch (ParseException ex) {
        printHelp(options);
        System.exit(1);
    }

    File config = null;
    boolean overwrite = false;
    boolean dtdInjectionMode = false;
    if (line != null) {
        if (line.hasOption('c')) {
            config = new File(args[1]);
        }

        if (line.hasOption("dtd")) {
            dtdInjectionMode = true;
        }

        if (line.hasOption('w')) {
            overwrite = true;
        }

        if (line.hasOption('h')) {
            printHelp(options);
            System.exit(0);
        }

        final List<String> arguments = Arrays.asList(line.getArgs());
        if (arguments.size() > 0) {
            for (String arg : arguments) {
                try {
                    List<File> files = getFiles(new File(arg));
                    for (File file : files) {
                        canonicalize(file, config, dtdInjectionMode, overwrite);
                    }
                } catch (IOException ex) {
                    Logger.getLogger(MathMLCanonicalizerCommandLineTool.class.getName()).log(Level.SEVERE,
                            ex.getMessage(), ex);
                } catch (ConfigException ex) {
                    Logger.getLogger(MathMLCanonicalizerCommandLineTool.class.getName()).log(Level.SEVERE,
                            ex.getMessage(), ex);
                } catch (JDOMException ex) {
                    Logger.getLogger(MathMLCanonicalizerCommandLineTool.class.getName()).log(Level.SEVERE,
                            ex.getMessage(), ex);
                } catch (ModuleException ex) {
                    Logger.getLogger(MathMLCanonicalizerCommandLineTool.class.getName()).log(Level.SEVERE,
                            ex.getMessage(), ex);
                }
            }
        } else {
            printHelp(options);
            System.exit(0);
        }
    }
}

From source file:fr.inria.edelweiss.kgdqp.core.FedQueryingCLI.java

@SuppressWarnings("unchecked")
public static void main(String args[]) throws ParseException, EngineException {

    List<String> endpoints = new ArrayList<String>();
    String queryPath = null;//from  w  w  w  . j a v a2 s .co m
    int slice = -1;

    Options options = new Options();
    Option helpOpt = new Option("h", "help", false, "print this message");
    Option queryOpt = new Option("q", "query", true, "specify the sparql query file");
    Option endpointOpt = new Option("e", "endpoints", true, "the list of federated sparql endpoint URLs");
    Option groupingOpt = new Option("g", "grouping", true, "triple pattern optimisation");
    Option slicingOpt = new Option("s", "slicing", true, "size of the slicing parameter");
    Option versionOpt = new Option("v", "version", false, "print the version information and exit");
    options.addOption(queryOpt);
    options.addOption(endpointOpt);
    options.addOption(helpOpt);
    options.addOption(versionOpt);
    options.addOption(groupingOpt);
    options.addOption(slicingOpt);

    String header = "Corese/KGRAM DQP command line interface";
    String footer = "\nPlease report any issue to alban.gaignard@cnrs.fr";

    CommandLineParser parser = new BasicParser();
    CommandLine cmd = parser.parse(options, args);
    if (cmd.hasOption("h")) {
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp("kgdqp", header, options, footer, true);
        System.exit(0);
    }
    if (!cmd.hasOption("e")) {
        logger.info("You must specify at least the URL of one sparql endpoint !");
        System.exit(0);
    } else {
        endpoints = new ArrayList<String>(Arrays.asList(cmd.getOptionValues("e")));
    }
    if (!cmd.hasOption("q")) {
        logger.info("You must specify a path for a sparql query !");
        System.exit(0);
    } else {
        queryPath = cmd.getOptionValue("q");
    }
    if (cmd.hasOption("s")) {
        try {
            slice = Integer.parseInt(cmd.getOptionValue("s"));
        } catch (NumberFormatException ex) {
            logger.warn(cmd.getOptionValue("s") + " is not formatted as number for the slicing parameter");
            logger.warn("Slicing disabled");
        }
    }
    if (cmd.hasOption("v")) {
        logger.info("version 3.0.4-SNAPSHOT");
        System.exit(0);
    }

    /////////////////
    Graph graph = Graph.create();
    QueryProcessDQP exec = QueryProcessDQP.create(graph);
    exec.setGroupingEnabled(cmd.hasOption("g"));
    if (slice > 0) {
        exec.setSlice(slice);
    }
    Provider sProv = ProviderImplCostMonitoring.create();
    exec.set(sProv);

    for (String url : endpoints) {
        try {
            exec.addRemote(new URL(url), WSImplem.REST);
        } catch (MalformedURLException ex) {
            logger.error(url + " is not a well-formed URL");
            System.exit(1);
        }
    }

    StringBuffer fileData = new StringBuffer(1000);
    BufferedReader reader = null;
    try {
        reader = new BufferedReader(new FileReader(queryPath));
    } catch (FileNotFoundException ex) {
        logger.error("Query file " + queryPath + " not found !");
        System.exit(1);
    }
    char[] buf = new char[1024];
    int numRead = 0;
    try {
        while ((numRead = reader.read(buf)) != -1) {
            String readData = String.valueOf(buf, 0, numRead);
            fileData.append(readData);
            buf = new char[1024];
        }
        reader.close();
    } catch (IOException ex) {
        logger.error("Error while reading query file " + queryPath);
        System.exit(1);
    }

    String sparqlQuery = fileData.toString();

    //        Query q = exec.compile(sparqlQuery, null);
    //        System.out.println(q);

    StopWatch sw = new StopWatch();
    sw.start();
    Mappings map = exec.query(sparqlQuery);
    int dqpSize = map.size();
    System.out.println("--------");
    long time = sw.getTime();
    System.out.println(time + " " + dqpSize);
}