Example usage for org.apache.hadoop.conf Configuration set

List of usage examples for org.apache.hadoop.conf Configuration set

Introduction

In this page you can find the example usage for org.apache.hadoop.conf Configuration set.

Prototype

public void set(String name, String value) 

Source Link

Document

Set the value of the name property.

Usage

From source file:DataHBase.java

License:Open Source License

public void run(HashMap<String, String> config) throws Exception {

    //clean the former output if it exists
    Path p = new Path(config.get("hdfs_output_dir"));
    FileSystem fs = FileSystem.get(new Configuration());
    if (fs.exists(p)) {
        fs.delete(p, true);/*www. jav a 2s.c o m*/
    }

    String junction = config.get("what_to_find"); // the name of the junction
    String date1 = config.get("date1");
    String date2 = config.get("date2");
    //date1 and date2 can be of a format YYYY-MM-DD
    if (date1.length() == 10)
        date1 = date1 + " 00:00:00";
    if (date2.length() == 10)
        date2 = date2 + " 23:59:59";
    System.out.println("Looking for data of " + junction + ": " + date1 + " - " + date2);

    //create timestamps (considering time zone!) to limit data
    SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
    sdf.setTimeZone(TimeZone.getDefault());
    Long time1 = sdf.parse(date1).getTime();
    Long time2 = sdf.parse(date2).getTime();

    //run a job
    Configuration conf = HBaseConfiguration.create();
    conf.set("mapreduce.output.textoutputformat.separator", ","); //set comma as a delimiter

    Job job = new Job(conf, "Retrieve data from hbase");
    job.setJarByClass(DataHBase.class);

    Scan scan = new Scan();
    scan.setCaching(500); // 1 is the default in Scan, which will be bad for MapReduce jobs
    scan.setCacheBlocks(false); // don't set to true for MR jobs
    scan.setMaxVersions(1);
    scan.setTimeRange(time1, time2); //take a day we are interested in
    //set a filter for a junction name
    if (!junction.equals("")) {
        SingleColumnValueFilter filter = new SingleColumnValueFilter(Bytes.toBytes("data"),
                Bytes.toBytes("location_name"), CompareOp.EQUAL, Bytes.toBytes(junction));
        scan.setFilter(filter);
    }
    //add the specific columns to the output to limit the amount of data
    scan.addFamily(Bytes.toBytes("data"));

    TableMapReduceUtil.initTableMapperJob(config.get("hbase_table"), // input HBase table name
            scan, // Scan instance to control CF and attribute selection
            TableMap.class, // mapper
            Text.class, // mapper output key
            Text.class, // mapper output value
            job);

    job.setReducerClass(Reduce.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(Text.class);

    FileOutputFormat.setOutputPath(job, new Path(config.get("hdfs_output_dir")));

    job.waitForCompletion(true);

}

From source file:CustomAuthenticator.java

License:Apache License

public static char[] getPassword(String credentialProvider, String alias) throws IOException {
    Configuration conf = new Configuration();
    conf.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH, credentialProvider);
    return conf.getPassword(alias);
}

From source file:TestIndexServer.java

License:Open Source License

public void testServerMergeIndex() throws Exception {
    Configuration conf = new Configuration();
    conf.set("se.indexer.IPM_merge_interval", "5000");
    IndexServer server = new IndexServer(conf);
    server.testmode = true;/* ww  w  .j  a va  2  s.c  o  m*/
    server.start();
    ArrayList<IndexItemStatus> itemstatuss = new ArrayList<IndexItemStatus>();
    for (int k = 0; k < 0; k++) {
        while (true) {
            itemstatuss.clear();
            File file = new File("indexconf");
            DataInputStream dis = new DataInputStream(new FileInputStream(file));
            int num = dis.readInt();
            for (int i = 0; i < num; i++) {
                IndexItemStatus status = new IndexItemStatus();
                status.read(dis);
                itemstatuss.add(status);
            }
            dis.close();
            if (itemstatuss.get(0).status == 2) {
                break;
            }
            if (itemstatuss.get(0).status < 0) {
                server.close();
                assertTrue(false);
            }
            Thread.sleep(3000);
        }
        UtilIndexStorage.writeFDF(status.gettablelocation(), 3, 10000, (short) -1, false, false, true, false);
        while (true) {
            itemstatuss.clear();
            File file = new File("indexconf");
            DataInputStream dis = new DataInputStream(new FileInputStream(file));
            int num = dis.readInt();
            for (int i = 0; i < num; i++) {
                IndexItemStatus status = new IndexItemStatus();
                status.read(dis);
                itemstatuss.add(status);
            }
            dis.close();
            if (itemstatuss.get(0).status == 1) {
                break;
            }
            Thread.sleep(3000);
        }
    }

    Thread.sleep(15000);
    while (true) {
        itemstatuss.clear();
        File file = new File("indexconf");
        DataInputStream dis = new DataInputStream(new FileInputStream(file));
        int num = dis.readInt();
        for (int i = 0; i < num; i++) {
            IndexItemStatus status = new IndexItemStatus();
            status.read(dis);
            itemstatuss.add(status);
        }
        dis.close();
        if (itemstatuss.get(0).status == 2) {
            break;
        }
        Thread.sleep(3000);
    }

    FileStatus[] fss = fs.listStatus(new Path(itemstatuss.get(0).indexlocation + "/nopart"));
    assertEquals(fss.length, 1);

    server.close();
}

From source file:WordLines.java

License:Apache License

public static void main(String[] args) throws Exception {
    Configuration conf = new Configuration();
    String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();
    if (otherArgs.length < 3) {
        System.err.println("Usage: wordlines <in> [<in>...] <SearchTerm> <out>");
        System.exit(2);// w ww.  j  ava2s  . co m
    }
    conf.set("searchWord", otherArgs[otherArgs.length - 2]);
    Job job = new Job(conf, "word lines");
    job.setJarByClass(WordLines.class);
    job.setMapperClass(TokenizerMapper.class);
    job.setCombinerClass(IntSumReducer.class);
    job.setReducerClass(IntSumReducer.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(Text.class);
    for (int i = 0; i < otherArgs.length - 2; ++i) {
        FileInputFormat.addInputPath(job, new Path(otherArgs[i]));
    }
    FileOutputFormat.setOutputPath(job, new Path(otherArgs[otherArgs.length - 1]));
    System.exit(job.waitForCompletion(true) ? 0 : 1);
}

From source file:RyaDirectExample.java

License:Apache License

public static void main(final String[] args) throws Exception {
    final Configuration conf = getConf();
    conf.set(PrecomputedJoinIndexerConfig.PCJ_STORAGE_TYPE, PrecomputedJoinStorageType.ACCUMULO.name());
    conf.setBoolean(ConfigUtils.DISPLAY_QUERY_PLAN, PRINT_QUERIES);

    log.info("Creating the tables as root.");

    SailRepository repository = null;//from   w  w  w.j  a v  a 2s. c  om
    SailRepositoryConnection conn = null;

    try {
        log.info("Connecting to Indexing Sail Repository.");
        final Sail extSail = RyaSailFactory.getInstance(conf);
        repository = new SailRepository(extSail);
        conn = repository.getConnection();

        createPCJ(conf);

        final long start = System.currentTimeMillis();
        log.info("Running SPARQL Example: Add and Delete");
        testAddAndDelete(conn);
        log.info("Running SAIL/SPARQL Example: PCJ Search");
        testPCJSearch(conn);
        log.info("Running SAIL/SPARQL Example: Add and Temporal Search");
        testAddAndTemporalSearchWithPCJ(conn);
        log.info("Running SAIL/SPARQL Example: Add and Free Text Search with PCJ");
        testAddAndFreeTextSearchWithPCJ(conn);
        //         log.info("Running SPARQL Example: Add Point and Geo Search with PCJ");
        ////         testAddPointAndWithinSearchWithPCJ(conn);
        //         log.info("Running SPARQL Example: Temporal, Freetext, and Geo Search");
        //         testTemporalFreeGeoSearch(conn);
        //         log.info("Running SPARQL Example: Geo, Freetext, and PCJ Search");
        //         testGeoFreetextWithPCJSearch(conn);
        log.info("Running SPARQL Example: Delete Temporal Data");
        testDeleteTemporalData(conn);
        log.info("Running SPARQL Example: Delete Free Text Data");
        testDeleteFreeTextData(conn);
        //         log.info("Running SPARQL Example: Delete Geo Data");
        //         testDeleteGeoData(conn);

        log.info("TIME: " + (System.currentTimeMillis() - start) / 1000.);
    } finally {
        log.info("Shutting down");
        closeQuietly(conn);
        closeQuietly(repository);
    }
}

From source file:RyaDirectExample.java

License:Apache License

private static void createPCJ(final Configuration conf) throws RepositoryException, AccumuloException,
        AccumuloSecurityException, TableExistsException, PcjException, InferenceEngineException,
        NumberFormatException, UnknownHostException, SailException, TableNotFoundException {

    final Configuration config = new AccumuloRdfConfiguration(conf);
    config.set(ConfigUtils.USE_PCJ, "false");
    SailRepository repository = null;//from   ww  w.jav  a  2s.c  om
    SailRepositoryConnection conn = null;

    try {
        final Sail extSail = RyaSailFactory.getInstance(config);

        repository = new SailRepository(extSail);
        conn = repository.getConnection();

        final String queryString1 = ""//
                + "SELECT ?e ?c ?l ?o " //
                + "{" //
                + "  ?c a ?e . "//
                + "  ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l . "//
                + "  ?e <uri:talksTo> ?o . "//
                + "}";//

        final String queryString2 = ""//
                + "SELECT ?e ?c ?l ?o " //
                + "{" //
                + "  ?e a ?c . "//
                + "  ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l . "//
                + "  ?e <uri:talksTo> ?o . "//
                + "}";//

        URI obj, subclass, talksTo;
        final URI person = new URIImpl("urn:people:alice");
        final URI feature = new URIImpl("urn:feature");
        final URI sub = new URIImpl("uri:entity");
        subclass = new URIImpl("uri:class");
        obj = new URIImpl("uri:obj");
        talksTo = new URIImpl("uri:talksTo");

        conn.add(person, RDF.TYPE, sub);
        conn.add(feature, RDF.TYPE, sub);
        conn.add(sub, RDF.TYPE, subclass);
        conn.add(sub, RDFS.LABEL, new LiteralImpl("label"));
        conn.add(sub, talksTo, obj);

        final String tablename1 = RYA_TABLE_PREFIX + "INDEX_1";
        final String tablename2 = RYA_TABLE_PREFIX + "INDEX_2";

        final Connector accCon = new MockInstance(INSTANCE).getConnector("root",
                new PasswordToken("".getBytes(StandardCharsets.UTF_8)));

        new PcjTables().createAndPopulatePcj(conn, accCon, tablename1, queryString1,
                new String[] { "e", "c", "l", "o" }, Optional.<PcjVarOrderFactory>absent());

        new PcjTables().createAndPopulatePcj(conn, accCon, tablename2, queryString2,
                new String[] { "e", "c", "l", "o" }, Optional.<PcjVarOrderFactory>absent());

    } catch (final RyaDAOException e) {
        throw new Error("While creating PCJ tables.", e);
    } finally {
        closeQuietly(conn);
        closeQuietly(repository);
    }

}

From source file:MapReduce3.java

public static void main(String[] args) throws Exception {

    ////from   w ww  .  ja  va2s.  c o  m
    String dst = "hdfs://localhost:9000/data/2006a.csv";

    //??
    //  String dstOut = "hdfs://localhost:9000/mapreduce/result3/1";
    String dstOut = "/Users/wendyzhuo/NetBeansProjects/final_Hadoop/src/output3/1";
    String outFiles = "/Users/wendyzhuo/NetBeansProjects/final_Hadoop/src/output3/2";
    Configuration hadoopConfig = new Configuration();

    hadoopConfig.set("fs.hdfs.impl", org.apache.hadoop.hdfs.DistributedFileSystem.class.getName());

    hadoopConfig.set("fs.file.impl", org.apache.hadoop.fs.LocalFileSystem.class.getName());

    Job job = new Job(hadoopConfig);
    Job job2 = new Job(hadoopConfig);

    FileInputFormat.addInputPath(job, new Path(dst));
    FileOutputFormat.setOutputPath(job, new Path(dstOut));
    FileInputFormat.addInputPath(job2, new Path(dstOut));
    FileOutputFormat.setOutputPath(job2, new Path(outFiles));

    JobConf map1Conf = new JobConf(false);
    ChainMapper.addMapper(job, TempMapper.class, LongWritable.class, Text.class, CompositeKey_wd.class,
            IntWritable.class, map1Conf);
    JobConf reduceConf = new JobConf(false);
    ChainReducer.setReducer(job, TempReducer.class, CompositeKey_wd.class, IntWritable.class,
            CompositeKey_wd.class, IntWritable.class, reduceConf);

    JobConf map2Conf = new JobConf(false);
    ChainMapper.addMapper(job2, TempMapper2.class, LongWritable.class, Text.class, IntWritable.class,
            CompositeKey_wd.class, map2Conf);
    JobConf map3Conf = new JobConf(false);
    ChainReducer.setReducer(job2, TempReduce2.class, IntWritable.class, CompositeKey_wd.class, Text.class,
            IntWritable.class, map3Conf);
    //       
    //  JobClient.runJob(job);

    //MapperReducer?
    //        job.setMapperClass(TempMapper.class);
    //
    //        job.setReducerClass(TempReducer.class);

    //?KeyValue
    job.setOutputKeyClass(CompositeKey_wd.class);

    job.setOutputValueClass(IntWritable.class);

    job2.setMapOutputKeyClass(IntWritable.class);
    job2.setMapOutputValueClass(CompositeKey_wd.class);

    //  job2.setSortComparatorClass(LongWritable.DecreasingComparator.class);

    //job?
    job.waitForCompletion(true);
    System.out.println("Finished1");
    job2.waitForCompletion(true);
    System.out.println("Finished2");

}

From source file:ConnectHBase124ClusterExample.java

@Test
public void test() throws Exception {
    final String tableName = "test_table";
    final String family = "family";
    final String columnQualifier = "qual";
    final String rowValue = "foo";
    final String cellValue = "bar";

    Configuration baseDefaults = new Configuration();
    baseDefaults.set("hbase.defaults.for.version", "1.2.4");
    Configuration configuration = HBaseConfiguration.create(baseDefaults);
    configuration.set("hbase.zookeeper.quorum", "new-hbase-server:2181");
    UserGroupInformation.setConfiguration(configuration);
    try (HBaseAdmin admin = new HBaseAdmin(configuration);) {
        HColumnDescriptor col = new HColumnDescriptor(family);
        if (!admin.isTableAvailable(tableName)) {
            System.out.println("Table " + tableName + " does not exist. Creating");
            HTableDescriptor desc = new HTableDescriptor(tableName);
            desc.addFamily(col);/*  w  w  w  .  j  av  a 2 s  .  co  m*/
            admin.createTable(desc);
            System.out.println("Table " + tableName + " created");
        }
        if (!admin.isTableEnabled(tableName)) {
            System.out.println("table " + tableName + " is not enabled. enabling");
            admin.enableTable(tableName);
        }
        try (HTablePool pool = new HTablePool(configuration, 1);
                HTableInterface table = pool.getTable(tableName)) {
            //                Put put = new Put(rowValue.getBytes());
            //                put.add(family.getBytes(), columnQualifier.getBytes(), cellValue.getBytes());
            //                table.put(put);

            Scan scan = new Scan();
            scan.setCacheBlocks(false);
            scan.setCaching(1000);
            scan.setBatch(1000);
            scan.setMaxVersions(1);
            try (ResultScanner scanner = table.getScanner(scan);) {
                Result result = scanner.next();
                while (result != null) {
                    KeyValue cell = result.getColumnLatest(family.getBytes(), columnQualifier.getBytes());
                    System.out.println("row:" + new String(cell.getRow()));
                    System.out.println("value:" + new String(cell.getValue()));
                    result = scanner.next();
                }
            }
        }

    }

}

From source file:GetUserInfoGivenMovieId.java

public static void main(String[] args) throws Exception {
    Configuration conf = new Configuration();
    conf.set("movieId", args[2]);
    Job job = new Job(conf, "GetUserInfoGivenMovieId");

    job.setInputFormatClass(TextInputFormat.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(Text.class);

    job.setJarByClass(GetUserInfoGivenMovieId.class);
    job.setMapperClass(Map.class);
    job.setCombinerClass(Reduce.class);
    job.setReducerClass(Reduce.class);

    FileInputFormat.addInputPath(job, new Path(args[0]));
    FileOutputFormat.setOutputPath(job, new Path(args[3]));

    boolean flag1 = job.waitForCompletion(true);

    if (flag1) {/*  w w w.  j  ava2s .  c  o  m*/
        Configuration conf2 = new Configuration();
        //FileSystem fs = FileSystem.get(conf2);
        //Path Intermediate = new Path(args[1]);
        //DistributedCache.addCacheFile(Intermediate.toUri(), conf2);
        //DistributedCache.addCacheFile(new URI(args[1]),conf2);

        Job job2 = new Job(conf2, "UserInfo");
        /*Job job2 = new Job(new Configuration());
        Configuration conf2 = job.getConfiguration();
        job2.setJobName("Join with Cache");
        DistributedCache.addCacheFile(new URI(args[1]), conf2);*/
        job2.addCacheFile(new URI(args[1]));
        job2.setOutputKeyClass(Text.class);
        job2.setOutputValueClass(Text.class);
        job2.setJarByClass(GetUserInfoGivenMovieId.class);
        job2.setMapperClass(MapWithJoin.class);
        //job2.setCombinerClass(Reduce.class);
        job2.setReducerClass(ReduceFinal.class);

        job2.setInputFormatClass(TextInputFormat.class);
        job2.setOutputFormatClass(TextOutputFormat.class);

        FileInputFormat.addInputPath(job2, new Path(args[3]));
        FileOutputFormat.setOutputPath(job2, new Path(args[4]));
        job2.waitForCompletion(true);
    }
}

From source file:ParascaleFsTestCase.java

License:Apache License

/**
 * Creates a new Hadoop Configuration object.
 *
 * @return a new Hadoop configuration object
 *
 * @see Configuration/*from   ww w  . ja  v  a  2s . co m*/
 */
protected Configuration getConf() {
    final Configuration conf = new Configuration();
    if (setDefaultBlockSize) {
        conf.setLong(RawParascaleFileSystem.PS_DEFAULT_BLOCKSIZE, defaultBlockSize);
    }
    if (setDefaultReplication) {
        conf.setLong(RawParascaleFileSystem.PS_DEFAULT_REPLICATION, defaultReplication);
    }
    if (setMountPoint) {
        conf.set(RawParascaleFileSystem.PS_MOUNT_POINT, String.format("%s/%s", getTempDir(), mountPoint));
    }
    if (setDefaultFsName) {
        conf.set(FS_DEFAULT_NAME, String.format("%s%s@%s", fsScheme, virtualFs, controlNode));
    }

    return conf;
}