List of usage examples for org.apache.hadoop.conf Configuration addResource
public void addResource(Configuration conf)
From source file:com.intropro.prairie.unit.hdfs.HdfsUnit.java
License:Apache License
@Override protected Configuration gatherConfigs() { Configuration conf = super.gatherConfigs(); conf.addResource("hdfs-site.prairie.xml"); conf.set("hdfs.minidfs.basedir", getTmpDir().toString()); conf.set("hadoop.proxyuser." + user + ".hosts", "*"); conf.set("hadoop.proxyuser." + user + ".groups", "*"); return conf;/*from w ww . ja v a2s . c o m*/ }
From source file:com.intropro.prairie.unit.oozie.OozieUnit.java
License:Apache License
@Override protected Configuration gatherConfigs() { Configuration configuration = super.gatherConfigs(); configuration.addResource("oozie-site.prairie.xml"); return configuration; }
From source file:com.intropro.prairie.unit.pig.PigUnit.java
License:Apache License
@Override public Configuration gatherConfigs() { Configuration configuration = new Configuration(super.gatherConfigs()); Iterator<Map.Entry<String, String>> iterator = yarnUnit.getConfig().iterator(); while (iterator.hasNext()) { Map.Entry<String, String> next = iterator.next(); configuration.set(next.getKey(), next.getValue()); }/*from w w w . j ava2 s . com*/ configuration.addResource("pig-site.prairie.xml"); return configuration; }
From source file:com.kappaware.hbtools.common.Utils.java
License:Apache License
public static Configuration buildHBaseConfiguration(HBaseParameters parameters) throws ConfigurationException, IOException { Configuration config = HBaseConfiguration.create(); for (String cf : parameters.getConfigFiles()) { File f = new File(cf); if (!f.canRead()) { throw new ConfigurationException(String.format("Unable to read file '%s'", cf)); }/* w w w . ja v a2 s. c om*/ log.debug(String.format("Will load '%s'", cf)); config.addResource(new Path(cf)); } config.set("hbase.client.retries.number", Integer.toString(parameters.getClientRetries())); //config.reloadConfiguration(); if (Utils.hasText(parameters.getDumpConfigFile())) { Utils.dumpConfiguration(config, parameters.getDumpConfigFile()); } if (Utils.hasText(parameters.getKeytab()) && Utils.hasText(parameters.getPrincipal())) { // Check if keytab file exists and is readable File f = new File(parameters.getKeytab()); if (!f.canRead()) { throw new ConfigurationException( String.format("Unable to read keytab file: '%s'", parameters.getKeytab())); } UserGroupInformation.setConfiguration(config); if (!UserGroupInformation.isSecurityEnabled()) { throw new ConfigurationException( "Security is not enabled in core-site.xml while Kerberos principal and keytab are provided."); } try { UserGroupInformation userGroupInformation = UserGroupInformation .loginUserFromKeytabAndReturnUGI(parameters.getPrincipal(), parameters.getKeytab()); UserGroupInformation.setLoginUser(userGroupInformation); } catch (Exception e) { throw new ConfigurationException( String.format("Kerberos: Unable to authenticate with principal='%s' and keytab='%s': %s.", parameters.getPrincipal(), parameters.getKeytab(), e.getMessage())); } } return config; }
From source file:com.knewton.mapreduce.example.SSTableMRExample.java
License:Apache License
private static Job getJobConf(CommandLine cli) throws URISyntaxException, IOException { Configuration conf = new Configuration(); Job job = Job.getInstance(conf);//from w w w . j av a 2 s . c om ClassLoader loader = SSTableMRExample.class.getClassLoader(); URL url = loader.getResource("knewton-site.xml"); conf.addResource(url); SSTableInputFormat.setPartitionerClass(RandomPartitioner.class.getName(), job); SSTableInputFormat.setComparatorClass(LongType.class.getName(), job); SSTableInputFormat.setColumnFamilyName("StudentEvents", job); SSTableInputFormat.setKeyspaceName("demoKeyspace", job); if (cli.hasOption('s')) { conf.set(PropertyConstants.START_DATE.txt, cli.getOptionValue('s')); } if (cli.hasOption('e')) { conf.set(PropertyConstants.END_DATE.txt, cli.getOptionValue('e')); } return job; }
From source file:com.linkedin.pinot.common.segment.fetcher.HdfsSegmentFetcher.java
License:Apache License
private Configuration getConf(String hadoopConfPath) { Configuration hadoopConf = new Configuration(); if (Strings.isNullOrEmpty(hadoopConfPath)) { LOGGER.warn("no hadoop conf path is provided, will rely on default config"); } else {//from www. j ava2s . co m hadoopConf.addResource(new Path(hadoopConfPath, "core-site.xml")); hadoopConf.addResource(new Path(hadoopConfPath, "hdfs-site.xml")); } return hadoopConf; }
From source file:com.main.MRSearchMain.java
public void searchHBase(int numOfDays) throws IOException, InterruptedException, ClassNotFoundException { long startTime; long endTime; String path = "/home/hadoop/app/hadoop-2.0.0-cdh4.3.0/etc/hadoop/"; Configuration conf = HBaseConfiguration.create(); // conf.set("hbase.zookeeper.quorum", "streamslab.localdomain"); // conf.set("fs.default.name", "hdfs://streamslab.localdomain:8020"); // conf.set("mapred.job.tracker", "hdfs://streamslab.localdomain:50300"); // conf.set("fs.hdfs.impl", // org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()); conf.set("fs.file.impl", org.apache.hadoop.fs.LocalFileSystem.class.getName()); //?,FileSystem? conf.addResource(new Path(path + "core-site.xml")); conf.addResource(new Path(path + "hdfs-site.xml")); conf.addResource(new Path(path + "mapred-site.xml")); /* // w w w.j a v a2 s. c om * ?map */ conf.set("search.license", "C87310"); conf.set("search.color", "10"); conf.set("search.direction", "2"); Job job = new Job(conf, "MRSearchHBase"); System.out.println("search.license: " + conf.get("search.license")); job.setNumReduceTasks(0); job.setJarByClass(MRSearchMain.class); Scan scan = new Scan(); scan.addFamily(FAMILY_NAME); byte[] startRow = Bytes.toBytes("2011010100000"); byte[] stopRow; switch (numOfDays) { case 1: stopRow = Bytes.toBytes("2011010200000"); break; case 10: stopRow = Bytes.toBytes("2011011100000"); break; case 30: stopRow = Bytes.toBytes("2011020100000"); break; case 365: stopRow = Bytes.toBytes("2012010100000"); break; default: stopRow = Bytes.toBytes("2011010101000"); } // ?key scan.setStartRow(startRow); scan.setStopRow(stopRow); TableMapReduceUtil.initTableMapperJob(TABLE_NAME, scan, SearchMapper.class, ImmutableBytesWritable.class, Text.class, job); Path outPath = new Path("searchresult"); LOG.info("outPath:" + outPath.toString()); //hdfs FileSystem file = null; try { file = FileSystem.get(conf); } catch (IOException e) { e.printStackTrace(); } // HDFS_File file = new HDFS_File(); // file.DelFile(conf, outPath.getName(), true); // //"hdfs://streamslab.localdomain:8020/ if (file.exists(outPath)) { file.delete(outPath, true); LOG.info("=====delPath " + outPath.toString() + "====="); } FileOutputFormat.setOutputPath(job, outPath);// startTime = System.currentTimeMillis(); job.waitForCompletion(true); endTime = System.currentTimeMillis(); LOG.info("Time used: " + (endTime - startTime)); LOG.info("startRow:" + Text.decode(startRow)); LOG.info("stopRow: " + Text.decode(stopRow)); }
From source file:com.mammothdata.apex.example.ApplicationTest.java
@Test public void testApplication() throws Exception { try {/*from w ww . j av a 2 s .co m*/ // Pull in sample data and shovel it into kafka KafkaTestProducer p = new KafkaTestProducer(TOPIC); List<String> lines = IOUtils.readLines(this.getClass().getResourceAsStream("/sample.short.json"), "UTF-8"); p.setMessages(lines); new Thread(p).start(); LocalMode lma = LocalMode.newInstance(); Configuration conf = new Configuration(false); conf.addResource(this.getClass().getResourceAsStream("/META-INF/properties.xml")); conf.set("dt.operator.KafkaInput.prop.topic", TOPIC); conf.set("dt.operator.KafkaInput.prop.zookeeper", "localhost:" + KafkaOperatorTestBase.TEST_ZOOKEEPER_PORT[0]); conf.set("dt.operator.KafkaInput.prop.maxTuplesPerWindow", "1"); // consume one string per window lma.prepareDAG(new Application(), conf); LocalMode.Controller lc = lma.getController(); LOG.info("Application run started"); lc.run(10000); LOG.info("Application run finished"); lc.shutdown(); } catch (ConstraintViolationException e) { Assert.fail("constraint violations: " + e.getConstraintViolations()); } }
From source file:com.mammothdata.samplepipeline.ApplicationTest.java
@Test public void testApplication() throws Exception { try {//from w w w . j a v a2 s. co m // Pull in sample data and shovel it into kafka KafkaTestProducer p = new KafkaTestProducer(TOPIC); List<String> lines = IOUtils.readLines(this.getClass().getResourceAsStream("/data/sample.short.json"), "UTF-8"); p.setMessages(lines); LOG.info("Kafka data loaded"); new Thread(p).start(); LocalMode lma = LocalMode.newInstance(); Configuration conf = new Configuration(false); conf.addResource(this.getClass().getResourceAsStream("/META-INF/properties.xml")); conf.set("dt.operator.kafkaInput.prop.topic", TOPIC); conf.set("dt.operator.kafkaInput.prop.zookeeper", "localhost:" + KafkaOperatorTestBase.TEST_ZOOKEEPER_PORT[0]); conf.set("dt.operator.kafkaInput.prop.maxTuplesPerWindow", "1"); // consume one word per window lma.prepareDAG(new Application(), conf); LocalMode.Controller lc = lma.getController(); LOG.info("Application run started"); lc.run(10000); LOG.info("Application run finished"); lc.shutdown(); } catch (ConstraintViolationException e) { Assert.fail("constraint violations: " + e.getConstraintViolations()); } }
From source file:com.marklogic.hadoop.csvtoxml.CsvToXML.java
License:Apache License
public static void main(String[] args) throws Exception { Configuration conf = new Configuration(); String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs(); @SuppressWarnings("deprecation") Job job = new Job(conf); job.setJobName("ProcessCSVtoXML_job"); System.out.println("After the JobName Updates"); job.setJarByClass(CsvToXML.class); job.setInputFormatClass(TextInputFormat.class); job.setMapperClass(ContentMapper.class); job.setMapOutputKeyClass(DocumentURI.class); job.setMapOutputValueClass(Text.class); job.setOutputFormatClass(ContentOutputFormat.class); System.out.println("Made it past external jar dependencies nodes"); FileInputFormat.setInputPaths(job, new Path(otherArgs[1])); conf = job.getConfiguration();/*from w ww . j a v a2s . co m*/ conf.addResource(otherArgs[0]); System.out.println("After the conf.set"); System.exit(job.waitForCompletion(true) ? 0 : 1); }