Example usage for org.apache.hadoop.conf Configuration addResource

List of usage examples for org.apache.hadoop.conf Configuration addResource

Introduction

In this page you can find the example usage for org.apache.hadoop.conf Configuration addResource.

Prototype

public void addResource(Configuration conf) 

Source Link

Document

Add a configuration resource.

Usage

From source file:org.apache.beam.sdk.io.hdfs.HadoopFileSystemModuleTest.java

License:Apache License

@SuppressWarnings("unchecked")
@Test/*from ww w.  ja v a2 s  . c  o m*/
public void testConfigurationSerializationDeserialization() throws Exception {
    Configuration baseConfiguration = new Configuration(false);
    baseConfiguration.set("testPropertyA", "baseA");
    baseConfiguration.set("testPropertyC", "baseC");
    Configuration configuration = new Configuration(false);
    configuration.addResource(baseConfiguration);
    configuration.set("testPropertyA", "A");
    configuration.set("testPropertyB", "B");
    ObjectMapper objectMapper = new ObjectMapper();
    objectMapper.registerModule(new HadoopFileSystemModule());
    String serializedConfiguration = objectMapper.writeValueAsString(configuration);
    Configuration deserializedConfiguration = objectMapper.readValue(serializedConfiguration,
            Configuration.class);
    assertThat(deserializedConfiguration,
            Matchers.<Map.Entry<String, String>>containsInAnyOrder(
                    new AbstractMap.SimpleEntry("testPropertyA", "A"),
                    new AbstractMap.SimpleEntry("testPropertyB", "B"),
                    new AbstractMap.SimpleEntry("testPropertyC", "baseC")));
}

From source file:org.apache.blur.hive.BlurHiveMRLoaderOutputCommitter.java

License:Apache License

private void finishBulkJob(JobContext context, final boolean apply) throws IOException {
    final Configuration configuration = context.getConfiguration();
    PrivilegedExceptionAction<Void> action = new PrivilegedExceptionAction<Void>() {
        @Override/*from  w  ww .  java  2s.  c  o m*/
        public Void run() throws Exception {
            String workingPathStr = configuration.get(BlurConstants.BLUR_BULK_UPDATE_WORKING_PATH);
            Path workingPath = new Path(workingPathStr);
            Path tmpDir = new Path(workingPath, "tmp");
            FileSystem fileSystem = tmpDir.getFileSystem(configuration);
            String loadId = configuration.get(BlurSerDe.BLUR_MR_LOAD_ID);
            Path loadPath = new Path(tmpDir, loadId);

            if (apply) {
                Path newDataPath = new Path(workingPath, "new");
                Path dst = new Path(newDataPath, loadId);
                if (!fileSystem.rename(loadPath, dst)) {
                    LOG.error("Could not move data from src [" + loadPath + "] to dst [" + dst + "]");
                    throw new IOException(
                            "Could not move data from src [" + loadPath + "] to dst [" + dst + "]");
                }

                TableDescriptor tableDescriptor = BlurOutputFormat.getTableDescriptor(configuration);
                String connectionStr = configuration.get(BlurSerDe.BLUR_CONTROLLER_CONNECTION_STR);
                BulkTableUpdateCommand bulkTableUpdateCommand = new BulkTableUpdateCommand();
                bulkTableUpdateCommand.setAutoLoad(true);
                bulkTableUpdateCommand.setTable(tableDescriptor.getName());
                bulkTableUpdateCommand.setWaitForDataBeVisible(true);

                Configuration config = new Configuration(false);
                config.addResource(HDFS_SITE_XML);
                config.addResource(YARN_SITE_XML);
                config.addResource(MAPRED_SITE_XML);

                bulkTableUpdateCommand.addExtraConfig(config);
                if (bulkTableUpdateCommand.run(BlurClient.getClient(connectionStr)) != 0) {
                    throw new IOException("Unknown error occured duing load.");
                }
            } else {
                fileSystem.delete(loadPath, true);
            }
            return null;
        }
    };
    UserGroupInformation userGroupInformation = BlurHiveOutputFormat.getUGI(configuration);
    try {
        userGroupInformation.doAs(action);
    } catch (InterruptedException e) {
        throw new IOException(e);
    }
}

From source file:org.apache.blur.mapreduce.lib.update.BulkTableUpdateCommand.java

License:Apache License

@Override
public Integer clusterExecute(ClusterContext context) throws IOException, InterruptedException {
    String table = getTable();/*from   ww w  .  j a va 2 s.c o m*/
    BlurConfiguration blurConfiguration = context.getBlurConfiguration(table);
    String blurZkConnection = blurConfiguration.get(BlurConstants.BLUR_ZOOKEEPER_CONNECTION);
    TableContext tableContext = context.getTableContext(table);
    TableDescriptor descriptor = tableContext.getDescriptor();
    String tableUri = descriptor.getTableUri();
    String mrIncWorkingPathStr = blurConfiguration.get(BlurConstants.BLUR_BULK_UPDATE_WORKING_PATH);
    Path mrIncWorkingPath = new Path(mrIncWorkingPathStr);
    String outputPathStr = new Path(new Path(new Path(tableUri), IMPORT),
            Long.toString(System.currentTimeMillis())).toString();
    Configuration configuration = new Configuration();
    configuration.addResource(HDFS_SITE_XML);
    configuration.addResource(YARN_SITE_XML);
    configuration.addResource(MAPRED_SITE_XML);
    for (String s : extraConfigs) {
        if (s != null) {
            InputStream inputStream = IOUtils.toInputStream(s);
            configuration.addResource(inputStream);
            inputStream.close();
        }
    }
    int run;
    try {
        run = ToolRunner.run(configuration, new Driver(), new String[] { table, mrIncWorkingPath.toString(),
                outputPathStr, blurZkConnection, Integer.toString(reducerMultipler) });
    } catch (Exception e) {
        e.printStackTrace();
        throw new IOException(e);
    }

    if (run == 0 && autoLoad) {
        Iface client = BlurClient.getClientFromZooKeeperConnectionStr(blurZkConnection);
        try {
            client.loadData(table, outputPathStr);
            if (waitForDataBeVisible) {
                waitForDataToBeVisible(client, table);
            }
        } catch (BlurException e) {
            throw new IOException(e);
        } catch (TException e) {
            throw new IOException(e);
        }
    }
    return run;
}

From source file:org.apache.blur.utils.BlurUtil.java

License:Apache License

public static Configuration addHdfsConfig(Configuration configuration, BlurConfiguration blurConfiguration) {
    configuration.addResource("hdfs-default.xml");
    configuration.addResource("hdfs-site.xml");
    if (blurConfiguration != null) {
        Map<String, String> properties = blurConfiguration.getProperties();
        for (Entry<String, String> e : properties.entrySet()) {
            String key = e.getKey();
            if (key.startsWith(HADOOP_CONF)) {
                String hadoopKey = key.substring(HADOOP_CONF.length());
                String hadoopValue = e.getValue();
                LOG.info("Adding hadoop configuration item [{0}] [{1}]", hadoopKey, hadoopValue);
                configuration.set(hadoopKey, hadoopValue);
            }/* w  w w .  j  a v  a  2s  .  c  o m*/
        }
    }
    return configuration;
}

From source file:org.apache.camel.component.hdfs2.integration.HdfsAppendTest.java

License:Apache License

@Override
public void setUp() throws Exception {
    super.setUp();

    Configuration conf = new Configuration();
    conf.addResource("hdfs-test.xml");
    Path file = new Path("hdfs://localhost:9000/tmp/test/test-camel-simple-write-file1");
    FileSystem fs = FileSystem.get(file.toUri(), conf);
    if (fs.exists(file)) {
        fs.delete(file, true);/*from   w w  w.  ja v  a  2s . c om*/
    }
    FSDataOutputStream out = fs.create(file);
    for (int i = 0; i < 10; ++i) {
        out.write("PIPPO".getBytes("UTF-8"));
    }
    out.close();
}

From source file:org.apache.carbondata.core.carbon.datastorage.filesystem.HDFSCarbonFileTest.java

License:Apache License

@BeforeClass
static public void setUp() throws IOException {
    Configuration config = new Configuration();
    //adding local hadoop configuration
    config.addResource(new Path("core-site.xml"));
    config.addResource(new Path("hdfs-site.xml"));
    fileName = "Test.carbondata"; //this path is HDFS path
    pt = new Path(fileName);
    fs = FileSystem.get(new Configuration(config));
    fs.create(pt);//from w w  w  .  ja  v  a2s  .  c o  m
    if (fs.exists(pt)) {
        OutputStream os = fs.create(pt, new Progressable() {
            public void progress() {
                LOGGER.info("Started Writing to File===");
            }
        });
        BufferedWriter br = new BufferedWriter(new OutputStreamWriter(os, "UTF-8"));
        br.write("Hello World");
        br.close();
        fs.close();

        fileStatus = new FileStatus(12L, true, 60, 120l, 180L, new Path(fileName));
        fileStatusWithOutDirectoryPermission = new FileStatus(12L, false, 60, 120l, 180L, new Path(fileName));
        hdfsCarbonFile = new HDFSCarbonFile(fileStatus);

    }
}

From source file:org.apache.eagle.jpm.mr.history.crawler.AbstractJobHistoryDAO.java

License:Apache License

@Override
public void readFileContent(int year, int month, int day, int serialNumber, String jobHistoryFileName,
        JHFInputStreamCallback reader) throws Exception {
    InputStream downloadIs;//from  w  ww.  ja v a 2s .c o m
    try {
        downloadIs = getJHFFileContentAsStream(year, month, day, serialNumber, jobHistoryFileName);
    } catch (FileNotFoundException ex) {
        LOG.error(
                "job history file not found " + jobHistoryFileName + ", ignore and will NOT process any more");
        return;
    }

    InputStream downloadJobConfIs = null;
    try {
        downloadJobConfIs = getJHFConfContentAsStream(year, month, day, serialNumber, jobHistoryFileName);
    } catch (FileNotFoundException ex) {
        LOG.warn("job configuration file of " + jobHistoryFileName
                + " not found , ignore and use empty configuration");
    }

    org.apache.hadoop.conf.Configuration conf = null;

    if (downloadJobConfIs != null) {
        conf = new org.apache.hadoop.conf.Configuration();
        conf.addResource(downloadJobConfIs);
    }

    try {
        if (downloadIs != null) {
            reader.onInputStream(downloadIs, conf);
        }
    } catch (Exception ex) {
        LOG.error("fail reading job history file", ex);
        throw ex;
    } catch (Throwable t) {
        LOG.error("fail reading job history file", t);
        throw new Exception(t);
    } finally {
        try {
            if (downloadJobConfIs != null) {
                downloadJobConfIs.close();
            }
            if (downloadIs != null) {
                downloadIs.close();
            }
        } catch (IOException e) {
            LOG.error(e.getMessage(), e);
        }
    }
}

From source file:org.apache.eagle.jpm.mr.history.JHFEventReaderBaseTest.java

License:Apache License

@Test
public void testParseConfiguration() throws Exception {
    Configuration conf = new org.apache.hadoop.conf.Configuration();
    conf.addResource("job_1479206441898_508949_conf.xml");

    final JobHistoryContentFilterBuilder builder = JobHistoryContentFilterBuilder.newBuilder().acceptJobFile()
            .acceptJobConfFile();//w  w  w .ja va  2s  . c  o m
    List<String> confKeyPatterns = new ArrayList<>();
    confKeyPatterns.add(Constants.JobConfiguration.CASCADING_JOB);
    confKeyPatterns.add(Constants.JobConfiguration.HIVE_JOB);
    confKeyPatterns.add(Constants.JobConfiguration.PIG_JOB);
    confKeyPatterns.add(Constants.JobConfiguration.SCOOBI_JOB);
    for (String key : confKeyPatterns) {
        builder.includeJobKeyPatterns(Pattern.compile(key));
    }
    JobHistoryContentFilter filter = builder.build();

    MRHistoryJobConfig appConfig = MRHistoryJobConfig.newInstance(ConfigFactory.load());
    Map<String, String> tags = new HashMap<>();
    tags.put("site", "sandbox");
    tags.put("jobId", "job_1490593856016_152289");
    tags.put("jobType", "HIVE");
    tags.put("jobDefId", "INSERT OVERWRITE TABLE kyl...'2017-04-06')))(Stage-1)");
    JHFMRVer2EventReader reader = new JHFMRVer2EventReader(tags, conf, filter, appConfig);
    reader.addListener(new JobConfigurationCreationServiceListener(appConfig.getEagleServiceConfig()) {
        @Override
        public void jobEntityCreated(JobBaseAPIEntity entity) throws Exception {
            Assert.assertTrue(null != entity);
            Assert.assertTrue(entity instanceof JobConfigurationAPIEntity);
            JobConfigurationAPIEntity configurationAPIEntity = (JobConfigurationAPIEntity) entity;
            Assert.assertTrue(configurationAPIEntity.getJobConfig().getConfig().size() == 1);
        }
    });
    reader.parseConfiguration();
}

From source file:org.apache.eagle.service.hbase.EmbeddedHbase.java

License:Apache License

public void start(Configuration confMap) {
    try {/* w ww. j  ava 2  s. co m*/
        util = new HBaseTestingUtility();
        Configuration conf = util.getConfiguration();
        if (confMap != null) {
            conf.addResource(confMap);
        }
        conf.setInt("test.hbase.zookeeper.property.clientPort", port);
        conf.set("zookeeper.znode.parent", znode);
        conf.setInt("hbase.zookeeper.property.maxClientCnxns", 200);

        conf.setInt("hbase.master.info.port", -1);//avoid port clobbering
        // start mini hbase cluster
        hbaseCluster = util.startMiniCluster();
        Configuration config = hbaseCluster.getConf();

        config.set("zookeeper.session.timeout", "120000");
        config.set("hbase.zookeeper.property.tickTime", "6000");
        config.set(HConstants.HBASE_CLIENT_PAUSE, "3000");
        config.set(HConstants.HBASE_CLIENT_RETRIES_NUMBER, "1");
        config.set(HConstants.HBASE_CLIENT_OPERATION_TIMEOUT, "60000");

        Runtime.getRuntime().addShutdownHook(new Thread() {
            @Override
            public void run() {
                shutdown();
            }
        });
    } catch (Throwable t) {
        LOG.error("Got an exception: ", t);
    }
}

From source file:org.apache.falcon.hive.util.HiveDRUtils.java

License:Apache License

public static Configuration getDefaultConf() throws IOException {
    Configuration conf = new Configuration();

    if (System.getProperty("oozie.action.conf.xml") != null) {
        Path confPath = new Path("file:///", System.getProperty("oozie.action.conf.xml"));

        final boolean actionConfExists = confPath.getFileSystem(conf).exists(confPath);
        LOG.info("Oozie Action conf {} found ? {}", confPath, actionConfExists);
        if (actionConfExists) {
            LOG.info("Oozie Action conf found, adding path={}, conf={}", confPath, conf.toString());
            conf.addResource(confPath);
        }/*from w w  w  .j ava  2s .  c  o m*/
    }

    String tokenFile = System.getenv("HADOOP_TOKEN_FILE_LOCATION");
    if (StringUtils.isNotBlank(tokenFile)) {
        if (Shell.WINDOWS) {
            if (tokenFile.charAt(0) == '"') {
                tokenFile = tokenFile.substring(1);
            }
            if (tokenFile.charAt(tokenFile.length() - 1) == '"') {
                tokenFile = tokenFile.substring(0, tokenFile.length() - 1);
            }
        }

        conf.set("mapreduce.job.credentials.binary", tokenFile);
        System.setProperty("mapreduce.job.credentials.binary", tokenFile);
        conf.set("tez.credentials.path", tokenFile);
        System.setProperty("tez.credentials.path", tokenFile);
    }

    return conf;
}