Example usage for org.apache.hadoop.conf Configuration addResource

List of usage examples for org.apache.hadoop.conf Configuration addResource

Introduction

In this page you can find the example usage for org.apache.hadoop.conf Configuration addResource.

Prototype

public void addResource(Configuration conf) 

Source Link

Document

Add a configuration resource.

Usage

From source file:co.cask.cdap.test.ConfigurableTestBase.java

License:Apache License

private static void initialize(@Nullable Map<String, String> additionalConfiguration) throws Exception {
    if (startCount++ > 0) {
        return;/*  ww  w. j  a  v  a 2s.com*/
    }
    File localDataDir = tmpFolder.newFolder();

    cConf = createCConf(localDataDir, additionalConfiguration);

    org.apache.hadoop.conf.Configuration hConf = new org.apache.hadoop.conf.Configuration();
    hConf.addResource("mapred-site-local.xml");
    hConf.reloadConfiguration();
    hConf.set(Constants.CFG_LOCAL_DATA_DIR, localDataDir.getAbsolutePath());
    hConf.set(Constants.AppFabric.OUTPUT_DIR, cConf.get(Constants.AppFabric.OUTPUT_DIR));
    hConf.set("hadoop.tmp.dir",
            new File(localDataDir, cConf.get(Constants.AppFabric.TEMP_DIR)).getAbsolutePath());

    // Windows specific requirements
    if (OSDetector.isWindows()) {
        File tmpDir = tmpFolder.newFolder();
        File binDir = new File(tmpDir, "bin");
        Assert.assertTrue(binDir.mkdirs());

        copyTempFile("hadoop.dll", tmpDir);
        copyTempFile("winutils.exe", binDir);
        System.setProperty("hadoop.home.dir", tmpDir.getAbsolutePath());
        System.load(new File(tmpDir, "hadoop.dll").getAbsolutePath());
    }

    Injector injector = Guice.createInjector(createDataFabricModule(),
            new DataSetsModules().getStandaloneModules(), new DataSetServiceModules().getInMemoryModules(),
            new ConfigModule(cConf, hConf), new IOModule(), new LocationRuntimeModule().getInMemoryModules(),
            new DiscoveryRuntimeModule().getInMemoryModules(),
            new AppFabricServiceRuntimeModule().getInMemoryModules(),
            new ServiceStoreModules().getInMemoryModules(),
            new InMemoryProgramRunnerModule(LocalStreamWriter.class), new AbstractModule() {
                @Override
                protected void configure() {
                    bind(StreamHandler.class).in(Scopes.SINGLETON);
                    bind(StreamFetchHandler.class).in(Scopes.SINGLETON);
                    bind(AbstractNamespaceClient.class).to(LocalNamespaceClient.class).in(Scopes.SINGLETON);
                    bind(StreamFileJanitorService.class).to(LocalStreamFileJanitorService.class)
                            .in(Scopes.SINGLETON);
                    bind(StreamWriterSizeCollector.class).to(BasicStreamWriterSizeCollector.class)
                            .in(Scopes.SINGLETON);
                    bind(StreamCoordinatorClient.class).to(InMemoryStreamCoordinatorClient.class)
                            .in(Scopes.SINGLETON);
                }
            },
            // todo: do we need handler?
            new MetricsHandlerModule(), new MetricsClientRuntimeModule().getInMemoryModules(),
            new LoggingModules().getInMemoryModules(), new ExploreRuntimeModule().getInMemoryModules(),
            new ExploreClientModule(), new NotificationFeedServiceRuntimeModule().getInMemoryModules(),
            new NotificationServiceRuntimeModule().getInMemoryModules(), new AbstractModule() {
                @Override
                @SuppressWarnings("deprecation")
                protected void configure() {
                    install(new FactoryModuleBuilder()
                            .implement(ApplicationManager.class, DefaultApplicationManager.class)
                            .build(ApplicationManagerFactory.class));
                    install(new FactoryModuleBuilder().implement(StreamWriter.class, DefaultStreamWriter.class)
                            .build(StreamWriterFactory.class));
                    install(new FactoryModuleBuilder()
                            .implement(StreamManager.class, DefaultStreamManager.class)
                            .build(StreamManagerFactory.class));
                    bind(TemporaryFolder.class).toInstance(tmpFolder);
                }
            });

    txService = injector.getInstance(TransactionManager.class);
    txService.startAndWait();
    dsOpService = injector.getInstance(DatasetOpExecutor.class);
    dsOpService.startAndWait();
    datasetService = injector.getInstance(DatasetService.class);
    datasetService.startAndWait();
    metricsQueryService = injector.getInstance(MetricsQueryService.class);
    metricsQueryService.startAndWait();
    metricsCollectionService = injector.getInstance(MetricsCollectionService.class);
    metricsCollectionService.startAndWait();
    schedulerService = injector.getInstance(SchedulerService.class);
    schedulerService.startAndWait();
    if (cConf.getBoolean(Constants.Explore.EXPLORE_ENABLED)) {
        exploreExecutorService = injector.getInstance(ExploreExecutorService.class);
        exploreExecutorService.startAndWait();
        exploreClient = injector.getInstance(ExploreClient.class);
    }
    streamCoordinatorClient = injector.getInstance(StreamCoordinatorClient.class);
    streamCoordinatorClient.startAndWait();
    testManager = injector.getInstance(UnitTestManager.class);
    namespaceAdmin = injector.getInstance(NamespaceAdmin.class);
    // we use MetricStore directly, until RuntimeStats API changes
    RuntimeStats.metricStore = injector.getInstance(MetricStore.class);
    namespaceAdmin = injector.getInstance(NamespaceAdmin.class);
    namespaceAdmin.createNamespace(Constants.DEFAULT_NAMESPACE_META);
}

From source file:co.cask.cdap.test.TestBase.java

License:Apache License

@BeforeClass
public static void initialize() throws Exception {
    if (startCount++ > 0) {
        return;//from   www. jav a2  s  . c om
    }
    File localDataDir = TMP_FOLDER.newFolder();

    cConf = createCConf(localDataDir);

    org.apache.hadoop.conf.Configuration hConf = new org.apache.hadoop.conf.Configuration();
    hConf.addResource("mapred-site-local.xml");
    hConf.reloadConfiguration();
    hConf.set(Constants.CFG_LOCAL_DATA_DIR, localDataDir.getAbsolutePath());
    hConf.set(Constants.AppFabric.OUTPUT_DIR, cConf.get(Constants.AppFabric.OUTPUT_DIR));
    hConf.set("hadoop.tmp.dir",
            new File(localDataDir, cConf.get(Constants.AppFabric.TEMP_DIR)).getAbsolutePath());

    // Windows specific requirements
    if (OSDetector.isWindows()) {
        File tmpDir = TMP_FOLDER.newFolder();
        File binDir = new File(tmpDir, "bin");
        Assert.assertTrue(binDir.mkdirs());

        copyTempFile("hadoop.dll", tmpDir);
        copyTempFile("winutils.exe", binDir);
        System.setProperty("hadoop.home.dir", tmpDir.getAbsolutePath());
        System.load(new File(tmpDir, "hadoop.dll").getAbsolutePath());
    }

    Injector injector = Guice.createInjector(createDataFabricModule(), new TransactionExecutorModule(),
            new DataSetsModules().getStandaloneModules(), new DataSetServiceModules().getInMemoryModules(),
            new ConfigModule(cConf, hConf), new IOModule(), new LocationRuntimeModule().getInMemoryModules(),
            new DiscoveryRuntimeModule().getInMemoryModules(),
            new AppFabricServiceRuntimeModule().getInMemoryModules(),
            new ServiceStoreModules().getInMemoryModules(),
            new InMemoryProgramRunnerModule(LocalStreamWriter.class), new AbstractModule() {
                @Override
                protected void configure() {
                    bind(StreamHandler.class).in(Scopes.SINGLETON);
                    bind(StreamFetchHandler.class).in(Scopes.SINGLETON);
                    bind(StreamViewHttpHandler.class).in(Scopes.SINGLETON);
                    bind(StreamFileJanitorService.class).to(LocalStreamFileJanitorService.class)
                            .in(Scopes.SINGLETON);
                    bind(StreamWriterSizeCollector.class).to(BasicStreamWriterSizeCollector.class)
                            .in(Scopes.SINGLETON);
                    bind(StreamCoordinatorClient.class).to(InMemoryStreamCoordinatorClient.class)
                            .in(Scopes.SINGLETON);
                    bind(MetricsManager.class).toProvider(MetricsManagerProvider.class);
                }
            },
            // todo: do we need handler?
            new MetricsHandlerModule(), new MetricsClientRuntimeModule().getInMemoryModules(),
            new LoggingModules().getInMemoryModules(), new ExploreRuntimeModule().getInMemoryModules(),
            new ExploreClientModule(), new NotificationFeedServiceRuntimeModule().getInMemoryModules(),
            new NotificationServiceRuntimeModule().getInMemoryModules(),
            new NamespaceClientRuntimeModule().getStandaloneModules(),
            new NamespaceStoreModule().getStandaloneModules(), new AuthorizationModule(), new AbstractModule() {
                @Override
                @SuppressWarnings("deprecation")
                protected void configure() {
                    install(new FactoryModuleBuilder()
                            .implement(ApplicationManager.class, DefaultApplicationManager.class)
                            .build(ApplicationManagerFactory.class));
                    install(new FactoryModuleBuilder()
                            .implement(ArtifactManager.class, DefaultArtifactManager.class)
                            .build(ArtifactManagerFactory.class));
                    install(new FactoryModuleBuilder()
                            .implement(StreamManager.class, DefaultStreamManager.class)
                            .build(StreamManagerFactory.class));
                    bind(TemporaryFolder.class).toInstance(TMP_FOLDER);
                    bind(AuthorizationHandler.class).in(Scopes.SINGLETON);
                }
            });

    txService = injector.getInstance(TransactionManager.class);
    txService.startAndWait();
    dsOpService = injector.getInstance(DatasetOpExecutor.class);
    dsOpService.startAndWait();
    datasetService = injector.getInstance(DatasetService.class);
    datasetService.startAndWait();
    metricsQueryService = injector.getInstance(MetricsQueryService.class);
    metricsQueryService.startAndWait();
    metricsCollectionService = injector.getInstance(MetricsCollectionService.class);
    metricsCollectionService.startAndWait();
    schedulerService = injector.getInstance(SchedulerService.class);
    schedulerService.startAndWait();
    if (cConf.getBoolean(Constants.Explore.EXPLORE_ENABLED)) {
        exploreExecutorService = injector.getInstance(ExploreExecutorService.class);
        exploreExecutorService.startAndWait();
        exploreClient = injector.getInstance(ExploreClient.class);
    }
    streamCoordinatorClient = injector.getInstance(StreamCoordinatorClient.class);
    streamCoordinatorClient.startAndWait();
    testManager = injector.getInstance(UnitTestManager.class);
    metricsManager = injector.getInstance(MetricsManager.class);
    authorizerInstantiatorService = injector.getInstance(AuthorizerInstantiatorService.class);
    authorizerInstantiatorService.startAndWait();
    // This is needed so the logged-in user can successfully create the default namespace
    if (cConf.getBoolean(Constants.Security.Authorization.ENABLED)) {
        InstanceId instance = new InstanceId(cConf.get(Constants.INSTANCE_NAME));
        Principal principal = new Principal(SecurityRequestContext.getUserId(), Principal.PrincipalType.USER);
        authorizerInstantiatorService.get().grant(instance, principal, ImmutableSet.of(Action.ADMIN));
    }
    namespaceAdmin = injector.getInstance(NamespaceAdmin.class);
    namespaceAdmin.create(NamespaceMeta.DEFAULT);
}

From source file:com.adr.dataclient.ui.ClientConfig.java

License:Open Source License

public Configuration getConfiguration() {
    Configuration config = HBaseConfiguration.create();
    if (configproperties != null && !configproperties.equals("")) {
        try {/*from w ww  .j av a2  s . co m*/
            config.addResource(new ByteArrayInputStream(configproperties.getBytes("UTF-8")));
            config.reloadConfiguration();
        } catch (UnsupportedEncodingException ex) {
            logger.log(Level.SEVERE, null, ex);
        }
    }
    return config;
}

From source file:com.alibaba.wasp.conf.WaspConfiguration.java

License:Apache License

public static Configuration addWaspResources(Configuration conf) {
    conf.addResource("wasp-default.xml");
    conf.addResource("wasp-site.xml");

    checkDefaultsVersion(conf);/*from   w w w.  j  av a2  s  .c  o m*/
    return conf;
}

From source file:com.aliyun.odps.fs.VolumeFileSystem.java

License:Apache License

@Override
public void initialize(URI uri, Configuration conf) throws IOException {
    conf.addResource(VolumeFSConstants.VOLUME_FS_CONFIG_FILE);
    super.initialize(uri, conf);
    setConf(conf);//  w  w w  .j a  va  2 s  .  c  o  m
    checkURI(uri);
    this.project = resolveProject(uri);
    this.volumeClient = createVolumeClient(conf);
    this.uri = URI.create(uri.getScheme() + VolumeFSConstants.SCHEME_SEPARATOR + uri.getAuthority());
    this.homeVolume = getHomeVolume(conf);
    this.workingDir = getHomeDirectory();
    this.defaultReplication = (short) conf.getInt(VolumeFileSystemConfigKeys.DFS_REPLICATION_KEY,
            VolumeFSConstants.DFS_REPLICATION_DEFAULT);
}

From source file:com.amintor.hdfs.client.kerberizedhdfsclient.KerberizedHDFSClient.java

/**
 * @param args the command line arguments
 *///from   w w w . j a  va2  s.com
public static void main(String[] args) {

    try {
        Configuration conf = new Configuration();
        conf.addResource(new FileInputStream(HDFS_SITE_LOCATION));
        conf.addResource(new FileInputStream(CORE_SITE_LOCATION));
        String authType = conf.get("hadoop.security.authentication");
        System.out.println("Authentication Type:" + authType);
        if (authType.trim().equalsIgnoreCase("kerberos")) {
            // Login through UGI keytab
            UserGroupInformation.setConfiguration(conf);
            UserGroupInformation.loginUserFromKeytab("vijay", "/Users/vsingh/Software/vijay.keytab");
            FileSystem hdFS = FileSystem.get(conf);
            FileStatus[] listStatus = hdFS.listStatus(new Path(args[0]));
            for (FileStatus statusFile : listStatus) {
                System.out.print("Replication:" + statusFile.getReplication() + "\t");
                System.out.print("Owner:" + statusFile.getOwner() + "\t");
                System.out.print("Group:" + statusFile.getGroup() + "\t");
                System.out.println("Path:" + statusFile.getPath() + "\t");
            }

        }
    } catch (IOException ex) {
        Logger.getLogger(KerberizedHDFSClient.class.getName()).log(Level.SEVERE, null, ex);
    }
}

From source file:com.antsdb.saltedfish.storage.HBaseUtilMain.java

License:Open Source License

private void connectUseConfig(String optionValue) throws IOException {
    Configuration conf = HBaseConfiguration.create();
    conf = HBaseConfiguration.create();/*from  w  ww. j av a  2 s. c o m*/
    conf.addResource(new Path(optionValue));
    println("Connecting to server %s ...", conf.get("hbase.zookeeper.quorum"));
    this.conn = ConnectionFactory.createConnection(conf);
}

From source file:com.appDataPushAgent.bug.ApplicationTest.java

@Test
public void testApplication() throws IOException, Exception {
    try {//from w  w  w.  j  av  a  2  s  . c o m
        LocalMode lma = LocalMode.newInstance();
        Configuration conf = new Configuration(false);
        conf.addResource(this.getClass().getResourceAsStream("/META-INF/properties.xml"));
        lma.prepareDAG(new Application(), conf);
        LocalMode.Controller lc = lma.getController();
        lc.run(10000); // runs for 10 seconds and quits
    } catch (ConstraintViolationException e) {
        Assert.fail("constraint violations: " + e.getConstraintViolations());
    }
}

From source file:com.armon.test.quartz.QuartzConfiguration.java

License:Apache License

public static Configuration addQuartzResources(Configuration conf) {
    conf.addResource("quartz-site.xml");

    return conf;//from www  .j a  v  a2 s.  co m
}

From source file:com.asakusafw.bridge.launch.HadoopPropertiesOption.java

License:Apache License

@Override
protected Map<String, String> extract(File file) throws LaunchConfigurationException {
    LOG.debug("loading file: {} ({})", file, COMMAND); //$NON-NLS-1$
    Configuration configuration = new Configuration(false);
    try (InputStream in = new FileInputStream(file)) {
        configuration.addResource(in);
        Map<String, String> results = new TreeMap<>();
        for (Map.Entry<String, String> entry : configuration) {
            results.put(entry.getKey(), entry.getValue());
        }/*w  w w.j a v a 2 s.  c  om*/
        return results;
    } catch (IOException e) {
        throw new LaunchConfigurationException(MessageFormat
                .format("error occurred while loading configuration file: {1} ({0})", COMMAND, file), e);
    }
}