List of usage examples for org.apache.hadoop.conf Configuration reloadConfiguration
public synchronized void reloadConfiguration()
From source file:co.cask.cdap.StandaloneMain.java
License:Apache License
public static StandaloneMain create(CConfiguration cConf, Configuration hConf) { // This is needed to use LocalJobRunner with fixes (we have it in app-fabric). // For the modified local job runner hConf.addResource("mapred-site-local.xml"); hConf.reloadConfiguration(); // Due to incredibly stupid design of Limits class, once it is initialized, it keeps its settings. We // want to make sure it uses our settings in this hConf, so we have to force it initialize here before // someone else initializes it. Limits.init(hConf);/*from w w w . j av a 2 s. c o m*/ File localDataDir = new File(cConf.get(Constants.CFG_LOCAL_DATA_DIR)); hConf.set(Constants.CFG_LOCAL_DATA_DIR, localDataDir.getAbsolutePath()); hConf.set(Constants.AppFabric.OUTPUT_DIR, cConf.get(Constants.AppFabric.OUTPUT_DIR)); hConf.set("hadoop.tmp.dir", new File(localDataDir, cConf.get(Constants.AppFabric.TEMP_DIR)).getAbsolutePath()); // Windows specific requirements if (OSDetector.isWindows()) { // not set anywhere by the project, expected to be set from IDEs if running from the project instead of sdk // hadoop.dll is at cdap-unit-test\src\main\resources\hadoop.dll for some reason String hadoopDLLPath = System.getProperty("hadoop.dll.path"); if (hadoopDLLPath != null) { System.load(hadoopDLLPath); } else { // this is where it is when the standalone sdk is built String userDir = System.getProperty("user.dir"); System.load(Joiner.on(File.separator).join(userDir, "lib", "native", "hadoop.dll")); } } //Run dataset service on random port List<Module> modules = createPersistentModules(cConf, hConf); return new StandaloneMain(modules, cConf); }
From source file:co.cask.cdap.test.ConfigurableTestBase.java
License:Apache License
private static void initialize(@Nullable Map<String, String> additionalConfiguration) throws Exception { if (startCount++ > 0) { return;//from www . j av a2 s. c o m } File localDataDir = tmpFolder.newFolder(); cConf = createCConf(localDataDir, additionalConfiguration); org.apache.hadoop.conf.Configuration hConf = new org.apache.hadoop.conf.Configuration(); hConf.addResource("mapred-site-local.xml"); hConf.reloadConfiguration(); hConf.set(Constants.CFG_LOCAL_DATA_DIR, localDataDir.getAbsolutePath()); hConf.set(Constants.AppFabric.OUTPUT_DIR, cConf.get(Constants.AppFabric.OUTPUT_DIR)); hConf.set("hadoop.tmp.dir", new File(localDataDir, cConf.get(Constants.AppFabric.TEMP_DIR)).getAbsolutePath()); // Windows specific requirements if (OSDetector.isWindows()) { File tmpDir = tmpFolder.newFolder(); File binDir = new File(tmpDir, "bin"); Assert.assertTrue(binDir.mkdirs()); copyTempFile("hadoop.dll", tmpDir); copyTempFile("winutils.exe", binDir); System.setProperty("hadoop.home.dir", tmpDir.getAbsolutePath()); System.load(new File(tmpDir, "hadoop.dll").getAbsolutePath()); } Injector injector = Guice.createInjector(createDataFabricModule(), new DataSetsModules().getStandaloneModules(), new DataSetServiceModules().getInMemoryModules(), new ConfigModule(cConf, hConf), new IOModule(), new LocationRuntimeModule().getInMemoryModules(), new DiscoveryRuntimeModule().getInMemoryModules(), new AppFabricServiceRuntimeModule().getInMemoryModules(), new ServiceStoreModules().getInMemoryModules(), new InMemoryProgramRunnerModule(LocalStreamWriter.class), new AbstractModule() { @Override protected void configure() { bind(StreamHandler.class).in(Scopes.SINGLETON); bind(StreamFetchHandler.class).in(Scopes.SINGLETON); bind(AbstractNamespaceClient.class).to(LocalNamespaceClient.class).in(Scopes.SINGLETON); bind(StreamFileJanitorService.class).to(LocalStreamFileJanitorService.class) .in(Scopes.SINGLETON); bind(StreamWriterSizeCollector.class).to(BasicStreamWriterSizeCollector.class) .in(Scopes.SINGLETON); bind(StreamCoordinatorClient.class).to(InMemoryStreamCoordinatorClient.class) .in(Scopes.SINGLETON); } }, // todo: do we need handler? new MetricsHandlerModule(), new MetricsClientRuntimeModule().getInMemoryModules(), new LoggingModules().getInMemoryModules(), new ExploreRuntimeModule().getInMemoryModules(), new ExploreClientModule(), new NotificationFeedServiceRuntimeModule().getInMemoryModules(), new NotificationServiceRuntimeModule().getInMemoryModules(), new AbstractModule() { @Override @SuppressWarnings("deprecation") protected void configure() { install(new FactoryModuleBuilder() .implement(ApplicationManager.class, DefaultApplicationManager.class) .build(ApplicationManagerFactory.class)); install(new FactoryModuleBuilder().implement(StreamWriter.class, DefaultStreamWriter.class) .build(StreamWriterFactory.class)); install(new FactoryModuleBuilder() .implement(StreamManager.class, DefaultStreamManager.class) .build(StreamManagerFactory.class)); bind(TemporaryFolder.class).toInstance(tmpFolder); } }); txService = injector.getInstance(TransactionManager.class); txService.startAndWait(); dsOpService = injector.getInstance(DatasetOpExecutor.class); dsOpService.startAndWait(); datasetService = injector.getInstance(DatasetService.class); datasetService.startAndWait(); metricsQueryService = injector.getInstance(MetricsQueryService.class); metricsQueryService.startAndWait(); metricsCollectionService = injector.getInstance(MetricsCollectionService.class); metricsCollectionService.startAndWait(); schedulerService = injector.getInstance(SchedulerService.class); schedulerService.startAndWait(); if (cConf.getBoolean(Constants.Explore.EXPLORE_ENABLED)) { exploreExecutorService = injector.getInstance(ExploreExecutorService.class); exploreExecutorService.startAndWait(); exploreClient = injector.getInstance(ExploreClient.class); } streamCoordinatorClient = injector.getInstance(StreamCoordinatorClient.class); streamCoordinatorClient.startAndWait(); testManager = injector.getInstance(UnitTestManager.class); namespaceAdmin = injector.getInstance(NamespaceAdmin.class); // we use MetricStore directly, until RuntimeStats API changes RuntimeStats.metricStore = injector.getInstance(MetricStore.class); namespaceAdmin = injector.getInstance(NamespaceAdmin.class); namespaceAdmin.createNamespace(Constants.DEFAULT_NAMESPACE_META); }
From source file:co.cask.cdap.test.TestBase.java
License:Apache License
@BeforeClass public static void initialize() throws Exception { if (startCount++ > 0) { return;/*from w ww .j a va 2s . c o m*/ } File localDataDir = TMP_FOLDER.newFolder(); cConf = createCConf(localDataDir); org.apache.hadoop.conf.Configuration hConf = new org.apache.hadoop.conf.Configuration(); hConf.addResource("mapred-site-local.xml"); hConf.reloadConfiguration(); hConf.set(Constants.CFG_LOCAL_DATA_DIR, localDataDir.getAbsolutePath()); hConf.set(Constants.AppFabric.OUTPUT_DIR, cConf.get(Constants.AppFabric.OUTPUT_DIR)); hConf.set("hadoop.tmp.dir", new File(localDataDir, cConf.get(Constants.AppFabric.TEMP_DIR)).getAbsolutePath()); // Windows specific requirements if (OSDetector.isWindows()) { File tmpDir = TMP_FOLDER.newFolder(); File binDir = new File(tmpDir, "bin"); Assert.assertTrue(binDir.mkdirs()); copyTempFile("hadoop.dll", tmpDir); copyTempFile("winutils.exe", binDir); System.setProperty("hadoop.home.dir", tmpDir.getAbsolutePath()); System.load(new File(tmpDir, "hadoop.dll").getAbsolutePath()); } Injector injector = Guice.createInjector(createDataFabricModule(), new TransactionExecutorModule(), new DataSetsModules().getStandaloneModules(), new DataSetServiceModules().getInMemoryModules(), new ConfigModule(cConf, hConf), new IOModule(), new LocationRuntimeModule().getInMemoryModules(), new DiscoveryRuntimeModule().getInMemoryModules(), new AppFabricServiceRuntimeModule().getInMemoryModules(), new ServiceStoreModules().getInMemoryModules(), new InMemoryProgramRunnerModule(LocalStreamWriter.class), new AbstractModule() { @Override protected void configure() { bind(StreamHandler.class).in(Scopes.SINGLETON); bind(StreamFetchHandler.class).in(Scopes.SINGLETON); bind(StreamViewHttpHandler.class).in(Scopes.SINGLETON); bind(StreamFileJanitorService.class).to(LocalStreamFileJanitorService.class) .in(Scopes.SINGLETON); bind(StreamWriterSizeCollector.class).to(BasicStreamWriterSizeCollector.class) .in(Scopes.SINGLETON); bind(StreamCoordinatorClient.class).to(InMemoryStreamCoordinatorClient.class) .in(Scopes.SINGLETON); bind(MetricsManager.class).toProvider(MetricsManagerProvider.class); } }, // todo: do we need handler? new MetricsHandlerModule(), new MetricsClientRuntimeModule().getInMemoryModules(), new LoggingModules().getInMemoryModules(), new ExploreRuntimeModule().getInMemoryModules(), new ExploreClientModule(), new NotificationFeedServiceRuntimeModule().getInMemoryModules(), new NotificationServiceRuntimeModule().getInMemoryModules(), new NamespaceClientRuntimeModule().getStandaloneModules(), new NamespaceStoreModule().getStandaloneModules(), new AuthorizationModule(), new AbstractModule() { @Override @SuppressWarnings("deprecation") protected void configure() { install(new FactoryModuleBuilder() .implement(ApplicationManager.class, DefaultApplicationManager.class) .build(ApplicationManagerFactory.class)); install(new FactoryModuleBuilder() .implement(ArtifactManager.class, DefaultArtifactManager.class) .build(ArtifactManagerFactory.class)); install(new FactoryModuleBuilder() .implement(StreamManager.class, DefaultStreamManager.class) .build(StreamManagerFactory.class)); bind(TemporaryFolder.class).toInstance(TMP_FOLDER); bind(AuthorizationHandler.class).in(Scopes.SINGLETON); } }); txService = injector.getInstance(TransactionManager.class); txService.startAndWait(); dsOpService = injector.getInstance(DatasetOpExecutor.class); dsOpService.startAndWait(); datasetService = injector.getInstance(DatasetService.class); datasetService.startAndWait(); metricsQueryService = injector.getInstance(MetricsQueryService.class); metricsQueryService.startAndWait(); metricsCollectionService = injector.getInstance(MetricsCollectionService.class); metricsCollectionService.startAndWait(); schedulerService = injector.getInstance(SchedulerService.class); schedulerService.startAndWait(); if (cConf.getBoolean(Constants.Explore.EXPLORE_ENABLED)) { exploreExecutorService = injector.getInstance(ExploreExecutorService.class); exploreExecutorService.startAndWait(); exploreClient = injector.getInstance(ExploreClient.class); } streamCoordinatorClient = injector.getInstance(StreamCoordinatorClient.class); streamCoordinatorClient.startAndWait(); testManager = injector.getInstance(UnitTestManager.class); metricsManager = injector.getInstance(MetricsManager.class); authorizerInstantiatorService = injector.getInstance(AuthorizerInstantiatorService.class); authorizerInstantiatorService.startAndWait(); // This is needed so the logged-in user can successfully create the default namespace if (cConf.getBoolean(Constants.Security.Authorization.ENABLED)) { InstanceId instance = new InstanceId(cConf.get(Constants.INSTANCE_NAME)); Principal principal = new Principal(SecurityRequestContext.getUserId(), Principal.PrincipalType.USER); authorizerInstantiatorService.get().grant(instance, principal, ImmutableSet.of(Action.ADMIN)); } namespaceAdmin = injector.getInstance(NamespaceAdmin.class); namespaceAdmin.create(NamespaceMeta.DEFAULT); }
From source file:com.adr.dataclient.ui.ClientConfig.java
License:Open Source License
public Configuration getConfiguration() { Configuration config = HBaseConfiguration.create(); if (configproperties != null && !configproperties.equals("")) { try {//from w ww . ja v a 2s . c o m config.addResource(new ByteArrayInputStream(configproperties.getBytes("UTF-8"))); config.reloadConfiguration(); } catch (UnsupportedEncodingException ex) { logger.log(Level.SEVERE, null, ex); } } return config; }
From source file:com.twitter.elephanttwin.util.HdfsUtils.java
License:Apache License
public static Configuration getHdfsConfiguration(String hdfsConfigPath) throws IOException { Configuration conf = new Configuration(true); conf.addResource(new Path(hdfsConfigPath)); conf.reloadConfiguration(); return conf;// w w w . j a va 2 s. c om }
From source file:de.bitocean.mm.MMAppRunner.java
public static void main(String[] args) throws Exception { Configuration cfg = new Configuration(); File cfgFile = EtoshaContextLogger.getCFGFile(); System.out.println(">>> CFG: " + cfgFile.getAbsolutePath()); System.out.println(">>> exists : " + cfgFile.exists()); /**// w w w . ja v a 2 s. c o m * according to: * * http://stackoverflow.com/questions/11478036/hadoop-configuration-property-returns-null * * we add the resource as a URI */ cfg.addResource(cfgFile.getAbsoluteFile().toURI().toURL()); cfg.reloadConfiguration(); System.out.println(cfg); System.out.println(cfg.getRaw("smw.url")); System.out.println(cfg.get("smw.pw")); System.out.println(cfg.get("smw.user")); // for SMW account System.out.println(cfg.get("smw.env")); SemanticContextBridge.overWriteEnvForLocaltest = false; int exitCode = ToolRunner.run(cfg, new MMAppRunner(), args); }
From source file:edu.berkeley.confspell.HSlurper.java
public void slurp(File f, OptionSet res) { Configuration c = new Configuration(false); c.addResource(new Path(f.getAbsolutePath())); // to search filesystem, not // classpath c.reloadConfiguration(); fromHConf(res, c);//from w w w . ja v a 2s . c o m }
From source file:org.elasticsearch.repositories.hdfs.HdfsBlobStoreContainerTests.java
License:Apache License
@SuppressForbidden(reason = "lesser of two evils (the other being a bunch of JNI/classloader nightmares)") private FileContext createContext(URI uri) { // mirrors HdfsRepository.java behaviour Configuration cfg = new Configuration(true); cfg.setClassLoader(HdfsRepository.class.getClassLoader()); cfg.reloadConfiguration(); Constructor<?> ctor;//from w w w . j av a 2 s. c o m Subject subject; try { Class<?> clazz = Class.forName("org.apache.hadoop.security.User"); ctor = clazz.getConstructor(String.class); ctor.setAccessible(true); } catch (ClassNotFoundException | NoSuchMethodException e) { throw new RuntimeException(e); } try { Principal principal = (Principal) ctor.newInstance(System.getProperty("user.name")); subject = new Subject(false, Collections.singleton(principal), Collections.emptySet(), Collections.emptySet()); } catch (InstantiationException | IllegalAccessException | InvocationTargetException e) { throw new RuntimeException(e); } // disable file system cache cfg.setBoolean("fs.hdfs.impl.disable.cache", true); // set file system to TestingFs to avoid a bunch of security // checks, similar to what is done in HdfsTests.java cfg.set("fs.AbstractFileSystem." + uri.getScheme() + ".impl", TestingFs.class.getName()); // create the FileContext with our user return Subject.doAs(subject, new PrivilegedAction<FileContext>() { @Override public FileContext run() { try { TestingFs fs = (TestingFs) AbstractFileSystem.get(uri, cfg); return FileContext.getFileContext(fs, cfg); } catch (UnsupportedFileSystemException e) { throw new RuntimeException(e); } } }); }