List of usage examples for org.apache.hadoop.conf Configuration toString
@Override
public String toString()
From source file:com.uber.hoodie.common.util.FSUtils.java
License:Apache License
public static FileSystem getFs(String path, Configuration conf) { FileSystem fs;/*from w w w. j a v a 2 s. co m*/ conf = prepareHadoopConf(conf); try { fs = new Path(path).getFileSystem(conf); } catch (IOException e) { throw new HoodieIOException("Failed to get instance of " + FileSystem.class.getName(), e); } LOG.info(String.format("Hadoop Configuration: fs.defaultFS: [%s], Config:[%s], FileSystem: [%s]", conf.getRaw("fs.defaultFS"), conf.toString(), fs.toString())); return fs; }
From source file:fi.aalto.hacid.HAcidClient.java
License:Apache License
/** * Tells whether or not HAcid is installed at the HBase site referred by * config.// w w w. j av a 2 s. co m * * @param config * @return Whether or not HAcid is installed at the HBase site * @throws Exception */ public static boolean isInstalled(Configuration config) throws Exception { if (config == null) { LOG.error("No connection to HBase. Configuration is null"); } HBaseAdmin admin = new HBaseAdmin(config); LOG.trace("Checking whether HAcid is installed in " + config.toString()); return admin.tableExists(Schema.TABLE_TIMESTAMP_LOG); }
From source file:fi.aalto.hacid.HAcidClient.java
License:Apache License
/** * Sets up a HBase data store to use HAcid. Creates the HAcid metadata tables, * and prepares each user table to be used by HAcid. * //ww w . j a v a 2 s .c o m * @param config * The configuration data of the HBase site where HAcid metadata * tables will stay. * @param userTables * A Collection of user tables to be prepared for use in HAcid. * @throws Exception */ private static void install(Configuration config, Collection<HTable> allUserTables) throws Exception { if (config == null) { LOG.error("No connection to HBase. Configuration is null"); } HBaseAdmin admin = new HBaseAdmin(config); LOG.info("Installing HAcid in " + config.toString() + " ..."); // Opens the Timestamp Log table, and first initializes it if necessary if (!admin.tableExists(Schema.TABLE_TIMESTAMP_LOG)) { LOG.debug("Creating HAcid Timestamp Log table"); HTableDescriptor logDescriptor = new HTableDescriptor(Schema.TABLE_TIMESTAMP_LOG); HColumnDescriptor hacidFamily = new HColumnDescriptor(Schema.FAMILY_HACID); HColumnDescriptor writesetFamily = new HColumnDescriptor(Schema.FAMILY_WRITESET); HColumnDescriptor readsetFamily = new HColumnDescriptor(Schema.FAMILY_READSET); hacidFamily.setMaxVersions(1); writesetFamily.setMaxVersions(1); readsetFamily.setMaxVersions(1); logDescriptor.addFamily(hacidFamily); logDescriptor.addFamily(writesetFamily); logDescriptor.addFamily(readsetFamily); admin.createTable(logDescriptor); HTable logTable = new HTable(config, Schema.TABLE_TIMESTAMP_LOG); // Makes the first timestamp as used (because user tables might be // initialized with this) Put firstPut = new Put(timestampToKey(Schema.TIMESTAMP_INITIAL_LONG)); firstPut.add(Schema.FAMILY_HACID, Schema.QUALIFIER_TS_TYPE, Schema.TYPE_END); firstPut.add(Schema.FAMILY_HACID, Schema.QUALIFIER_START_TIMESTAMP, Schema.TIMESTAMP_INITIAL); firstPut.add(Schema.FAMILY_HACID, Schema.QUALIFIER_END_TIMESTAMP, Schema.TIMESTAMP_INITIAL); firstPut.add(Schema.FAMILY_HACID, Schema.QUALIFIER_TXN_STATE, Schema.STATE_COMMITTED); logTable.put(firstPut); } // Prepares all user tables if (allUserTables != null) { LOG.debug("Preparing a collection of user tables for use in HAcid"); for (HTable usertable : allUserTables) { prepareUserTable(usertable); } } }
From source file:fi.aalto.hacid.HAcidClient.java
License:Apache License
/** * Removes the HAcid metadata tables from HBase. * /*from www. j av a 2 s .co m*/ * @param config * The configuration data of the HBase site where HAcid metadata * tables are. * @throws Exception */ public static void uninstall(Configuration config) throws Exception { if (config == null) { LOG.error("No connection to HBase. Configuration is null"); } LOG.debug("Uninstalling HAcid from " + config.toString() + " ..."); HBaseAdmin admin = new HBaseAdmin(config); admin.disableTable(Schema.TABLE_TIMESTAMP_LOG); admin.deleteTable(Schema.TABLE_TIMESTAMP_LOG); }
From source file:gr.demokritos.iit.utilities.configuration.PersonalAIzHBaseConfiguration.java
License:Apache License
/** * Get HBase configuration based on stored settings. * * @return The HBase Configuration. The default values are * hbase.zookeeper.quorum= localhost hbase.zookeeper.property.clientPort= * 2181/*from ww w . jav a 2s .co m*/ */ public Configuration getHBaseConfig() { Configuration hbaseConfig = HBaseConfiguration.create(); hbaseConfig.set("hbase.zookeeper.quorum", properties.getProperty("zookeeper.host", "localhost")); hbaseConfig.set("hbase.zookeeper.property.clientPort", properties.getProperty("hbase.zookeeper.property.clientPort", "2181")); LOGGER.debug(hbaseConfig.toString()); return hbaseConfig; }
From source file:hitune.analysis.mapreduce.processor.AnalysisProcessorManager.java
License:Apache License
/** * //from w w w . j a v a 2 s .c om */ public AnalysisProcessorManager(Map<String, List<Configuration>> conflist) { // TODO Auto-generated constructor stub for (String sourcedata : conflist.keySet()) { log.info("Data source: " + sourcedata); List<Configuration> confs = conflist.get(sourcedata); for (Object item : confs.toArray()) { Configuration conf = (Configuration) item; log.debug(conf.toString()); String processor = HiTuneAnalysis + "." + conf.get(AnalysisProcessorConfiguration.reportengine); log.info("AnalysisProcessor" + processor); try { AnalysisProcessor engine = (AnalysisProcessor) Class.forName(processor) .getConstructor(new Class[] { Configuration.class }).newInstance(new Object[] { conf }); engineList.add(engine); } catch (IllegalArgumentException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (SecurityException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (InstantiationException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (IllegalAccessException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (InvocationTargetException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (NoSuchMethodException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (ClassNotFoundException e) { // TODO Auto-generated catch block e.printStackTrace(); } } } }
From source file:info.shelfunit.hbase.HBaseBuilder.java
License:Apache License
protected HBaseBuilder(Configuration conf) { log.info("Connecting to host: {}", conf.toString()); // get( HConstants.DEFAULT_HOST ) ); this.conf = conf; }
From source file:org.apache.ambari.view.slider.SliderAppsViewControllerImpl.java
License:Apache License
/** * Creates a new {@link SliderClient} initialized with appropriate * configuration and started. This slider client can be used to invoke * individual API.// w w w .ja va 2 s . c o m * * When work with this client is done, * {@link #destroySliderClient(SliderClient)} must be called. * * @return created {@link SliderClient} * @see #destroySliderClient(SliderClient) * @see #runSliderCommand(String...) */ protected SliderClient createSliderClient() { Configuration sliderClientConfiguration = getSliderClientConfiguration(); SliderClient client = new SliderClient() { @Override public void init(Configuration conf) { super.init(conf); try { initHadoopBinding(); } catch (SliderException e) { throw new RuntimeException("Unable to automatically init Hadoop binding", e); } catch (IOException e) { throw new RuntimeException("Unable to automatically init Hadoop binding", e); } } }; try { if (logger.isDebugEnabled()) { logger.debug("Slider Client configuration: " + sliderClientConfiguration.toString()); } sliderClientConfiguration = client.bindArgs(sliderClientConfiguration, new String[] { "help" }); client.init(sliderClientConfiguration); client.start(); } catch (Exception e) { logger.warn("Unable to create SliderClient", e); throw new RuntimeException(e.getMessage(), e); } catch (Throwable e) { logger.warn("Unable to create SliderClient", e); throw new RuntimeException(e.getMessage(), e); } return client; }
From source file:org.apache.falcon.catalog.HiveCatalogService.java
License:Apache License
/** * This is used from with in an oozie job. * * @param conf conf object//from w w w . j a v a 2 s . c o m * @param metastoreUrl metastore uri * @return hive metastore client handle * @throws FalconException */ private static HiveMetaStoreClient createClient(Configuration conf, String metastoreUrl) throws FalconException { try { LOG.info("Creating HCatalog client object for metastore {} using conf {}", metastoreUrl, conf.toString()); final Credentials credentials = getCredentials(conf); Configuration jobConf = credentials != null ? copyCredentialsToConf(conf, credentials) : conf; HiveConf hcatConf = createHiveConf(jobConf, metastoreUrl); if (UserGroupInformation.isSecurityEnabled()) { hcatConf.set(HiveConf.ConfVars.METASTORE_KERBEROS_PRINCIPAL.varname, conf.get(HiveConf.ConfVars.METASTORE_KERBEROS_PRINCIPAL.varname)); hcatConf.set(HiveConf.ConfVars.METASTORE_USE_THRIFT_SASL.varname, "true"); UserGroupInformation ugi = UserGroupInformation.getCurrentUser(); ugi.addCredentials(credentials); // credentials cannot be null } return new HiveMetaStoreClient(hcatConf); } catch (Exception e) { throw new FalconException("Exception creating HiveMetaStoreClient: " + e.getMessage(), e); } }
From source file:org.apache.falcon.hive.util.HiveDRUtils.java
License:Apache License
public static Configuration getDefaultConf() throws IOException { Configuration conf = new Configuration(); if (System.getProperty("oozie.action.conf.xml") != null) { Path confPath = new Path("file:///", System.getProperty("oozie.action.conf.xml")); final boolean actionConfExists = confPath.getFileSystem(conf).exists(confPath); LOG.info("Oozie Action conf {} found ? {}", confPath, actionConfExists); if (actionConfExists) { LOG.info("Oozie Action conf found, adding path={}, conf={}", confPath, conf.toString()); conf.addResource(confPath);/*from w ww . j a v a 2 s . com*/ } } String tokenFile = System.getenv("HADOOP_TOKEN_FILE_LOCATION"); if (StringUtils.isNotBlank(tokenFile)) { if (Shell.WINDOWS) { if (tokenFile.charAt(0) == '"') { tokenFile = tokenFile.substring(1); } if (tokenFile.charAt(tokenFile.length() - 1) == '"') { tokenFile = tokenFile.substring(0, tokenFile.length() - 1); } } conf.set("mapreduce.job.credentials.binary", tokenFile); System.setProperty("mapreduce.job.credentials.binary", tokenFile); conf.set("tez.credentials.path", tokenFile); System.setProperty("tez.credentials.path", tokenFile); } return conf; }