List of usage examples for org.apache.hadoop.conf Configuration getResource
public URL getResource(String name)
From source file:com.cloudera.oryx.lambda.batch.BatchUpdateFunction.java
License:Open Source License
@Override public void call(JavaPairRDD<K, M> newData, Time timestamp) throws IOException, InterruptedException { if (newData.isEmpty()) { log.info("No data in current generation's RDD; nothing to do"); return;//from w w w .jav a2 s. co m } log.info("Beginning update at {}", timestamp); Configuration hadoopConf = sparkContext.hadoopConfiguration(); if (hadoopConf.getResource("core-site.xml") == null) { log.warn("Hadoop config like core-site.xml was not found; " + "is the Hadoop config directory on the classpath?"); } JavaPairRDD<K, M> pastData; Path inputPathPattern = new Path(dataDirString + "/*/part-*"); FileSystem fs = FileSystem.get(inputPathPattern.toUri(), hadoopConf); FileStatus[] inputPathStatuses = fs.globStatus(inputPathPattern); if (inputPathStatuses == null || inputPathStatuses.length == 0) { log.info("No past data at path(s) {}", inputPathPattern); pastData = null; } else { log.info("Found past data at path(s) like {}", inputPathStatuses[0].getPath()); Configuration updatedConf = new Configuration(hadoopConf); updatedConf.set(FileInputFormat.INPUT_DIR, joinFSPaths(fs, inputPathStatuses)); @SuppressWarnings("unchecked") JavaPairRDD<Writable, Writable> pastWritableData = (JavaPairRDD<Writable, Writable>) sparkContext .newAPIHadoopRDD(updatedConf, SequenceFileInputFormat.class, keyWritableClass, messageWritableClass); pastData = pastWritableData.mapToPair( new WritableToValueFunction<>(keyClass, messageClass, keyWritableClass, messageWritableClass)); } if (updateTopic == null || updateBroker == null) { log.info("Not producing updates to update topic since none was configured"); updateInstance.runUpdate(sparkContext, timestamp.milliseconds(), newData, pastData, modelDirString, null); } else { // This TopicProducer should not be async; sends one big model generally and // needs to occur before other updates reliably rather than be buffered try (TopicProducer<String, U> producer = new TopicProducerImpl<>(updateBroker, updateTopic, false)) { updateInstance.runUpdate(sparkContext, timestamp.milliseconds(), newData, pastData, modelDirString, producer); } } }
From source file:com.cloudera.recordservice.mr.PlanUtil.java
License:Apache License
/** * Return all configuration properties info (name, value, and source). * This is useful for debugging.//from ww w. java 2 s . co m * If `dumpAll` is false, only dump properties that start with 'recordservice'. * Otherwise, it dumps all properties in the `conf`. */ public static String dumpConfiguration(Configuration conf, boolean dumpAll) { // TODO: how do we handle SparkConf and SQLConf? Seems like they didn't offer // facility to track a property to its source. StringBuilder sb = new StringBuilder(); sb.append('\n'); sb.append("=============== Begin of Configuration Properties Info ==============="); for (Map.Entry<String, String> e : conf) { if (!dumpAll && !e.getKey().startsWith("recordservice")) continue; String[] sources = conf.getPropertySources(e.getKey()); String source; if (sources == null || sources.length == 0) { source = "Not Found"; } else { // Only get the newest source that this property comes from. source = sources[sources.length - 1]; URL url = conf.getResource(source); // If there's a URL with this resource, use that. if (url != null) source = url.toString(); } sb.append('\n'); sb.append(String.format("Property Name: %s\tValue: %s\tSource: %s", e.getKey(), e.getValue(), source)); } sb.append('\n'); sb.append("================ End of Configuration Properties Info ================"); return sb.toString(); }
From source file:com.google.mr4c.hadoop.HadoopUtils.java
License:Open Source License
/** * Generates human readable string with resource name and URI *///from w ww .j av a 2 s . c o m public static String describeResource(Configuration conf, String name) { URL url = conf.getResource(name); return String.format("Resource %s found at %s", name, url); }
From source file:com.inmobi.conduit.distcp.tools.DistCp.java
License:Apache License
/** * Setup ssl configuration on the job configuration to enable hsftp access * from map job. Also copy the ssl configuration file to Distributed cache * * @param configuration - Reference to job's configruation handle * @throws java.io.IOException - Exception if unable to locate ssl config file *//*from www. ja v a 2s . co m*/ private void setupSSLConfig(Configuration configuration) throws IOException { Path sslConfigPath = new Path(configuration.getResource(inputOptions.getSslConfigurationFile()).toString()); addSSLFilesToDistCache(configuration, sslConfigPath); configuration.set(DistCpConstants.CONF_LABEL_SSL_CONF, sslConfigPath.getName()); configuration.set(DistCpConstants.CONF_LABEL_SSL_KEYSTORE, sslConfigPath.getName()); }
From source file:com.rockstor.util.RockConfiguration.java
License:Apache License
public static Configuration addRockStorResources(Configuration conf) { conf.addResource("rockstor-default.xml"); conf.addResource("rockstor-site.xml"); LOG.debug("rockstor.bin.home: " + conf.getResource("./")); LOG.debug("rockstor.default.config.path: " + conf.getResource("rockstor-default.xml")); LOG.debug("rockstor.user.config.path: " + conf.getResource("rockstor-site.xml")); checkDefaultsVersion(conf);//from ww w. java 2 s .co m return conf; }
From source file:com.thinkbiganalytics.datalake.authorization.hdfs.HDFSUtil.java
License:Apache License
/** * @param configResources : Hadoop configuration resource *///from ww w . ja v a 2 s. c o m public static Configuration getConfigurationFromResources(String configResources) throws IOException { boolean foundResources = false; final Configuration config = new Configuration(); if (null != configResources) { String[] resources = configResources.split(","); for (String resource : resources) { config.addResource(new Path(resource.trim())); foundResources = true; } } if (!foundResources) { // check that at least 1 non-default resource is available on the classpath String configStr = config.toString(); for (String resource : configStr.substring(configStr.indexOf(':') + 1).split(",")) { if (!resource.contains("default") && config.getResource(resource.trim()) != null) { foundResources = true; break; } } } if (!foundResources) { throw new IOException("Could not find any of the " + "hadoop conf" + " on the classpath"); } return config; }
From source file:com.thinkbiganalytics.nifi.security.ApplySecurityPolicy.java
License:Apache License
public static Configuration getConfigurationFromResources(String configResources) throws IOException { boolean foundResources = false; final Configuration config = new Configuration(); if (null != configResources) { String[] resources = configResources.split(","); for (String resource : resources) { config.addResource(new Path(resource.trim())); foundResources = true;/*ww w . j a v a 2 s .c om*/ } } if (!foundResources) { // check that at least 1 non-default resource is available on the classpath String configStr = config.toString(); for (String resource : configStr.substring(configStr.indexOf(":") + 1).split(",")) { if (!resource.contains("default") && config.getResource(resource.trim()) != null) { foundResources = true; break; } } } if (!foundResources) { throw new IOException("Could not find any of the " + "hadoop conf" + " on the classpath"); } return config; }
From source file:com.thinkbiganalytics.nifi.v2.hdfs.AbstractHadoopProcessor.java
License:Apache License
private static Configuration getConfigurationFromResources(String configResources) throws IOException { boolean foundResources = false; final Configuration config = new Configuration(); if (null != configResources) { String[] resources = configResources.split(","); for (String resource : resources) { config.addResource(new Path(resource.trim())); foundResources = true;/*from w w w . j a v a 2s . c o m*/ } } if (!foundResources) { // check that at least 1 non-default resource is available on the classpath String configStr = config.toString(); for (String resource : configStr.substring(configStr.indexOf(":") + 1).split(",")) { if (!resource.contains("default") && config.getResource(resource.trim()) != null) { foundResources = true; break; } } } if (!foundResources) { throw new IOException( "Could not find any of the " + HADOOP_CONFIGURATION_RESOURCES.getName() + " on the classpath"); } return config; }
From source file:org.apache.nifi.processors.hadoop.AbstractHadoopProcessor.java
License:Apache License
private static Configuration getConfigurationFromResources(final Configuration config, String configResources) throws IOException { boolean foundResources = false; if (null != configResources) { String[] resources = configResources.split(","); for (String resource : resources) { config.addResource(new Path(resource.trim())); foundResources = true;/*from w w w.ja v a 2 s . co m*/ } } if (!foundResources) { // check that at least 1 non-default resource is available on the classpath String configStr = config.toString(); for (String resource : configStr.substring(configStr.indexOf(":") + 1).split(",")) { if (!resource.contains("default") && config.getResource(resource.trim()) != null) { foundResources = true; break; } } } if (!foundResources) { throw new IOException( "Could not find any of the " + HADOOP_CONFIGURATION_RESOURCES.getName() + " on the classpath"); } return config; }
From source file:org.apache.nifi.processors.kite.AbstractKiteProcessor.java
License:Apache License
protected static Configuration getConfiguration(String configFiles) { Configuration conf = DefaultConfiguration.get(); if (configFiles == null || configFiles.isEmpty()) { return conf; }//from w w w. ja v a2s . c om for (String file : COMMA.split(configFiles)) { // process each resource only once if (conf.getResource(file) == null) { // use Path instead of String to get the file from the FS conf.addResource(new Path(file)); } } return conf; }