List of usage examples for org.apache.hadoop.conf Configuration get
public String get(String name, String defaultValue)
name
. From source file:co.cask.cdap.data2.util.hbase.HBaseTableUtil.java
License:Apache License
private void setDefaultConfiguration(HTableDescriptor tableDescriptor, Configuration conf) { String compression = conf.get(CFG_HBASE_TABLE_COMPRESSION, DEFAULT_COMPRESSION_TYPE.name()); CompressionType compressionAlgo = CompressionType.valueOf(compression); for (HColumnDescriptor hcd : tableDescriptor.getColumnFamilies()) { setCompression(hcd, compressionAlgo); setBloomFilter(hcd, BloomType.ROW); }/*from w w w .j a v a 2 s. c o m*/ }
From source file:co.cask.cdap.explore.service.ExploreServiceUtils.java
License:Apache License
/** * Change yarn-site.xml file, and return a temp copy of it to which are added * necessary options.//ww w .java 2 s. c om */ private static File updateYarnConfFile(File confFile, File tempDir) { Configuration conf = new Configuration(false); try { conf.addResource(confFile.toURI().toURL()); } catch (MalformedURLException e) { LOG.error("File {} is malformed.", confFile, e); throw Throwables.propagate(e); } String yarnAppClassPath = conf.get(YarnConfiguration.YARN_APPLICATION_CLASSPATH, Joiner.on(",").join(YarnConfiguration.DEFAULT_YARN_APPLICATION_CLASSPATH)); // add the pwd/* at the beginning of classpath. so user's jar will take precedence and without this change, // job.jar will be at the beginning of the classpath, since job.jar has old guava version classes, // we want to add pwd/* before yarnAppClassPath = "$PWD/*," + yarnAppClassPath; conf.set(YarnConfiguration.YARN_APPLICATION_CLASSPATH, yarnAppClassPath); File newYarnConfFile = new File(tempDir, "yarn-site.xml"); try (FileOutputStream os = new FileOutputStream(newYarnConfFile)) { conf.writeXml(os); } catch (IOException e) { LOG.error("Problem creating and writing to temporary yarn-conf.xml conf file at {}", newYarnConfFile, e); throw Throwables.propagate(e); } return newYarnConfFile; }
From source file:co.cask.cdap.explore.service.ExploreServiceUtils.java
License:Apache License
/** * Change mapred-site.xml file, and return a temp copy of it to which are added * necessary options.// w ww . ja v a2 s . co m */ private static File updateMapredConfFile(File confFile, File tempDir) { Configuration conf = new Configuration(false); try { conf.addResource(confFile.toURI().toURL()); } catch (MalformedURLException e) { LOG.error("File {} is malformed.", confFile, e); throw Throwables.propagate(e); } String mrAppClassPath = conf.get(MRJobConfig.MAPREDUCE_APPLICATION_CLASSPATH, MRJobConfig.DEFAULT_MAPREDUCE_APPLICATION_CLASSPATH); // Add the pwd/* at the beginning of classpath. Without this change, old jars from mr framework classpath // get into classpath. mrAppClassPath = "$PWD/*," + mrAppClassPath; conf.set(MRJobConfig.MAPREDUCE_APPLICATION_CLASSPATH, mrAppClassPath); File newMapredConfFile = new File(tempDir, "mapred-site.xml"); try (FileOutputStream os = new FileOutputStream(newMapredConfFile)) { conf.writeXml(os); } catch (IOException e) { LOG.error("Problem creating and writing to temporary mapred-site.xml conf file at {}", newMapredConfFile, e); throw Throwables.propagate(e); } return newMapredConfFile; }
From source file:co.cask.cdap.internal.app.runtime.batch.dataset.output.MultipleOutputs.java
License:Apache License
/** * Adds a named output for the job.//ww w .j a v a 2 s.c om * * @param job job to add the named output * @param namedOutput named output name, it has to be a word, letters * and numbers only (alphanumeric) * @param outputFormatClass name of the OutputFormat class. * @param keyClass key class * @param valueClass value class * @param outputConfigs configurations for the output */ @SuppressWarnings("unchecked") public static void addNamedOutput(Job job, String namedOutput, String outputFormatClass, Class<?> keyClass, Class<?> valueClass, Map<String, String> outputConfigs) { assertValidName(namedOutput); checkNamedOutputName(namedOutput, getNamedOutputsList(job), false); Configuration conf = job.getConfiguration(); conf.set(MULTIPLE_OUTPUTS, conf.get(MULTIPLE_OUTPUTS, "") + " " + namedOutput); conf.set(MO_PREFIX + namedOutput + FORMAT, outputFormatClass); conf.setClass(MO_PREFIX + namedOutput + KEY, keyClass, Object.class); conf.setClass(MO_PREFIX + namedOutput + VALUE, valueClass, Object.class); ConfigurationUtil.setNamedConfigurations(conf, computePrefixName(namedOutput), outputConfigs); }
From source file:co.cask.cdap.internal.app.runtime.batch.inmemory.LocalClientProtocolProvider.java
License:Apache License
@Override public ClientProtocol create(Configuration conf) throws IOException { String framework = conf.get(MRConfig.FRAMEWORK_NAME, MRConfig.LOCAL_FRAMEWORK_NAME); LOG.info("Using framework: " + framework); if (!MRConfig.LOCAL_FRAMEWORK_NAME.equals(framework)) { return null; }/*from www. java 2 s. c om*/ // We have to use something unique like "clocal" to make sure Hadoop's LocalClientProtocolProvider will fail to // provide the ClientProtocol String tracker = conf.get(JTConfig.JT_IPC_ADDRESS, "clocal"); LOG.info("Using tracker: " + tracker); if ("clocal".equals(tracker)) { conf.setInt("mapreduce.job.maps", 1); return new LocalJobRunnerWithFix(conf); } else { throw new IOException("Invalid \"" + JTConfig.JT_IPC_ADDRESS + "\" configuration value for LocalJobRunner : \"" + tracker + "\""); } }
From source file:co.cask.cdap.internal.app.runtime.batch.MapperWrapper.java
License:Apache License
/** * Wraps the mapper defined in the job with this {@link MapperWrapper} if it is defined. * @param job The MapReduce job/*w ww . j av a 2s .c o m*/ */ public static void wrap(Job job) { // NOTE: we don't use job.getMapperClass() as we don't need to load user class here Configuration conf = job.getConfiguration(); String mapClass = conf.get(MRJobConfig.MAP_CLASS_ATTR, Mapper.class.getName()); conf.set(MapperWrapper.ATTR_MAPPER_CLASS, mapClass); job.setMapperClass(MapperWrapper.class); }
From source file:co.cask.cdap.internal.app.runtime.batch.MapReduceContextProvider.java
License:Apache License
/** * Helper method to tell if the MR is running in local mode or not. This method doesn't really belongs to this * class, but currently there is no better place for it. *//*from w w w .j ava2 s . c o m*/ static boolean isLocal(Configuration hConf) { String mrFramework = hConf.get(MRConfig.FRAMEWORK_NAME, MRConfig.LOCAL_FRAMEWORK_NAME); return MRConfig.LOCAL_FRAMEWORK_NAME.equals(mrFramework); }
From source file:co.cask.cdap.template.etl.common.BatchFileFilter.java
License:Apache License
@Override public void setConf(Configuration conf) { if (conf == null) { return;/*from www. j av a 2 s . c om*/ } pathName = conf.get(FileBatchSource.INPUT_NAME_CONFIG, "/"); //path is a directory so remove trailing '/' if (pathName.endsWith("/")) { pathName = pathName.substring(0, pathName.length() - 1); } String input = conf.get(FileBatchSource.INPUT_REGEX_CONFIG, ".*"); if (input.equals(FileBatchSource.USE_TIMEFILTER)) { useTimeFilter = true; } else { useTimeFilter = false; regex = Pattern.compile(input); } lastRead = conf.get(FileBatchSource.LAST_TIME_READ, "-1"); if (!lastRead.equals("-1")) { dateRangesToRead = GSON.fromJson(lastRead, ARRAYLIST_DATE_TYPE); } try { prevHour = sdf.parse(conf.get(FileBatchSource.CUTOFF_READ_TIME)); } catch (ParseException pe) { prevHour = new Date(System.currentTimeMillis()); } }
From source file:co.cask.hydrator.plugin.common.BatchFileFilter.java
License:Apache License
@Override public void setConf(Configuration conf) { if (conf == null) { return;//from ww w . j a v a 2s . c o m } pathName = conf.get(FileBatchSource.INPUT_NAME_CONFIG, "/"); //path is a directory so remove trailing '/' if (pathName.endsWith("/")) { pathName = pathName.substring(0, pathName.length() - 1); } String input = conf.get(FileBatchSource.INPUT_REGEX_CONFIG); if (input.equals(FileBatchSource.USE_TIMEFILTER)) { useTimeFilter = true; } else { useTimeFilter = false; regex = Pattern.compile(input); } lastRead = conf.get(FileBatchSource.LAST_TIME_READ, "-1"); if (!lastRead.equals("-1")) { dateRangesToRead = GSON.fromJson(lastRead, ARRAYLIST_DATE_TYPE); } try { prevHour = sdf.parse(conf.get(FileBatchSource.CUTOFF_READ_TIME)); } catch (ParseException pe) { prevHour = new Date(System.currentTimeMillis()); } }
From source file:co.cask.hydrator.plugin.common.BatchXMLFileFilter.java
License:Apache License
@Override public void setConf(@Nullable Configuration conf) { if (conf == null) { return;// w w w.j ava 2 s. c o m } pathName = conf.get(XMLInputFormat.XML_INPUTFORMAT_PATH_NAME, "/"); //path is a directory so remove trailing '/' if (pathName.endsWith("/")) { pathName = pathName.substring(0, pathName.length() - 1); } String input = conf.get(XMLInputFormat.XML_INPUTFORMAT_PATTERN, ".*"); regex = Pattern.compile(input); String processedFiles = conf.get(XMLInputFormat.XML_INPUTFORMAT_PROCESSED_FILES); if (!Strings.isNullOrEmpty(processedFiles)) { preProcessedFileList = GSON.fromJson(processedFiles, ARRAYLIST_PREPROCESSED_FILES); } }