List of usage examples for org.apache.hadoop.conf Configuration getStrings
public String[] getStrings(String name)
name
property as an array of String
s. From source file:org.apache.oozie.service.TestLiteWorkflowAppService.java
License:Apache License
public void checkSubworkflowLibHelper(String inherit, String inheritWF, int unique, String[] parentLibs, String[] childLibs, String[] expectedLibs) throws Exception { Services services = new Services(); try {//from w w w .jav a2s. c o m services.getConf().set("oozie.subworkflow.classpath.inheritance", inherit); services.init(); Reader reader = IOUtils.getResourceAsReader("wf-schema-valid.xml", -1); String childWFDir = createTestCaseSubDir("child-wf-" + unique); File childWFFile = new File(childWFDir, "workflow.xml"); Writer writer = new FileWriter(childWFFile); IOUtils.copyCharStream(reader, writer); WorkflowAppService wps = Services.get().get(WorkflowAppService.class); Configuration jobConf = new XConfiguration(); jobConf.set(OozieClient.APP_PATH, childWFFile.toURI().toString()); jobConf.set(OozieClient.USER_NAME, getTestUser()); if (inheritWF != null) { jobConf.set("oozie.wf.subworkflow.classpath.inheritance", inheritWF); } String childLibDir = createTestCaseSubDir("child-wf-" + unique, "lib"); for (String childLib : childLibs) { writer = new FileWriter(new File(childLibDir, childLib)); writer.write("bla bla"); writer.close(); } String parentLibDir = createTestCaseSubDir("parent-wf-" + unique, "lib"); String[] parentLibsFullPaths = new String[parentLibs.length]; for (int i = 0; i < parentLibs.length; i++) { parentLibsFullPaths[i] = new File(parentLibDir, parentLibs[i]).toString(); writer = new FileWriter(parentLibsFullPaths[i]); writer.write("bla bla"); writer.close(); } // Set the parent libs jobConf.setStrings(WorkflowAppService.APP_LIB_PATH_LIST, parentLibsFullPaths); Configuration protoConf = wps.createProtoActionConf(jobConf, true); assertEquals(getTestUser(), protoConf.get(OozieClient.USER_NAME)); String[] foundLibs = protoConf.getStrings(WorkflowAppService.APP_LIB_PATH_LIST); if (expectedLibs.length > 0) { assertEquals(expectedLibs.length, foundLibs.length); for (int i = 0; i < foundLibs.length; i++) { Path p = new Path(foundLibs[i]); foundLibs[i] = p.getName(); } Arrays.sort(expectedLibs); Arrays.sort(foundLibs); assertEquals(Arrays.toString(expectedLibs), Arrays.toString(foundLibs)); } else { assertEquals(null, foundLibs); } } finally { services.destroy(); } }
From source file:org.apache.oozie.service.WorkflowAppService.java
License:Apache License
/** * Create proto configuration. <p> The proto configuration includes the user,group and the paths which need to be * added to distributed cache. These paths include .jar,.so and the resource file paths. * * @param jobConf job configuration.//www. j a v a 2 s . c om * @param isWorkflowJob indicates if the job is a workflow job or not. * @return proto configuration. * @throws WorkflowException thrown if the proto action configuration could not be created. */ public XConfiguration createProtoActionConf(Configuration jobConf, boolean isWorkflowJob) throws WorkflowException { try { HadoopAccessorService has = Services.get().get(HadoopAccessorService.class); URI uri = new URI(jobConf.get(OozieClient.APP_PATH)); Configuration conf = has.createJobConf(uri.getAuthority()); XConfiguration protoConf = new XConfiguration(); String user = jobConf.get(OozieClient.USER_NAME); conf.set(OozieClient.USER_NAME, user); protoConf.set(OozieClient.USER_NAME, user); FileSystem fs = has.createFileSystem(user, uri, conf); Path appPath = new Path(uri); XLog.getLog(getClass()).debug("jobConf.libPath = " + jobConf.get(OozieClient.LIBPATH)); XLog.getLog(getClass()).debug("jobConf.appPath = " + appPath); Collection<String> filePaths; if (isWorkflowJob) { // app path could be a directory Path path = new Path(uri.getPath()); if (!fs.isFile(path)) { filePaths = getLibFiles(fs, new Path(appPath + "/lib")); } else { filePaths = getLibFiles(fs, new Path(appPath.getParent(), "lib")); } } else { filePaths = new LinkedHashSet<String>(); } String[] libPaths = jobConf.getStrings(OozieClient.LIBPATH); if (libPaths != null && libPaths.length > 0) { for (int i = 0; i < libPaths.length; i++) { if (libPaths[i].trim().length() > 0) { Path libPath = new Path(libPaths[i].trim()); Collection<String> libFilePaths = getLibFiles(fs, libPath); filePaths.addAll(libFilePaths); } } } // Check if a subworkflow should inherit the libs from the parent WF // OOZIE_WF_SUBWORKFLOW_CLASSPATH_INHERITANCE has priority over OOZIE_SUBWORKFLOW_CLASSPATH_INHERITANCE from oozie-site // If OOZIE_WF_SUBWORKFLOW_CLASSPATH_INHERITANCE isn't specified, we use OOZIE_SUBWORKFLOW_CLASSPATH_INHERITANCE if (jobConf.getBoolean(OOZIE_WF_SUBWORKFLOW_CLASSPATH_INHERITANCE, oozieSubWfCPInheritance)) { // Keep any libs from a parent workflow that might already be in APP_LIB_PATH_LIST and also remove duplicates String[] parentFilePaths = jobConf.getStrings(APP_LIB_PATH_LIST); if (parentFilePaths != null && parentFilePaths.length > 0) { String[] filePathsNames = filePaths.toArray(new String[filePaths.size()]); for (int i = 0; i < filePathsNames.length; i++) { Path p = new Path(filePathsNames[i]); filePathsNames[i] = p.getName(); } Arrays.sort(filePathsNames); List<String> nonDuplicateParentFilePaths = new ArrayList<String>(); for (String parentFilePath : parentFilePaths) { Path p = new Path(parentFilePath); if (Arrays.binarySearch(filePathsNames, p.getName()) < 0) { nonDuplicateParentFilePaths.add(parentFilePath); } } filePaths.addAll(nonDuplicateParentFilePaths); } } protoConf.setStrings(APP_LIB_PATH_LIST, filePaths.toArray(new String[filePaths.size()])); //Add all properties start with 'oozie.' for (Map.Entry<String, String> entry : jobConf) { if (entry.getKey().startsWith("oozie.")) { String name = entry.getKey(); String value = entry.getValue(); // if property already exists, should not overwrite if (protoConf.get(name) == null) { protoConf.set(name, value); } } } return protoConf; } catch (IOException ex) { throw new WorkflowException(ErrorCode.E0712, jobConf.get(OozieClient.APP_PATH), ex.getMessage(), ex); } catch (URISyntaxException ex) { throw new WorkflowException(ErrorCode.E0711, jobConf.get(OozieClient.APP_PATH), ex.getMessage(), ex); } catch (HadoopAccessorException ex) { throw new WorkflowException(ex); } catch (Exception ex) { throw new WorkflowException(ErrorCode.E0712, jobConf.get(OozieClient.APP_PATH), ex.getMessage(), ex); } }
From source file:org.apache.oozie.service.WorkflowSchemaService.java
License:Apache License
private Schema loadSchema(Configuration conf) throws SAXException, IOException { List<StreamSource> sources = new ArrayList<StreamSource>(); sources.add(new StreamSource(IOUtils.getResourceAsStream(OOZIE_WORKFLOW_XSD, -1))); String[] schemas = conf.getStrings(CONF_EXT_SCHEMAS); if (schemas != null) { for (String schema : schemas) { sources.add(new StreamSource(IOUtils.getResourceAsStream(schema, -1))); }/*ww w .jav a 2 s .c o m*/ } SchemaFactory factory = SchemaFactory.newInstance(XMLConstants.W3C_XML_SCHEMA_NS_URI); return factory.newSchema(sources.toArray(new StreamSource[sources.size()])); }
From source file:org.apache.oozie.servlet.BaseJobServlet.java
License:Apache License
/** * Validate the configuration user/group. <p> * * @param conf configuration.// ww w. j av a 2 s. co m * @throws XServletException thrown if the configuration does not have a property {@link * org.apache.oozie.client.OozieClient#USER_NAME}. */ static void checkAuthorizationForApp(Configuration conf) throws XServletException { String user = conf.get(OozieClient.USER_NAME); String acl = ConfigUtils.getWithDeprecatedCheck(conf, OozieClient.GROUP_NAME, OozieClient.JOB_ACL, null); try { if (user == null) { throw new XServletException(HttpServletResponse.SC_BAD_REQUEST, ErrorCode.E0401, OozieClient.USER_NAME); } AuthorizationService auth = Services.get().get(AuthorizationService.class); if (acl != null) { conf.set(OozieClient.GROUP_NAME, acl); } else if (acl == null && auth.useDefaultGroupAsAcl()) { acl = auth.getDefaultGroup(user); conf.set(OozieClient.GROUP_NAME, acl); } XLog.Info.get().setParameter(XLogService.GROUP, acl); String wfPath = conf.get(OozieClient.APP_PATH); String coordPath = conf.get(OozieClient.COORDINATOR_APP_PATH); String bundlePath = conf.get(OozieClient.BUNDLE_APP_PATH); if (wfPath == null && coordPath == null && bundlePath == null) { String[] libPaths = conf.getStrings(XOozieClient.LIBPATH); if (libPaths != null && libPaths.length > 0 && libPaths[0].trim().length() > 0) { conf.set(OozieClient.APP_PATH, libPaths[0].trim()); wfPath = libPaths[0].trim(); } else { throw new XServletException(HttpServletResponse.SC_BAD_REQUEST, ErrorCode.E0405); } } ServletUtilities.ValidateAppPath(wfPath, coordPath, bundlePath); if (wfPath != null) { auth.authorizeForApp(user, acl, wfPath, "workflow.xml", conf); } else if (coordPath != null) { auth.authorizeForApp(user, acl, coordPath, "coordinator.xml", conf); } else if (bundlePath != null) { auth.authorizeForApp(user, acl, bundlePath, "bundle.xml", conf); } } catch (AuthorizationException ex) { XLog.getLog(BaseJobServlet.class).info("AuthorizationException ", ex); throw new XServletException(HttpServletResponse.SC_UNAUTHORIZED, ex); } }
From source file:org.apache.parquet.schema.StatisticsMetaData.java
License:Apache License
public StatisticsMetaData(Configuration conf) { paths_stats = new HashMap<String, Boolean>(); String[] paths = conf.getStrings("stats_paths"); if (paths != null) { for (int ix = 0; ix < paths.length; ix++) { paths_stats.put(paths[ix], new Boolean(true)); }/*ww w . ja va 2 s. c o m*/ } }
From source file:org.apache.phoenix.queryserver.server.Main.java
License:Apache License
/** * Logs information about the currently running JVM process including * the environment variables. Logging of env vars can be disabled by * setting {@code "phoenix.envvars.logging.disabled"} to {@code "true"}. * <p>If enabled, you can also exclude environment variables containing * certain substrings by setting {@code "phoenix.envvars.logging.skipwords"} * to comma separated list of such substrings. *///ww w . j a v a2s. c om public static void logProcessInfo(Configuration conf) { // log environment variables unless asked not to if (conf == null || !conf.getBoolean(QueryServices.QUERY_SERVER_ENV_LOGGING_ATTRIB, false)) { Set<String> skipWords = new HashSet<String>(QueryServicesOptions.DEFAULT_QUERY_SERVER_SKIP_WORDS); if (conf != null) { String[] confSkipWords = conf.getStrings(QueryServices.QUERY_SERVER_ENV_LOGGING_SKIPWORDS_ATTRIB); if (confSkipWords != null) { skipWords.addAll(Arrays.asList(confSkipWords)); } } nextEnv: for (Map.Entry<String, String> entry : System.getenv().entrySet()) { String key = entry.getKey().toLowerCase(); String value = entry.getValue().toLowerCase(); // exclude variables which may contain skip words for (String skipWord : skipWords) { if (key.contains(skipWord) || value.contains(skipWord)) continue nextEnv; } LOG.info("env:" + entry); } } // and JVM info logJVMInfo(); }
From source file:org.apache.pig.backend.hadoop.executionengine.tez.util.SecurityHelper.java
License:Apache License
public static void populateTokenCache(Configuration conf, Credentials credentials) throws IOException { readTokensFromFiles(conf, credentials); // add the delegation tokens from configuration String[] nameNodes = conf.getStrings(MRJobConfig.JOB_NAMENODES); LOG.debug("adding the following namenodes' delegation tokens:" + Arrays.toString(nameNodes)); if (nameNodes != null) { Path[] ps = new Path[nameNodes.length]; for (int i = 0; i < nameNodes.length; i++) { ps[i] = new Path(nameNodes[i]); }//from w w w.j ava 2s . c o m TokenCache.obtainTokensForNamenodes(credentials, ps, conf); } }
From source file:org.apache.ranger.authorization.hbase.HbaseUserUtilsImpl.java
License:Apache License
public static void initiailize(Configuration conf) { if (_Initialized.get()) { LOG.warn("HbaseUserUtilsImpl.initialize: Unexpected: initialization called more than once!"); } else {/*from w ww. j ava 2 s . com*/ if (conf == null) { LOG.error("HbaseUserUtilsImpl.initialize: Internal error: called with null conf value!"); } else { String[] users = conf.getStrings(SUPERUSER_CONFIG_PROP); if (users != null && users.length > 0) { Set<String> superUsers = new HashSet<String>(users.length); for (String user : users) { user = user.trim(); LOG.info("HbaseUserUtilsImpl.initialize: Adding Super User(" + user + ")"); superUsers.add(user); } _SuperUsers.set(superUsers); } } _Initialized.set(true); } }
From source file:org.apache.rya.accumulo.pig.IndexWritingTool.java
License:Apache License
@Override public int run(final String[] args) throws Exception { Preconditions.checkArgument(args.length == 7, "java " + IndexWritingTool.class.getCanonicalName() + " hdfsSaveLocation sparqlFile cbinstance cbzk cbuser cbpassword rdfTablePrefix."); final String inputDir = args[0]; final String sparqlFile = args[1]; final String instStr = args[2]; final String zooStr = args[3]; final String userStr = args[4]; final String passStr = args[5]; final String tablePrefix = args[6]; final String sparql = FileUtils.readFileToString(new File(sparqlFile)); final Job job = new Job(getConf(), "Write HDFS Index to Accumulo"); job.setJarByClass(this.getClass()); final Configuration jobConf = job.getConfiguration(); jobConf.setBoolean("mapred.map.tasks.speculative.execution", false); setVarOrders(sparql, jobConf);/*from ww w . j av a 2 s . c o m*/ TextInputFormat.setInputPaths(job, inputDir); job.setInputFormatClass(TextInputFormat.class); job.setMapperClass(MyMapper.class); job.setMapOutputKeyClass(Text.class); job.setMapOutputValueClass(Mutation.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(Mutation.class); job.setNumReduceTasks(0); String tableName; if (zooStr.equals("mock")) { tableName = tablePrefix; } else { tableName = tablePrefix + "INDEX_" + UUID.randomUUID().toString().replace("-", "").toUpperCase(); } setAccumuloOutput(instStr, zooStr, userStr, passStr, job, tableName); jobConf.set(sparql_key, sparql); final int complete = job.waitForCompletion(true) ? 0 : -1; if (complete == 0) { final String[] varOrders = jobConf.getStrings("varOrders"); final String orders = Joiner.on("\u0000").join(varOrders); Instance inst; if (zooStr.equals("mock")) { inst = new MockInstance(instStr); } else { inst = new ZooKeeperInstance(instStr, zooStr); } final Connector conn = inst.getConnector(userStr, passStr.getBytes(StandardCharsets.UTF_8)); final BatchWriter bw = conn.createBatchWriter(tableName, 10, 5000, 1); final Counters counters = job.getCounters(); final Counter c1 = counters.findCounter(cardCounter, cardCounter); final Mutation m = new Mutation("~SPARQL"); final Value v = new Value(sparql.getBytes(StandardCharsets.UTF_8)); m.put(new Text("" + c1.getValue()), new Text(orders), v); bw.addMutation(m); bw.close(); return complete; } else { return complete; } }
From source file:org.apache.tez.common.TestTezUtils.java
License:Apache License
private void checkConf(Configuration conf) { Assert.assertEquals(conf.get("test1"), "value1"); Assert.assertTrue(conf.getBoolean("test2", false)); Assert.assertEquals(conf.getDouble("test3", 0), 1.2345, 1e-15); Assert.assertEquals(conf.getInt("test4", 0), 34567); Assert.assertEquals(conf.getLong("test5", 0), 1234567890L); String tmp[] = conf.getStrings("test6"); Assert.assertEquals(tmp.length, 3);//from w w w .j a v a 2 s . c om Assert.assertEquals(tmp[0], "S1"); Assert.assertEquals(tmp[1], "S2"); Assert.assertEquals(tmp[2], "S3"); }