List of usage examples for org.apache.hadoop.conf Configuration getValByRegex
public Map<String, String> getValByRegex(String regex)
From source file:org.apache.oozie.action.hadoop.Hive2Main.java
License:Apache License
@Override protected void run(String[] args) throws Exception { System.out.println();/*from ww w.j a va 2 s . c o m*/ System.out.println("Oozie Hive 2 action configuration"); System.out.println("================================================================="); System.out.println(); Configuration actionConf = initActionConf(); //Logfile to capture job IDs String hadoopJobId = System.getProperty("oozie.launcher.job.id"); if (hadoopJobId == null) { throw new RuntimeException("Launcher Hadoop Job ID system property not set"); } String logFile = new File("hive2-oozie-" + hadoopJobId + ".log").getAbsolutePath(); List<String> arguments = new ArrayList<String>(); String jdbcUrl = actionConf.get(Hive2ActionExecutor.HIVE2_JDBC_URL); if (jdbcUrl == null) { throw new RuntimeException( "Action Configuration does not have [" + Hive2ActionExecutor.HIVE2_JDBC_URL + "] property"); } arguments.add("-u"); arguments.add(jdbcUrl); // Use the user who is running the map task String username = actionConf.get("user.name"); arguments.add("-n"); arguments.add(username); String password = actionConf.get(Hive2ActionExecutor.HIVE2_PASSWORD); if (password == null) { // Have to pass something or Beeline might interactively prompt, which we don't want password = "DUMMY"; } arguments.add("-p"); arguments.add(password); // We always use the same driver arguments.add("-d"); arguments.add("org.apache.hive.jdbc.HiveDriver"); String scriptPath = actionConf.get(Hive2ActionExecutor.HIVE2_SCRIPT); String query = actionConf.get(Hive2ActionExecutor.HIVE2_QUERY); if (scriptPath != null) { if (!new File(scriptPath).exists()) { throw new RuntimeException("Hive 2 script file [" + scriptPath + "] does not exist"); } // print out current directory & its contents File localDir = new File("dummy").getAbsoluteFile().getParentFile(); System.out.println("Current (local) dir = " + localDir.getAbsolutePath()); System.out.println("------------------------"); for (String file : localDir.list()) { System.out.println(" " + file); } System.out.println("------------------------"); System.out.println(); // Prepare the Hive Script String script = readStringFromFile(scriptPath); System.out.println(); System.out.println("Script [" + scriptPath + "] content: "); System.out.println("------------------------"); System.out.println(script); System.out.println("------------------------"); System.out.println(); arguments.add("-f"); arguments.add(scriptPath); } else if (query != null) { System.out.println("Query: "); System.out.println("------------------------"); System.out.println(query); System.out.println("------------------------"); System.out.println(); String filename = createScriptFile(query); arguments.add("-f"); arguments.add(filename); } else { throw new RuntimeException("Action Configuration does not have [" + Hive2ActionExecutor.HIVE2_SCRIPT + "], or [" + Hive2ActionExecutor.HIVE2_QUERY + "] property"); } // Pass any parameters to Beeline via arguments String[] params = MapReduceMain.getStrings(actionConf, Hive2ActionExecutor.HIVE2_PARAMS); if (params.length > 0) { System.out.println("Parameters:"); System.out.println("------------------------"); for (String param : params) { System.out.println(" " + param); int idx = param.indexOf('='); if (idx == -1) { throw new RuntimeException("Parameter expression must contain an assignment: " + param); } else if (idx == 0) { throw new RuntimeException("Parameter value not specified: " + param); } arguments.add("--hivevar"); arguments.add(param); } System.out.println("------------------------"); System.out.println(); } // This tells BeeLine to look for a delegation token; otherwise it won't and will fail in secure mode because there are no // Kerberos credentials. In non-secure mode, this argument is ignored so we can simply always pass it. arguments.add("-a"); arguments.add("delegationToken"); String[] beelineArgs = MapReduceMain.getStrings(actionConf, Hive2ActionExecutor.HIVE2_ARGS); for (String beelineArg : beelineArgs) { if (DISALLOWED_BEELINE_OPTIONS.contains(beelineArg)) { throw new RuntimeException("Error: Beeline argument " + beelineArg + " is not supported"); } arguments.add(beelineArg); } // Propagate MR job tag if defined if (actionConf.get(LauncherMain.MAPREDUCE_JOB_TAGS) != null) { arguments.add("--hiveconf"); arguments.add("mapreduce.job.tags=" + actionConf.get(LauncherMain.MAPREDUCE_JOB_TAGS)); } // Propagate "oozie.*" configs (but not "oozie.launcher.*" nor "oozie.hive2.*") for (Map.Entry<String, String> oozieConfig : actionConf.getValByRegex("^oozie\\.(?!launcher|hive2).+") .entrySet()) { arguments.add("--hiveconf"); arguments.add(oozieConfig.getKey() + "=" + oozieConfig.getValue()); } System.out.println("Beeline command arguments :"); for (String arg : arguments) { System.out.println(" " + arg); } System.out.println(); LauncherMainHadoopUtils.killChildYarnJobs(actionConf); System.out.println("================================================================="); System.out.println(); System.out.println(">>> Invoking Beeline command line now >>>"); System.out.println(); System.out.flush(); try { runBeeline(arguments.toArray(new String[arguments.size()]), logFile); } catch (SecurityException ex) { if (LauncherSecurityManager.getExitInvoked()) { if (LauncherSecurityManager.getExitCode() != 0) { throw ex; } } } finally { System.out.println("\n<<< Invocation of Beeline command completed <<<\n"); writeExternalChildIDs(logFile, HIVE2_JOB_IDS_PATTERNS, "Beeline"); } }
From source file:org.apache.phoenix.pherf.workload.QueryExecutor.java
License:Apache License
/** * Execute all scenarios//from ww w .j a v a2s . com * * @param dataModel * @throws Exception */ protected Runnable executeAllScenarios(final DataModel dataModel) throws Exception { return new Runnable() { @Override public void run() { List<DataModelResult> dataModelResults = new ArrayList<>(); DataModelResult dataModelResult = new DataModelResult(dataModel, PhoenixUtil.getZookeeper()); ResultManager resultManager = new ResultManager(dataModelResult.getName()); dataModelResults.add(dataModelResult); List<Scenario> scenarios = dataModel.getScenarios(); Configuration conf = HBaseConfiguration.create(); Map<String, String> phoenixProperty = conf.getValByRegex("phoenix"); try { for (Scenario scenario : scenarios) { ScenarioResult scenarioResult = new ScenarioResult(scenario); scenarioResult.setPhoenixProperties(phoenixProperty); dataModelResult.getScenarioResult().add(scenarioResult); WriteParams writeParams = scenario.getWriteParams(); if (writeParams != null) { int writerThreadCount = writeParams.getWriterThreadCount(); for (int i = 0; i < writerThreadCount; i++) { logger.debug( "Inserting write workload ( " + i + " ) of ( " + writerThreadCount + " )"); Workload writes = new WriteWorkload(PhoenixUtil.create(), parser, GeneratePhoenixStats.NO); workloadExecutor.add(writes); } } for (QuerySet querySet : scenario.getQuerySet()) { QuerySetResult querySetResult = new QuerySetResult(querySet); scenarioResult.getQuerySetResult().add(querySetResult); util.executeQuerySetDdls(querySet); if (querySet.getExecutionType() == ExecutionType.SERIAL) { executeQuerySetSerial(dataModelResult, querySet, querySetResult); } else { executeQuerySetParallel(dataModelResult, querySet, querySetResult); } } resultManager.write(dataModelResult); } resultManager.write(dataModelResults); resultManager.flush(); } catch (Exception e) { logger.warn("", e); } } }; }
From source file:org.apache.phoenix.queryserver.server.PhoenixMetaFactoryImpl.java
License:Apache License
@Override public Meta create(List<String> args) { Configuration conf = Preconditions.checkNotNull(getConf(), "Configuration must not be null."); Properties info = new Properties(); info.putAll(conf.getValByRegex("avatica.*")); try {/*from w w w . j a v a 2 s .c om*/ final String url; if (args.size() == 0) { url = QueryUtil.getConnectionUrl(info, conf); } else if (args.size() == 1) { url = args.get(0); } else { throw new RuntimeException("0 or 1 argument expected. Received " + Arrays.toString(args.toArray())); } // TODO: what about -D configs passed in from cli? How do they get pushed down? return new JdbcMeta(url, info); } catch (SQLException | ClassNotFoundException e) { throw new RuntimeException(e); } }
From source file:org.Microsoft.Telemetry.ServiceInformation.java
/** * This method Initializes all objects of the Telemetry service. * * @throws IOException ,YarnException/*from www. j a v a2s .c o m*/ */ public void initialization(Configuration conf) throws YarnException, IOException { telemetryconfig = TelemetryConfiguration.getActive(); client = YarnClient.createYarnClient(); Ikey = conf.get(IKEY_NAME_PROPERTY_CONFIG); dimension_from_config = conf.getValByRegex(PREFIX_CUSTOM_DIMENSINS + "*"); LOG.info(PATTERN_LOG_INFO + String.format("Updating %d dimensions from Configuration file ", dimension_from_config.size())); LOG2.info(PATTERN_LOG_INFO + String.format("Updating %d dimensions from Configuration file ", dimension_from_config.size())); if (!Ikey.equals("")) { telemetryconfig.setInstrumentationKey(Ikey); telemetry = new TelemetryClient(telemetryconfig); LOG.info(PATTERN_LOG_INFO + "Instrumentation Key initialized successfully....!"); LOG2.info(PATTERN_LOG_INFO + "Instrumentation Key initialized successfully....!"); } else { LOG.error(PATTERN_LOG_ERROR + "Instrumentation Key is not initialized Because not provided Ikey or failing to read from config file "); } }
From source file:org.talend.components.hadoopcluster.runtime.configuration.HadoopCMClusterService.java
License:Open Source License
private Map<String, String> getConfiguration(String confName) { Configuration conf = confs.get(confName); return conf.getValByRegex(".*"); //$NON-NLS-1$ ; }
From source file:org.talend.repository.hadoopcluster.configurator.cloudera.HadoopCMClusterService.java
License:Open Source License
private Map<String, String> getConfiguration(String confName) { Configuration conf = confs.get(confName); return conf.getValByRegex(".*"); //$NON-NLS-1$; }