List of usage examples for java.util Properties putAll
@Override public synchronized void putAll(Map<?, ?> t)
From source file:gobblin.service.modules.core.GobblinServiceHATest.java
@BeforeClass public void setup() throws Exception { // Clean up common Flow Spec Dir cleanUpDir(COMMON_SPEC_STORE_PARENT_DIR); // Clean up work dir for Node 1 cleanUpDir(NODE_1_SERVICE_WORK_DIR); cleanUpDir(NODE_1_SPEC_STORE_PARENT_DIR); // Clean up work dir for Node 2 cleanUpDir(NODE_2_SERVICE_WORK_DIR); cleanUpDir(NODE_2_SPEC_STORE_PARENT_DIR); // Use a random ZK port this.testingZKServer = new TestingServer(-1); logger.info("Testing ZK Server listening on: " + testingZKServer.getConnectString()); HelixUtils.createGobblinHelixCluster(testingZKServer.getConnectString(), TEST_HELIX_CLUSTER_NAME); Properties commonServiceCoreProperties = new Properties(); commonServiceCoreProperties.put(ServiceConfigKeys.ZK_CONNECTION_STRING_KEY, testingZKServer.getConnectString()); commonServiceCoreProperties.put(ServiceConfigKeys.HELIX_CLUSTER_NAME_KEY, TEST_HELIX_CLUSTER_NAME); commonServiceCoreProperties.put(ServiceConfigKeys.HELIX_INSTANCE_NAME_KEY, "GaaS_" + UUID.randomUUID().toString()); commonServiceCoreProperties.put(ServiceConfigKeys.TOPOLOGY_FACTORY_TOPOLOGY_NAMES_KEY, TEST_GOBBLIN_EXECUTOR_NAME); commonServiceCoreProperties.put(// w w w .j av a 2 s . co m ServiceConfigKeys.TOPOLOGY_FACTORY_PREFIX + TEST_GOBBLIN_EXECUTOR_NAME + ".description", "StandaloneTestExecutor"); commonServiceCoreProperties .put(ServiceConfigKeys.TOPOLOGY_FACTORY_PREFIX + TEST_GOBBLIN_EXECUTOR_NAME + ".version", "1"); commonServiceCoreProperties.put( ServiceConfigKeys.TOPOLOGY_FACTORY_PREFIX + TEST_GOBBLIN_EXECUTOR_NAME + ".uri", "gobblinExecutor"); commonServiceCoreProperties.put(ServiceConfigKeys.TOPOLOGY_FACTORY_PREFIX + TEST_GOBBLIN_EXECUTOR_NAME + ".specExecutorInstanceProducer", "gobblin.service.InMemorySpecExecutorInstanceProducer"); commonServiceCoreProperties.put(ServiceConfigKeys.TOPOLOGY_FACTORY_PREFIX + TEST_GOBBLIN_EXECUTOR_NAME + ".specExecInstance.capabilities", TEST_SOURCE_NAME + ":" + TEST_SINK_NAME); Properties node1ServiceCoreProperties = new Properties(); node1ServiceCoreProperties.putAll(commonServiceCoreProperties); node1ServiceCoreProperties.put(ConfigurationKeys.TOPOLOGYSPEC_STORE_DIR_KEY, NODE_1_TOPOLOGY_SPEC_STORE_DIR); node1ServiceCoreProperties.put(ConfigurationKeys.FLOWSPEC_STORE_DIR_KEY, NODE_1_FLOW_SPEC_STORE_DIR); Properties node2ServiceCoreProperties = new Properties(); node2ServiceCoreProperties.putAll(commonServiceCoreProperties); node2ServiceCoreProperties.put(ConfigurationKeys.TOPOLOGYSPEC_STORE_DIR_KEY, NODE_2_TOPOLOGY_SPEC_STORE_DIR); node2ServiceCoreProperties.put(ConfigurationKeys.FLOWSPEC_STORE_DIR_KEY, NODE_2_FLOW_SPEC_STORE_DIR); // Start Node 1 this.node1GobblinServiceManager = new GobblinServiceManager("CoreService", "1", ConfigUtils.propertiesToConfig(node1ServiceCoreProperties), Optional.of(new Path(NODE_1_SERVICE_WORK_DIR))); this.node1GobblinServiceManager.start(); // Start Node 2 this.node2GobblinServiceManager = new GobblinServiceManager("CoreService", "1", ConfigUtils.propertiesToConfig(node2ServiceCoreProperties), Optional.of(new Path(NODE_2_SERVICE_WORK_DIR))); this.node2GobblinServiceManager.start(); // Initialize Node 1 Client this.node1FlowConfigClient = new FlowConfigClient( String.format("http://localhost:%s/", this.node1GobblinServiceManager.restliServer.getPort())); // Initialize Node 2 Client this.node2FlowConfigClient = new FlowConfigClient( String.format("http://localhost:%s/", this.node2GobblinServiceManager.restliServer.getPort())); }
From source file:com.opengamma.component.ComponentManager.java
/** * Intelligently sets the property to the merged set of properties. * <p>//w w w .j av a 2s. c o m * The key "MANAGER.PROPERTIES" can be used in a properties file to refer to * the entire set of merged properties. This is normally what you want to pass * into other systems (such as Spring) that need a set of properties. * * @param bean the bean, not null * @param mp the property, not null * @throws Exception allowing throwing of a checked exception */ protected void setPropertyMergedProperties(Bean bean, MetaProperty<?> mp) throws Exception { final String desc = MANAGER_PROPERTIES + " for " + mp; final ByteArrayOutputStream out = new ByteArrayOutputStream(1024); Properties props = new Properties(); props.putAll(getProperties()); props.store(out, desc); out.close(); Resource resource = new AbstractResource() { @Override public String getDescription() { return MANAGER_PROPERTIES; } @Override public String getFilename() throws IllegalStateException { return MANAGER_PROPERTIES + ".properties"; } @Override public InputStream getInputStream() throws IOException { return new ByteArrayInputStream(out.toByteArray()); } @Override public String toString() { return desc; } }; mp.set(bean, resource); }
From source file:io.druid.indexing.kafka.test.TestBroker.java
public void start() { final Properties props = new Properties(); props.setProperty("zookeeper.connect", zookeeperConnect); props.setProperty("zookeeper.session.timeout.ms", "30000"); props.setProperty("zookeeper.connection.timeout.ms", "30000"); props.setProperty("log.dirs", directory.toString()); props.setProperty("broker.id", String.valueOf(id)); props.setProperty("port", String.valueOf(new Random().nextInt(9999) + 10000)); props.setProperty("advertised.host.name", "localhost"); props.putAll(brokerProps); final KafkaConfig config = new KafkaConfig(props); server = new KafkaServer(config, SystemTime.SYSTEM, Some.apply(StringUtils.format("TestingBroker[%d]-", id)), List$.MODULE$.empty()); server.startup();/*from w w w . j a v a 2 s. c om*/ }
From source file:org.artifactory.common.property.ArtifactorySystemProperties.java
private static void handleDeprecatedProps(Properties artProps) { //Test the deprecated props against the current props Properties autoReplaced = new Properties(); for (Object key : artProps.keySet()) { String prop = (String) key; if (DEPRECATED.containsKey(prop)) { PropertyMapper mapper = DEPRECATED.get(prop); String newProp = mapper.getNewPropertyName(); String suggestion = newProp == null ? "this property is no longer in use." : "please use: '" + newProp + "' instead."; BootstrapLogger.warn(/*from w ww. j a v a 2 s. c om*/ "Usage of deprecated artifactory system property detected: '" + prop + "' - " + suggestion); //Check if property can be automatically replaced String value = (String) artProps.get(prop); String newValue = mapper.map(value); if (newProp == null) { newProp = prop; } if (newValue != null) { autoReplaced.put(newProp, newValue); BootstrapLogger.warn("Deprecated artifactory system property '" + prop + "=" + value + "' auto-replaced with '" + newProp + "=" + newValue + "'."); } } } artProps.putAll(autoReplaced); }
From source file:org.apache.sqoop.connector.jdbc.GenericJdbcExecutor.java
public GenericJdbcExecutor(LinkConfiguration linkConfig) { // Persist link configuration for future use this.link = linkConfig; assert link != null; assert link.linkConfig != null; assert link.linkConfig.connectionString != null; // Load/register the JDBC driver to JVM Class driverClass = ClassUtils.loadClass(link.linkConfig.jdbcDriver); if (driverClass == null) { throw new SqoopException(GenericJdbcConnectorError.GENERIC_JDBC_CONNECTOR_0000, link.linkConfig.jdbcDriver); }/*from w ww . j a va 2 s .c om*/ // Properties that we will use for the connection Properties properties = new Properties(); if (link.linkConfig.jdbcProperties != null) { properties.putAll(link.linkConfig.jdbcProperties); } // Propagate username and password to the properties // // DriverManager have two relevant API for us: // * getConnection(url, username, password) // * getConnection(url, properties) // As we have to use properties, we need to use the later // method and hence we have to persist the credentials there. if (link.linkConfig.username != null) { properties.put(JDBC_PROPERTY_USERNAME, link.linkConfig.username); } if (link.linkConfig.password != null) { properties.put(JDBC_PROPERTY_PASSWORD, link.linkConfig.password); } // Finally create the connection try { connection = DriverManager.getConnection(link.linkConfig.connectionString, properties); } catch (SQLException e) { logSQLException(e); throw new SqoopException(GenericJdbcConnectorError.GENERIC_JDBC_CONNECTOR_0001, e); } // Fill in defaults if they were not pre-entered by user if (link.dialect.identifierEnclose == null) { link.dialect.identifierEnclose = "\""; } }
From source file:org.apache.pig.Main.java
static int run(String args[], PigProgressNotificationListener listener) { DateTime startTime = new DateTime(); int rc = 1;/*from w w w.ja v a 2 s . c o m*/ boolean verbose = false; boolean gruntCalled = false; boolean deleteTempFiles = true; String logFileName = null; boolean printScriptRunTime = true; PigContext pigContext = null; try { Configuration conf = new Configuration(false); GenericOptionsParser parser = new GenericOptionsParser(conf, args); conf = parser.getConfiguration(); Properties properties = new Properties(); PropertiesUtil.loadDefaultProperties(properties); properties.putAll(ConfigurationUtil.toProperties(conf)); if (listener == null) { listener = makeListener(properties); } String[] pigArgs = parser.getRemainingArgs(); boolean userSpecifiedLog = false; boolean checkScriptOnly = false; BufferedReader pin = null; boolean debug = false; boolean dryrun = false; boolean embedded = false; List<String> params = new ArrayList<String>(); List<String> paramFiles = new ArrayList<String>(); HashSet<String> disabledOptimizerRules = new HashSet<String>(); CmdLineParser opts = new CmdLineParser(pigArgs); opts.registerOpt('4', "log4jconf", CmdLineParser.ValueExpected.REQUIRED); opts.registerOpt('b', "brief", CmdLineParser.ValueExpected.NOT_ACCEPTED); opts.registerOpt('c', "check", CmdLineParser.ValueExpected.NOT_ACCEPTED); opts.registerOpt('d', "debug", CmdLineParser.ValueExpected.REQUIRED); opts.registerOpt('e', "execute", CmdLineParser.ValueExpected.NOT_ACCEPTED); opts.registerOpt('f', "file", CmdLineParser.ValueExpected.REQUIRED); opts.registerOpt('g', "embedded", CmdLineParser.ValueExpected.REQUIRED); opts.registerOpt('h', "help", CmdLineParser.ValueExpected.OPTIONAL); opts.registerOpt('i', "version", CmdLineParser.ValueExpected.OPTIONAL); opts.registerOpt('l', "logfile", CmdLineParser.ValueExpected.REQUIRED); opts.registerOpt('m', "param_file", CmdLineParser.ValueExpected.OPTIONAL); opts.registerOpt('p', "param", CmdLineParser.ValueExpected.OPTIONAL); opts.registerOpt('r', "dryrun", CmdLineParser.ValueExpected.NOT_ACCEPTED); opts.registerOpt('t', "optimizer_off", CmdLineParser.ValueExpected.REQUIRED); opts.registerOpt('v', "verbose", CmdLineParser.ValueExpected.NOT_ACCEPTED); opts.registerOpt('w', "warning", CmdLineParser.ValueExpected.NOT_ACCEPTED); opts.registerOpt('x', "exectype", CmdLineParser.ValueExpected.REQUIRED); opts.registerOpt('F', "stop_on_failure", CmdLineParser.ValueExpected.NOT_ACCEPTED); opts.registerOpt('M', "no_multiquery", CmdLineParser.ValueExpected.NOT_ACCEPTED); opts.registerOpt('N', "no_fetch", CmdLineParser.ValueExpected.NOT_ACCEPTED); opts.registerOpt('P', "propertyFile", CmdLineParser.ValueExpected.REQUIRED); ExecMode mode = ExecMode.UNKNOWN; String file = null; String engine = null; // set up client side system properties in UDF context UDFContext.getUDFContext().setClientSystemProps(properties); char opt; while ((opt = opts.getNextOpt()) != CmdLineParser.EndOfOpts) { switch (opt) { case '4': String log4jconf = opts.getValStr(); if (log4jconf != null) { properties.setProperty(LOG4J_CONF, log4jconf); } break; case 'b': properties.setProperty(BRIEF, "true"); break; case 'c': checkScriptOnly = true; break; case 'd': String logLevel = opts.getValStr(); if (logLevel != null) { properties.setProperty(DEBUG, logLevel); } debug = true; break; case 'e': mode = ExecMode.STRING; break; case 'f': mode = ExecMode.FILE; file = opts.getValStr(); break; case 'g': embedded = true; engine = opts.getValStr(); break; case 'F': properties.setProperty("stop.on.failure", "" + true); break; case 'h': String topic = opts.getValStr(); if (topic != null) if (topic.equalsIgnoreCase("properties")) printProperties(); else { System.out.println("Invalide help topic - " + topic); usage(); } else usage(); return ReturnCode.SUCCESS; case 'i': printScriptRunTime = false; System.out.println(getVersionString()); return ReturnCode.SUCCESS; case 'l': //call to method that validates the path to the log file //and sets up the file to store the client side log file String logFileParameter = opts.getValStr(); if (logFileParameter != null && logFileParameter.length() > 0) { logFileName = validateLogFile(logFileParameter, null); } else { logFileName = validateLogFile(logFileName, null); } userSpecifiedLog = true; properties.setProperty("pig.logfile", (logFileName == null ? "" : logFileName)); break; case 'm': paramFiles.add(opts.getValStr()); break; case 'M': // turns off multiquery optimization properties.setProperty(PigConfiguration.PIG_OPT_MULTIQUERY, "" + false); break; case 'N': properties.setProperty(PigConfiguration.PIG_OPT_FETCH, "" + false); break; case 'p': params.add(opts.getValStr()); break; case 'r': // currently only used for parameter substitution // will be extended in the future dryrun = true; break; case 't': disabledOptimizerRules.add(opts.getValStr()); break; case 'v': properties.setProperty(VERBOSE, "" + true); verbose = true; break; case 'w': properties.setProperty("aggregate.warning", "" + false); break; case 'x': properties.setProperty("exectype", opts.getValStr()); if (opts.getValStr().toLowerCase().contains("local")) { UserGroupInformation.setConfiguration(new Configuration(false)); } break; case 'P': { InputStream inputStream = null; try { FileLocalizer.FetchFileRet localFileRet = FileLocalizer.fetchFile(properties, opts.getValStr()); inputStream = new BufferedInputStream(new FileInputStream(localFileRet.file)); properties.load(inputStream); } catch (IOException e) { throw new RuntimeException("Unable to parse properties file '" + opts.getValStr() + "'"); } finally { if (inputStream != null) { try { inputStream.close(); } catch (IOException e) { } } } } break; default: { Character cc = Character.valueOf(opt); throw new AssertionError("Unhandled option " + cc.toString()); } } } // create the context with the parameter pigContext = new PigContext(properties); // create the static script state object ScriptState scriptState = pigContext.getExecutionEngine().instantiateScriptState(); String commandLine = LoadFunc.join((AbstractList<String>) Arrays.asList(args), " "); scriptState.setCommandLine(commandLine); if (listener != null) { scriptState.registerListener(listener); } ScriptState.start(scriptState); pigContext.getProperties().setProperty("pig.cmd.args", commandLine); if (logFileName == null && !userSpecifiedLog) { logFileName = validateLogFile(properties.getProperty("pig.logfile"), null); } pigContext.getProperties().setProperty("pig.logfile", (logFileName == null ? "" : logFileName)); // configure logging configureLog4J(properties, pigContext); log.info(getVersionString().replace("\n", "")); if (logFileName != null) { log.info("Logging error messages to: " + logFileName); } deleteTempFiles = Boolean .valueOf(properties.getProperty(PigConfiguration.PIG_DELETE_TEMP_FILE, "true")); pigContext.getProperties().setProperty(PigImplConstants.PIG_OPTIMIZER_RULES_KEY, ObjectSerializer.serialize(disabledOptimizerRules)); PigContext.setClassLoader(pigContext.createCl(null)); // construct the parameter substitution preprocessor Grunt grunt = null; BufferedReader in; String substFile = null; paramFiles = fetchRemoteParamFiles(paramFiles, properties); pigContext.setParams(params); pigContext.setParamFiles(paramFiles); switch (mode) { case FILE: { String remainders[] = opts.getRemainingArgs(); if (remainders != null) { pigContext.getProperties().setProperty(PigContext.PIG_CMD_ARGS_REMAINDERS, ObjectSerializer.serialize(remainders)); } FileLocalizer.FetchFileRet localFileRet = FileLocalizer.fetchFile(properties, file); if (localFileRet.didFetch) { properties.setProperty("pig.jars.relative.to.dfs", "true"); } scriptState.setFileName(file); if (embedded) { return runEmbeddedScript(pigContext, localFileRet.file.getPath(), engine); } else { SupportedScriptLang type = determineScriptType(localFileRet.file.getPath()); if (type != null) { return runEmbeddedScript(pigContext, localFileRet.file.getPath(), type.name().toLowerCase()); } } //Reader is created by first loading "pig.load.default.statements" or .pigbootup file if available in = new BufferedReader(new InputStreamReader( Utils.getCompositeStream(new FileInputStream(localFileRet.file), properties))); // run parameter substitution preprocessor first substFile = file + ".substituted"; pin = runParamPreprocessor(pigContext, in, substFile, debug || dryrun || checkScriptOnly); if (dryrun) { if (dryrun(substFile, pigContext)) { log.info("Dry run completed. Substituted pig script is at " + substFile + ". Expanded pig script is at " + file + ".expanded"); } else { log.info("Dry run completed. Substituted pig script is at " + substFile); } return ReturnCode.SUCCESS; } logFileName = validateLogFile(logFileName, localFileRet.file); pigContext.getProperties().setProperty("pig.logfile", (logFileName == null ? "" : logFileName)); // Set job name based on name of the script pigContext.getProperties().setProperty(PigContext.JOB_NAME, "PigLatin:" + new File(file).getName()); if (!debug) { new File(substFile).deleteOnExit(); } scriptState.setScript(localFileRet.file); grunt = new Grunt(pin, pigContext); gruntCalled = true; if (checkScriptOnly) { grunt.checkScript(substFile); System.err.println(file + " syntax OK"); rc = ReturnCode.SUCCESS; } else { int results[] = grunt.exec(); rc = getReturnCodeForStats(results); } return rc; } case STRING: { if (checkScriptOnly) { System.err.println("ERROR:" + "-c (-check) option is only valid " + "when executing pig with a pig script file)"); return ReturnCode.ILLEGAL_ARGS; } // Gather up all the remaining arguments into a string and pass them into // grunt. StringBuffer sb = new StringBuffer(); String remainders[] = opts.getRemainingArgs(); for (int i = 0; i < remainders.length; i++) { if (i != 0) sb.append(' '); sb.append(remainders[i]); } sb.append('\n'); scriptState.setScript(sb.toString()); in = new BufferedReader(new StringReader(sb.toString())); grunt = new Grunt(in, pigContext); gruntCalled = true; int results[] = grunt.exec(); return getReturnCodeForStats(results); } default: break; } // If we're here, we don't know yet what they want. They may have just // given us a jar to execute, they might have given us a pig script to // execute, or they might have given us a dash (or nothing) which means to // run grunt interactive. String remainders[] = opts.getRemainingArgs(); if (remainders == null) { if (checkScriptOnly) { System.err.println("ERROR:" + "-c (-check) option is only valid " + "when executing pig with a pig script file)"); return ReturnCode.ILLEGAL_ARGS; } // Interactive mode = ExecMode.SHELL; //Reader is created by first loading "pig.load.default.statements" or .pigbootup file if available ConsoleReader reader = new ConsoleReader(Utils.getCompositeStream(System.in, properties), new OutputStreamWriter(System.out)); reader.setDefaultPrompt("grunt> "); final String HISTORYFILE = ".pig_history"; String historyFile = System.getProperty("user.home") + File.separator + HISTORYFILE; reader.setHistory(new History(new File(historyFile))); ConsoleReaderInputStream inputStream = new ConsoleReaderInputStream(reader); grunt = new Grunt(new BufferedReader(new InputStreamReader(inputStream)), pigContext); grunt.setConsoleReader(reader); gruntCalled = true; grunt.run(); return ReturnCode.SUCCESS; } else { pigContext.getProperties().setProperty(PigContext.PIG_CMD_ARGS_REMAINDERS, ObjectSerializer.serialize(remainders)); // They have a pig script they want us to run. mode = ExecMode.FILE; FileLocalizer.FetchFileRet localFileRet = FileLocalizer.fetchFile(properties, remainders[0]); if (localFileRet.didFetch) { properties.setProperty("pig.jars.relative.to.dfs", "true"); } scriptState.setFileName(remainders[0]); if (embedded) { return runEmbeddedScript(pigContext, localFileRet.file.getPath(), engine); } else { SupportedScriptLang type = determineScriptType(localFileRet.file.getPath()); if (type != null) { return runEmbeddedScript(pigContext, localFileRet.file.getPath(), type.name().toLowerCase()); } } //Reader is created by first loading "pig.load.default.statements" or .pigbootup file if available InputStream seqInputStream = Utils.getCompositeStream(new FileInputStream(localFileRet.file), properties); in = new BufferedReader(new InputStreamReader(seqInputStream)); // run parameter substitution preprocessor first substFile = remainders[0] + ".substituted"; pin = runParamPreprocessor(pigContext, in, substFile, debug || dryrun || checkScriptOnly); if (dryrun) { if (dryrun(substFile, pigContext)) { log.info("Dry run completed. Substituted pig script is at " + substFile + ". Expanded pig script is at " + remainders[0] + ".expanded"); } else { log.info("Dry run completed. Substituted pig script is at " + substFile); } return ReturnCode.SUCCESS; } logFileName = validateLogFile(logFileName, localFileRet.file); pigContext.getProperties().setProperty("pig.logfile", (logFileName == null ? "" : logFileName)); if (!debug) { new File(substFile).deleteOnExit(); } // Set job name based on name of the script pigContext.getProperties().setProperty(PigContext.JOB_NAME, "PigLatin:" + new File(remainders[0]).getName()); scriptState.setScript(localFileRet.file); grunt = new Grunt(pin, pigContext); gruntCalled = true; if (checkScriptOnly) { grunt.checkScript(substFile); System.err.println(remainders[0] + " syntax OK"); rc = ReturnCode.SUCCESS; } else { int results[] = grunt.exec(); rc = getReturnCodeForStats(results); } return rc; } // Per Utkarsh and Chris invocation of jar file via pig depricated. } catch (ParseException e) { usage(); rc = ReturnCode.PARSE_EXCEPTION; PigStatsUtil.setErrorMessage(e.getMessage()); PigStatsUtil.setErrorThrowable(e); } catch (org.apache.pig.tools.parameters.ParseException e) { // usage(); rc = ReturnCode.PARSE_EXCEPTION; PigStatsUtil.setErrorMessage(e.getMessage()); PigStatsUtil.setErrorThrowable(e); } catch (IOException e) { if (e instanceof PigException) { PigException pe = (PigException) e; rc = (pe.retriable()) ? ReturnCode.RETRIABLE_EXCEPTION : ReturnCode.PIG_EXCEPTION; PigStatsUtil.setErrorMessage(pe.getMessage()); PigStatsUtil.setErrorCode(pe.getErrorCode()); } else { rc = ReturnCode.IO_EXCEPTION; PigStatsUtil.setErrorMessage(e.getMessage()); } PigStatsUtil.setErrorThrowable(e); if (!gruntCalled) { LogUtils.writeLog(e, logFileName, log, verbose, "Error before Pig is launched"); } } catch (Throwable e) { rc = ReturnCode.THROWABLE_EXCEPTION; PigStatsUtil.setErrorMessage(e.getMessage()); PigStatsUtil.setErrorThrowable(e); if (!gruntCalled) { LogUtils.writeLog(e, logFileName, log, verbose, "Error before Pig is launched"); } } finally { if (printScriptRunTime) { printScriptRunTime(startTime); } if (deleteTempFiles) { // clear temp files FileLocalizer.deleteTempFiles(); } if (pigContext != null) { pigContext.getExecutionEngine().destroy(); } PerformanceTimerFactory.getPerfTimerFactory().dumpTimers(); } return rc; }
From source file:alluxio.Configuration.java
/** * Constructor with a flag to indicate whether system properties should be included. When the flag * is set to false, it is used for {@link Configuration} test class. * * @param includeSystemProperties whether to include the system properties *///from ww w . j a va2s . c om Configuration(boolean includeSystemProperties) { // Load default Properties defaultProps = new Properties(); // Override runtime default defaultProps.setProperty(Constants.MASTER_HOSTNAME, NetworkAddressUtils.getLocalHostName(250)); defaultProps.setProperty(Constants.WORKER_WORKER_BLOCK_THREADS_MIN, String.valueOf(Runtime.getRuntime().availableProcessors())); defaultProps.setProperty(Constants.MASTER_WORKER_THREADS_MIN, String.valueOf(Runtime.getRuntime().availableProcessors())); defaultProps.setProperty(Constants.WORKER_NETWORK_NETTY_CHANNEL, String.valueOf(ChannelType.defaultType())); defaultProps.setProperty(Constants.USER_NETWORK_NETTY_CHANNEL, String.valueOf(ChannelType.defaultType())); InputStream defaultInputStream = Configuration.class.getClassLoader() .getResourceAsStream(DEFAULT_PROPERTIES); if (defaultInputStream == null) { throw new RuntimeException(ExceptionMessage.DEFAULT_PROPERTIES_FILE_DOES_NOT_EXIST.getMessage()); } try { defaultProps.load(defaultInputStream); } catch (IOException e) { throw new RuntimeException(ExceptionMessage.UNABLE_TO_LOAD_PROPERTIES_FILE.getMessage(), e); } // Load site specific properties file Properties siteProps = new Properties(); InputStream siteInputStream = Configuration.class.getClassLoader().getResourceAsStream(SITE_PROPERTIES); if (siteInputStream != null) { try { siteProps.load(siteInputStream); } catch (IOException e) { LOG.warn("Unable to load site Alluxio configuration file.", e); } } // Load system properties Properties systemProps = new Properties(); if (includeSystemProperties) { systemProps.putAll(System.getProperties()); } // Now lets combine mProperties.putAll(defaultProps); mProperties.putAll(siteProps); mProperties.putAll(systemProps); // Update alluxio.master_address based on if Zookeeper is used or not. String masterHostname = mProperties.getProperty(Constants.MASTER_HOSTNAME); String masterPort = mProperties.getProperty(Constants.MASTER_RPC_PORT); boolean useZk = Boolean.parseBoolean(mProperties.getProperty(Constants.ZOOKEEPER_ENABLED)); String masterAddress = (useZk ? Constants.HEADER_FT : Constants.HEADER) + masterHostname + ":" + masterPort; mProperties.setProperty(Constants.MASTER_ADDRESS, masterAddress); checkUserFileBufferBytes(); // Make sure the user hasn't set worker ports when there may be multiple workers per host int maxWorkersPerHost = getInt(Constants.INTEGRATION_YARN_WORKERS_PER_HOST_MAX); if (maxWorkersPerHost > 1) { String message = "%s cannot be specified when allowing multiple workers per host with " + Constants.INTEGRATION_YARN_WORKERS_PER_HOST_MAX + "=" + maxWorkersPerHost; Preconditions.checkState(System.getProperty(Constants.WORKER_DATA_PORT) == null, String.format(message, Constants.WORKER_DATA_PORT)); Preconditions.checkState(System.getProperty(Constants.WORKER_RPC_PORT) == null, String.format(message, Constants.WORKER_RPC_PORT)); Preconditions.checkState(System.getProperty(Constants.WORKER_WEB_PORT) == null, String.format(message, Constants.WORKER_WEB_PORT)); mProperties.setProperty(Constants.WORKER_DATA_PORT, "0"); mProperties.setProperty(Constants.WORKER_RPC_PORT, "0"); mProperties.setProperty(Constants.WORKER_WEB_PORT, "0"); } }
From source file:com.jkoolcloud.tnt4j.streams.inputs.KafkaStream.java
/** * Returns scope defined properties set. * * @param scope// w w w .j a v a2s . c om * properties scope key * @return scope defined properties */ protected Properties getScopeProps(String scope) { Properties allScopeProperties = new Properties(); Properties sProperties = userKafkaProps.get(scope); if (sProperties != null) { allScopeProperties.putAll(sProperties); } if (!PROP_SCOPE_COMMON.equals(scope)) { sProperties = userKafkaProps.get(PROP_SCOPE_COMMON); if (sProperties != null) { allScopeProperties.putAll(sProperties); } } return allScopeProperties; }
From source file:org.apache.druid.indexing.kafka.test.TestBroker.java
public void start() { final Properties props = new Properties(); props.setProperty("zookeeper.connect", zookeeperConnect); props.setProperty("zookeeper.session.timeout.ms", "30000"); props.setProperty("zookeeper.connection.timeout.ms", "30000"); props.setProperty("log.dirs", directory.toString()); props.setProperty("broker.id", String.valueOf(id)); props.setProperty("port", String.valueOf(ThreadLocalRandom.current().nextInt(9999) + 10000)); props.setProperty("advertised.host.name", "localhost"); props.putAll(brokerProps); final KafkaConfig config = new KafkaConfig(props); server = new KafkaServer(config, SystemTime.SYSTEM, Some.apply(StringUtils.format("TestingBroker[%d]-", id)), List$.MODULE$.empty()); server.startup();/*from w w w .j a va 2 s .com*/ }
From source file:gobblin.util.test.RetentionTestHelper.java
/** * * Does gobblin retention for test data. {@link DatasetCleaner} which does retention in production can not be directly called as we need to resolve some * runtime properties like ${testNameTempPath}. This directory contains all the setup data created for a test by {@link RetentionTestDataGenerator#setup()}. * It is unique for each test./*from w w w .ja v a2 s .c o m*/ * The default {@link ConfigClient} used by {@link DatasetCleaner} connects to config store configs. We need to provide a * mock {@link ConfigClient} since the configs are in classpath and not on config store. * * @param retentionConfigClasspathResource this is the same jobProps/config files used while running a real retention job * @param testNameTempPath temp path for this test where test data is generated */ public static void clean(FileSystem fs, Path retentionConfigClasspathResource, Optional<Path> additionalJobPropsClasspathResource, Path testNameTempPath) throws Exception { Properties additionalJobProps = new Properties(); if (additionalJobPropsClasspathResource.isPresent()) { try (final InputStream stream = RetentionTestHelper.class.getClassLoader() .getResourceAsStream(additionalJobPropsClasspathResource.get().toString())) { additionalJobProps.load(stream); } } if (retentionConfigClasspathResource.getName().endsWith(".job")) { Properties jobProps = new Properties(); try (final InputStream stream = RetentionTestHelper.class.getClassLoader() .getResourceAsStream(retentionConfigClasspathResource.toString())) { jobProps.load(stream); for (Entry<Object, Object> entry : jobProps.entrySet()) { jobProps.put(entry.getKey(), StringUtils.replace((String) entry.getValue(), "${testNameTempPath}", testNameTempPath.toString())); } } MultiCleanableDatasetFinder finder = new MultiCleanableDatasetFinder(fs, jobProps); for (Dataset dataset : finder.findDatasets()) { ((CleanableDataset) dataset).clean(); } } else { Config testConfig = ConfigFactory.parseResources(retentionConfigClasspathResource.toString()) .withFallback(ConfigFactory.parseMap(ImmutableMap.of("testNameTempPath", PathUtils.getPathWithoutSchemeAndAuthority(testNameTempPath).toString()))) .resolve(); ConfigClient client = mock(ConfigClient.class); when(client.getConfig(any(String.class))).thenReturn(testConfig); Properties jobProps = new Properties(); jobProps.setProperty(CleanableDatasetBase.SKIP_TRASH_KEY, Boolean.toString(true)); jobProps.setProperty(ConfigurationKeys.CONFIG_MANAGEMENT_STORE_URI, "dummy"); jobProps.putAll(additionalJobProps); @SuppressWarnings("unchecked") DatasetsFinder<CleanableDataset> finder = (DatasetsFinder<CleanableDataset>) GobblinConstructorUtils .invokeFirstConstructor( Class.forName( testConfig.getString(MultiCleanableDatasetFinder.DATASET_FINDER_CLASS_KEY)), ImmutableList.of(fs, jobProps, testConfig, client), ImmutableList.of(fs, jobProps, client)); for (CleanableDataset dataset : finder.findDatasets()) { dataset.clean(); } } }