List of usage examples for java.lang System getenv
public static String getenv(String name)
From source file:com.redhat.che.multitenant.toggle.CheServiceAccountTokenToggle.java
private String getHostname() { String hostname = System.getenv(HOSTNAME_ENV_VAR); LOG.info("HOSTNAME: {}", hostname); return StringUtils.isBlank(hostname) ? DEFAULT_HOSTNAME : hostname; }
From source file:com.seleniumtests.ut.browserfactory.mobile.TestLocalAppiumLauncher.java
private void initValidAppiumInstallation() throws IOException { PowerMockito.mockStatic(FileUtils.class); PowerMockito.mockStatic(System.class); when(System.getenv("APPIUM_HOME")).thenReturn("/opt/appium/"); when(FileUtils.readFileToString(new File("/opt/appium/node_modules/appium/package.json"))) .thenReturn("{\"name\":\"appium\",\"version\":\"1.4.13\"}"); }
From source file:net.brtly.monkeyboard.Configuration.java
public File getPreferencesFile() { String rv;/*from w ww .j av a 2s . c o m*/ if (System.getProperty("act.config") != null) { rv = System.getProperty("act.config"); } else { if (System.getProperty("os.name").toLowerCase().startsWith("win")) { rv = System.getenv("APPDATA") + "\\AndroidCommandTool.ini"; } else { rv = System.getenv("HOME") + "/.AndroidCommandTool.ini"; } } return new File(rv); }
From source file:lumbermill.aws.kcl.internal.KinesisConsumerBootstrap.java
public KinesisConsumerBootstrap(KinesisClientLibConfiguration kinesisCfg, UnitOfWorkListener unitOfWorkListener, ExceptionStrategy exceptionStrategy, Metrics metricsCallback, boolean dry) { this.kinesisCfg = kinesisCfg; this.unitOfWorkListener = unitOfWorkListener; this.exceptionStrategy = exceptionStrategy; this.metricsCallback = metricsCallback; this.dry = dry; String httpsProxy = System.getenv("https_proxy"); if (StringUtils.isNotEmpty(httpsProxy)) { URI proxy = URI.create(httpsProxy); kinesisCfg.getKinesisClientConfiguration().setProxyHost(proxy.getHost()); kinesisCfg.getKinesisClientConfiguration().setProxyPort(proxy.getPort()); kinesisCfg.getDynamoDBClientConfiguration().setProxyHost(proxy.getHost()); kinesisCfg.getDynamoDBClientConfiguration().setProxyPort(proxy.getPort()); kinesisCfg.getCloudWatchClientConfiguration().setProxyHost(proxy.getHost()); kinesisCfg.getCloudWatchClientConfiguration().setProxyPort(proxy.getPort()); }// w w w . j a va2 s . c o m }
From source file:com.ibm.controllers.WatsonController.java
public WatsonController() { FACEAPIKEY = System.getenv("FACEAPIKEY"); TEXTUSERNAME = System.getenv("TEXTUSERNAME"); TEXTPASSWORD = System.getenv("TEXTPASSWORD"); }
From source file:idgs.client.ProtoSerdeTest.java
/** * 1. json => pb1, pb1 => json, json => pb2 * 2. pb2 => binary, binary => pb3 * 3. pb3 => text, text => pb4// w w w. java 2 s.c om * assert pb1 == pb2 == pb3 == pb4 */ public void testSerde() throws IOException { log.info("test json <=> pb, binary <=> pb, text <=> pb serilaze/deserialize"); // json serde // String jsonFile = getClass().getResource("/client.conf").getPath(); String jsonFile = "conf/client.conf"; String home = System.getenv("IDGS_HOME"); if (home != null) { jsonFile = home + "/" + jsonFile; } else { jsonFile = "../../conf/client.conf"; } final String expectString = JsonUtil.getJsonFromFile(jsonFile).toString(); ProtoSerde serde = ProtoSerdeFactory.createSerde(1); ClientConfig.Builder builder = ClientConfig.newBuilder(); serde.deserializeFromByteArray(builder, expectString.getBytes()); ClientConfig cfg1 = builder.build(); byte[] array = serde.serializeToByteArray(cfg1); final String actualString = new String(array); builder = ClientConfig.newBuilder(); serde.deserializeFromByteArray(builder, actualString.getBytes()); ClientConfig cfg2 = builder.build(); // binary serde serde = ProtoSerdeFactory.createSerde(); array = serde.serializeToByteArray(cfg1); builder = ClientConfig.newBuilder(); serde.deserializeFromByteArray(builder, array); ClientConfig cfg3 = builder.build(); // text serde serde = ProtoSerdeFactory.createSerde(2); array = serde.serializeToByteArray(cfg2); builder = ClientConfig.newBuilder(); serde.deserializeFromByteArray(builder, array); ClientConfig cfg4 = builder.build(); assertEquals(cfg1, cfg2); assertEquals(cfg2, cfg3); assertEquals(cfg3, cfg4); log.debug(cfg4.toString()); }
From source file:azkaban.jobtype.ReportalHiveRunner.java
@Override protected void runReportal() throws Exception { System.out.println("Reportal Hive: Setting up Hive"); HiveConf conf = new HiveConf(SessionState.class); if (System.getenv("HADOOP_TOKEN_FILE_LOCATION") != null) { conf.set("mapreduce.job.credentials.binary", System.getenv("HADOOP_TOKEN_FILE_LOCATION")); }/*from w w w .ja va2 s.com*/ File tempTSVFile = new File("./temp.tsv"); OutputStream tsvTempOutputStream = new BoundedOutputStream( new BufferedOutputStream(new FileOutputStream(tempTSVFile)), outputCapacity); PrintStream logOut = System.out; // NOTE: It is critical to do this here so that log4j is reinitialized // before any of the other core hive classes are loaded // criccomini@linkedin.com: I disabled this because it appears to swallow // all future logging (even outside of hive). // SessionState.initHiveLog4j(); String orig = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEAUXJARS); CliSessionState sessionState = new CliSessionState(conf); sessionState.in = System.in; sessionState.out = new PrintStream(tsvTempOutputStream, true, "UTF-8"); sessionState.err = new PrintStream(logOut, true, "UTF-8"); OptionsProcessor oproc = new OptionsProcessor(); // Feed in Hive Args String[] args = buildHiveArgs(); if (!oproc.process_stage1(args)) { throw new Exception("unable to parse options stage 1"); } if (!oproc.process_stage2(sessionState)) { throw new Exception("unable to parse options stage 2"); } // Set all properties specified via command line for (Map.Entry<Object, Object> item : sessionState.cmdProperties.entrySet()) { conf.set((String) item.getKey(), (String) item.getValue()); } SessionState.start(sessionState); String expanded = expandHiveAuxJarsPath(orig); if (orig == null || orig.equals(expanded)) { System.out.println("Hive aux jars variable not expanded"); } else { System.out.println("Expanded aux jars variable from [" + orig + "] to [" + expanded + "]"); HiveConf.setVar(conf, HiveConf.ConfVars.HIVEAUXJARS, expanded); } if (!ShimLoader.getHadoopShims().usesJobShell()) { // hadoop-20 and above - we need to augment classpath using hiveconf // components // see also: code in ExecDriver.java ClassLoader loader = conf.getClassLoader(); String auxJars = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEAUXJARS); System.out.println("Got auxJars = " + auxJars); if (StringUtils.isNotBlank(auxJars)) { loader = Utilities.addToClassPath(loader, StringUtils.split(auxJars, ",")); } conf.setClassLoader(loader); Thread.currentThread().setContextClassLoader(loader); } CliDriver cli = new CliDriver(); int returnValue = 0; String prefix = ""; returnValue = cli.processLine("set hive.cli.print.header=true;"); String[] queries = jobQuery.split("\n"); for (String line : queries) { if (!prefix.isEmpty()) { prefix += '\n'; } if (line.trim().endsWith(";") && !line.trim().endsWith("\\;")) { line = prefix + line; line = injectVariables(line); System.out.println("Reportal Hive: Running Hive Query: " + line); System.out.println("Reportal Hive: HiveConf HIVEAUXJARS: " + HiveConf.getVar(conf, HiveConf.ConfVars.HIVEAUXJARS)); returnValue = cli.processLine(line); prefix = ""; } else { prefix = prefix + line; continue; } } tsvTempOutputStream.close(); // convert tsv to csv and write it do disk System.out.println("Reportal Hive: Converting output"); InputStream tsvTempInputStream = new BufferedInputStream(new FileInputStream(tempTSVFile)); Scanner rowScanner = new Scanner(tsvTempInputStream); PrintStream csvOutputStream = new PrintStream(outputStream); while (rowScanner.hasNextLine()) { String tsvLine = rowScanner.nextLine(); // strip all quotes, and then quote the columns csvOutputStream.println("\"" + tsvLine.replace("\"", "").replace("\t", "\",\"") + "\""); } rowScanner.close(); csvOutputStream.close(); // Flush the temp file out tempTSVFile.delete(); if (returnValue != 0) { throw new Exception("Hive query finished with a non zero return code"); } System.out.println("Reportal Hive: Ended successfully"); }
From source file:com.antsdb.saltedfish.sql.FishCommandLine.java
public File getHome() { String home = cmd.getOptionValue("home"); if (home == null) { home = System.getenv("ANTSDB_HOME"); }/*from ww w . ja va2s . co m*/ if (home == null) { println("error: home directory is not specified"); return null; } File file = new File(home); if (!new File(file, "data/checkpoint.bin").exists()) { println("error: home directory '%s' is invalid", file.getAbsolutePath()); return null; } return file; }
From source file:net.sourceforge.dita4publishers.tools.common.MapBosProcessorBase.java
/** * @param bosOptions//w w w. ja va 2 s . co m */ protected void setupCatalogs(BosConstructionOptions bosOptions) { String[] catalogs = new String[1]; File catalogFile = null; if (commandLine.hasOption(CATALOG_OPTION_ONE_CHAR)) { String catalogPath = commandLine.getOptionValue(CATALOG_OPTION_ONE_CHAR); catalogFile = new File(catalogPath); } else { String ditaHome = System.getenv("DITA_HOME"); if (ditaHome != null && !"".equals(ditaHome.trim())) { File ditaDir = new File(ditaHome); catalogFile = new File(ditaDir, "catalog-dita.xml"); } } if (catalogFile != null) { try { checkExistsAndCanRead(catalogFile); } catch (Exception e) { System.err.println( "Catalog file \"" + catalogFile.getAbsolutePath() + "\" does not exist or cannot be read."); System.exit(1); } catalogs[0] = catalogFile.getAbsolutePath(); } bosOptions.setCatalogs(catalogs); }
From source file:com.amazonaws.services.simpleworkflow.flow.examples.common.ConfigHelper.java
public static ConfigHelper createConfig() throws IOException, IllegalArgumentException { BasicConfigurator.configure();/*from w w w . j a v a 2 s.c o m*/ Logger.getRootLogger().setLevel(Level.INFO); // Logger.getLogger("org.apache.http").setLevel(Level.INFO); ConfigHelper configHelper = null; boolean envVariableExists = false; //first check the existence of environment variable String sampleConfigPath = System.getenv(SampleConstants.ACCESS_PROPERTIES_ENVIRONMENT_VARIABLE); if (sampleConfigPath != null && sampleConfigPath.length() > 0) { envVariableExists = true; } File accessProperties = new File(System.getProperty(SampleConstants.HOME_DIRECTORY_PROPERTY), SampleConstants.HOME_DIRECTORY_FILENAME); if (accessProperties.exists()) { configHelper = new ConfigHelper(accessProperties); } else if (envVariableExists) { accessProperties = new File(sampleConfigPath, SampleConstants.ACCESS_PROPERTIES_FILENAME); configHelper = new ConfigHelper(accessProperties); } else { //try checking the existence of file on relative path. try { accessProperties = new File(SampleConstants.ACCESS_PROPERTIES_RELATIVE_PATH, SampleConstants.ACCESS_PROPERTIES_FILENAME); configHelper = new ConfigHelper(accessProperties); } catch (Exception e) { throw new FileNotFoundException( "Cannot find AWS_SWF_SAMPLES_CONFIG environment variable, Exiting!!!"); } } return configHelper; }