List of usage examples for java.util Properties setProperty
public synchronized Object setProperty(String key, String value)
From source file:main.java.com.aosa.util.AOSAReadProperties.java
/** * Description<code>Properties</code> <br> * By mutou at 2012-1-12 ?05:58:16 <br> * boolean <br>/*w w w.j a v a2 s . co m*/ * @param profileName * Properties ??ClassPath * @param key * ?? * @param value * ? value * @return * @throws */ public static boolean WriteValue(String profileName, String key, String value) { boolean isWrite = false; Properties properties = new Properties(); InputStream input = AOSAReadProperties.class.getClassLoader().getResourceAsStream(profileName); try { properties.load(input); properties.setProperty(key, value); URL prourl = AOSAReadProperties.class.getClassLoader().getResource(profileName); String filepath = prourl.getFile(); FileOutputStream fileOutputStream = new FileOutputStream(filepath); properties.store(fileOutputStream, "Custum shutDownTime config : conf.properties"); fileOutputStream.flush(); fileOutputStream.close(); isWrite = true; } catch (FileNotFoundException e) { logger.debug("Properties"); throw new AOSARuntimeException("Properties", e); } catch (IOException e) { logger.debug("PropertiesIO"); throw new AOSARuntimeException("PropertiesIO", e); } finally { if (input != null) { try { input.close(); } catch (IOException e) { logger.debug("IO?"); throw new AOSARuntimeException("IO?", e); } } } return isWrite; }
From source file:com.github.tddts.jet.util.Util.java
/** * Save given property to given propeorty file. * * @param fileName property file path/*from w ww.jav a2s . c o m*/ * @param key property key * @param value property value */ public static void saveProperty(String fileName, String key, String value) { try { File file = new File(fileName); if (!file.exists()) file.createNewFile(); Properties properties = new Properties(); try (InputStream in = FileUtils.openInputStream(file)) { properties.load(in); } properties.setProperty(key, value); try (OutputStream out = FileUtils.openOutputStream(file)) { properties.store(out, ""); } } catch (IOException e) { throw new ApplicationException(e); } }
From source file:org.apache.drill.jdbc.test.JdbcAssert.java
public static ModelAndSchema withFull(String schema) { final Properties info = new Properties(); info.setProperty("schema", schema); return new ModelAndSchema(info, false); }
From source file:com.googlecode.fascinator.storage.jclouds.BlobStoreClient.java
/** * Establish a connection to the BlobStore, then return the instantiated * BlobStore client used to connect.//from w w w. j a v a 2 s . c o m * * @return BlobStore: The client used to connect to the API * @throws StorageException * if there was an error */ private static BlobStore blobStoreConnect() throws StorageException { if (blobStore != null && connectCount < 100) { return blobStore; } connectCount = 0; ContextBuilder contextBuilder = ContextBuilder.newBuilder(provider); // If we're using filesystem, set local directory to write objects to if ("filesystem".equals(provider)) { if (supportsUserMetadataSetting != null) { supportsUserMetadata = supportsUserMetadataSetting; } else { File storageDir = new File(fileSystemLocation); if (!storageDir.exists()) { try { FileUtils.forceMkdir(storageDir); // Java doesn't support extended attributes in some file // systems like FAT32 and HFS. As JClouds use them to // store // user metadata we'll need to store them differently on // these file systems. if (!Files.getFileStore(storageDir.toPath()) .supportsFileAttributeView(UserDefinedFileAttributeView.class)) { supportsUserMetadata = false; } } catch (IOException e) { throw new StorageException("Failed to create storage directory", e); } } } Properties properties = new Properties(); properties.setProperty(FilesystemConstants.PROPERTY_BASEDIR, fileSystemLocation); contextBuilder.overrides(properties); } else if ("gridfs".equals(provider)) { Properties properties = new Properties(); properties.setProperty(Constants.PROPERTY_ENDPOINT, gridFsConnectionString); contextBuilder.overrides(properties); } context = contextBuilder.credentials(identity, credential) .endpoint("https://keystone.rc.nectar.org.au:5000/v2.0").buildView(BlobStoreContext.class); blobStore = context.getBlobStore(); Location loc = null; if (StringUtils.isNotEmpty(location)) { for (Location assignableLoc : blobStore.listAssignableLocations()) { if (assignableLoc.getId().equalsIgnoreCase(location)) { loc = assignableLoc; break; } } if (loc == null) { throw new StorageException(location + " location not found in Blobstore"); } } blobStore.createContainerInLocation(loc, containerName); return blobStore; }
From source file:org.apache.drill.jdbc.test.JdbcAssert.java
public static ModelAndSchema withModel(String model, String schema) { final Properties info = new Properties(); info.setProperty("schema", schema); info.setProperty("model", "inline:" + model); return new ModelAndSchema(info); }
From source file:com.streamsets.datacollector.util.ClusterUtil.java
public static void setupCluster(String testName, String pipelineJson, YarnConfiguration yarnConfiguration) throws Exception { System.setProperty("sdc.testing-mode", "true"); System.setProperty(MiniSDCTestingUtility.PRESERVE_TEST_DIR, "true"); yarnConfiguration.set("yarn.nodemanager.delete.debug-delay-sec", "600"); miniSDCTestingUtility = new MiniSDCTestingUtility(); File dataTestDir = miniSDCTestingUtility.getDataTestDir(); //copy spark files under the test data directory into a dir called "spark" File sparkHome = ClusterUtil.createSparkHome(dataTestDir); //start mini yarn cluster miniYarnCluster = miniSDCTestingUtility.startMiniYarnCluster(testName, 1, 1, 1, yarnConfiguration); Configuration config = miniYarnCluster.getConfig(); long deadline = System.currentTimeMillis() + TimeUnit.SECONDS.toMillis(10); while (config.get(YarnConfiguration.RM_ADDRESS).split(":")[1] == "0") { if (System.currentTimeMillis() > deadline) { throw new IllegalStateException("Timed out waiting for RM to come up."); }//from www .j av a2s .com LOG.debug("RM address still not set in configuration, waiting..."); TimeUnit.MILLISECONDS.sleep(100); } LOG.debug("RM at " + config.get(YarnConfiguration.RM_ADDRESS)); Properties sparkHadoopProps = new Properties(); for (Map.Entry<String, String> entry : config) { sparkHadoopProps.setProperty("spark.hadoop." + entry.getKey(), entry.getValue()); } LOG.debug("Creating spark properties file at " + dataTestDir); File propertiesFile = new File(dataTestDir, "spark.properties"); propertiesFile.createNewFile(); FileOutputStream sdcOutStream = new FileOutputStream(propertiesFile); sparkHadoopProps.store(sdcOutStream, null); sdcOutStream.flush(); sdcOutStream.close(); // Need to pass this property file to spark-submit for it pick up yarn confs System.setProperty(SPARK_PROPERTY_FILE, propertiesFile.getAbsolutePath()); File sparkBin = new File(sparkHome, "bin"); for (File file : sparkBin.listFiles()) { MiniSDCTestingUtility.setExecutePermission(file.toPath()); } miniSDC = miniSDCTestingUtility.createMiniSDC(MiniSDC.ExecutionMode.CLUSTER); miniSDC.startSDC(); serverURI = miniSDC.getServerURI(); miniSDC.createPipeline(pipelineJson); miniSDC.startPipeline(); int attempt = 0; //Hard wait for 2 minutes while (miniSDC.getListOfSlaveSDCURI().size() == 0 && attempt < 24) { Thread.sleep(5000); attempt++; LOG.debug("Attempt no: " + attempt + " to retrieve list of slaves"); } if (miniSDC.getListOfSlaveSDCURI().size() == 0) { throw new IllegalStateException("Timed out waiting for slaves to come up."); } }
From source file:cl.niclabs.tscrypto.common.utils.Util.java
public static void trimProperties(Properties props) { for (String name : props.stringPropertyNames()) { props.setProperty(name, props.getProperty(name).trim()); }//from w ww .j a v a 2s.com }
From source file:com.izforge.izpack.installer.bootstrap.Installer.java
private static void initializeLogging(String logFileName) throws IOException { if (logFileName != null) { final Properties props = new Properties(); final String cname = FileHandler.class.getName(); props.setProperty("handlers", cname); props.setProperty(cname + ".pattern", FilenameUtils.normalize(logFileName)); props.setProperty(cname + ".formatter", FileFormatter.class.getName()); props.setProperty(ConsoleHandler.class.getName() + ".level", "OFF"); props.setProperty(".level", "OFF"); LogUtils.loadConfiguration(props); } else {// www .j a va2s. c om LogUtils.loadConfiguration(); } logger = Logger.getLogger(Installer.class.getName()); }
From source file:com.glaf.core.jdbc.DBConnectionFactory.java
public static Properties getDatabaseTypeMappings() { Properties databaseTypeMappings = new Properties(); databaseTypeMappings.setProperty("H2", "h2"); databaseTypeMappings.setProperty("MySQL", "mysql"); databaseTypeMappings.setProperty("Oracle", "oracle"); databaseTypeMappings.setProperty("PostgreSQL", "postgresql"); databaseTypeMappings.setProperty("Microsoft SQL Server", "sqlserver"); databaseTypeMappings.setProperty("SQLite", "sqlite"); databaseTypeMappings.setProperty("DB2", "db2"); databaseTypeMappings.setProperty("DB2/NT", "db2"); databaseTypeMappings.setProperty("DB2/NT64", "db2"); databaseTypeMappings.setProperty("DB2 UDP", "db2"); databaseTypeMappings.setProperty("DB2/LINUX", "db2"); databaseTypeMappings.setProperty("DB2/LINUX390", "db2"); databaseTypeMappings.setProperty("DB2/LINUXZ64", "db2"); databaseTypeMappings.setProperty("DB2/LINUXX8664", "db2"); databaseTypeMappings.setProperty("DB2/400 SQL", "db2"); databaseTypeMappings.setProperty("DB2/6000", "db2"); databaseTypeMappings.setProperty("DB2 UDB iSeries", "db2"); databaseTypeMappings.setProperty("DB2/AIX64", "db2"); databaseTypeMappings.setProperty("DB2/HPUX", "db2"); databaseTypeMappings.setProperty("DB2/HP64", "db2"); databaseTypeMappings.setProperty("DB2/SUN", "db2"); databaseTypeMappings.setProperty("DB2/SUN64", "db2"); databaseTypeMappings.setProperty("DB2/PTX", "db2"); databaseTypeMappings.setProperty("DB2/2", "db2"); return databaseTypeMappings; }
From source file:edumsg.core.PostgresConnection.java
public static void initSource() { try {//from w ww .ja v a 2s. c om try { Class.forName("org.postgresql.Driver"); } catch (ClassNotFoundException ex) { LOGGER.log(Level.SEVERE, "Error loading Postgres driver: " + ex.getMessage(), ex); } try { readConfFile(); } catch (Exception e) { e.printStackTrace(); } Properties props = new Properties(); // System.out.println(DB_USERNAME); props.setProperty("user", DB_USERNAME); props.setProperty("password", DB_PASSWORD); props.setProperty("initialSize", DB_INIT_CONNECTIONS); props.setProperty("maxActive", DB_MAX_CONNECTIONS); ConnectionFactory connectionFactory = new DriverManagerConnectionFactory(DB_URL, props); PoolableConnectionFactory poolableConnectionFactory = new PoolableConnectionFactory(connectionFactory, null); poolableConnectionFactory.setPoolStatements(true); GenericObjectPoolConfig poolConfig = new GenericObjectPoolConfig(); poolConfig.setMaxIdle(Integer.parseInt(DB_INIT_CONNECTIONS)); poolConfig.setMaxTotal(Integer.parseInt(DB_MAX_CONNECTIONS)); ObjectPool<PoolableConnection> connectionPool = new GenericObjectPool<>(poolableConnectionFactory, poolConfig); poolableConnectionFactory.setPool(connectionPool); Class.forName("org.apache.commons.dbcp2.PoolingDriver"); dbDriver = (PoolingDriver) DriverManager.getDriver("jdbc:apache:commons:dbcp:"); dbDriver.registerPool(DB_NAME, connectionPool); dataSource = new PoolingDataSource<>(connectionPool); } catch (Exception ex) { LOGGER.log(Level.SEVERE, "Got error initializing data source: " + ex.getMessage(), ex); } }