List of usage examples for java.util Properties Properties
public Properties()
From source file:com.retroduction.carma.application.Carma.java
/** * command line test runner, reads configuration from mutationconfig.xml * //from w ww . ja v a2 s . c o m * @throws ParseException * @throws IOException */ public static void main(String[] args) throws ParseException { CommandLine line = new CLIValidator().readCLI(args); File customPropertiesFile; if (line.hasOption(CLIValidator.USER_CONFIG_OPTION_SHORT)) { customPropertiesFile = new File(line.getOptionValue(CLIValidator.USER_CONFIG_OPTION_SHORT)); } else { customPropertiesFile = new File(DEFAULT_USER_CONFIG); } CarmaDriverSetup setup = new CarmaDriverSetup(); Properties customProps = new Properties(); try { customProps.load(new FileInputStream(customPropertiesFile)); } catch (IOException e) { throw new CarmaException("Failed to load configuration", e); } setup.addCustomConfiguration(customProps); Core driver = setup.getDriver(); driver.execute(); }
From source file:ImageStringToBlob.java
public static void main(String[] args) { Connection conn = null;/* w w w . j a v a2 s .c o m*/ if (args.length != 1) { System.out.println("Missing argument: full path to <oscar.properties>"); return; } try { FileInputStream fin = new FileInputStream(args[0]); Properties prop = new Properties(); prop.load(fin); String driver = prop.getProperty("db_driver"); String uri = prop.getProperty("db_uri"); String db = prop.getProperty("db_name"); String username = prop.getProperty("db_username"); String password = prop.getProperty("db_password"); Class.forName(driver); conn = DriverManager.getConnection(uri + db, username, password); conn.setAutoCommit(true); // no transactions /* * select all records ids with image_data not null and contents is null * for each id fetch record * migrate data from image_data to contents */ String sql = "select image_id from client_image where image_data is not null and contents is null"; PreparedStatement pst = conn.prepareStatement(sql); ResultSet rs = pst.executeQuery(); List<Long> ids = new ArrayList<Long>(); while (rs.next()) { ids.add(rs.getLong("image_id")); } rs.close(); sql = "select image_data from client_image where image_id = ?"; pst = conn.prepareStatement(sql); System.out.println("Migrating image data for " + ids.size() + " images..."); for (Long id : ids) { pst.setLong(1, id); ResultSet imagesRS = pst.executeQuery(); while (imagesRS.next()) { String dataString = imagesRS.getString("image_data"); Blob dataBlob = fromStringToBlob(dataString); if (writeBlobToDb(conn, id, dataBlob) == 1) { System.out.println("Image data migrated for image_id: " + id); } } imagesRS.close(); } System.out.println("Migration completed."); } catch (Exception e) { e.printStackTrace(); } finally { if (conn != null) { try { conn.close(); } catch (SQLException e) { e.printStackTrace(); } } } }
From source file:com.xyz.reccommendation.driver.SKU2SKUCount.java
public static void main(String[] args) throws Exception { final Configuration conf = new Configuration(); String envt = null;//from www .j a v a 2 s. c o m if (args.length > 0) { envt = args[0]; } else { envt = "dev"; } Properties prop = new Properties(); try { // load a properties file from class path, inside static method prop.load(SKU2SKUCount.class.getClassLoader().getResourceAsStream("config-" + envt + ".properties")); } catch (IOException ex) { ex.printStackTrace(); System.exit(1); } MongoConfigUtil.setOutputURI(conf, "mongodb://" + prop.getProperty("mongodb.ip") + "/" + prop.getProperty("mongodb.dbname") + ".out_stat_custom"); log.debug("MongoDB URL : mongodb://" + prop.getProperty("mongodb.ip") + "/" + prop.getProperty("mongodb.dbname") + "." + ".out_stat_custom"); log.debug("Conf: " + conf); MongoConfigUtil.setCreateInputSplits(conf, false); args = new GenericOptionsParser(conf, args).getRemainingArgs(); final Job job = new Job(conf, "Count the sku to sku mapping from pview data on hdfs in \"inputPview\" path."); job.setJarByClass(SKU2SKUCount.class); job.setMapperClass(TokenizerMapper.class); // job.setCombinerClass(IntSumReducer.class); job.setReducerClass(IntSumReducer.class); job.setMapOutputKeyClass(Text.class); job.setMapOutputValueClass(IntWritable.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(BSONWritable.class); job.setInputFormatClass(KeyValueTextInputFormat.class); job.setOutputFormatClass(MongoOutputFormat.class); FileInputFormat.setInputPaths(job, new Path("inputPview")); System.exit(job.waitForCompletion(true) ? 0 : 1); }
From source file:kafka.examples.consumer.BasicConsumerExample.java
public static void main(String[] args) { ArgumentParser parser = argParser(); try {//from www .j av a 2 s. c o m Namespace res = parser.parseArgs(args); /* parse args */ String brokerList = res.getString("bootstrap.servers"); String topic = res.getString("topic"); String serializer = res.getString("serializer"); Properties consumerConfig = new Properties(); consumerConfig.put("group.id", "my-group"); consumerConfig.put("bootstrap.servers", brokerList); consumerConfig.put("auto.offset.reset", "earliest"); consumerConfig.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.ByteArrayDeserializer"); consumerConfig.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.ByteArrayDeserializer"); KafkaConsumer<byte[], byte[]> consumer = new KafkaConsumer<>(consumerConfig); consumer.subscribe(Collections.singletonList(topic)); while (true) { ConsumerRecords<byte[], byte[]> records = consumer.poll(1000); for (ConsumerRecord<byte[], byte[]> record : records) { System.out.printf( "Received Message topic =%s, partition =%s, offset = %d, key = %s, value = %s\n", record.topic(), record.partition(), record.offset(), deserialize(record.key()), deserialize(record.value())); } consumer.commitSync(); } } catch (ArgumentParserException e) { if (args.length == 0) { parser.printHelp(); System.exit(0); } else { parser.handleError(e); System.exit(1); } } }
From source file:uk.ac.kcl.Main.java
public static void main(String[] args) { File folder = new File(args[0]); File[] listOfFiles = folder.listFiles(); assert listOfFiles != null; for (File listOfFile : listOfFiles) { if (listOfFile.isFile()) { if (listOfFile.getName().endsWith(".properties")) { System.out.println("Properties sile found:" + listOfFile.getName() + ". Attempting to launch application context"); Properties properties = new Properties(); InputStream input; try { input = new FileInputStream(listOfFile); properties.load(input); if (properties.getProperty("globalSocketTimeout") != null) { TcpHelper.setSocketTimeout( Integer.valueOf(properties.getProperty("globalSocketTimeout"))); }//from w ww. ja v a 2 s. c o m Map<String, Object> map = new HashMap<>(); properties.forEach((k, v) -> { map.put(k.toString(), v); }); ConfigurableEnvironment environment = new StandardEnvironment(); MutablePropertySources propertySources = environment.getPropertySources(); propertySources.addFirst(new MapPropertySource(listOfFile.getName(), map)); @SuppressWarnings("resource") AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext(); ctx.registerShutdownHook(); ctx.setEnvironment(environment); String scheduling; try { scheduling = properties.getProperty("scheduler.useScheduling"); if (scheduling.equalsIgnoreCase("true")) { ctx.register(ScheduledJobLauncher.class); ctx.refresh(); } else if (scheduling.equalsIgnoreCase("false")) { ctx.register(SingleJobLauncher.class); ctx.refresh(); SingleJobLauncher launcher = ctx.getBean(SingleJobLauncher.class); launcher.launchJob(); } else if (scheduling.equalsIgnoreCase("slave")) { ctx.register(JobConfiguration.class); ctx.refresh(); } else { throw new RuntimeException( "useScheduling not configured. Must be true, false or slave"); } } catch (NullPointerException ex) { throw new RuntimeException( "useScheduling not configured. Must be true, false or slave"); } } catch (IOException e) { e.printStackTrace(); } } } } }
From source file:kafka.examples.producer.BasicProducerExample.java
public static void main(String[] args) { ArgumentParser parser = argParser(); try {//from w w w .j a v a2 s . c o m Namespace res = parser.parseArgs(args); /* parse args */ String brokerList = res.getString("bootstrap.servers"); String topic = res.getString("topic"); Boolean syncSend = res.getBoolean("syncsend"); long noOfMessages = res.getLong("messages"); long delay = res.getLong("delay"); String messageType = res.getString("messagetype"); Properties producerConfig = new Properties(); producerConfig.put("bootstrap.servers", brokerList); producerConfig.put("client.id", "basic-producer"); producerConfig.put("acks", "all"); producerConfig.put("retries", "3"); producerConfig.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.ByteArraySerializer"); producerConfig.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.ByteArraySerializer"); SimpleProducer<byte[], byte[]> producer = new SimpleProducer<>(producerConfig, syncSend); for (int i = 0; i < noOfMessages; i++) { producer.send(topic, getKey(i), getEvent(messageType, i)); try { Thread.sleep(delay); } catch (InterruptedException e) { e.printStackTrace(); } } producer.close(); } catch (ArgumentParserException e) { if (args.length == 0) { parser.printHelp(); System.exit(0); } else { parser.handleError(e); System.exit(1); } } }
From source file:kafka.examples.producer.CustomPartitionerExample.java
public static void main(String[] args) { ArgumentParser parser = argParser(); try {/*from w ww . j ava2 s . com*/ Namespace res = parser.parseArgs(args); /* parse args */ String brokerList = res.getString("bootstrap.servers"); String topic = res.getString("topic"); Boolean syncSend = res.getBoolean("syncsend"); long noOfMessages = res.getLong("messages"); long delay = res.getLong("delay"); String messageType = res.getString("messagetype"); Properties producerConfig = new Properties(); producerConfig.put("bootstrap.servers", brokerList); producerConfig.put("client.id", "basic-producer"); producerConfig.put("acks", "all"); producerConfig.put("retries", "3"); producerConfig.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.ByteArraySerializer"); producerConfig.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.ByteArraySerializer"); producerConfig.put(ProducerConfig.PARTITIONER_CLASS_CONFIG, "kafka.examples.producer.CustomPartitioner"); SimpleProducer<byte[], byte[]> producer = new SimpleProducer<>(producerConfig, syncSend); for (int i = 0; i < noOfMessages; i++) { producer.send(topic, getKey(i), getEvent(messageType, i)); try { Thread.sleep(delay); } catch (InterruptedException e) { e.printStackTrace(); } } producer.close(); } catch (ArgumentParserException e) { if (args.length == 0) { parser.printHelp(); System.exit(0); } else { parser.handleError(e); System.exit(1); } } }
From source file:kafka.examples.producer.BasicPartitionExample.java
public static void main(String[] args) { ArgumentParser parser = argParser(); try {/* w w w . j a v a 2 s .c o m*/ Namespace res = parser.parseArgs(args); /* parse args */ String brokerList = res.getString("bootstrap.servers"); String topic = res.getString("topic"); Boolean syncSend = res.getBoolean("syncsend"); long noOfMessages = res.getLong("messages"); long delay = res.getLong("delay"); String messageType = res.getString("messagetype"); Properties producerConfig = new Properties(); producerConfig.put("bootstrap.servers", brokerList); producerConfig.put("client.id", "basic-producer"); producerConfig.put("acks", "all"); producerConfig.put("retries", "3"); producerConfig.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.ByteArraySerializer"); producerConfig.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.ByteArraySerializer"); SimpleProducer<byte[], byte[]> producer = new SimpleProducer<>(producerConfig, syncSend); for (int i = 0; i < noOfMessages; i++) { if (i % 2 == 0) producer.send(topic, 0, getKey(i), getEvent(messageType, i)); // send even numbered messages else producer.send(topic, 1, getKey(i), getEvent(messageType, i)); // send odd numbered messages try { Thread.sleep(delay); } catch (InterruptedException e) { e.printStackTrace(); } } producer.close(); } catch (ArgumentParserException e) { if (args.length == 0) { parser.printHelp(); System.exit(0); } else { parser.handleError(e); System.exit(1); } } }
From source file:eu.databata.engine.util.PropagatorRecreateUserTool.java
public static void main(String[] args) { if (args.length == 0) { printMessage();/*from w w w. j a v a 2 s. c om*/ return; } String scriptName = getScriptName(args[0]); if (scriptName == null) { printMessage(); return; } ClassLoader classLoader = PropagatorRecreateUserTool.class.getClassLoader(); InputStream resourceAsStream = classLoader.getResourceAsStream("databata.properties"); Properties propagatorProperties = new Properties(); try { propagatorProperties.load(resourceAsStream); } catch (FileNotFoundException e) { LOG.error("Sepecified file 'databata.properties' not found"); } catch (IOException e) { LOG.error("Sepecified file 'databata.properties' cannot be loaded"); } SingleConnectionDataSource dataSource = new SingleConnectionDataSource(); dataSource.setDriverClassName(propagatorProperties.getProperty("db.propagation.driver")); if ("-idb".equals(args[0])) { dataSource.setUrl(propagatorProperties.getProperty("db.propagation.dba.connection-url")); dataSource.setUsername(propagatorProperties.getProperty("db.propagation.dba.user")); dataSource.setPassword(propagatorProperties.getProperty("db.propagation.dba.password")); } else { dataSource.setUrl(propagatorProperties.getProperty("db.propagation.sa.connection-url")); dataSource.setUsername(propagatorProperties.getProperty("db.propagation.sa.user")); dataSource.setPassword(propagatorProperties.getProperty("db.propagation.sa.password")); } dataSource.setSuppressClose(true); String databaseName = "undefined"; try { databaseName = dataSource.getConnection().getMetaData().getDatabaseProductName(); } catch (SQLException e) { LOG.error("Cannot get connection by specified url", e); return; } String databaseCode = PropagationUtils.getDatabaseCode(databaseName); LOG.info("Database with code '" + databaseCode + "' is identified. for database '" + databaseName + "'"); String submitFileName = "META-INF/databata/" + databaseCode + "_" + scriptName + ".sql"; String fileContent = ""; try { fileContent = getFileContent(classLoader, submitFileName); } catch (IOException e) { LOG.info("File with name '" + submitFileName + "' cannot be read from classpath. Trying to load default submit file."); } if (fileContent == null || "".equals(fileContent)) { String defaultSubmitFileName = "META-INF/databata/" + databaseCode + "_" + scriptName + ".default.sql"; try { fileContent = getFileContent(classLoader, defaultSubmitFileName); } catch (IOException e) { LOG.info("File with name '" + defaultSubmitFileName + "' cannot be read from classpath. Trying to load default submit file."); } } if (fileContent == null) { LOG.info("File content is empty. Stopping process."); return; } fileContent = replacePlaceholders(fileContent, propagatorProperties); SqlFile sqlFile = null; try { sqlFile = new SqlFile(fileContent, null, submitFileName, new SqlExecutionCallback() { @Override public void handleExecuteSuccess(String sql, int arg1, double arg2) { LOG.info("Sql is sucessfully executed \n ======== \n" + sql + "\n ======== \n"); } @Override public void handleException(SQLException arg0, String sql) throws SQLException { LOG.info("Sql returned error \n ======== \n" + sql + "\n ======== \n"); } }, null); } catch (IOException e) { LOG.error("Error when initializing SqlTool", e); } try { sqlFile.setConnection(dataSource.getConnection()); } catch (SQLException e) { LOG.error("Error is occured when setting connection", e); } try { sqlFile.execute(); } catch (SqlToolError e) { LOG.error("Error when creating user", e); } catch (SQLException e) { LOG.error("Error when creating user", e); } }
From source file:com.digitalgeneralists.assurance.Application.java
public static void main(String[] args) { Logger logger = Logger.getLogger(Application.class); logger.info("App is starting."); Properties applicationProperties = new Properties(); String applicationInfoFileName = "/version.txt"; InputStream inputStream = Application.class.getResourceAsStream(applicationInfoFileName); applicationInfoFileName = null;//from w w w . j a v a 2s. c om try { if (inputStream != null) { applicationProperties.load(inputStream); Application.applicationShortName = applicationProperties.getProperty("name"); Application.applicationName = applicationProperties.getProperty("applicationName"); Application.applicationVersion = applicationProperties.getProperty("version"); Application.applicationBuildNumber = applicationProperties.getProperty("buildNumber"); applicationProperties = null; } } catch (IOException e) { logger.warn("Could not load application version information.", e); } finally { try { inputStream.close(); } catch (IOException e) { logger.error("Couldn't close the application version input stream."); } inputStream = null; } javax.swing.SwingUtilities.invokeLater(new Runnable() { private Logger logger = Logger.getLogger(Application.class); public void run() { logger.info("Starting the Swing run thread."); try { Application.installDb(); } catch (IOException e) { logger.fatal("Unable to install the application database.", e); System.exit(1); } catch (SQLException e) { logger.fatal("Unable to install the application database.", e); System.exit(1); } IApplicationUI window = null; ClassPathXmlApplicationContext springContext = null; try { springContext = new ClassPathXmlApplicationContext("/META-INF/spring/app-context.xml"); StringBuffer message = new StringBuffer(256); logger.info(message.append("Spring Context: ").append(springContext)); message.setLength(0); window = (IApplicationUI) springContext.getBean("ApplicationUI"); } finally { if (springContext != null) { springContext.close(); } springContext = null; } if (window != null) { logger.info("Launching the window."); window.display(); } else { logger.fatal("The main application window object is null."); } logger = null; } }); }