Example usage for java.util Properties put

List of usage examples for java.util Properties put

Introduction

In this page you can find the example usage for java.util Properties put.

Prototype

@Override
    public synchronized Object put(Object key, Object value) 

Source Link

Usage

From source file:Main.java

public static void main(String[] args) throws Exception {
    Class.forName("com.mysql.jdbc.Driver");
    Connection conn = null;/*w  w w.j av  a 2s.  c  o m*/
    Properties info = new Properties();
    // info.put("proxy_type", "4"); // SSL Tunneling
    info.put("proxy_host", "[proxy host]");
    info.put("proxy_port", "[proxy port]");
    info.put("proxy_user", "[proxy user]");
    info.put("proxy_password", "[proxy password]");
    info.put("user", "[db user]");
    info.put("password", "[db pass word]");
    conn = DriverManager.getConnection("jdbc:mysql://[db host]/", info);

    Statement stmt = conn.createStatement();
    ResultSet rs = stmt.executeQuery("Select NOW()");
    rs.next();
    System.out.println("Data- " + rs.getString(1));
    rs.close();
    stmt.close();
    conn.close();
}

From source file:JAXPTransletOneTransformation.java

public static void main(String argv[]) throws TransformerException, TransformerConfigurationException,
        IOException, SAXException, ParserConfigurationException, FileNotFoundException {
    // Set the TransformerFactory system property to generate and use a translet.
    // Note: To make this sample more flexible, load properties from a properties file.    
    // The setting for the Xalan Transformer is "org.apache.xalan.processor.TransformerFactoryImpl"
    String key = "javax.xml.transform.TransformerFactory";
    String value = "org.apache.xalan.xsltc.trax.TransformerFactoryImpl";
    Properties props = System.getProperties();
    props.put(key, value);
    System.setProperties(props);/*from w  w w .  j  av  a2  s .  c  om*/

    String xslInURI = "todo.xsl";
    String xmlInURI = "todo.xml";
    String htmlOutURI = "todo.html";
    try {
        // Instantiate the TransformerFactory, and use it along with a SteamSource
        // XSL stylesheet to create a Transformer.
        TransformerFactory tFactory = TransformerFactory.newInstance();
        Transformer transformer = tFactory.newTransformer(new StreamSource(xslInURI));
        // Perform the transformation from a StreamSource to a StreamResult;
        transformer.transform(new StreamSource(xmlInURI), new StreamResult(new FileOutputStream(htmlOutURI)));
        System.out.println("Produced todo.html");
    } catch (Exception e) {
        System.out.println(e.toString());
        e.printStackTrace();
    }
}

From source file:JAXPTransletMultipleTransformations.java

public static void main(String argv[]) {
    // Set the TransformerFactory system property to generate and use translets.
    // Note: To make this sample more flexible, load properties from a properties file.
    // The setting for the Xalan Transformer is "org.apache.xalan.processor.TransformerFactoryImpl"
    String key = "javax.xml.transform.TransformerFactory";
    String value = "org.apache.xalan.xsltc.trax.TransformerFactoryImpl";
    Properties props = System.getProperties();
    props.put(key, value);

    System.setProperties(props);//  w w  w. j ava  2  s.  co  m

    String xslInURI = "todo.xsl";

    try {
        // Instantiate the TransformerFactory, and use it along with a SteamSource
        // XSL stylesheet to create a translet as a Templates object.
        TransformerFactory tFactory = TransformerFactory.newInstance();
        Templates translet = tFactory.newTemplates(new StreamSource(xslInURI));

        // Perform each transformation
        doTransform(translet, "todo.xml", "todo.html");
        System.out.println("Produced todo.html");

        doTransform(translet, "todotoo.xml", "todotoo.html");
        System.out.println("Produced todotoo.html");
    } catch (Exception e) {
        e.printStackTrace();
    }
}

From source file:com.edmunds.etm.client.impl.Daemon.java

public static void main(String[] args) {

    if (args.length == 0) {
        System.out.println("Error provide the name of the service as the first argument");
        return;/*from w w  w .  j a v a 2  s.  c o m*/
    }
    final String serviceName = args[0];

    final Properties properties = new Properties();
    properties.put("serviceName", serviceName);

    final ApplicationContext appCtx = SpringContextLoader.loadClassPathSpringContext(DAEMON_CONTEXT_PATH,
            properties);
    final DaemonConfig daemonConfig = (DaemonConfig) appCtx.getBean("daemonConfig");
    daemonConfig.setServiceName(serviceName);
    final Daemon daemon = (Daemon) appCtx.getBean("daemon");

    if (daemon != null) {
        daemon.run();
    }
}

From source file:com.github.fhuss.kafka.streams.cep.demo.CEPStockKStreamsDemo.java

public static void main(String[] args) {

    Properties props = new Properties();
    props.put(StreamsConfig.APPLICATION_ID_CONFIG, "streams-cep");
    props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
    props.put(StreamsConfig.ZOOKEEPER_CONNECT_CONFIG, "localhost:2181");
    props.put(StreamsConfig.KEY_SERDE_CLASS_CONFIG, StockEventSerDe.class);
    props.put(StreamsConfig.VALUE_SERDE_CLASS_CONFIG, StockEventSerDe.class);

    // setting offset reset to earliest so that we can re-run the demo code with the same pre-loaded data
    props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");

    // build query
    final Pattern<Object, StockEvent> pattern = new QueryBuilder<Object, StockEvent>().select()
            .where((k, v, ts, store) -> v.volume > 1000).<Long>fold("avg", (k, v, curr) -> v.price).then()
            .select().zeroOrMore().skipTillNextMatch()
            .where((k, v, ts, state) -> v.price > (long) state.get("avg"))
            .<Long>fold("avg", (k, v, curr) -> (curr + v.price) / 2)
            .<Long>fold("volume", (k, v, curr) -> v.volume).then().select().skipTillNextMatch()
            .where((k, v, ts, state) -> v.volume < 0.8 * state.getOrElse("volume", 0L))
            .within(1, TimeUnit.HOURS).build();

    KStreamBuilder builder = new KStreamBuilder();

    CEPStream<Object, StockEvent> stream = new CEPStream<>(builder.stream("StockEvents"));

    KStream<Object, Sequence<Object, StockEvent>> stocks = stream.query("Stocks", pattern);

    stocks.mapValues(seq -> {/*from   www  . j  a  va 2 s.co  m*/
        JSONObject json = new JSONObject();
        seq.asMap().forEach((k, v) -> {
            JSONArray events = new JSONArray();
            json.put(k, events);
            List<String> collect = v.stream().map(e -> e.value.name).collect(Collectors.toList());
            Collections.reverse(collect);
            collect.forEach(e -> events.add(e));
        });
        return json.toJSONString();
    }).through(null, Serdes.String(), "Matches").print();

    //Use the topologyBuilder and streamingConfig to start the kafka streams process
    KafkaStreams streaming = new KafkaStreams(builder, props);
    //streaming.cleanUp();
    streaming.start();
}

From source file:eu.qualimaster.easy.extension.debug.DebugProfile.java

/**
 * Executes the test./*from   w  w  w  .j  a  v a2  s  . c  o  m*/
 * 
 * @param args the first argument shall be the model location
 * @throws ModelManagementException in case that obtaining the models fails
 * @throws IOException if file operations fail
 */
public static void main(String[] args) throws ModelManagementException, IOException {
    if (0 == args.length) {
        System.out.println("qualimaster.profile: <model location>");
        System.exit(0);
    } else {
        Properties prop = new Properties();
        prop.put(CoordinationConfiguration.PIPELINE_ELEMENTS_REPOSITORY,
                "https://projects.sse.uni-hildesheim.de/qm/maven/");
        CoordinationConfiguration.configure(prop, false);
        File tmp = new File(FileUtils.getTempDirectory(), "qmDebugProfile");
        FileUtils.deleteDirectory(tmp);
        tmp.mkdirs();

        File modelLocation = new File(args[0]);
        if (!modelLocation.exists()) {
            System.out.println("model location " + modelLocation + " does not exist");
            System.exit(0);
        }
        initialize();
        ModelInitializer.registerLoader(ProgressObserver.NO_OBSERVER);
        ModelInitializer.addLocation(modelLocation, ProgressObserver.NO_OBSERVER);
        Project project = RepositoryHelper.obtainModel(VarModel.INSTANCE, "QM", null);

        // create descriptor before clearing the location - in infrastructure pass vil directly/resolve VIL
        Configuration monConfig = RepositoryHelper.createConfiguration(project, "MONITORING");
        QmProjectDescriptor source = new QmProjectDescriptor(tmp);
        try {
            ProfileData data = AlgorithmProfileHelper.createProfilePipeline(monConfig, "ProfileTestPip",
                    "fCorrelationFinancial", "TopoSoftwareCorrelationFinancial", source);
            //                  "fPreprocessor", "Preprocessor", source);
            System.out.println("Creation successful. " + data.getPipeline());
        } catch (VilException e) {
            e.printStackTrace();
        }
        ModelInitializer.removeLocation(modelLocation, ProgressObserver.NO_OBSERVER);
    }
}

From source file:Main.java

public static void main(String[] args) throws Exception {
    Properties prop = new Properties();
    StringWriter sw = new StringWriter();

    prop.setProperty("Chapter Count", "200");
    prop.put("Tutorial Count", "1500");
    prop.put("tutorial", "java2s.com");

    // print the list
    System.out.println(prop);/*from  w ww. j  a  v a2s .c o  m*/

    // store the properties list in an output writer
    prop.store(sw, "Main");

    System.out.println(sw.toString());

}

From source file:ReverseSelect.java

public static void main(String argv[]) {
    Connection con = null;//from   w w w  . j  a va  2  s.  co m

    try {
        String url = "jdbc:msql://carthage.imaginary.com/ora";
        String driver = "com.imaginary.sql.msql.MsqlDriver";
        Properties p = new Properties();
        Statement stmt;
        ResultSet rs;

        p.put("user", "borg");
        Class.forName(driver).newInstance();
        con = DriverManager.getConnection(url, "borg", "");
        stmt = con.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY);
        rs = stmt.executeQuery("SELECT * from test ORDER BY test_id");
        // as a new ResultSet, rs is currently positioned
        // before the first row
        System.out.println("Got results:");
        // position rs after the last row
        rs.afterLast();
        while (rs.previous()) {
            int a = rs.getInt("test_id");
            String str = rs.getString("test_val");

            System.out.print("\ttest_id= " + a);
            System.out.println("/str= '" + str + "'");
        }
        System.out.println("Done.");
    } catch (Exception e) {
        e.printStackTrace();
    } finally {
        if (con != null) {
            try {
                con.close();
            } catch (SQLException e) {
                e.printStackTrace();
            }
        }
    }
}

From source file:com.aerospike.client.rest.AerospikeRESTfulService.java

public static void main(String[] args) throws ParseException {

    Options options = new Options();
    options.addOption("h", "host", true, "Server hostname (default: localhost)");
    options.addOption("p", "port", true, "Server port (default: 3000)");

    // parse the command line args
    CommandLineParser parser = new PosixParser();
    CommandLine cl = parser.parse(options, args, false);

    // set properties
    Properties as = System.getProperties();
    String host = cl.getOptionValue("h", "localhost");
    as.put("seedHost", host);
    String portString = cl.getOptionValue("p", "3000");
    as.put("port", portString);

    // start app//from w ww.  ja  va 2  s  . c o  m
    SpringApplication.run(AerospikeRESTfulService.class, args);

}

From source file:kafka.examples.consumer.BasicConsumerExample.java

public static void main(String[] args) {
    ArgumentParser parser = argParser();

    try {//from  w ww. j a  v  a 2  s.c o m
        Namespace res = parser.parseArgs(args);

        /* parse args */
        String brokerList = res.getString("bootstrap.servers");
        String topic = res.getString("topic");
        String serializer = res.getString("serializer");

        Properties consumerConfig = new Properties();
        consumerConfig.put("group.id", "my-group");
        consumerConfig.put("bootstrap.servers", brokerList);
        consumerConfig.put("auto.offset.reset", "earliest");
        consumerConfig.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,
                "org.apache.kafka.common.serialization.ByteArrayDeserializer");
        consumerConfig.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG,
                "org.apache.kafka.common.serialization.ByteArrayDeserializer");

        KafkaConsumer<byte[], byte[]> consumer = new KafkaConsumer<>(consumerConfig);
        consumer.subscribe(Collections.singletonList(topic));

        while (true) {
            ConsumerRecords<byte[], byte[]> records = consumer.poll(1000);
            for (ConsumerRecord<byte[], byte[]> record : records) {
                System.out.printf(
                        "Received Message topic =%s, partition =%s, offset = %d, key = %s, value = %s\n",
                        record.topic(), record.partition(), record.offset(), deserialize(record.key()),
                        deserialize(record.value()));
            }

            consumer.commitSync();
        }

    } catch (ArgumentParserException e) {
        if (args.length == 0) {
            parser.printHelp();
            System.exit(0);
        } else {
            parser.handleError(e);
            System.exit(1);
        }
    }

}