Example usage for java.util Properties load

List of usage examples for java.util Properties load

Introduction

In this page you can find the example usage for java.util Properties load.

Prototype

public synchronized void load(InputStream inStream) throws IOException 

Source Link

Document

Reads a property list (key and element pairs) from the input byte stream.

Usage

From source file:herddb.server.ServerMain.java

public static void main(String... args) {
    try {//from  w  w w.j av  a  2 s  .c  o m
        LOG.log(Level.INFO, "Starting HerdDB version {0}", herddb.utils.Version.getVERSION());
        Properties configuration = new Properties();

        boolean configFileFromParameter = false;
        for (int i = 0; i < args.length; i++) {
            String arg = args[i];
            if (!arg.startsWith("-")) {
                File configFile = new File(args[i]).getAbsoluteFile();
                LOG.log(Level.INFO, "Reading configuration from {0}", configFile);
                try (InputStreamReader reader = new InputStreamReader(new FileInputStream(configFile),
                        StandardCharsets.UTF_8)) {
                    configuration.load(reader);
                }
                configFileFromParameter = true;
            } else if (arg.equals("--use-env")) {
                System.getenv().forEach((key, value) -> {
                    System.out.println("Considering env as system property " + key + " -> " + value);
                    System.setProperty(key, value);
                });
            } else if (arg.startsWith("-D")) {
                int equals = arg.indexOf('=');
                if (equals > 0) {
                    String key = arg.substring(2, equals);
                    String value = arg.substring(equals + 1);
                    System.setProperty(key, value);
                }
            }
        }
        if (!configFileFromParameter) {
            File configFile = new File("conf/server.properties").getAbsoluteFile();
            LOG.log(Level.INFO, "Reading configuration from {0}", configFile);
            if (configFile.isFile()) {
                try (InputStreamReader reader = new InputStreamReader(new FileInputStream(configFile),
                        StandardCharsets.UTF_8)) {
                    configuration.load(reader);
                }
            }
        }

        System.getProperties().forEach((k, v) -> {
            String key = k + "";
            if (!key.startsWith("java") && !key.startsWith("user")) {
                configuration.put(k, v);
            }
        });

        LogManager.getLogManager().readConfiguration();

        Runtime.getRuntime().addShutdownHook(new Thread("ctrlc-hook") {

            @Override
            public void run() {
                System.out.println("Ctrl-C trapped. Shutting down");
                ServerMain _brokerMain = runningInstance;
                if (_brokerMain != null) {
                    _brokerMain.close();
                }
            }

        });
        runningInstance = new ServerMain(configuration);
        runningInstance.start();

        runningInstance.join();

    } catch (Throwable t) {
        t.printStackTrace();
        System.exit(1);
    }
}

From source file:de.tu_dortmund.ub.data.dswarm.TaskProcessingUnit.java

public static void main(final String[] args) throws Exception {

    // default config
    String configFile = DEFAULT_CONF_FOLDER_NAME + File.separatorChar + DEFAULT_CONFIG_PROPERTIES_FILE_NAME;

    // read program parameters
    if (args.length > 0) {

        for (final String arg : args) {

            LOG.info("arg = " + arg);

            if (arg.startsWith("-conf=")) {

                configFile = arg.split("=")[1];
            }/*from   w w  w .ja va  2 s .  c  o  m*/
        }
    }

    final Properties config;

    // Init properties
    try {

        try (final InputStream inputStream = new FileInputStream(configFile)) {

            try (final BufferedReader reader = new BufferedReader(
                    new InputStreamReader(inputStream, TPUUtil.UTF_8))) {

                config = new Properties();
                config.load(reader);
            }
        }
    } catch (final IOException e) {

        LOG.error("something went wrong", e);
        LOG.error(String.format("FATAL ERROR: Could not read '%s'!", configFile));

        throw e;
    }

    startTPU(configFile, config);
}

From source file:com.msd.gin.halyard.tools.HalyardExport.java

/**
 * Main of the HalyardExport/*from   w  w w  . j av  a 2 s .  c om*/
 * @param args String command line arguments
 * @throws Exception throws Exception in case of any problem
 */
public static void main(final String args[]) throws Exception {
    if (conf == null)
        conf = new Configuration();
    Options options = new Options();
    options.addOption(newOption("h", null, "Prints this help"));
    options.addOption(newOption("v", null, "Prints version"));
    options.addOption(newOption("s", "source_htable", "Source HBase table with Halyard RDF store"));
    options.addOption(
            newOption("q", "sparql_query", "SPARQL tuple or graph query executed to export the data"));
    options.addOption(newOption("t", "target_url",
            "file://<path>/<file_name>.<ext> or hdfs://<path>/<file_name>.<ext> or jdbc:<jdbc_connection>/<table_name>"));
    options.addOption(newOption("p", "property=value", "JDBC connection properties"));
    options.addOption(newOption("l", "driver_classpath", "JDBC driver classpath delimited by ':'"));
    options.addOption(newOption("c", "driver_class", "JDBC driver class name"));
    options.addOption(newOption("r", null, "Trim target table before export (apply for JDBC only)"));
    try {
        CommandLine cmd = new PosixParser().parse(options, args);
        if (args.length == 0 || cmd.hasOption('h')) {
            printHelp(options);
            return;
        }
        if (cmd.hasOption('v')) {
            Properties p = new Properties();
            try (InputStream in = HalyardExport.class
                    .getResourceAsStream("/META-INF/maven/com.msd.gin.halyard/hbasesail/pom.properties")) {
                if (in != null)
                    p.load(in);
            }
            System.out.println("Halyard Export version " + p.getProperty("version", "unknown"));
            return;
        }
        if (!cmd.getArgList().isEmpty())
            throw new ExportException("Unknown arguments: " + cmd.getArgList().toString());
        for (char c : "sqt".toCharArray()) {
            if (!cmd.hasOption(c))
                throw new ExportException("Missing mandatory option: " + c);
        }
        for (char c : "sqtlc".toCharArray()) {
            String s[] = cmd.getOptionValues(c);
            if (s != null && s.length > 1)
                throw new ExportException("Multiple values for option: " + c);
        }
        StatusLog log = new StatusLog() {
            private final Logger l = Logger.getLogger(HalyardExport.class.getName());

            @Override
            public void tick() {
            }

            @Override
            public void logStatus(String status) {
                l.info(status);
            }
        };
        String driverClasspath = cmd.getOptionValue('l');
        URL driverCP[] = null;
        if (driverClasspath != null) {
            String jars[] = driverClasspath.split(":");
            driverCP = new URL[jars.length];
            for (int j = 0; j < jars.length; j++) {
                File f = new File(jars[j]);
                if (!f.isFile())
                    throw new ExportException("Invalid JDBC driver classpath element: " + jars[j]);
                driverCP[j] = f.toURI().toURL();
            }
        }
        export(conf, log, cmd.getOptionValue('s'), cmd.getOptionValue('q'), cmd.getOptionValue('t'),
                cmd.getOptionValue('c'), driverCP, cmd.getOptionValues('p'), cmd.hasOption('r'));
    } catch (RuntimeException exp) {
        System.out.println(exp.getMessage());
        printHelp(options);
        throw exp;
    }
}

From source file:com.baidu.rigel.biplatform.ma.file.serv.FileServer.java

/**
 * //from w  w w.  j  ava  2  s.  c o m
 * ???server
 * 
 * @param args
 */
public static void main(String[] args) throws Exception {
    //        if (args.length != 1) {
    //            LOGGER.error("can not get enough parameters for starting file server");
    //            printUsage();
    //            System.exit(-1);
    //        }

    FileInputStream fis = null;
    String classLocation = FileServer.class.getProtectionDomain().getCodeSource().getLocation().toString();
    final File configFile = new File(classLocation + "/../conf/fileserver.conf");
    Properties properties = new Properties();
    try {
        if (configFile.exists()) {
            fis = new FileInputStream(configFile);
        } else if (StringUtils.isNotEmpty(args[0])) {
            fis = new FileInputStream(args[0]);
        } else {
            printUsage();
            throw new RuntimeException("can't find correct file server configuration file!");
        }
        properties.load(fis);
    } finally {
        if (fis != null) {
            fis.close();
        }
    }
    int port = -1;
    try {
        port = Integer.valueOf(properties.getProperty(PORT_NUM_KEY));
    } catch (NumberFormatException e) {
        LOGGER.error("parameter is not correct, [port = {}]", args[0]);
        System.exit(-1);
    }

    String location = properties.getProperty(ROOT_DIR_KEY);
    if (StringUtils.isEmpty(location)) {
        LOGGER.error("the location can not be empty");
        System.exit(-1);
    }

    File f = new File(location);
    if (!f.exists() && !f.mkdirs()) {
        LOGGER.error("invalidation location [{}] please verify the input", args[1]);
        System.exit(-1);
    }
    startServer(location, port);
}

From source file:com.msc.dao.facturierswing.Main.java

/**
 * @param args the command line arguments
 *///w w  w .jav a  2s .co  m
public static void main(String args[]) throws FileNotFoundException, IOException {
    Properties prop = new Properties();
    Reader r = new FileReader("./config.properties");
    prop.load(r);
    r.close();
    WebService.prop = prop;
    WebService.setDebugMode(Boolean.parseBoolean(prop.getProperty("debugMode")));
    Request request = new Request("login", "login");
    try {
        Response s = request.sendRequest();
        s.consumeToken();
        request.setEndpoint("moi");
        request.setUrlSuite("existe");
        s = request.sendRequest();
        java.lang.reflect.Type fooType = new TypeToken<Helper<Boolean>>() {
        }.getType();
        Helper<Boolean> b = (Helper<Boolean>) s.getObject(null, fooType);
        firstTime = !b.getMyObject();
    } catch (MalformedURLException | IllegalArgumentException ex) {
        Logger.getLogger(Main.class.getName()).log(Level.SEVERE, null, ex);
    }

    /* Set the Nimbus look and feel */
    //<editor-fold defaultstate="collapsed" desc=" Look and feel setting code (optional) ">
    /* If Nimbus (introduced in Java SE 6) is not available, stay with the default look and feel.
     * For details see http://download.oracle.com/javase/tutorial/uiswing/lookandfeel/plaf.html 
     */
    try {
        for (javax.swing.UIManager.LookAndFeelInfo info : javax.swing.UIManager.getInstalledLookAndFeels()) {
            if ("Nimbus".equals(info.getName())) {
                javax.swing.UIManager.setLookAndFeel(info.getClassName());
                break;
            }
        }
    } catch (ClassNotFoundException ex) {
        java.util.logging.Logger.getLogger(Main.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
    } catch (InstantiationException ex) {
        java.util.logging.Logger.getLogger(Main.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
    } catch (IllegalAccessException ex) {
        java.util.logging.Logger.getLogger(Main.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
    } catch (javax.swing.UnsupportedLookAndFeelException ex) {
        java.util.logging.Logger.getLogger(Main.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
    }
    //</editor-fold>

    /* Create and display the form */
    java.awt.EventQueue.invokeLater(new Runnable() {
        @Override
        public void run() {
            new Main().setVisible(true);
        }
    });
}

From source file:com.cloud.utils.crypt.EncryptionSecretKeyChanger.java

public static void main(String[] args) {
    List<String> argsList = Arrays.asList(args);
    Iterator<String> iter = argsList.iterator();
    String oldMSKey = null;/*from  ww  w .ja  v  a2 s .c  o  m*/
    String oldDBKey = null;
    String newMSKey = null;
    String newDBKey = null;

    //Parse command-line args
    while (iter.hasNext()) {
        String arg = iter.next();
        // Old MS Key
        if (arg.equals("-m")) {
            oldMSKey = iter.next();
        }
        // Old DB Key
        if (arg.equals("-d")) {
            oldDBKey = iter.next();
        }
        // New MS Key
        if (arg.equals("-n")) {
            newMSKey = iter.next();
        }
        // New DB Key
        if (arg.equals("-e")) {
            newDBKey = iter.next();
        }
    }

    if (oldMSKey == null || oldDBKey == null) {
        System.out.println("Existing MS secret key or DB secret key is not provided");
        usage();
        return;
    }

    if (newMSKey == null && newDBKey == null) {
        System.out.println("New MS secret key and DB secret are both not provided");
        usage();
        return;
    }

    final File dbPropsFile = PropertiesUtil.findConfigFile("db.properties");
    final Properties dbProps;
    EncryptionSecretKeyChanger keyChanger = new EncryptionSecretKeyChanger();
    StandardPBEStringEncryptor encryptor = new StandardPBEStringEncryptor();
    keyChanger.initEncryptor(encryptor, oldMSKey);
    dbProps = new EncryptableProperties(encryptor);
    PropertiesConfiguration backupDBProps = null;

    System.out.println("Parsing db.properties file");
    try {
        dbProps.load(new FileInputStream(dbPropsFile));
        backupDBProps = new PropertiesConfiguration(dbPropsFile);
    } catch (FileNotFoundException e) {
        System.out.println("db.properties file not found while reading DB secret key" + e.getMessage());
    } catch (IOException e) {
        System.out.println("Error while reading DB secret key from db.properties" + e.getMessage());
    } catch (ConfigurationException e) {
        e.printStackTrace();
    }

    String dbSecretKey = null;
    try {
        dbSecretKey = dbProps.getProperty("db.cloud.encrypt.secret");
    } catch (EncryptionOperationNotPossibleException e) {
        System.out.println("Failed to decrypt existing DB secret key from db.properties. " + e.getMessage());
        return;
    }

    if (!oldDBKey.equals(dbSecretKey)) {
        System.out.println("Incorrect MS Secret Key or DB Secret Key");
        return;
    }

    System.out.println("Secret key provided matched the key in db.properties");
    final String encryptionType = dbProps.getProperty("db.cloud.encryption.type");

    if (newMSKey == null) {
        System.out.println("No change in MS Key. Skipping migrating db.properties");
    } else {
        if (!keyChanger.migrateProperties(dbPropsFile, dbProps, newMSKey, newDBKey)) {
            System.out.println("Failed to update db.properties");
            return;
        } else {
            //db.properties updated successfully
            if (encryptionType.equals("file")) {
                //update key file with new MS key
                try {
                    FileWriter fwriter = new FileWriter(keyFile);
                    BufferedWriter bwriter = new BufferedWriter(fwriter);
                    bwriter.write(newMSKey);
                    bwriter.close();
                } catch (IOException e) {
                    System.out.println("Failed to write new secret to file. Please update the file manually");
                }
            }
        }
    }

    boolean success = false;
    if (newDBKey == null || newDBKey.equals(oldDBKey)) {
        System.out.println("No change in DB Secret Key. Skipping Data Migration");
    } else {
        EncryptionSecretKeyChecker.initEncryptorForMigration(oldMSKey);
        try {
            success = keyChanger.migrateData(oldDBKey, newDBKey);
        } catch (Exception e) {
            System.out.println("Error during data migration");
            e.printStackTrace();
            success = false;
        }
    }

    if (success) {
        System.out.println("Successfully updated secret key(s)");
    } else {
        System.out.println("Data Migration failed. Reverting db.properties");
        //revert db.properties
        try {
            backupDBProps.save();
        } catch (ConfigurationException e) {
            e.printStackTrace();
        }
        if (encryptionType.equals("file")) {
            //revert secret key in file
            try {
                FileWriter fwriter = new FileWriter(keyFile);
                BufferedWriter bwriter = new BufferedWriter(fwriter);
                bwriter.write(oldMSKey);
                bwriter.close();
            } catch (IOException e) {
                System.out.println("Failed to revert to old secret to file. Please update the file manually");
            }
        }
    }
}

From source file:de.unileipzig.ub.indexer.App.java

public static void main(String[] args) throws IOException {

    // create Options object
    Options options = new Options();

    options.addOption("h", "help", false, "display this help");

    options.addOption("f", "filename", true, "name of the JSON file whose content should be indexed");
    options.addOption("i", "index", true, "the name of the target index");
    options.addOption("d", "doctype", true, "the name of the doctype (title, local, ...)");

    options.addOption("t", "host", true, "elasticsearch hostname (default: 0.0.0.0)");
    options.addOption("p", "port", true, "transport port (that's NOT the http port, default: 9300)");
    options.addOption("c", "cluster", true, "cluster name (default: elasticsearch_mdma)");

    options.addOption("b", "bulksize", true, "number of docs sent in one request (default: 3000)");
    options.addOption("v", "verbose", false, "show processing speed while indexing");
    options.addOption("s", "status", false, "only show status of index for file");

    options.addOption("r", "repair", false, "attempt to repair recoverable inconsistencies on the go");
    options.addOption("e", "debug", false, "set logging level to debug");
    options.addOption("l", "logfile", true, "logfile - in not specified only log to stdout");

    options.addOption("m", "memcached", true, "host and port of memcached (default: localhost:11211)");
    options.addOption("z", "latest-flag-on", true,
            "enable latest flag according to field (within content, e.g. 001)");
    options.addOption("a", "flat", false, "flat-mode: do not check for inconsistencies");

    CommandLineParser parser = new PosixParser();
    CommandLine cmd = null;/* w ww. ja v a  2s . c om*/

    try {
        cmd = parser.parse(options, args);
    } catch (ParseException ex) {
        logger.error(ex);
        System.exit(1);
    }

    // setup logging
    Properties systemProperties = System.getProperties();
    systemProperties.put("net.spy.log.LoggerImpl", "net.spy.memcached.compat.log.Log4JLogger");
    System.setProperties(systemProperties);
    Logger.getLogger("net.spy.memcached").setLevel(Level.ERROR);

    Properties props = new Properties();
    props.load(props.getClass().getResourceAsStream("/log4j.properties"));

    if (cmd.hasOption("debug")) {
        props.setProperty("log4j.logger.de.unileipzig", "DEBUG");
    }

    if (cmd.hasOption("logfile")) {
        props.setProperty("log4j.rootLogger", "INFO, stdout, F");
        props.setProperty("log4j.appender.F", "org.apache.log4j.FileAppender");
        props.setProperty("log4j.appender.F.File", cmd.getOptionValue("logfile"));
        props.setProperty("log4j.appender.F.layout", "org.apache.log4j.PatternLayout");
        props.setProperty("log4j.appender.F.layout.ConversionPattern", "%5p | %d | %F | %L | %m%n");
    }

    PropertyConfigurator.configure(props);

    InetAddress addr = InetAddress.getLocalHost();
    String memcachedHostAndPort = addr.getHostAddress() + ":11211";
    if (cmd.hasOption("m")) {
        memcachedHostAndPort = cmd.getOptionValue("m");
    }

    // setup caching
    try {
        if (memcachedClient == null) {
            memcachedClient = new MemcachedClient(
                    new ConnectionFactoryBuilder().setFailureMode(FailureMode.Cancel).build(),
                    AddrUtil.getAddresses("0.0.0.0:11211"));
            try {
                // give client and server 500ms
                Thread.sleep(300);
            } catch (InterruptedException ex) {
            }

            Collection availableServers = memcachedClient.getAvailableServers();
            logger.info(availableServers);
            if (availableServers.size() == 0) {
                logger.info("no memcached servers found");
                memcachedClient.shutdown();
                memcachedClient = null;
            } else {
                logger.info(availableServers.size() + " memcached server(s) detected, fine.");
            }
        }
    } catch (IOException ex) {
        logger.warn("couldn't create a connection, bailing out: " + ex.getMessage());
    }

    // process options

    if (cmd.hasOption("h")) {
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp("indexer", options, true);
        quit(0);
    }

    boolean verbose = false;
    if (cmd.hasOption("verbose")) {
        verbose = true;
    }

    // ES options
    String[] hosts = new String[] { "0.0.0.0" };
    int port = 9300;
    String clusterName = "elasticsearch_mdma";
    int bulkSize = 3000;

    if (cmd.hasOption("host")) {
        hosts = cmd.getOptionValues("host");
    }
    if (cmd.hasOption("port")) {
        port = Integer.parseInt(cmd.getOptionValue("port"));
    }
    if (cmd.hasOption("cluster")) {
        clusterName = cmd.getOptionValue("cluster");
    }
    if (cmd.hasOption("bulksize")) {
        bulkSize = Integer.parseInt(cmd.getOptionValue("bulksize"));
        if (bulkSize < 1 || bulkSize > 100000) {
            logger.error("bulksize must be between 1 and 100,000");
            quit(1);
        }
    }

    // ES Client
    final Settings settings = ImmutableSettings.settingsBuilder().put("cluster.name", "elasticsearch_mdma")
            .build();
    final TransportClient client = new TransportClient(settings);
    for (String host : hosts) {
        client.addTransportAddress(new InetSocketTransportAddress(host, port));
    }

    if (cmd.hasOption("filename") && cmd.hasOption("index") && cmd.hasOption("doctype")) {

        final String filename = cmd.getOptionValue("filename");

        final File _file = new File(filename);
        if (_file.length() == 0) {
            logger.info(_file.getAbsolutePath() + " is empty, skipping");
            quit(0); // file is empty
        }

        // for flat mode: leave a stampfile beside the json to 
        // indicate previous successful processing
        File directory = new File(filename).getParentFile();
        File stampfile = new File(directory, DigestUtils.shaHex(filename) + ".indexed");

        long start = System.currentTimeMillis();
        long lineCount = 0;

        final String indexName = cmd.getOptionValue("index");
        final String docType = cmd.getOptionValue("doctype");
        BulkRequestBuilder bulkRequest = client.prepareBulk();

        try {
            if (cmd.hasOption("flat")) {
                // flat mode
                // .........
                if (stampfile.exists()) {
                    logger.info("SKIPPING, since it seems this file has already " + "been imported (found: "
                            + stampfile.getAbsolutePath() + ")");
                    quit(0);
                }
            } else {

                final String srcSHA1 = extractSrcSHA1(filename);

                logger.debug(filename + " srcsha1: " + srcSHA1);

                long docsInIndex = getIndexedRecordCount(client, indexName, srcSHA1);
                logger.debug(filename + " indexed: " + docsInIndex);

                long docsInFile = getLineCount(filename);
                logger.debug(filename + " lines: " + docsInFile);

                // in non-flat-mode, indexing would take care
                // of inconsistencies
                if (docsInIndex == docsInFile) {
                    logger.info("UP-TO DATE: " + filename + " (" + docsInIndex + ", " + srcSHA1 + ")");
                    client.close();
                    quit(0);
                }

                if (docsInIndex > 0) {
                    logger.warn("INCONSISTENCY DETECTED: " + filename + ": indexed:" + docsInIndex + " lines:"
                            + docsInFile);

                    if (!cmd.hasOption("r")) {
                        logger.warn(
                                "Please re-run indexer with --repair flag or delete residues first with: $ curl -XDELETE "
                                        + hosts[0] + ":9200/" + indexName
                                        + "/_query -d ' {\"term\" : { \"meta.srcsha1\" : \"" + srcSHA1
                                        + "\" }}'");
                        client.close();
                        quit(1);
                    } else {
                        logger.info("Attempting to clear residues...");
                        // attempt to repair once
                        DeleteByQueryResponse dbqr = client.prepareDeleteByQuery(indexName)
                                .setQuery(termQuery("meta.srcsha1", srcSHA1)).execute().actionGet();

                        Iterator<IndexDeleteByQueryResponse> it = dbqr.iterator();
                        long deletions = 0;
                        while (it.hasNext()) {
                            IndexDeleteByQueryResponse response = it.next();
                            deletions += 1;
                        }
                        logger.info("Deleted residues of " + filename);
                        logger.info("Refreshing [" + indexName + "]");
                        RefreshResponse refreshResponse = client.admin().indices()
                                .refresh(new RefreshRequest(indexName)).actionGet();

                        long indexedAfterDelete = getIndexedRecordCount(client, indexName, srcSHA1);
                        logger.info(indexedAfterDelete + " docs remained");
                        if (indexedAfterDelete > 0) {
                            logger.warn("Not all residues cleaned. Try to fix this manually: $ curl -XDELETE "
                                    + hosts[0] + ":9200/" + indexName
                                    + "/_query -d ' {\"term\" : { \"meta.srcsha1\" : \"" + srcSHA1 + "\" }}'");
                            quit(1);
                        } else {
                            logger.info("Residues are gone. Now trying to reindex: " + filename);
                        }
                    }
                }
            }

            logger.info("INDEXING-REQUIRED: " + filename);
            if (cmd.hasOption("status")) {
                quit(0);
            }

            HashSet idsInBatch = new HashSet();

            String idField = null;
            if (cmd.hasOption("z")) {
                idField = cmd.getOptionValue("z");
            }

            final FileReader fr = new FileReader(filename);
            final BufferedReader br = new BufferedReader(fr);

            String line;
            // one line is one document
            while ((line = br.readLine()) != null) {

                // "Latest-Flag" machine
                // This gets obsolete with a "flat" index
                if (cmd.hasOption("z")) {
                    // flag that indicates, whether the document
                    // about to be indexed will be the latest
                    boolean willBeLatest = true;

                    // check if there is a previous (lower meta.timestamp) document with 
                    // the same identifier (whatever that may be - queried under "content")
                    final String contentIdentifier = getContentIdentifier(line, idField);
                    idsInBatch.add(contentIdentifier);

                    // assumed in meta.timestamp
                    final Long timestamp = Long.parseLong(getTimestamp(line));

                    logger.debug("Checking whether record is latest (line: " + lineCount + ")");
                    logger.debug(contentIdentifier + ", " + timestamp);

                    // get all docs, which match the contentIdentifier
                    // by filter, which doesn't score
                    final TermFilterBuilder idFilter = new TermFilterBuilder("content." + idField,
                            contentIdentifier);
                    final TermFilterBuilder kindFilter = new TermFilterBuilder("meta.kind", docType);
                    final AndFilterBuilder afb = new AndFilterBuilder();
                    afb.add(idFilter).add(kindFilter);
                    final FilteredQueryBuilder fb = filteredQuery(matchAllQuery(), afb);

                    final SearchResponse searchResponse = client.prepareSearch(indexName)
                            .setSearchType(SearchType.DFS_QUERY_THEN_FETCH).setQuery(fb).setFrom(0)
                            .setSize(1200) // 3 years and 105 days assuming daily updates at the most
                            .setExplain(false).execute().actionGet();

                    final SearchHits searchHits = searchResponse.getHits();

                    logger.debug("docs with this id in the index: " + searchHits.getTotalHits());

                    for (final SearchHit hit : searchHits.getHits()) {
                        final String docId = hit.id();
                        final Map<String, Object> source = hit.sourceAsMap();
                        final Map meta = (Map) source.get("meta");
                        final Long docTimestamp = Long.parseLong(meta.get("timestamp").toString());
                        // if the indexed doc timestamp is lower the the current one, 
                        // remove any latest flag
                        if (timestamp >= docTimestamp) {
                            source.remove("latest");
                            final ObjectMapper mapper = new ObjectMapper();
                            // put the updated doc back
                            // IndexResponse response = 
                            client.prepareIndex(indexName, docType).setCreate(false).setId(docId)
                                    .setSource(mapper.writeValueAsBytes(source))
                                    .execute(new ActionListener<IndexResponse>() {
                                        public void onResponse(IndexResponse rspns) {
                                            logger.debug("Removed latest flag from " + contentIdentifier + ", "
                                                    + docTimestamp + ", " + hit.id() + " since (" + timestamp
                                                    + " > " + docTimestamp + ")");
                                        }

                                        public void onFailure(Throwable thrwbl) {
                                            logger.error("Could not remove flag from " + hit.id() + ", "
                                                    + contentIdentifier);
                                        }
                                    });
                            // .execute()
                            //.actionGet();
                        } else {
                            logger.debug("Doc " + hit.id() + " is newer (" + docTimestamp + ")");
                            willBeLatest = false;
                        }
                    }

                    if (willBeLatest) {
                        line = setLatestFlag(line);
                        logger.info("Setting latest flag on " + contentIdentifier + ", " + timestamp);
                    }

                    // end of latest-flag machine
                    // beware - this will be correct as long as there
                    // are no dups within one bulk!
                }

                bulkRequest.add(client.prepareIndex(indexName, docType).setSource(line));
                lineCount++;
                logger.debug("Added line " + lineCount + " to BULK");
                logger.debug(line);

                if (lineCount % bulkSize == 0) {

                    if (idsInBatch.size() != bulkSize && cmd.hasOption("z")) {
                        logger.error(
                                "This batch has duplications in the ID. That's not bad for the index, just makes the latest flag fuzzy");
                        logger.error(
                                "Bulk size was: " + bulkSize + ", but " + idsInBatch.size() + " IDs (only)");
                    }
                    idsInBatch.clear();

                    logger.debug("Issuing BULK request");

                    final long actionCount = bulkRequest.numberOfActions();
                    final BulkResponse bulkResponse = bulkRequest.execute().actionGet();
                    final long tookInMillis = bulkResponse.getTookInMillis();

                    if (bulkResponse.hasFailures()) {
                        logger.fatal("FAILED, bulk not indexed. exiting now.");
                        Iterator<BulkItemResponse> it = bulkResponse.iterator();
                        while (it.hasNext()) {
                            BulkItemResponse bir = it.next();
                            if (bir.isFailed()) {
                                Failure failure = bir.getFailure();
                                logger.fatal("id: " + failure.getId() + ", message: " + failure.getMessage()
                                        + ", type: " + failure.getType() + ", index: " + failure.getIndex());
                            }
                        }
                        quit(1);
                    } else {
                        if (verbose) {
                            final double elapsed = System.currentTimeMillis() - start;
                            final double speed = (lineCount / elapsed * 1000);
                            logger.info("OK (" + filename + ") " + lineCount + " docs indexed (" + actionCount
                                    + "/" + tookInMillis + "ms" + "/" + String.format("%.2f", speed) + "r/s)");
                        }
                    }
                    bulkRequest = client.prepareBulk();
                }
            }

            // handle the remaining items
            final long actionCount = bulkRequest.numberOfActions();
            if (actionCount > 0) {
                final BulkResponse bulkResponse = bulkRequest.execute().actionGet();
                final long tookInMillis = bulkResponse.getTookInMillis();

                if (bulkResponse.hasFailures()) {
                    logger.fatal("FAILED, bulk not indexed. exiting now.");
                    Iterator<BulkItemResponse> it = bulkResponse.iterator();
                    while (it.hasNext()) {
                        BulkItemResponse bir = it.next();
                        if (bir.isFailed()) {
                            Failure failure = bir.getFailure();
                            logger.fatal("id: " + failure.getId() + ", message: " + failure.getMessage()
                                    + ", type: " + failure.getType() + ", index: " + failure.getIndex());
                        }
                    }
                    quit(1);
                } else {

                    // trigger update now
                    RefreshResponse refreshResponse = client.admin().indices()
                            .refresh(new RefreshRequest(indexName)).actionGet();

                    if (verbose) {
                        final double elapsed = System.currentTimeMillis() - start;
                        final double speed = (lineCount / elapsed * 1000);
                        logger.info("OK (" + filename + ") " + lineCount + " docs indexed (" + actionCount + "/"
                                + tookInMillis + "ms" + "/" + String.format("%.2f", speed) + "r/s)");
                    }

                }

            }

            br.close();
            client.close();
            final double elapsed = (System.currentTimeMillis() - start) / 1000;
            final double speed = (lineCount / elapsed);
            logger.info("indexing (" + filename + ") " + lineCount + " docs took " + elapsed + "s (speed: "
                    + String.format("%.2f", speed) + "r/s)");
            if (cmd.hasOption("flat")) {
                try {
                    FileUtils.touch(stampfile);
                } catch (IOException ioe) {
                    logger.warn(".indexed files not created. Will reindex everything everytime.");
                }
            }
        } catch (IOException e) {
            client.close();
            logger.error(e);
            quit(1);
        } finally {
            client.close();
        }
    }
    quit(0);
}

From source file:com.intuit.tank.tools.debugger.AgentDebuggerFrame.java

/**
 * @param args// w w  w  . j av  a  2 s.c o  m
 */
public static void main(String[] args) {
    try {
        java.security.Security.setProperty("networkaddress.cache.ttl", "0");
    } catch (Throwable e1) {
        LOG.warn(LogUtil.getLogMessage("Error setting dns timeout: " + e1.toString(), LogEventType.System));
    }
    try {
        System.setProperty("jsse.enableSNIExtension", "false");
    } catch (Throwable e1) {
        LOG.warn(LogUtil.getLogMessage("Error disabling SNI extension: " + e1.toString(), LogEventType.System));
    }
    try {
        System.setProperty("jdk.certpath.disabledAlgorithms", "");
    } catch (Throwable e1) {
        System.err.println("Error setting property jdk.certpath.disabledAlgorithms: " + e1.toString());
        e1.printStackTrace();
    }
    String url = "";
    if (args.length > 0) {
        url = args[0];
    }
    Properties props = new Properties();
    try {
        InputStream configStream = AgentDebuggerFrame.class.getResourceAsStream("/log4j.properties");
        props.load(configStream);
        configStream.close();
    } catch (IOException e) {
        System.out.println("Error: Cannot laod configuration file ");
    }
    props.setProperty("log4j.appender.agent.File", "debugger.log");
    LogManager.resetConfiguration();
    PropertyConfigurator.configure(props);

    new AgentDebuggerFrame(true, url).setVisible(true);
}

From source file:Evaluator.PerQueryRelDocs.java

public static void main(String[] args) {
    if (args.length < 1) {
        args = new String[1];
        args[0] = "/home/procheta/NetBeansProjects/InferrdAp/src/Evaluator/init.properties";
    }/*ww w .j ava 2  s  .c  o m*/
    try {
        Properties prop = new Properties();
        prop.load(new FileReader(args[0]));
        EvaluateAll eval = new EvaluateAll(prop);
        eval.load();
        // eval.computeMeanAp();
        //  eval.storeRunMeanAp(prop.getProperty("storeMeanAp"));
        eval.storeRunQid(prop.getProperty("storeMeanAp"));
    } catch (Exception ex) {
        ex.printStackTrace();
    }

}

From source file:je3.rmi.MudClient.java

/**
 * This main() method is the standalone program that figures out what
 * database to connect to with what driver, connects to the database,
 * creates a PersistentBankServer object, and registers it with the registry,
 * making it available for client use//from w  ww. j  a v  a2 s . co  m
 **/
public static void main(String[] args) {
    try {
        // Create a new Properties object.  Attempt to initialize it from
        // the BankDB.props file or the file optionally specified on the 
        // command line, ignoring errors.
        Properties p = new Properties();
        try { p.load(new FileInputStream(args[0])); }
        catch (Exception e) {
            try { p.load(new FileInputStream("BankDB.props")); }
            catch (Exception e2) {}
        }
       
        // The BankDB.props file (or file specified on the command line)
        // must contain properties "driver" and "database", and may
        // optionally contain properties "user" and "password".
        String driver = p.getProperty("driver");
        String database = p.getProperty("database");
        String user = p.getProperty("user", "");
        String password = p.getProperty("password", "");
       
        // Load the database driver class
        Class.forName(driver);
       
        // Connect to the database that stores our accounts
        Connection db = DriverManager.getConnection(database,
                 user, password);
       
        // Configure the database to allow multiple queries and updates
        // to be grouped into atomic transactions
        db.setAutoCommit(false);
        db.setTransactionIsolation(Connection.TRANSACTION_READ_COMMITTED);
       
        // Create a server object that uses our database connection
        PersistentBankServer bank = new PersistentBankServer(db);
       
        // Read a system property to figure out how to name this server.
        // Use "SecondRemote" as the default.
        String name = System.getProperty("bankname", "SecondRemote");
       
        // Register the server with the name
        Naming.rebind(name, bank);
       
        // And tell everyone that we're up and running.
        System.out.println(name + " is open and ready for customers.");
    }
    catch (Exception e) {
        System.err.println(e);
        if (e instanceof SQLException) 
            System.err.println("SQL State: " +
           ((SQLException)e).getSQLState());
        System.err.println("Usage: java [-Dbankname=<name>] " +
          "je3.rmi.PersistentBankServer " +
            "[<dbpropsfile>]");
        System.exit(1);
    }
}