Example usage for java.util Properties getClass

List of usage examples for java.util Properties getClass

Introduction

In this page you can find the example usage for java.util Properties getClass.

Prototype

@HotSpotIntrinsicCandidate
public final native Class<?> getClass();

Source Link

Document

Returns the runtime class of this Object .

Usage

From source file:de.unileipzig.ub.indexer.App.java

public static void main(String[] args) throws IOException {

    // create Options object
    Options options = new Options();

    options.addOption("h", "help", false, "display this help");

    options.addOption("f", "filename", true, "name of the JSON file whose content should be indexed");
    options.addOption("i", "index", true, "the name of the target index");
    options.addOption("d", "doctype", true, "the name of the doctype (title, local, ...)");

    options.addOption("t", "host", true, "elasticsearch hostname (default: 0.0.0.0)");
    options.addOption("p", "port", true, "transport port (that's NOT the http port, default: 9300)");
    options.addOption("c", "cluster", true, "cluster name (default: elasticsearch_mdma)");

    options.addOption("b", "bulksize", true, "number of docs sent in one request (default: 3000)");
    options.addOption("v", "verbose", false, "show processing speed while indexing");
    options.addOption("s", "status", false, "only show status of index for file");

    options.addOption("r", "repair", false, "attempt to repair recoverable inconsistencies on the go");
    options.addOption("e", "debug", false, "set logging level to debug");
    options.addOption("l", "logfile", true, "logfile - in not specified only log to stdout");

    options.addOption("m", "memcached", true, "host and port of memcached (default: localhost:11211)");
    options.addOption("z", "latest-flag-on", true,
            "enable latest flag according to field (within content, e.g. 001)");
    options.addOption("a", "flat", false, "flat-mode: do not check for inconsistencies");

    CommandLineParser parser = new PosixParser();
    CommandLine cmd = null;//from  www.  j  ava2s  . c  om

    try {
        cmd = parser.parse(options, args);
    } catch (ParseException ex) {
        logger.error(ex);
        System.exit(1);
    }

    // setup logging
    Properties systemProperties = System.getProperties();
    systemProperties.put("net.spy.log.LoggerImpl", "net.spy.memcached.compat.log.Log4JLogger");
    System.setProperties(systemProperties);
    Logger.getLogger("net.spy.memcached").setLevel(Level.ERROR);

    Properties props = new Properties();
    props.load(props.getClass().getResourceAsStream("/log4j.properties"));

    if (cmd.hasOption("debug")) {
        props.setProperty("log4j.logger.de.unileipzig", "DEBUG");
    }

    if (cmd.hasOption("logfile")) {
        props.setProperty("log4j.rootLogger", "INFO, stdout, F");
        props.setProperty("log4j.appender.F", "org.apache.log4j.FileAppender");
        props.setProperty("log4j.appender.F.File", cmd.getOptionValue("logfile"));
        props.setProperty("log4j.appender.F.layout", "org.apache.log4j.PatternLayout");
        props.setProperty("log4j.appender.F.layout.ConversionPattern", "%5p | %d | %F | %L | %m%n");
    }

    PropertyConfigurator.configure(props);

    InetAddress addr = InetAddress.getLocalHost();
    String memcachedHostAndPort = addr.getHostAddress() + ":11211";
    if (cmd.hasOption("m")) {
        memcachedHostAndPort = cmd.getOptionValue("m");
    }

    // setup caching
    try {
        if (memcachedClient == null) {
            memcachedClient = new MemcachedClient(
                    new ConnectionFactoryBuilder().setFailureMode(FailureMode.Cancel).build(),
                    AddrUtil.getAddresses("0.0.0.0:11211"));
            try {
                // give client and server 500ms
                Thread.sleep(300);
            } catch (InterruptedException ex) {
            }

            Collection availableServers = memcachedClient.getAvailableServers();
            logger.info(availableServers);
            if (availableServers.size() == 0) {
                logger.info("no memcached servers found");
                memcachedClient.shutdown();
                memcachedClient = null;
            } else {
                logger.info(availableServers.size() + " memcached server(s) detected, fine.");
            }
        }
    } catch (IOException ex) {
        logger.warn("couldn't create a connection, bailing out: " + ex.getMessage());
    }

    // process options

    if (cmd.hasOption("h")) {
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp("indexer", options, true);
        quit(0);
    }

    boolean verbose = false;
    if (cmd.hasOption("verbose")) {
        verbose = true;
    }

    // ES options
    String[] hosts = new String[] { "0.0.0.0" };
    int port = 9300;
    String clusterName = "elasticsearch_mdma";
    int bulkSize = 3000;

    if (cmd.hasOption("host")) {
        hosts = cmd.getOptionValues("host");
    }
    if (cmd.hasOption("port")) {
        port = Integer.parseInt(cmd.getOptionValue("port"));
    }
    if (cmd.hasOption("cluster")) {
        clusterName = cmd.getOptionValue("cluster");
    }
    if (cmd.hasOption("bulksize")) {
        bulkSize = Integer.parseInt(cmd.getOptionValue("bulksize"));
        if (bulkSize < 1 || bulkSize > 100000) {
            logger.error("bulksize must be between 1 and 100,000");
            quit(1);
        }
    }

    // ES Client
    final Settings settings = ImmutableSettings.settingsBuilder().put("cluster.name", "elasticsearch_mdma")
            .build();
    final TransportClient client = new TransportClient(settings);
    for (String host : hosts) {
        client.addTransportAddress(new InetSocketTransportAddress(host, port));
    }

    if (cmd.hasOption("filename") && cmd.hasOption("index") && cmd.hasOption("doctype")) {

        final String filename = cmd.getOptionValue("filename");

        final File _file = new File(filename);
        if (_file.length() == 0) {
            logger.info(_file.getAbsolutePath() + " is empty, skipping");
            quit(0); // file is empty
        }

        // for flat mode: leave a stampfile beside the json to 
        // indicate previous successful processing
        File directory = new File(filename).getParentFile();
        File stampfile = new File(directory, DigestUtils.shaHex(filename) + ".indexed");

        long start = System.currentTimeMillis();
        long lineCount = 0;

        final String indexName = cmd.getOptionValue("index");
        final String docType = cmd.getOptionValue("doctype");
        BulkRequestBuilder bulkRequest = client.prepareBulk();

        try {
            if (cmd.hasOption("flat")) {
                // flat mode
                // .........
                if (stampfile.exists()) {
                    logger.info("SKIPPING, since it seems this file has already " + "been imported (found: "
                            + stampfile.getAbsolutePath() + ")");
                    quit(0);
                }
            } else {

                final String srcSHA1 = extractSrcSHA1(filename);

                logger.debug(filename + " srcsha1: " + srcSHA1);

                long docsInIndex = getIndexedRecordCount(client, indexName, srcSHA1);
                logger.debug(filename + " indexed: " + docsInIndex);

                long docsInFile = getLineCount(filename);
                logger.debug(filename + " lines: " + docsInFile);

                // in non-flat-mode, indexing would take care
                // of inconsistencies
                if (docsInIndex == docsInFile) {
                    logger.info("UP-TO DATE: " + filename + " (" + docsInIndex + ", " + srcSHA1 + ")");
                    client.close();
                    quit(0);
                }

                if (docsInIndex > 0) {
                    logger.warn("INCONSISTENCY DETECTED: " + filename + ": indexed:" + docsInIndex + " lines:"
                            + docsInFile);

                    if (!cmd.hasOption("r")) {
                        logger.warn(
                                "Please re-run indexer with --repair flag or delete residues first with: $ curl -XDELETE "
                                        + hosts[0] + ":9200/" + indexName
                                        + "/_query -d ' {\"term\" : { \"meta.srcsha1\" : \"" + srcSHA1
                                        + "\" }}'");
                        client.close();
                        quit(1);
                    } else {
                        logger.info("Attempting to clear residues...");
                        // attempt to repair once
                        DeleteByQueryResponse dbqr = client.prepareDeleteByQuery(indexName)
                                .setQuery(termQuery("meta.srcsha1", srcSHA1)).execute().actionGet();

                        Iterator<IndexDeleteByQueryResponse> it = dbqr.iterator();
                        long deletions = 0;
                        while (it.hasNext()) {
                            IndexDeleteByQueryResponse response = it.next();
                            deletions += 1;
                        }
                        logger.info("Deleted residues of " + filename);
                        logger.info("Refreshing [" + indexName + "]");
                        RefreshResponse refreshResponse = client.admin().indices()
                                .refresh(new RefreshRequest(indexName)).actionGet();

                        long indexedAfterDelete = getIndexedRecordCount(client, indexName, srcSHA1);
                        logger.info(indexedAfterDelete + " docs remained");
                        if (indexedAfterDelete > 0) {
                            logger.warn("Not all residues cleaned. Try to fix this manually: $ curl -XDELETE "
                                    + hosts[0] + ":9200/" + indexName
                                    + "/_query -d ' {\"term\" : { \"meta.srcsha1\" : \"" + srcSHA1 + "\" }}'");
                            quit(1);
                        } else {
                            logger.info("Residues are gone. Now trying to reindex: " + filename);
                        }
                    }
                }
            }

            logger.info("INDEXING-REQUIRED: " + filename);
            if (cmd.hasOption("status")) {
                quit(0);
            }

            HashSet idsInBatch = new HashSet();

            String idField = null;
            if (cmd.hasOption("z")) {
                idField = cmd.getOptionValue("z");
            }

            final FileReader fr = new FileReader(filename);
            final BufferedReader br = new BufferedReader(fr);

            String line;
            // one line is one document
            while ((line = br.readLine()) != null) {

                // "Latest-Flag" machine
                // This gets obsolete with a "flat" index
                if (cmd.hasOption("z")) {
                    // flag that indicates, whether the document
                    // about to be indexed will be the latest
                    boolean willBeLatest = true;

                    // check if there is a previous (lower meta.timestamp) document with 
                    // the same identifier (whatever that may be - queried under "content")
                    final String contentIdentifier = getContentIdentifier(line, idField);
                    idsInBatch.add(contentIdentifier);

                    // assumed in meta.timestamp
                    final Long timestamp = Long.parseLong(getTimestamp(line));

                    logger.debug("Checking whether record is latest (line: " + lineCount + ")");
                    logger.debug(contentIdentifier + ", " + timestamp);

                    // get all docs, which match the contentIdentifier
                    // by filter, which doesn't score
                    final TermFilterBuilder idFilter = new TermFilterBuilder("content." + idField,
                            contentIdentifier);
                    final TermFilterBuilder kindFilter = new TermFilterBuilder("meta.kind", docType);
                    final AndFilterBuilder afb = new AndFilterBuilder();
                    afb.add(idFilter).add(kindFilter);
                    final FilteredQueryBuilder fb = filteredQuery(matchAllQuery(), afb);

                    final SearchResponse searchResponse = client.prepareSearch(indexName)
                            .setSearchType(SearchType.DFS_QUERY_THEN_FETCH).setQuery(fb).setFrom(0)
                            .setSize(1200) // 3 years and 105 days assuming daily updates at the most
                            .setExplain(false).execute().actionGet();

                    final SearchHits searchHits = searchResponse.getHits();

                    logger.debug("docs with this id in the index: " + searchHits.getTotalHits());

                    for (final SearchHit hit : searchHits.getHits()) {
                        final String docId = hit.id();
                        final Map<String, Object> source = hit.sourceAsMap();
                        final Map meta = (Map) source.get("meta");
                        final Long docTimestamp = Long.parseLong(meta.get("timestamp").toString());
                        // if the indexed doc timestamp is lower the the current one, 
                        // remove any latest flag
                        if (timestamp >= docTimestamp) {
                            source.remove("latest");
                            final ObjectMapper mapper = new ObjectMapper();
                            // put the updated doc back
                            // IndexResponse response = 
                            client.prepareIndex(indexName, docType).setCreate(false).setId(docId)
                                    .setSource(mapper.writeValueAsBytes(source))
                                    .execute(new ActionListener<IndexResponse>() {
                                        public void onResponse(IndexResponse rspns) {
                                            logger.debug("Removed latest flag from " + contentIdentifier + ", "
                                                    + docTimestamp + ", " + hit.id() + " since (" + timestamp
                                                    + " > " + docTimestamp + ")");
                                        }

                                        public void onFailure(Throwable thrwbl) {
                                            logger.error("Could not remove flag from " + hit.id() + ", "
                                                    + contentIdentifier);
                                        }
                                    });
                            // .execute()
                            //.actionGet();
                        } else {
                            logger.debug("Doc " + hit.id() + " is newer (" + docTimestamp + ")");
                            willBeLatest = false;
                        }
                    }

                    if (willBeLatest) {
                        line = setLatestFlag(line);
                        logger.info("Setting latest flag on " + contentIdentifier + ", " + timestamp);
                    }

                    // end of latest-flag machine
                    // beware - this will be correct as long as there
                    // are no dups within one bulk!
                }

                bulkRequest.add(client.prepareIndex(indexName, docType).setSource(line));
                lineCount++;
                logger.debug("Added line " + lineCount + " to BULK");
                logger.debug(line);

                if (lineCount % bulkSize == 0) {

                    if (idsInBatch.size() != bulkSize && cmd.hasOption("z")) {
                        logger.error(
                                "This batch has duplications in the ID. That's not bad for the index, just makes the latest flag fuzzy");
                        logger.error(
                                "Bulk size was: " + bulkSize + ", but " + idsInBatch.size() + " IDs (only)");
                    }
                    idsInBatch.clear();

                    logger.debug("Issuing BULK request");

                    final long actionCount = bulkRequest.numberOfActions();
                    final BulkResponse bulkResponse = bulkRequest.execute().actionGet();
                    final long tookInMillis = bulkResponse.getTookInMillis();

                    if (bulkResponse.hasFailures()) {
                        logger.fatal("FAILED, bulk not indexed. exiting now.");
                        Iterator<BulkItemResponse> it = bulkResponse.iterator();
                        while (it.hasNext()) {
                            BulkItemResponse bir = it.next();
                            if (bir.isFailed()) {
                                Failure failure = bir.getFailure();
                                logger.fatal("id: " + failure.getId() + ", message: " + failure.getMessage()
                                        + ", type: " + failure.getType() + ", index: " + failure.getIndex());
                            }
                        }
                        quit(1);
                    } else {
                        if (verbose) {
                            final double elapsed = System.currentTimeMillis() - start;
                            final double speed = (lineCount / elapsed * 1000);
                            logger.info("OK (" + filename + ") " + lineCount + " docs indexed (" + actionCount
                                    + "/" + tookInMillis + "ms" + "/" + String.format("%.2f", speed) + "r/s)");
                        }
                    }
                    bulkRequest = client.prepareBulk();
                }
            }

            // handle the remaining items
            final long actionCount = bulkRequest.numberOfActions();
            if (actionCount > 0) {
                final BulkResponse bulkResponse = bulkRequest.execute().actionGet();
                final long tookInMillis = bulkResponse.getTookInMillis();

                if (bulkResponse.hasFailures()) {
                    logger.fatal("FAILED, bulk not indexed. exiting now.");
                    Iterator<BulkItemResponse> it = bulkResponse.iterator();
                    while (it.hasNext()) {
                        BulkItemResponse bir = it.next();
                        if (bir.isFailed()) {
                            Failure failure = bir.getFailure();
                            logger.fatal("id: " + failure.getId() + ", message: " + failure.getMessage()
                                    + ", type: " + failure.getType() + ", index: " + failure.getIndex());
                        }
                    }
                    quit(1);
                } else {

                    // trigger update now
                    RefreshResponse refreshResponse = client.admin().indices()
                            .refresh(new RefreshRequest(indexName)).actionGet();

                    if (verbose) {
                        final double elapsed = System.currentTimeMillis() - start;
                        final double speed = (lineCount / elapsed * 1000);
                        logger.info("OK (" + filename + ") " + lineCount + " docs indexed (" + actionCount + "/"
                                + tookInMillis + "ms" + "/" + String.format("%.2f", speed) + "r/s)");
                    }

                }

            }

            br.close();
            client.close();
            final double elapsed = (System.currentTimeMillis() - start) / 1000;
            final double speed = (lineCount / elapsed);
            logger.info("indexing (" + filename + ") " + lineCount + " docs took " + elapsed + "s (speed: "
                    + String.format("%.2f", speed) + "r/s)");
            if (cmd.hasOption("flat")) {
                try {
                    FileUtils.touch(stampfile);
                } catch (IOException ioe) {
                    logger.warn(".indexed files not created. Will reindex everything everytime.");
                }
            }
        } catch (IOException e) {
            client.close();
            logger.error(e);
            quit(1);
        } finally {
            client.close();
        }
    }
    quit(0);
}

From source file:com.marklogic.entityservices.tests.TestSetup.java

public synchronized static TestSetup getInstance() {
    Properties prop = new Properties();
    InputStream input = null;/*from  w w  w.  jav  a 2 s .  c  o  m*/

    try {

        input = prop.getClass().getResourceAsStream("/gradle.properties");

        // load a properties file
        prop.load(input);

    } catch (IOException ex) {
        ex.printStackTrace();
        throw new RuntimeException(ex);
    }

    String host = prop.getProperty("mlHost");
    String username = prop.getProperty("mlUsername");
    String password = prop.getProperty("mlPassword");
    String port = prop.getProperty("mlRestPort");
    String modulesDatabase = prop.getProperty("mlModulesDatabaseName");
    String schemasDatabase = prop.getProperty("mlSchemasDatabaseName");

    if (instance == null) {
        instance = new TestSetup();
        if (instance._client == null) {
            instance._client = DatabaseClientFactory.newClient(host, Integer.parseInt(port), username, password,
                    Authentication.DIGEST);
        }
        if (instance._modulesClient == null) {
            instance._modulesClient = DatabaseClientFactory.newClient(host, Integer.parseInt(port),
                    modulesDatabase, username, password, Authentication.DIGEST);
        }
        if (instance._schemasClient == null) {
            instance._schemasClient = DatabaseClientFactory.newClient(host, Integer.parseInt(port),
                    schemasDatabase, username, password, Authentication.DIGEST);
        }
        instance.docMgr = instance._client.newJSONDocumentManager();
    }

    //instance.loadEntityTypes();
    //instance.loadExtraFiles();
    return instance;
}

From source file:com.marklogic.entityservices.TestSetup.java

public synchronized static TestSetup getInstance() {
    Properties prop = new Properties();
    InputStream input = null;//from  www  .ja  v a  2  s . c  o  m

    try {

        input = prop.getClass().getResourceAsStream("/gradle.properties");

        // load a properties file
        prop.load(input);

    } catch (IOException ex) {
        ex.printStackTrace();
        throw new RuntimeException(ex);
    }

    String host = prop.getProperty("mlHost");
    String username = prop.getProperty("mlUsername");
    String password = prop.getProperty("mlPassword");
    String port = prop.getProperty("mlRestPort");
    String modulesDatabase = prop.getProperty("mlModulesDatabaseName");
    String schemasDatabase = prop.getProperty("mlSchemasDatabaseName");

    if (instance == null) {
        instance = new TestSetup();
        if (instance._client == null) {
            instance._client = DatabaseClientFactory.newClient(host, Integer.parseInt(port), username, password,
                    Authentication.DIGEST);
        }
        if (instance._modulesClient == null) {
            instance._modulesClient = DatabaseClientFactory.newClient(host, Integer.parseInt(port),
                    modulesDatabase, username, password, Authentication.DIGEST);
        }
        if (instance._schemasClient == null) {
            instance._schemasClient = DatabaseClientFactory.newClient(host, Integer.parseInt(port),
                    schemasDatabase, username, password, Authentication.DIGEST);
        }
    }

    instance.loadEntityTypes();
    instance.loadInvalidEntityTypes();
    instance.loadExtraFiles();
    return instance;
}

From source file:ru.retbansk.utils.UsefulMethods.java

/** Simple loading properties from email.properties file.
 * <p> At first, program will try to read external properties.
 * If nothing there - internal//  w  ww. j ava2 s .c  om
 * <p> Implements user exit
 * 
 * @return java.util.Properties;
 * @throws Exception
 */
public static Properties loadProperties() throws Exception {
    Properties prop = new Properties();
    InputStream inputStream = null;
    // At first, program will try to read external properties
    try {
        inputStream = new FileInputStream("email.properties");
        prop.load(inputStream);
    } catch (FileNotFoundException e) {
    }
    // If nothing there - internal
    if (inputStream == null)
        inputStream = prop.getClass().getResourceAsStream("/email.properties");

    prop.load(inputStream);
    if (inputStream != null)
        inputStream.close();
    // Implementing user exit
    try {
        if (prop.getProperty("continue").equals("no")) {
            logger.info("Program was stopped by the User");
            System.exit(0);
        }
    } catch (Exception e) {
        logger.info("Continue parameter is not defined");
    }
    return prop;
}

From source file:io.coala.json.DynaBean.java

/**
 * @param <T>//from   w ww .  j ava 2 s.  c  om
 * @param wrapperType
 * @return
 */
static final <T> JsonSerializer<T> createJsonSerializer(final Class<T> type) {
    return new JsonSerializer<T>() {
        @Override
        public void serialize(final T value, final JsonGenerator jgen, final SerializerProvider serializers)
                throws IOException, JsonProcessingException {
            // non-Proxy objects get default treatment
            if (!Proxy.isProxyClass(value.getClass())) {
                @SuppressWarnings("unchecked")
                final JsonSerializer<T> ser = (JsonSerializer<T>) serializers
                        .findValueSerializer(value.getClass());
                if (ser != this)
                    ser.serialize(value, jgen, serializers);
                else
                    LOG.warn("Problem serializing: {}", value);
                return;
            }

            // BeanWrapper gets special treatment
            if (DynaBeanInvocationHandler.class.isInstance(Proxy.getInvocationHandler(value))) {
                final DynaBeanInvocationHandler handler = (DynaBeanInvocationHandler) Proxy
                        .getInvocationHandler(value);

                // Wrapper extensions get special treatment
                if (Wrapper.class.isAssignableFrom(handler.type)) {
                    final Object wrap = handler.bean.get("wrap");
                    serializers.findValueSerializer(wrap.getClass(), null).serialize(wrap, jgen, serializers);
                    return;
                }
                // Config (Accessible) extensions get special treatment
                else if (Accessible.class.isAssignableFrom(handler.type)) {
                    final Map<String, Object> copy = new HashMap<>(handler.bean.any());
                    final Accessible config = (Accessible) handler.config;
                    for (String key : config.propertyNames())
                        copy.put(key, config.getProperty(key));
                    serializers.findValueSerializer(copy.getClass(), null).serialize(copy, jgen, serializers);
                    return;
                } else if (Config.class.isAssignableFrom(handler.type))
                    throw new JsonGenerationException("BeanWrapper should extend " + Accessible.class.getName()
                            + " required for serialization: " + Arrays.asList(handler.type.getInterfaces()),
                            jgen);

                // BeanWrappers that do not extend OWNER API's Config
                serializers.findValueSerializer(handler.bean.getClass(), null).serialize(handler.bean, jgen,
                        serializers);
                return;
            }

            // Config (Accessible) gets special treatment
            if (Accessible.class.isInstance(value)) {
                final Accessible config = (Accessible) value;
                final Properties entries = new Properties();
                for (String key : config.propertyNames())
                    entries.put(key, config.getProperty(key));
                serializers.findValueSerializer(entries.getClass(), null).serialize(entries, jgen, serializers);
                return;
            }

            if (Config.class.isInstance(value))
                throw new JsonGenerationException("Config should extend " + Accessible.class.getName()
                        + " required for serialization: " + Arrays.asList(value.getClass().getInterfaces()),
                        jgen);

            throw new JsonGenerationException(
                    "No serializer found for proxy of: " + Arrays.asList(value.getClass().getInterfaces()),
                    jgen);
        }
    };
}

From source file:net.sf.jasperreports.data.hibernate.HibernateDataAdapterService.java

@Override
public void contributeParameters(Map<String, Object> parameters) throws JRException {
    HibernateDataAdapter hbmDA = getHibernateDataAdapter();
    if (hbmDA != null) {
        ClassLoader oldThreadClassLoader = Thread.currentThread().getContextClassLoader();

        try {/*  w  ww.  j  a va2s.  c  o  m*/
            Thread.currentThread().setContextClassLoader(getClassLoader(oldThreadClassLoader));

            Class<?> clazz = null;
            if (!hbmDA.isUseAnnotation()) {
                clazz = JRClassLoader.loadClassForRealName("org.hibernate.cfg.Configuration");
            } else {
                clazz = JRClassLoader.loadClassForRealName("org.hibernate.cfg.AnnotationConfiguration");
            }
            if (clazz != null) {
                Object configure = clazz.getDeclaredConstructor().newInstance();
                if (configure != null) {
                    String xmlFileName = hbmDA.getXMLFileName();
                    if (xmlFileName != null && !xmlFileName.isEmpty()) {
                        File file = new File(xmlFileName);
                        clazz.getMethod("configure", file.getClass()).invoke(configure, file);
                    } else {
                        clazz.getMethod("configure", new Class[] {}).invoke(configure, new Object[] {});
                    }
                    String pFileName = hbmDA.getPropertiesFileName();
                    if (pFileName != null && !pFileName.isEmpty()) {
                        Properties propHibernate = new Properties();
                        propHibernate.load(new FileInputStream(pFileName));

                        clazz.getMethod("setProperties", propHibernate.getClass()).invoke(configure,
                                propHibernate);
                    }

                    Object bsf = clazz.getMethod("buildSessionFactory", new Class[] {}).invoke(configure,
                            new Object[] {});
                    session = bsf.getClass().getMethod("openSession", new Class[] {}).invoke(bsf,
                            new Object[] {});
                    session.getClass().getMethod("beginTransaction", new Class[] {}).invoke(session,
                            new Object[] {});
                    parameters.put(JRHibernateQueryExecuterFactory.PARAMETER_HIBERNATE_SESSION, session);
                }
            }
        } catch (IOException | ClassNotFoundException | InstantiationException | IllegalAccessException
                | IllegalArgumentException | SecurityException | InvocationTargetException
                | NoSuchMethodException e) {
            throw new JRException(e);
        } finally {
            Thread.currentThread().setContextClassLoader(oldThreadClassLoader);
        }
    }
}

From source file:ru.retbansk.utils.scheduled.impl.ReadEmailAndConvertToXmlSpringImpl.java

/** Simple loading properties from email.properties file.
 * <p> At first, program will try to read external properties.
 * If nothing there - internal/*  w  ww .jav  a2s.com*/
 * <p> Implements user exit
 * 
 * @return email.properties from folder with jar, if nothing there - from classpath
 * @throws Exception
 */
public Properties loadProperties() throws Exception {
    Properties prop = new Properties();
    InputStream inputStream = null;
    // At first, program will try to read external properties
    try {
        inputStream = new FileInputStream("email.properties");
        prop.load(inputStream);
    } catch (FileNotFoundException e) {
    }
    // If nothing there - internal
    if (inputStream == null)
        inputStream = prop.getClass().getResourceAsStream("/email.properties");

    prop.load(inputStream);
    if (inputStream != null)
        inputStream.close();
    // Implementing user exit
    try {
        if (prop.getProperty("continue").equals("no")) {
            logger.info("Program was stopped by the User");
            System.exit(0);
        }
    } catch (Exception e) {
        logger.info("Continue parameter is not defined");
    }
    return prop;
}

From source file:codeswarm.code_swarm.java

/**
 * Initialisation/*from  w w  w.  j a v  a  2 s  .  c o  m*/
 */
public void setup() {
    width = cfg.getIntProperty(CodeSwarmConfig.WIDTH_KEY, 640);
    if (width <= 0) {
        width = 640;
    }

    height = cfg.getIntProperty(CodeSwarmConfig.HEIGHT_KEY, 480);
    if (height <= 0) {
        height = 480;
    }

    maxBackgroundThreads = cfg.getIntProperty(CodeSwarmConfig.MAX_THREADS_KEY, 4);
    if (maxBackgroundThreads <= 0) {
        maxBackgroundThreads = 4;
    }
    backgroundExecutor = new ThreadPoolExecutor(1, maxBackgroundThreads, Long.MAX_VALUE, TimeUnit.NANOSECONDS,
            new ArrayBlockingQueue<Runnable>(4 * maxBackgroundThreads),
            new ThreadPoolExecutor.CallerRunsPolicy());

    if (cfg.getBooleanProperty(CodeSwarmConfig.USE_OPEN_GL, false)) {
        size(width, height, OPENGL);
    } else {
        size(width, height);
    }

    showLegend = cfg.getBooleanProperty(CodeSwarmConfig.SHOW_LEGEND, false);
    showHistogram = cfg.getBooleanProperty(CodeSwarmConfig.SHOW_HISTORY, false);
    showDate = cfg.getBooleanProperty(CodeSwarmConfig.SHOW_DATE, false);
    showEdges = cfg.getBooleanProperty(CodeSwarmConfig.SHOW_EDGES, false);
    showDebug = cfg.getBooleanProperty(CodeSwarmConfig.SHOW_DEBUG, false);
    takeSnapshots = cfg.getBooleanProperty(CodeSwarmConfig.TAKE_SNAPSHOTS_KEY, false);
    drawNamesSharp = cfg.getBooleanProperty(CodeSwarmConfig.DRAW_NAMES_SHARP, true);
    drawNamesHalos = cfg.getBooleanProperty(CodeSwarmConfig.DRAW_NAMES_HALOS, false);
    drawFilesSharp = cfg.getBooleanProperty(CodeSwarmConfig.DRAW_FILES_SHARP, false);
    drawFilesFuzzy = cfg.getBooleanProperty(CodeSwarmConfig.DRAW_FILES_FUZZY, true);
    drawFilesJelly = cfg.getBooleanProperty(CodeSwarmConfig.DRAW_FILES_JELLY, false);
    background = cfg.getBackground().getRGB();

    UPDATE_DELTA = cfg.getIntProperty(CodeSwarmConfig.MSEC_PER_FRAME_KEY, -1);
    if (UPDATE_DELTA == -1) {
        int framesperday = cfg.getIntProperty(CodeSwarmConfig.FRAMES_PER_DAY_KEY, 4);
        if (framesperday > 0) {
            UPDATE_DELTA = (86400000 / framesperday);
        }
    }
    if (UPDATE_DELTA <= 0) {
        // Default to 4 frames per day.
        UPDATE_DELTA = 21600000;
    }

    isInputSorted = cfg.getBooleanProperty(CodeSwarmConfig.IS_INPUT_SORTED_KEY, false);

    /**
     * This section loads config files and calls the setup method for all physics engines.
     */

    physicsEngineConfigDir = cfg.getStringProperty(CodeSwarmConfig.PHYSICS_ENGINE_CONF_DIR, "physics_engine");
    File f = new File(physicsEngineConfigDir);
    String[] configFiles = null;
    if (f.exists() && f.isDirectory()) {
        configFiles = f.list();
    }
    for (int i = 0; configFiles != null && i < configFiles.length; i++) {
        if (configFiles[i].endsWith(".config")) {
            Properties p = new Properties();
            String ConfigPath = physicsEngineConfigDir + System.getProperty("file.separator") + configFiles[i];
            try {
                p.load(new FileInputStream(ConfigPath));
            } catch (FileNotFoundException e) {
                e.printStackTrace();
                System.exit(1);
            } catch (IOException e) {
                e.printStackTrace();
                System.exit(1);
            }
            String ClassName = p.getProperty("name", "__DEFAULT__");
            if (!ClassName.equals("__DEFAULT__")) {
                PhysicsEngine pe = getPhysicsEngine(ClassName);
                pe.setup(p);
                mPhysicsEngineChoices.add(pe);
            } else {
                logger.error("Skipping config file '" + ConfigPath
                        + "'.  Must specify class name via the 'name' parameter.");
                System.exit(1);
            }
        }
    }

    if (mPhysicsEngineChoices.size() == 0) {
        logger.error("No physics engine config files found in '" + physicsEngineConfigDir + "'.");
        System.exit(1);
    }

    // Physics engine configuration and instantiation
    physicsEngineSelection = cfg.getStringProperty(CodeSwarmConfig.PHYSICS_ENGINE_SELECTION,
            PHYSICS_ENGINE_LEGACY);

    for (PhysicsEngine p : mPhysicsEngineChoices) {
        if (physicsEngineSelection.equals(p.getClass().getName())) {
            mPhysicsEngine = p;
        }
    }

    if (mPhysicsEngine == null) {
        logger.error("No physics engine matches your choice of '" + physicsEngineSelection + "'. Check '"
                + physicsEngineConfigDir + "' for options.");
        System.exit(1);
    }

    smooth();
    frameRate(FRAME_RATE);

    // init data structures
    nodes = new CopyOnWriteArrayList<FileNode>();
    edges = new CopyOnWriteArrayList<Edge>();
    people = new CopyOnWriteArrayList<PersonNode>();
    history = new LinkedList<ColorBins>();

    if (isInputSorted) {
        //If the input is sorted, we only need to store the next few events
        eventsQueue = new ArrayBlockingQueue<FileEvent>(5000);
    } else {
        //Otherwise we need to store them all at once in a data structure that will sort them
        eventsQueue = new PriorityBlockingQueue<FileEvent>();
    }

    // Init color map
    initColors();

    loadRepEvents(cfg.getStringProperty(CodeSwarmConfig.INPUT_FILE_KEY)); // event formatted (this is the standard)
    synchronized (this) {
        while (!finishedLoading && eventsQueue.isEmpty()) {
            try {
                wait();
            } catch (InterruptedException e) {
                logger.error("The ready-check thread was interrupted", e);
            }
        }
    }
    prevDate = eventsQueue.peek().getDate();

    SCREENSHOT_FILE = cfg.getStringProperty(CodeSwarmConfig.SNAPSHOT_LOCATION_KEY);

    maxFramesSaved = (int) Math.pow(10, SCREENSHOT_FILE.replaceAll("[^#]", "").length());

    // Create fonts
    String fontName = cfg.getStringProperty(CodeSwarmConfig.FONT_KEY, "SansSerif");
    String fontNameBold = cfg.getStringProperty(CodeSwarmConfig.FONT_KEY_BOLD, "SansSerif");
    Integer fontSize = cfg.getIntProperty(CodeSwarmConfig.FONT_SIZE, 10);
    Integer fontSizeBold = cfg.getIntProperty(CodeSwarmConfig.FONT_SIZE_BOLD, 14);
    font = createFont(fontName, fontSize);
    boldFont = createFont(fontNameBold, fontSizeBold);

    textFont(font);

    // Create the file particle image
    sprite = loadImage(cfg.getStringProperty(CodeSwarmConfig.SPRITE_FILE_KEY, "particle.png"));
    // Add translucency (using itself in this case)
    sprite.mask(sprite);
}

From source file:org.apache.jmeter.util.BSFTestElement.java

protected void initManager(BSFManager mgr) throws BSFException {
    final String label = getName();
    final String fileName = getFilename();
    final String scriptParameters = getParameters();
    // Use actual class name for log
    final Logger logger = LoggingManager.getLoggerForShortName(getClass().getName());
    mgr.declareBean("log", logger, Logger.class); // $NON-NLS-1$
    mgr.declareBean("Label", label, String.class); // $NON-NLS-1$
    mgr.declareBean("FileName", fileName, String.class); // $NON-NLS-1$
    mgr.declareBean("Parameters", scriptParameters, String.class); // $NON-NLS-1$
    String[] args = JOrphanUtils.split(scriptParameters, " ");//$NON-NLS-1$
    mgr.declareBean("args", args, args.getClass());//$NON-NLS-1$
    // Add variables for access to context and variables
    JMeterContext jmctx = JMeterContextService.getContext();
    JMeterVariables vars = jmctx.getVariables();
    Properties props = JMeterUtils.getJMeterProperties();

    mgr.declareBean("ctx", jmctx, jmctx.getClass()); // $NON-NLS-1$
    mgr.declareBean("vars", vars, vars.getClass()); // $NON-NLS-1$
    mgr.declareBean("props", props, props.getClass()); // $NON-NLS-1$
    // For use in debugging:
    mgr.declareBean("OUT", System.out, PrintStream.class); // $NON-NLS-1$

    // Most subclasses will need these:
    Sampler sampler = jmctx.getCurrentSampler();
    mgr.declareBean("sampler", sampler, Sampler.class);
    SampleResult prev = jmctx.getPreviousResult();
    mgr.declareBean("prev", prev, SampleResult.class);
}

From source file:org.apache.kylin.common.persistence.JDBCSqlQueryFormatProvider.java

public static JDBCSqlQueryFormat createJDBCSqlQueriesFormat(String dialect) {
    String key = String.format(Locale.ROOT, "/metadata-jdbc-%s.properties", dialect.toLowerCase(Locale.ROOT));
    if (cache.containsKey(key)) {
        return new JDBCSqlQueryFormat(cache.get(key));
    } else {//from  w  ww.j av  a 2s.co  m
        Properties props = new Properties();
        InputStream input = null;
        try {
            input = props.getClass().getResourceAsStream(key);
            props.load(input);
            if (!props.isEmpty()) {
                cache.put(key, props);
            }
            return new JDBCSqlQueryFormat(props);
        } catch (Exception e) {
            throw new RuntimeException(
                    String.format(Locale.ROOT, "Can't find properties named %s for metastore", key), e);
        } finally {
            IOUtils.closeQuietly(input);
        }
    }

}