Example usage for java.util Properties putAll

List of usage examples for java.util Properties putAll

Introduction

In this page you can find the example usage for java.util Properties putAll.

Prototype

@Override
    public synchronized void putAll(Map<?, ?> t) 

Source Link

Usage

From source file:org.apache.geode.distributed.AbstractLauncher.java

/**
 * Creates a Properties object with configuration settings that the launcher has that should take
 * precedence over anything the user has defined in their gemfire properties file.
 *
 * @param defaults default GemFire Distributed System properties as configured in the Builder.
 * @return a Properties object with GemFire properties that the launcher has defined.
 * @see java.util.Properties/*from w  w w  .java  2 s.  com*/
 */
protected Properties getDistributedSystemProperties(final Properties defaults) {
    final Properties distributedSystemProperties = new Properties();

    if (defaults != null) {
        distributedSystemProperties.putAll(defaults);
    }

    if (isNotBlank(getMemberName())) {
        distributedSystemProperties.setProperty(NAME, getMemberName());
    }

    return distributedSystemProperties;
}

From source file:org.apache.samza.system.kafka.KafkaSystemAdmin.java

/**
 * Converts a StreamSpec into a KafkaStreamSpec. Special handling for coordinator and changelog stream.
 * @param spec a StreamSpec object/*from   w  w w.ja  v a 2 s. c om*/
 * @return KafkaStreamSpec object
 */
public KafkaStreamSpec toKafkaSpec(StreamSpec spec) {
    KafkaStreamSpec kafkaSpec;
    if (spec.isChangeLogStream()) {
        String topicName = spec.getPhysicalName();
        ChangelogInfo topicMeta = changelogTopicMetaInformation.get(topicName);
        if (topicMeta == null) {
            throw new StreamValidationException("Unable to find topic information for topic " + topicName);
        }

        kafkaSpec = new KafkaStreamSpec(spec.getId(), topicName, systemName, spec.getPartitionCount(),
                topicMeta.getReplicationFactor(), topicMeta.getKafkaProperties());
    } else if (spec.isCoordinatorStream()) {
        kafkaSpec = new KafkaStreamSpec(spec.getId(), spec.getPhysicalName(), systemName, 1,
                coordinatorStreamReplicationFactor, coordinatorStreamProperties);
    } else if (intermediateStreamProperties.containsKey(spec.getId())) {
        kafkaSpec = KafkaStreamSpec.fromSpec(spec);
        Properties properties = kafkaSpec.getProperties();
        properties.putAll(intermediateStreamProperties.get(spec.getId()));
        kafkaSpec = kafkaSpec.copyWithProperties(properties);
    } else {
        kafkaSpec = KafkaStreamSpec.fromSpec(spec);
    }
    return kafkaSpec;
}

From source file:fr.ens.biologie.genomique.eoulsan.it.IT.java

/**
 * Retrieve properties for the test, compile specific configuration with
 * global./* www .j  a v  a2s  .c  o m*/
 * @param globalsConf global configuration for tests
 * @return Properties content of configuration file
 * @throws IOException if an error occurs while reading the file.
 * @throws EoulsanException if an error occurs while evaluating value property
 */
private Properties loadConfigurationFile(final Properties globalsConf) throws IOException, EoulsanException {

    final File testConfFile = new File(this.testDataDirectory, ITFactory.TEST_CONFIGURATION_FILENAME);

    checkExistingFile(testConfFile, "test configuration file");

    // Add global configuration
    final Properties props = new Properties();
    props.putAll(globalsConf);

    final BufferedReader br = newReader(testConfFile, Charsets.toCharset(Globals.DEFAULT_FILE_ENCODING));

    String line = null;

    while ((line = br.readLine()) != null) {
        // Skip commentary
        if (line.startsWith("#")) {
            continue;
        }

        final int pos = line.indexOf('=');
        if (pos == -1) {
            continue;
        }

        final String key = line.substring(0, pos).trim();

        // Evaluate value
        String value = evaluateExpressions(line.substring(pos + 1).trim(), true);

        // Key pattern : add value for test to values from
        // configuration general

        if (isKeyInCompileProperties(key) && props.containsKey(key)) {
            // Concatenate values
            value = props.getProperty(key) + SEPARATOR + value;
        }

        // Save parameter with value
        props.put(key, value);
    }
    br.close();

    return props;

}

From source file:org.apache.falcon.oozie.process.ProcessExecutionCoordinatorBuilder.java

@Override
public List<Properties> buildCoords(Cluster cluster, Path buildPath) throws FalconException {
    String coordName = getEntityName();
    Path coordPath = getBuildPath(buildPath);
    copySharedLibs(cluster, new Path(coordPath, "lib"));

    COORDINATORAPP coord = new COORDINATORAPP();
    // coord attributes
    initializeCoordAttributes(cluster, coord, coordName);

    CONTROLS controls = initializeControls(); // controls
    coord.setControls(controls);/*from  www . j a v a  2s. c  o m*/

    // Configuration
    Properties props = createCoordDefaultConfiguration(coordName);

    initializeInputPaths(cluster, coord, props); // inputs
    initializeOutputPaths(cluster, coord, props); // outputs

    // create parent wf
    Properties wfProps = OozieOrchestrationWorkflowBuilder.get(entity, cluster, Tag.DEFAULT).build(cluster,
            coordPath);

    WORKFLOW wf = new WORKFLOW();
    wf.setAppPath(getStoragePath(wfProps.getProperty(OozieEntityBuilder.ENTITY_PATH)));
    // Add the custom properties set in feed. Else, dryrun won't catch any missing props.
    props.putAll(getEntityProperties(entity));
    wf.setConfiguration(getConfig(props));

    // set coord action to parent wf
    org.apache.falcon.oozie.coordinator.ACTION action = new org.apache.falcon.oozie.coordinator.ACTION();
    action.setWorkflow(wf);

    coord.setAction(action);

    Path marshalPath = marshal(cluster, coord, coordPath);
    return Arrays.asList(getProperties(marshalPath, coordName));
}

From source file:org.apache.samza.system.kafka.KafkaSystemAdmin.java

protected Properties createAdminClientProperties() {
    // populate brokerList from either consumer or producer configs
    Properties props = new Properties();
    // included SSL settings if needed

    props.putAll(config.subset(String.format("systems.%s.consumer.", systemName), true));

    //validate brokerList
    String brokerList = config.get(String.format(KafkaConfig.CONSUMER_CONFIGS_CONFIG_KEY(), systemName,
            ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG));
    if (brokerList == null) {
        brokerList = config.get(String.format(KafkaConfig.PRODUCER_CONFIGS_CONFIG_KEY(), systemName,
                ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG));
    }//from   w w w  . ja v a 2 s .  com
    if (brokerList == null) {
        throw new SamzaException(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG
                + " is required for systemAdmin for system " + systemName);
    }
    props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, brokerList);

    return props;
}

From source file:com.baidu.cc.spring.ConfigCenterPropertyPlaceholderConfigurer.java

/**
 * Load configuration properties from configuration center server
 * /*  w  ww  . j  a v  a  2 s . c  om*/
 * @param props properties to merge.
 */
private void loadProptiesFromCC(Properties props) {

    Map<String, String> configItems = configLoader.getConfigItems();
    //do merge
    if (configItems != null) {
        //set tag value
        configLoader.setVersionTag(configItems);

        //set to cached config
        ccLoadedProps = new Properties();
        ccLoadedProps.putAll(configItems);

        props.putAll(configItems);

    }
}

From source file:gobblin.runtime.local.LocalJobManager.java

/**
 * Start the job configuration file monitor.
 *
 * <p>/*from   w  w  w.  j  av a  2 s.com*/
 *     The job configuration file monitor currently only supports monitoring
 *     newly added job configuration files.
 * </p>
 */
private void startJobConfigFileMonitor() throws Exception {
    File jobConfigFileDir = new File(this.properties.getProperty(ConfigurationKeys.JOB_CONFIG_FILE_DIR_KEY));
    FileAlterationObserver observer = new FileAlterationObserver(jobConfigFileDir);
    FileAlterationListener listener = new FileAlterationListenerAdaptor() {
        /**
         * Called when a new job configuration file is dropped in.
         */
        @Override
        public void onFileCreate(File file) {
            int pos = file.getName().lastIndexOf(".");
            String fileExtension = pos >= 0 ? file.getName().substring(pos + 1) : "";
            if (!jobConfigFileExtensions.contains(fileExtension)) {
                // Not a job configuration file, ignore.
                return;
            }

            LOG.info("Detected new job configuration file " + file.getAbsolutePath());
            Properties jobProps = new Properties();
            // First add framework configuration properties
            jobProps.putAll(properties);
            // Then load job configuration properties from the new job configuration file
            loadJobConfig(jobProps, file);

            // Schedule the new job
            try {
                boolean runOnce = Boolean
                        .valueOf(jobProps.getProperty(ConfigurationKeys.JOB_RUN_ONCE_KEY, "false"));
                scheduleJob(jobProps, runOnce ? new RunOnceJobListener() : new EmailNotificationJobListener());
            } catch (Throwable t) {
                LOG.error("Failed to schedule new job loaded from job configuration file "
                        + file.getAbsolutePath(), t);
            }
        }

        /**
         * Called when a job configuration file is changed.
         */
        @Override
        public void onFileChange(File file) {
            int pos = file.getName().lastIndexOf(".");
            String fileExtension = pos >= 0 ? file.getName().substring(pos + 1) : "";
            if (!jobConfigFileExtensions.contains(fileExtension)) {
                // Not a job configuration file, ignore.
                return;
            }

            LOG.info("Detected change to job configuration file " + file.getAbsolutePath());
            Properties jobProps = new Properties();
            // First add framework configuration properties
            jobProps.putAll(properties);
            // Then load the updated job configuration properties
            loadJobConfig(jobProps, file);

            String jobName = jobProps.getProperty(ConfigurationKeys.JOB_NAME_KEY);
            try {
                // First unschedule and delete the old job
                unscheduleJob(jobName);
                boolean runOnce = Boolean
                        .valueOf(jobProps.getProperty(ConfigurationKeys.JOB_RUN_ONCE_KEY, "false"));
                // Reschedule the job with the new job configuration
                scheduleJob(jobProps, runOnce ? new RunOnceJobListener() : new EmailNotificationJobListener());
            } catch (Throwable t) {
                LOG.error("Failed to update existing job " + jobName, t);
            }
        }

        private void loadJobConfig(Properties jobProps, File file) {
            Closer closer = Closer.create();
            try {
                Reader propsReader = closer.register(new InputStreamReader(new FileInputStream(file),
                        ConfigurationKeys.DEFAULT_CHARSET_ENCODING));
                jobProps.load(propsReader);
                jobProps.setProperty(ConfigurationKeys.JOB_CONFIG_FILE_PATH_KEY, file.getAbsolutePath());
            } catch (Exception e) {
                LOG.error("Failed to load job configuration from file " + file.getAbsolutePath(), e);
            } finally {
                try {
                    closer.close();
                } catch (IOException e) {
                    LOG.error("unable to close properties file:" + e, e);
                }
            }
        }
    };

    observer.addListener(listener);
    this.fileAlterationMonitor.addObserver(observer);
    this.fileAlterationMonitor.start();
}

From source file:org.apache.falcon.workflow.engine.OozieDAGEngine.java

private void dryRunInternal(Properties properties, Path buildPath, Entity entity)
        throws OozieClientException, DAGEngineException {
    if (properties == null) {
        LOG.info("Entity {} is not scheduled on cluster {}", entity.getName(), cluster);
        throw new DAGEngineException("Properties for entity " + entity.getName() + " is empty");
    }//from w ww  .j  a v  a2  s. co  m

    switchUser();
    LOG.debug("Logged in user is " + CurrentUser.getUser());
    properties.setProperty(OozieClient.USER_NAME, CurrentUser.getUser());
    properties.setProperty(OozieClient.APP_PATH, buildPath.toString());
    properties.putAll(getDryRunProperties(entity));
    //Do dryrun before run as run is asynchronous
    LOG.info("Dry run with properties {}", properties);
    client.dryrun(properties);
}

From source file:com.mirth.connect.plugins.datapruner.DataPrunerPanel.java

@Override
public void doRefresh() {
    if (PlatformUI.MIRTH_FRAME.alertRefresh()) {
        return;/*from   w  w w .  j a  v  a 2 s. c  om*/
    }

    final String workingId = getFrame().startWorking("Loading " + getTabName() + " properties...");

    final Properties serverProperties = new Properties();

    SwingWorker<Void, Void> worker = new SwingWorker<Void, Void>() {

        public Void doInBackground() {
            try {
                Properties propertiesFromServer = plugin.getPropertiesFromServer();

                if (propertiesFromServer != null) {
                    serverProperties.putAll(propertiesFromServer);
                }
            } catch (Exception e) {
                getFrame().alertThrowable(getFrame(), e);
            }
            return null;
        }

        @Override
        public void done() {
            setProperties(serverProperties);
            getFrame().stopWorking(workingId);
        }
    };

    worker.execute();
}