List of usage examples for java.util Properties putAll
@Override public synchronized void putAll(Map<?, ?> t)
From source file:org.apache.apex.malhar.kafka.KafkaConsumerWrapper.java
/** * This method is called in the activate method of the operator *//*from ww w . j av a 2 s .c o m*/ public void start(boolean waitForReplay) { this.waitForReplay = waitForReplay; isAlive.set(true); // thread to consume the kafka data // create thread pool for consumer threads kafkaConsumerExecutor = Executors .newCachedThreadPool(new ThreadFactoryBuilder().setNameFormat("kafka-consumer-%d").build()); // group list of PartitionMeta by cluster Map<String, List<TopicPartition>> consumerAssignment = new HashMap<>(); Set<AbstractKafkaPartitioner.PartitionMeta> assignments = ownerOperator.assignment(); for (AbstractKafkaPartitioner.PartitionMeta partitionMeta : assignments) { String cluster = partitionMeta.getCluster(); List<TopicPartition> cAssignment = consumerAssignment.get(cluster); if (cAssignment == null) { cAssignment = new LinkedList<>(); consumerAssignment.put(cluster, cAssignment); } cAssignment.add(new TopicPartition(partitionMeta.getTopic(), partitionMeta.getPartitionId())); } Map<AbstractKafkaPartitioner.PartitionMeta, Long> currentOffset = ownerOperator.getOffsetTrack(); // create one thread for each cluster // each thread use one KafkaConsumer to consume from 1+ partition(s) of 1+ topic(s) for (Map.Entry<String, List<TopicPartition>> e : consumerAssignment.entrySet()) { Properties prop = new Properties(); if (ownerOperator.getConsumerProps() != null) { prop.putAll(ownerOperator.getConsumerProps()); } prop.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, e.getKey()); prop.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "none"); // never auto commit the offsets prop.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false"); prop.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class.getName()); prop.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class.getName()); AbstractKafkaInputOperator.InitialOffset initialOffset = AbstractKafkaInputOperator.InitialOffset .valueOf(ownerOperator.getInitialOffset()); if (initialOffset == AbstractKafkaInputOperator.InitialOffset.APPLICATION_OR_EARLIEST || initialOffset == AbstractKafkaInputOperator.InitialOffset.APPLICATION_OR_LATEST) { // commit the offset with application name if we set initialoffset to application prop.put(ConsumerConfig.GROUP_ID_CONFIG, ownerOperator.getApplicationName() + "_Consumer"); } KafkaConsumer<byte[], byte[]> kc = new KafkaConsumer<>(prop); kc.assign(e.getValue()); if (logger.isInfoEnabled()) { logger.info("Create consumer with properties {} ", Joiner.on(";").withKeyValueSeparator("=").join(prop)); logger.info("Assign consumer to {}", Joiner.on('#').join(e.getValue())); } if (currentOffset != null && !currentOffset.isEmpty()) { for (TopicPartition tp : e.getValue()) { AbstractKafkaPartitioner.PartitionMeta partitionKey = new AbstractKafkaPartitioner.PartitionMeta( e.getKey(), tp.topic(), tp.partition()); if (currentOffset.containsKey(partitionKey)) { kc.seek(tp, currentOffset.get(partitionKey)); } } } consumers.put(e.getKey(), kc); kafkaConsumerExecutor.submit(new ConsumerThread(e.getKey(), kc, this)); } }
From source file:org.paxml.bean.PropertiesTag.java
private void loadTextProperties(Properties props, String text) { Properties loaded = new Properties(); try {/*ww w . ja va 2s . c om*/ PaxmlUtils.loadProperties(loaded, true, new ByteArrayInputStream(text.getBytes("UTF-8"))); } catch (UnsupportedEncodingException e) { throw new PaxmlRuntimeException(e); } loaded = PaxmlUtils.trimProperties(loaded); props.putAll(loaded); }
From source file:com.erudika.scoold.utils.LanguageUtils.java
private void writeLanguageToFile(String langCode, Map<String, String> lang) { if (lang != null && !lang.isEmpty() && langCode != null && langCode.length() == 2) { FileOutputStream fos = null; try {/*w w w . jav a2 s. c o m*/ Properties langProps = new Properties(); langProps.putAll(lang); File file = new File("lang_" + langCode + ".properties"); fos = new FileOutputStream(file); langProps.store(fos, langCode); int progress = 0; for (Map.Entry<String, String> entry : lang.entrySet()) { if (!getDefaultLanguage().get(entry.getKey()).equals(entry.getValue())) { progress++; } } if (progress > 0) { updateTranslationProgressMap(langCode, progress); } } catch (Exception ex) { logger.error("Could not write language to file: ", ex); } finally { try { if (fos != null) { fos.close(); } } catch (IOException ex) { logger.error(null, ex); } } } }
From source file:gobblin.runtime.mapreduce.MRJobLauncherTest.java
public Properties loadJobProps() throws IOException { Properties jobProps = new Properties(); try (InputStream propsReader = getClass().getClassLoader() .getResourceAsStream("mr-job-conf/GobblinMRTest.pull")) { jobProps.load(propsReader);/* ww w . j a va2s .c o m*/ } jobProps.putAll(this.launcherProps); jobProps.setProperty(JobLauncherTestHelper.SOURCE_FILE_LIST_KEY, "gobblin-test/resource/source/test.avro.0," + "gobblin-test/resource/source/test.avro.1," + "gobblin-test/resource/source/test.avro.2," + "gobblin-test/resource/source/test.avro.3"); return jobProps; }
From source file:org.artifactory.common.property.ArtifactorySystemProperties.java
/** * Returns the propertiesCopy object/*from w w w. j ava 2 s .com*/ * * @return Properties - The copy of the artifactoryProperties object */ public Properties getPropertiesCopy() { Properties propertiesCopy = new Properties(); for (Map.Entry<ConstantValues, String> entry : artifactoryProperties.entrySet()) { propertiesCopy.put(entry.getKey().getPropertyName(), entry.getValue()); } propertiesCopy.putAll(nonEnumArtifactoryProperties); return propertiesCopy; }
From source file:com.jaspersoft.jasperserver.api.engine.common.virtualdatasourcequery.teiid.impl.HiveTeiidConnectorImpl.java
private ModelMetaData getModel() { String subDataSourceID = getConnectorName(); String modelName = dataSourceName; ModelMetaData model = new ModelMetaData(); model.setModelType(Model.Type.PHYSICAL); model.setName(modelName);/*w w w. ja v a 2 s . c o m*/ Properties importProperties = new Properties() { private static final long serialVersionUID = 1L; { setProperty("importer.trimColumnNames", Boolean.TRUE.toString()); } }; if (importPropertyMap != null) { importProperties.putAll(importPropertyMap); } model.setProperties(importProperties); model.addSourceMapping(subDataSourceID, translatorConfig.getTranslatorName(), subDataSourceID); return model; }
From source file:com.ibm.jaggr.core.impl.options.OptionsImpl.java
/** * Returns the default options for this the aggregator service. * * @return The default options./*w w w . j ava2 s . com*/ */ protected Properties initDefaultOptions() { Properties defaultValues = new Properties(); defaultValues.putAll(defaults); // See if there's an aggregator.properties in the class loader's root ClassLoader cl = OptionsImpl.class.getClassLoader(); URL url = cl.getResource(getPropsFilename()); if (url != null) { loadFromUrl(defaultValues, url); } // If the bundle defines properties, then load those too if (aggregator != null) { url = aggregator.getPlatformServices().getResource(getPropsFilename()); if (url != null) { loadFromUrl(defaultValues, url); } } return defaultValues; }
From source file:com.jkoolcloud.tnt4j.streams.inputs.KafkaConsumerStream.java
/** * Returns scope defined properties set. * * @param scope//from w w w . j a v a 2 s. com * properties scope key * @return scope defined properties */ protected Properties getScopeProps(String scope) { Properties allScopeProperties = new Properties(); Properties sProperties = userKafkaProps.get(scope); if (sProperties != null) { allScopeProperties.putAll(sProperties); } if (!PROP_SCOPE_USER.equals(scope)) { sProperties = userKafkaProps.get(PROP_SCOPE_USER); if (sProperties != null) { allScopeProperties.putAll(sProperties); } } return allScopeProperties; }
From source file:com.github.ffremont.microservices.springboot.node.tasks.SimpleTestConfiguration.java
@Bean public PropertyPlaceholderConfigurer getProps() throws IOException { PropertyPlaceholderConfigurer configurer = new PropertyPlaceholderConfigurer(); Properties props = new Properties(); Map myMap = new HashMap<>(); myMap.put("app.base", Files.createTempDirectory("appBaseNode").toString()); myMap.put("app.cluster", "myCuster"); myMap.put("app.node", "myNode"); myMap.put("app.master.host", "localhost"); myMap.put("app.master.port", "9999"); myMap.put("app.master.contextRoot", "/test"); myMap.put("app.master.user", ""); myMap.put("app.master.pwd", ""); props.putAll(myMap); configurer.setProperties(props);/*from w ww. j ava2 s. c o m*/ return configurer; }