List of usage examples for com.google.common.collect Maps fromProperties
@GwtIncompatible("java.util.Properties") public static ImmutableMap<String, String> fromProperties(Properties properties)
From source file:azkaban.flowtrigger.plugin.FlowTriggerDependencyPluginManager.java
private Map<String, String> readConfig(final File file) throws FlowTriggerDependencyPluginException { final Properties props = new Properties(); InputStream input = null;// w w w . j a v a 2 s . c om try { input = new BufferedInputStream(new FileInputStream(file)); props.load(input); } catch (final Exception e) { logger.debug("unable to read the file " + file, e); throw new FlowTriggerDependencyPluginException(e); } finally { try { if (input != null) { input.close(); } } catch (final IOException e) { logger.error("unable to close input stream when reading config from file " + file.getAbsolutePath(), e); } } return Maps.fromProperties(props); }
From source file:org.apache.gobblin.service.FlowConfigResourceLocalHandler.java
/** * Get flow config// w ww. j a va2s . c om */ public FlowConfig getFlowConfig(FlowId flowId) throws FlowConfigLoggedException { log.info("[GAAS-REST] Get called with flowGroup {} flowName {}", flowId.getFlowGroup(), flowId.getFlowName()); try { URI flowCatalogURI = new URI("gobblin-flow", null, "/", null, null); URI flowUri = new URI(flowCatalogURI.getScheme(), flowCatalogURI.getAuthority(), "/" + flowId.getFlowGroup() + "/" + flowId.getFlowName(), null, null); FlowSpec spec = (FlowSpec) flowCatalog.getSpec(flowUri); FlowConfig flowConfig = new FlowConfig(); Properties flowProps = spec.getConfigAsProperties(); Schedule schedule = null; if (flowProps.containsKey(ConfigurationKeys.JOB_SCHEDULE_KEY)) { schedule = new Schedule(); schedule.setCronSchedule(flowProps.getProperty(ConfigurationKeys.JOB_SCHEDULE_KEY)); } if (flowProps.containsKey(ConfigurationKeys.JOB_TEMPLATE_PATH)) { flowConfig.setTemplateUris(flowProps.getProperty(ConfigurationKeys.JOB_TEMPLATE_PATH)); } else if (spec.getTemplateURIs().isPresent()) { flowConfig.setTemplateUris(StringUtils.join(spec.getTemplateURIs().get(), ",")); } else { flowConfig.setTemplateUris("NA"); } if (schedule != null) { if (flowProps.containsKey(ConfigurationKeys.FLOW_RUN_IMMEDIATELY)) { schedule.setRunImmediately( Boolean.valueOf(flowProps.getProperty(ConfigurationKeys.FLOW_RUN_IMMEDIATELY))); } flowConfig.setSchedule(schedule); } // remove keys that were injected as part of flowSpec creation flowProps.remove(ConfigurationKeys.JOB_SCHEDULE_KEY); flowProps.remove(ConfigurationKeys.JOB_TEMPLATE_PATH); StringMap flowPropsAsStringMap = new StringMap(); flowPropsAsStringMap.putAll(Maps.fromProperties(flowProps)); return flowConfig .setId(new FlowId().setFlowGroup(flowId.getFlowGroup()).setFlowName(flowId.getFlowName())) .setProperties(flowPropsAsStringMap); } catch (URISyntaxException e) { throw new FlowConfigLoggedException(HttpStatus.S_400_BAD_REQUEST, "bad URI " + flowId.getFlowName(), e); } catch (SpecNotFoundException e) { throw new FlowConfigLoggedException(HttpStatus.S_404_NOT_FOUND, "Flow requested does not exist: " + flowId.getFlowName(), null); } }
From source file:org.apache.gobblin.service.SimpleKafkaSpecProducer.java
@Override public Future<?> deleteSpec(URI deletedSpecURI, Properties headers) { AvroJobSpec avroJobSpec = AvroJobSpec.newBuilder().setUri(deletedSpecURI.toString()) .setMetadata(ImmutableMap.of(VERB_KEY, SpecExecutor.Verb.DELETE.name())) .setProperties(Maps.fromProperties(headers)).build(); log.info("Deleting Spec: " + deletedSpecURI + " using Kafka."); return getKafkaProducer().write(_serializer.serializeRecord(avroJobSpec), WriteCallback.EMPTY); }
From source file:org.sakaiproject.nakamura.lite.ConfigurationImpl.java
@Activate public void activate(Map<String, Object> properties) throws IOException { aclColumnFamily = StorageClientUtils.getSetting(properties.get(ACL_COLUMN_FAMILY), "ac"); keySpace = StorageClientUtils.getSetting(properties.get(KEYSPACE), "n"); authorizableColumnFamily = StorageClientUtils.getSetting(properties.get(AUTHORIZABLE_COLUMN_FAMILY), "au"); contentColumnFamily = StorageClientUtils.getSetting(properties.get(CONTENT_COLUMN_FAMILY), "cn"); // load defaults // check the classpath sharedProperties = Maps.newHashMap(); InputStream in = this.getClass().getClassLoader().getResourceAsStream(SHAREDCONFIGPATH); if (in != null) { Properties p = new Properties(); p.load(in);/*from w w w. j av a2 s .co m*/ in.close(); sharedProperties.putAll(Maps.fromProperties(p)); } // Load from a properties file defiend on the command line String osSharedConfigPath = System.getProperty(SHAREDCONFIGPROPERTY); if (osSharedConfigPath != null && StringUtils.isNotEmpty(osSharedConfigPath)) { File f = new File(osSharedConfigPath); if (f.exists() && f.canRead()) { FileReader fr = new FileReader(f); Properties p = new Properties(); p.load(fr); fr.close(); sharedProperties.putAll(Maps.fromProperties(p)); } else { LOGGER.warn("Unable to read shared config file {} specified by the system property {} ", f.getAbsolutePath(), SHAREDCONFIGPROPERTY); } } // make the shared properties immutable. sharedProperties = ImmutableMap.copyOf(sharedProperties); indexColumnNames = DEFAULT_INDEX_COLUMN_NAMES; // if present in the shared properties, load the default from there. if (sharedProperties.containsKey(INDEX_COLUMN_NAMES)) { indexColumnNames = StringUtils.split(sharedProperties.get(INDEX_COLUMN_NAMES), ','); LOGGER.info("Index Column Names from shared properties is configured as {}", Arrays.toString(indexColumnNames)); } else { LOGGER.warn("Using Default Index Columns from code base, not from shared properties, " + "OSGi Configuration may override this, if {} has been set in the " + "OSGi Configuration for this component ", INDEX_COLUMN_NAMES); } // apply any local OSGi customization indexColumnNames = StorageClientUtils.getSetting(properties.get(INDEX_COLUMN_NAMES), indexColumnNames); LOGGER.info("Using Configuration for Index Column Names as {}", Arrays.toString(indexColumnNames)); String uuidFieldName = DEFAULT_UUID_FIELD; if (sharedProperties.containsKey(UUID_FIELD_NAME)) { uuidFieldName = sharedProperties.get(UUID_FIELD_NAME); LOGGER.info("UUID Field Name from shared properties is configured as {}", uuidFieldName); } InternalContent.setUuidField(StorageClientUtils.getSetting(properties.get(UUID_FIELD_NAME), uuidFieldName)); }
From source file:org.sonar.maven3.SonarMojo.java
private void configureLogging(LoggingConfiguration logging) { logging.setProperties(Maps.fromProperties(session.getSystemProperties())); logging.setFormat(LoggingConfiguration.FORMAT_MAVEN); if (getLog().isDebugEnabled()) { logging.setVerbose(true);//from w ww.ja v a 2 s . c om } }
From source file:org.sonar.maven.SonarMojo.java
private void configureLogging(LoggingConfiguration logging) { logging.setProperties(Maps.fromProperties(session.getExecutionProperties())); logging.setFormat(LoggingConfiguration.FORMAT_MAVEN); if (getLog().isDebugEnabled()) { logging.setVerbose(true);//from w w w . j a v a2 s .c o m } }
From source file:org.apache.jackrabbit.oak.run.Utils.java
@Nullable public static GarbageCollectableBlobStore bootstrapDataStore(String[] args, Closer closer) throws IOException, RepositoryException { OptionParser parser = new OptionParser(); parser.allowsUnrecognizedOptions();//from www. j a va2s . com ArgumentAcceptingOptionSpec<String> s3dsConfig = parser.accepts("s3ds", "S3DataStore config") .withRequiredArg().ofType(String.class); ArgumentAcceptingOptionSpec<String> fdsConfig = parser.accepts("fds", "FileDataStore config") .withRequiredArg().ofType(String.class); OptionSet options = parser.parse(args); if (!options.has(s3dsConfig) && !options.has(fdsConfig)) { return null; } DataStore delegate; if (options.has(s3dsConfig)) { SharedS3DataStore s3ds = new SharedS3DataStore(); String cfgPath = s3dsConfig.value(options); Properties props = loadAndTransformProps(cfgPath); s3ds.setProperties(props); s3ds.init(null); delegate = s3ds; } else { delegate = new OakFileDataStore(); String cfgPath = fdsConfig.value(options); Properties props = loadAndTransformProps(cfgPath); populate(delegate, Maps.fromProperties(props), true); delegate.init(null); } DataStoreBlobStore blobStore = new DataStoreBlobStore(delegate); closer.register(Utils.asCloseable(blobStore)); return blobStore; }
From source file:com.feedzai.commons.sql.abstraction.engine.testconfig.DatabaseConfigurationUtil.java
/** * Loads the database configurations from the given source. * * @throws java.io.IOException//from w w w . j ava 2 s .c om */ private void loadDatabaseConfigurations(InputStream is) throws IOException { final Properties properties = new Properties(); properties.load(is); final Map<String, String> config = Maps.fromProperties(properties); final Map<String, Collection<String>> propsByVendor = groupByVendor(config); this.configs = Maps.transformEntries(propsByVendor, new Maps.EntryTransformer<String, Collection<String>, DatabaseConfiguration>() { @Override public DatabaseConfiguration transformEntry(String vendor, Collection<String> properties) { return buildDatabaseConfiguration(vendor, properties, config); } }); }
From source file:gobblin.service.SimpleKafkaSpecExecutorInstanceProducer.java
private AvroJobSpec convertToAvroJobSpec(Spec spec, Verb verb) { if (spec instanceof JobSpec) { JobSpec jobSpec = (JobSpec) spec; AvroJobSpec.Builder avroJobSpecBuilder = AvroJobSpec.newBuilder(); avroJobSpecBuilder.setUri(jobSpec.getUri().toString()).setVersion(jobSpec.getVersion()) .setDescription(jobSpec.getDescription()) .setProperties(Maps.fromProperties(jobSpec.getConfigAsProperties())) .setMetadata(ImmutableMap.of(VERB_KEY, verb.name())); if (jobSpec.getTemplateURI().isPresent()) { avroJobSpecBuilder.setTemplateUri(jobSpec.getTemplateURI().get().toString()); }//from w ww .j a v a 2 s. c om return avroJobSpecBuilder.build(); } else { throw new RuntimeException("Unsupported spec type " + spec.getClass()); } }
From source file:org.apache.hive.ptest.conf.Configuration.java
public static Configuration fromInputStream(InputStream inputStream) throws IOException { Properties properties = new Properties(); properties.load(inputStream);/*w ww. j av a2 s . co m*/ Context context = new Context(Maps.fromProperties(properties)); return new Configuration(context); }