List of usage examples for com.google.common.collect Maps fromProperties
@GwtIncompatible("java.util.Properties") public static ImmutableMap<String, String> fromProperties(Properties properties)
From source file:com.ariht.maven.plugins.config.generator.ConfigGeneratorImpl.java
/** * Merge templates with filters to generate config, scripts and property io. */// w ww . java 2 s . c om private void processTemplatesAndGenerateConfig() throws Exception { final DirectoryReader directoryReader = new DirectoryReader(log); final List<FileInfo> filters = directoryReader.readFiles(configGeneratorParameters.getFiltersBasePath(), configGeneratorParameters.getFiltersToIgnore()); for (FileInfo fileInfo : filters) { fileInfo.lookForExternalFiles(configGeneratorParameters.getExternalFilterBasePaths()); } final List<FileInfo> templates = directoryReader.readFiles(configGeneratorParameters.getTemplatesBasePath(), configGeneratorParameters.getTemplatesToIgnore()); logOutputPath(); // Get list of all properties in all filter io. final Set<String> allProperties = getAllProperties(filters); // Collection stores missing properties by file so this can be logged once at the end. final Map<String, Set<String>> missingPropertiesByFilename = new LinkedHashMap<String, Set<String>>(); for (final FileInfo filter : filters) { final Properties properties = readFilterIntoProperties(filter); final LinkedHashMap<String, String> valueMap = Maps.newLinkedHashMap(Maps.fromProperties(properties)); // No point checking for missing properties if all were found in the filter file boolean missingPropertyFound = false; for (String missingProperty : Sets.difference(allProperties, valueMap.keySet()).immutableCopy()) { valueMap.put(missingProperty, MISSING_PROPERTY_PREFIX + missingProperty + MISSING_PROPERTY_SUFFIX); missingPropertyFound = true; } final StrSubstitutor strSubstitutor = new StrSubstitutor(valueMap, configGeneratorParameters.getPropertyPrefix(), configGeneratorParameters.getPropertySuffix()); for (final FileInfo template : templates) { generateConfig(template, filter, configGeneratorParameters.getOutputBasePath(), strSubstitutor, missingPropertiesByFilename, missingPropertyFound); } } if (!missingPropertiesByFilename.keySet().isEmpty()) { final StringBuilder sb = new StringBuilder("Missing properties identified:\n"); for (String filename : missingPropertiesByFilename.keySet()) { sb.append(filename).append(": "); sb.append(StringUtils.join(missingPropertiesByFilename.get(filename), ", ")).append("\n"); } log.warn(sb.toString()); if (configGeneratorParameters.isFailOnMissingProperty()) { throw new MojoExecutionException(sb.toString()); } } }
From source file:org.opennms.netmgt.dao.support.FilesystemResourceStorageDao.java
@Override public Map<String, String> getStringAttributes(ResourcePath path) { Properties props = RrdResourceAttributeUtils.getStringProperties(m_rrdDirectory, toRelativePath(path)); return Maps.fromProperties(props); }
From source file:gobblin.service.FlowConfigsResource.java
/** * Retrieve the flow configuration with the given key * @param key flow config id key containing group name and flow name * @return {@link FlowConfig} with flow configuration */// w w w . j a v a2 s. co m @Override public FlowConfig get(ComplexResourceKey<FlowId, EmptyRecord> key) { String flowGroup = key.getKey().getFlowGroup(); String flowName = key.getKey().getFlowName(); LOG.info("Get called with flowGroup " + flowGroup + " flowName " + flowName); try { URI flowCatalogURI = new URI("gobblin-flow", null, "/", null, null); URI flowUri = new URI(flowCatalogURI.getScheme(), flowCatalogURI.getAuthority(), "/" + flowGroup + "/" + flowName, null, null); FlowSpec spec = (FlowSpec) getFlowCatalog().getSpec(flowUri); FlowConfig flowConfig = new FlowConfig(); Properties flowProps = spec.getConfigAsProperties(); Schedule schedule = null; if (flowProps.containsKey(ConfigurationKeys.JOB_SCHEDULE_KEY)) { schedule = new Schedule(); schedule.setCronSchedule(flowProps.getProperty(ConfigurationKeys.JOB_SCHEDULE_KEY)); } if (flowProps.containsKey(ConfigurationKeys.JOB_TEMPLATE_PATH)) { flowConfig.setTemplateUris(flowProps.getProperty(ConfigurationKeys.JOB_TEMPLATE_PATH)); } else if (spec.getTemplateURIs().isPresent()) { flowConfig.setTemplateUris(StringUtils.join(spec.getTemplateURIs().get(), ",")); } else { flowConfig.setTemplateUris("NA"); } if (schedule != null) { if (flowProps.containsKey(ConfigurationKeys.FLOW_RUN_IMMEDIATELY)) { schedule.setRunImmediately( Boolean.valueOf(flowProps.getProperty(ConfigurationKeys.FLOW_RUN_IMMEDIATELY))); } flowConfig.setSchedule(schedule); } // remove keys that were injected as part of flowSpec creation flowProps.remove(ConfigurationKeys.JOB_SCHEDULE_KEY); flowProps.remove(ConfigurationKeys.JOB_TEMPLATE_PATH); StringMap flowPropsAsStringMap = new StringMap(); flowPropsAsStringMap.putAll(Maps.fromProperties(flowProps)); return flowConfig.setId(new FlowId().setFlowGroup(flowGroup).setFlowName(flowName)) .setProperties(flowPropsAsStringMap); } catch (URISyntaxException e) { logAndThrowRestLiServiceException(HttpStatus.S_400_BAD_REQUEST, "bad URI " + flowName, e); } catch (SpecNotFoundException e) { logAndThrowRestLiServiceException(HttpStatus.S_404_NOT_FOUND, "Flow requested does not exist: " + flowName, null); } return null; }
From source file:org.openengsb.core.services.internal.deployer.connector.ConnectorFile.java
private ImmutableMap<String, String> readProperties(File file) { FileReader reader = null;//from w w w.j a v a2 s.c o m try { reader = new FileReader(file); Properties props = new Properties(); props.load(reader); Map<String, String> map = Maps.fromProperties(props); Map<String, String> transformedMap = Maps.transformValues(map, new TrimFunction<String, String>()); return ImmutableMap.copyOf(transformedMap); } catch (FileNotFoundException e) { throw new IllegalStateException(e); } catch (IOException e) { throw new IllegalStateException(e); } finally { IOUtils.closeQuietly(reader); } }
From source file:iterator.util.Config.java
public void load(Properties properties) { load(Maps.fromProperties(properties)); }
From source file:com.test.config.service.web.mb.PropertyGroupManagedBean.java
private void savePropertyGroup(String fileName, String group, InputStream inputstream) throws IOException { Reader reader = new InputStreamReader(inputstream, Charsets.UTF_8); Properties properties = new Properties(); properties.load(reader);/*from www .ja v a 2 s . c o m*/ if (!properties.isEmpty()) { String groupPath = ZKPaths.makePath(nodeAuth.getAuthedNode(), group); boolean created = nodeService.createProperty(groupPath, null); if (created) { Map<String, String> map = Maps.fromProperties(properties); for (Entry<String, String> entry : map.entrySet()) { nodeService.createProperty(ZKPaths.makePath(groupPath, entry.getKey()), entry.getValue()); } refreshGroups(); FacesContext.getCurrentInstance().addMessage(null, new FacesMessage("Succesful", fileName + " is uploaded.")); } else { FacesContext.getCurrentInstance().addMessage(null, new FacesMessage("Failed", fileName + " create group " + group + " failed.")); } } else { FacesContext.getCurrentInstance().addMessage(null, new FacesMessage("Failed", fileName + " is empty.")); } }
From source file:org.apache.gobblin.service.SimpleKafkaSpecProducer.java
private AvroJobSpec convertToAvroJobSpec(Spec spec, SpecExecutor.Verb verb) { if (spec instanceof JobSpec) { JobSpec jobSpec = (JobSpec) spec; AvroJobSpec.Builder avroJobSpecBuilder = AvroJobSpec.newBuilder(); avroJobSpecBuilder.setUri(jobSpec.getUri().toString()).setVersion(jobSpec.getVersion()) .setDescription(jobSpec.getDescription()) .setProperties(Maps.fromProperties(jobSpec.getConfigAsProperties())) .setMetadata(ImmutableMap.of(VERB_KEY, verb.name())); if (jobSpec.getTemplateURI().isPresent()) { avroJobSpecBuilder.setTemplateUri(jobSpec.getTemplateURI().get().toString()); }// ww w . j a v a2s . c om return avroJobSpecBuilder.build(); } else { throw new RuntimeException("Unsupported spec type " + spec.getClass()); } }
From source file:org.apache.hive.hcatalog.mapreduce.SpecialCases.java
/** * Method to do any file-format specific special casing while * instantiating a storage handler to write. We set any parameters * we want to be visible to the job in jobProperties, and this will * be available to the job via jobconf at run time. * * This is mostly intended to be used by StorageHandlers that wrap * File-based OutputFormats such as FosterStorageHandler that wraps * RCFile, ORC, etc./*w w w. j a va 2 s .com*/ * * @param jobProperties : map to write to * @param jobInfo : information about this output job to read from * @param ofclass : the output format in use */ public static void addSpecialCasesParametersToOutputJobProperties(Map<String, String> jobProperties, OutputJobInfo jobInfo, Class<? extends OutputFormat> ofclass) { if (ofclass == RCFileOutputFormat.class) { // RCFile specific parameter jobProperties.put(HiveConf.ConfVars.HIVE_RCFILE_COLUMN_NUMBER_CONF.varname, Integer.toOctalString(jobInfo.getOutputSchema().getFields().size())); } else if (ofclass == OrcOutputFormat.class) { // Special cases for ORC // We need to check table properties to see if a couple of parameters, // such as compression parameters are defined. If they are, then we copy // them to job properties, so that it will be available in jobconf at runtime // See HIVE-5504 for details Map<String, String> tableProps = jobInfo.getTableInfo().getTable().getParameters(); for (OrcConf property : OrcConf.values()) { String propName = property.getAttribute(); if (tableProps.containsKey(propName)) { jobProperties.put(propName, tableProps.get(propName)); } } } else if (ofclass == AvroContainerOutputFormat.class) { // Special cases for Avro. As with ORC, we make table properties that // Avro is interested in available in jobconf at runtime Map<String, String> tableProps = jobInfo.getTableInfo().getTable().getParameters(); for (AvroSerdeUtils.AvroTableProperties property : AvroSerdeUtils.AvroTableProperties.values()) { String propName = property.getPropName(); if (tableProps.containsKey(propName)) { String propVal = tableProps.get(propName); jobProperties.put(propName, tableProps.get(propName)); } } Properties properties = new Properties(); properties.put("name", jobInfo.getTableName()); List<String> colNames = jobInfo.getOutputSchema().getFieldNames(); List<TypeInfo> colTypes = new ArrayList<TypeInfo>(); for (HCatFieldSchema field : jobInfo.getOutputSchema().getFields()) { colTypes.add(TypeInfoUtils.getTypeInfoFromTypeString(field.getTypeString())); } if (jobProperties.get(AvroSerdeUtils.AvroTableProperties.SCHEMA_LITERAL.getPropName()) == null || jobProperties.get(AvroSerdeUtils.AvroTableProperties.SCHEMA_LITERAL.getPropName()) .isEmpty()) { jobProperties.put(AvroSerdeUtils.AvroTableProperties.SCHEMA_LITERAL.getPropName(), AvroSerDe.getSchemaFromCols(properties, colNames, colTypes, null).toString()); } } else if (ofclass == MapredParquetOutputFormat.class) { //Handle table properties Properties tblProperties = new Properties(); Map<String, String> tableProps = jobInfo.getTableInfo().getTable().getParameters(); for (String key : tableProps.keySet()) { if (ParquetTableUtils.isParquetProperty(key)) { tblProperties.put(key, tableProps.get(key)); } } //Handle table schema List<String> colNames = jobInfo.getOutputSchema().getFieldNames(); List<TypeInfo> colTypes = new ArrayList<TypeInfo>(); for (HCatFieldSchema field : jobInfo.getOutputSchema().getFields()) { colTypes.add(TypeInfoUtils.getTypeInfoFromTypeString(field.getTypeString())); } String parquetSchema = HiveSchemaConverter.convert(colNames, colTypes).toString(); jobProperties.put(DataWritableWriteSupport.PARQUET_HIVE_SCHEMA, parquetSchema); jobProperties.putAll(Maps.fromProperties(tblProperties)); } }
From source file:com.eucalyptus.bootstrap.BillOfMaterials.java
@SuppressWarnings("unchecked") static synchronized Map<String, String> loadProps() { if ((loadedProps == null) || loadedProps.isEmpty()) { loadedProps = Maps.newHashMap(); try {// w w w . j a v a 2 s . c o m final List<URL> propFiles = Collections.list(ClassLoader.getSystemResources("version.properties")); for (final URL u : propFiles) { final Properties temp = new Properties(); final InputStream in = Resources.newInputStreamSupplier(u).getInput(); try { temp.load(in); } finally { in.close(); } loadedProps.putAll(Maps.fromProperties(temp)); } } catch (final IOException ex) { LOG.error(ex, ex); } return loadedProps; } else { return loadedProps; } }
From source file:org.apache.jackrabbit.oak.fixture.BlobStoreFixture.java
private static Map<String, ?> getConfig() { Map<String, Object> result = Maps.newHashMap(); for (Map.Entry<String, ?> e : Maps.fromProperties(System.getProperties()).entrySet()) { String key = e.getKey();/*w w w . j a v a2 s. c o m*/ if (key.startsWith("ds.") || key.startsWith("bs.")) { key = key.substring(3); //length of bs. result.put(key, e.getValue()); } } return result; }