List of usage examples for com.google.common.collect Maps fromProperties
@GwtIncompatible("java.util.Properties") public static ImmutableMap<String, String> fromProperties(Properties properties)
From source file:org.richfaces.util.PropertiesUtil.java
/** * Returns map with properties (key/value pairs) loaded from all resources on given classpath location. * * @see #loadProperties(Properties, String) *//*w w w . j av a2s . c om*/ public static Map<String, String> loadProperties(String location) { Properties props = new Properties(); loadProperties(props, location); return Maps.fromProperties(props); }
From source file:org.apache.metron.profiler.spark.reader.TextEncodedTelemetryReader.java
@Override public Dataset<String> read(SparkSession spark, Properties profilerProps, Properties readerProps) { String inputPath = TELEMETRY_INPUT_PATH.get(profilerProps, String.class); if (inputFormat == null) { inputFormat = TELEMETRY_INPUT_FORMAT.get(profilerProps, String.class); }//from www.j a va 2 s. co m LOG.debug("Loading telemetry; inputPath={}, inputFormat={}", inputPath, inputFormat); return spark.read().options(Maps.fromProperties(readerProps)).format(inputFormat).load(inputPath) .as(Encoders.STRING()); }
From source file:org.jclouds.codec.ToProvider.java
/** * Merges multiple {@link Properties} into a {@link Map}. * In case of duplicate keys, the latest value will be kept. * This utility is mostly needed because the map builder can't handle duplicates. * @param properties//from w w w . j a v a 2 s . co m * @return */ private static Map<String, String> fromProperties(Properties... properties) { Map<String, String> map = Maps.newHashMap(); for (Properties p : properties) { map.putAll(Maps.fromProperties(p)); } return map; }
From source file:org.apache.metron.profiler.spark.reader.ColumnEncodedTelemetryReader.java
@Override public Dataset<String> read(SparkSession spark, Properties profilerProps, Properties readerProps) { String inputPath = TELEMETRY_INPUT_PATH.get(profilerProps, String.class); if (inputFormat == null) { inputFormat = TELEMETRY_INPUT_FORMAT.get(profilerProps, String.class); }//from w ww . j av a 2s . co m LOG.debug("Loading telemetry; inputPath={}, inputFormat={}", inputPath, inputFormat); return spark.read().options(Maps.fromProperties(readerProps)).format(inputFormat).load(inputPath).toJSON(); }
From source file:alluxio.shell.command.MountCommand.java
@Override public int run(CommandLine cl) throws AlluxioException, IOException { String[] args = cl.getArgs(); AlluxioURI alluxioPath = new AlluxioURI(args[0]); AlluxioURI ufsPath = new AlluxioURI(args[1]); MountOptions options = MountOptions.defaults(); if (cl.hasOption(READONLY_OPTION.getLongOpt())) { options.setReadOnly(true);/* w w w. j a v a 2 s . co m*/ } if (cl.hasOption(SHARED_OPTION.getLongOpt())) { options.setShared(true); } if (cl.hasOption(OPTION_OPTION.getLongOpt())) { Properties properties = cl.getOptionProperties(OPTION_OPTION.getLongOpt()); options.setProperties(Maps.fromProperties(properties)); } mFileSystem.mount(alluxioPath, ufsPath, options); System.out.println("Mounted " + ufsPath + " at " + alluxioPath); return 0; }
From source file:org.sonar.core.persistence.AbstractDaoTestCase.java
@Before public void startDatabase() throws Exception { if (database == null) { Settings settings = new Settings().setProperties(Maps.fromProperties(System.getProperties())); if (settings.hasKey("orchestrator.configUrl")) { loadOrchestratorSettings(settings); }// ww w. jav a 2s . c om for (String key : settings.getKeysStartingWith("sonar.jdbc")) { LOG.info(key + ": " + settings.getString(key)); } boolean hasDialect = settings.hasKey("sonar.jdbc.dialect"); if (hasDialect) { database = new DefaultDatabase(settings); } else { database = new H2Database("h2Tests"); } database.start(); LOG.info("Test Database: " + database); databaseCommands = DatabaseCommands.forDialect(database.getDialect()); databaseTester = new DataSourceDatabaseTester(database.getDataSource()); myBatis = new MyBatis(database, settings, new Logback()); myBatis.start(); } databaseCommands.truncateDatabase(database.getDataSource()); }
From source file:org.artifactory.common.ha.HaNodeProperties.java
public ImmutableMap<String, String> getPropertiesMap() { return Maps.fromProperties(properties); }
From source file:org.apache.tajo.client.v2.LegacyClientDelegate.java
public LegacyClientDelegate(String host, int port, Properties clientParams) { super(new DummyServiceTracker(NetUtils.createSocketAddr(host, port)), null, new KeyValueSet( clientParams == null ? new HashMap<String, String>() : Maps.fromProperties(clientParams))); queryClient = new QueryClientImpl(this); }
From source file:alluxio.cli.fs.command.MountCommand.java
@Override public int run(CommandLine cl) throws AlluxioException, IOException { String[] args = cl.getArgs(); if (args.length == 0) { Map<String, MountPointInfo> mountTable = mFileSystem.getMountTable(); UfsCommand.printMountInfo(mountTable); return 0; }/*from w w w . j ava 2s . co m*/ AlluxioURI alluxioPath = new AlluxioURI(args[0]); AlluxioURI ufsPath = new AlluxioURI(args[1]); MountOptions options = MountOptions.defaults(); if (cl.hasOption(READONLY_OPTION.getLongOpt())) { options.setReadOnly(true); } if (cl.hasOption(SHARED_OPTION.getLongOpt())) { options.setShared(true); } if (cl.hasOption(OPTION_OPTION.getLongOpt())) { Properties properties = cl.getOptionProperties(OPTION_OPTION.getLongOpt()); options.setProperties(Maps.fromProperties(properties)); } mFileSystem.mount(alluxioPath, ufsPath, options); System.out.println("Mounted " + ufsPath + " at " + alluxioPath); return 0; }
From source file:com.facebook.presto.kafka.util.EmbeddedKafka.java
EmbeddedKafka(EmbeddedZookeeper zookeeper, Properties overrideProperties) throws IOException { this.zookeeper = requireNonNull(zookeeper, "zookeeper is null"); requireNonNull(overrideProperties, "overrideProperties is null"); this.port = findUnusedPort(); this.kafkaDataDir = Files.createTempDir(); Map<String, String> properties = ImmutableMap.<String, String>builder().put("broker.id", "0") .put("host.name", "localhost").put("num.partitions", "2") .put("log.flush.interval.messages", "10000").put("log.flush.interval.ms", "1000") .put("log.retention.minutes", "60").put("log.segment.bytes", "1048576") .put("auto.create.topics.enable", "false").put("zookeeper.connection.timeout.ms", "1000000") .put("port", Integer.toString(port)).put("log.dirs", kafkaDataDir.getAbsolutePath()) .put("zookeeper.connect", zookeeper.getConnectString()) .putAll(Maps.fromProperties(overrideProperties)).build(); KafkaConfig config = new KafkaConfig(toProperties(properties)); this.kafka = new KafkaServerStartable(config); }