Example usage for java.util Collections emptyMap

List of usage examples for java.util Collections emptyMap

Introduction

In this page you can find the example usage for java.util Collections emptyMap.

Prototype

@SuppressWarnings("unchecked")
public static final <K, V> Map<K, V> emptyMap() 

Source Link

Document

Returns an empty map (immutable).

Usage

From source file:org.openlmis.fulfillment.security.CustomUserAuthenticationConverterTest.java

@Test
public void shouldReturnNullWhenMapDoesNotContainPrincipal() {
    Authentication authentication = userAuthenticationConverter.extractAuthentication(Collections.emptyMap());

    assertNull(authentication);//  w ww .  j  a  v  a  2  s  .  c  o  m
}

From source file:org.vas.test.rest.RestImpl.java

@Override
public <T> Response<T> post(String uri, Class<T> klass, Object... args) {
    return post(uri, Collections.emptyMap(), klass, args);
}

From source file:com.netflix.spinnaker.clouddriver.ecs.provider.agent.EcsClusterCachingAgent.java

@Override
protected Map<String, Collection<CacheData>> generateFreshData(Collection<String> clusterArns) {
    Collection<CacheData> dataPoints = new LinkedList<>();
    for (String clusterArn : clusterArns) {
        String clusterName = StringUtils.substringAfterLast(clusterArn, "/");

        Map<String, Object> attributes = convertClusterArnToAttributes(accountName, region, clusterArn);

        String key = Keys.getClusterKey(accountName, region, clusterName);
        dataPoints.add(new DefaultCacheData(key, attributes, Collections.emptyMap()));
    }/*from  w  ww . ja v a 2 s  . c om*/

    log.info("Caching " + dataPoints.size() + " ECS clusters in " + getAgentType());
    Map<String, Collection<CacheData>> dataMap = new HashMap<>();
    dataMap.put(ECS_CLUSTERS.toString(), dataPoints);

    return dataMap;
}

From source file:ch.cyberduck.core.worker.TransferPromptFilterWorker.java

@Override
public Map<TransferItem, TransferStatus> initialize() {
    return Collections.emptyMap();
}

From source file:com.thoughtworks.go.api.ApiController.java

protected Map<String, Object> readRequestBodyAsJSON(Request req) {
    Map<String, Object> map = GsonTransformer.getInstance().fromJson(req.body(),
            new TypeToken<Map<String, Object>>() {
            }.getType());/*from w ww . j a v a  2s .co  m*/
    if (map == null) {
        return Collections.emptyMap();
    }
    return map;
}

From source file:com.ikanow.aleph2.core.shared.services.ReadOnlyMultiCrudService.java

/** Returns a multi bucket crud wrapper 
 *  DOESN'T CURRENTLY SUPPORT LIMITS OR SORTBY PROPERLY
 * @param buckets - a list of bucket paths
 * @param maybe_extra_query_builder - for each bucket lets the user specify an additional query to be applied to all queries
 * @return/*from w ww . jav a  2  s.  com*/
 */
public static <O> Optional<ReadOnlyMultiCrudService<O>> from(final Class<O> clazz, final List<String> buckets,
        final Optional<String> owner_id, final IGenericDataService data_service,
        final IManagementCrudService<DataBucketBean> bucket_store, final IServiceContext service_context,
        final Optional<Function<DataBucketBean, Optional<QueryComponent<O>>>> maybe_extra_query_builder) {

    final DataBucketBean dummy_bucket = BeanTemplateUtils.build(DataBucketBean.class)
            .with(DataBucketBean::owner_id, owner_id.orElse(null))
            .with(DataBucketBean::multi_bucket_children, buckets).done().get();

    final List<ICrudService<O>> services = MultiBucketUtils
            .expandMultiBuckets(Arrays.asList(dummy_bucket), bucket_store, service_context).values().stream()
            .map(b -> Tuples._2T(b,
                    data_service.getReadableCrudService(clazz, Arrays.asList(b), Optional.empty())
                            .<ICrudService<O>>flatMap(ds -> ds.getCrudService())))
            .filter(bucket_crud -> bucket_crud._2().isPresent())
            .map(bucket_crud -> Tuples._2T(bucket_crud._1(), bucket_crud._2().get())) // because of above filter)
            .map(bucket_crud -> maybe_extra_query_builder.flatMap(qb -> qb.apply(bucket_crud._1()))
                    .map(extra_query -> CrudServiceUtils.intercept(clazz, bucket_crud._2(),
                            Optional.of(extra_query), Optional.empty(), Collections.emptyMap(),
                            Optional.empty()))
                    .orElse(bucket_crud._2()))
            .collect(Collectors.toList());

    return services.isEmpty() ? Optional.empty() : Optional.of(new ReadOnlyMultiCrudService<O>(services));
}

From source file:org.cloudfoundry.tools.env.CloudEnvironment.java

@SuppressWarnings("unchecked")
private Map<String, Object> getValueAsMap(String name) {
    String value = getValue(name);
    if (StringUtils.hasLength(value)) {
        try {//w  w w.  jav  a 2  s.c om
            return this.objectMapper.readValue(value, Map.class);
        } catch (Exception e) {
            throw new IllegalStateException("Unable to read value '" + name + "' as a map", e);
        }
    }
    return Collections.emptyMap();
}

From source file:com.haulmont.yarg.console.ConsoleRunner.java

private static Map<String, Object> parseReportParams(CommandLine cmd, Report report) {
    if (cmd.hasOption(REPORT_PARAMETER)) {
        Map<String, Object> params = new HashMap<String, Object>();
        Properties optionProperties = cmd.getOptionProperties(REPORT_PARAMETER);
        for (ReportParameter reportParameter : report.getReportParameters()) {
            String paramValueStr = optionProperties.getProperty(reportParameter.getAlias());
            if (paramValueStr != null) {
                params.put(reportParameter.getAlias(),
                        converter.convertFromString(reportParameter.getParameterClass(), paramValueStr));
            }//from  w  w  w .j  a va 2s  . c o m
        }

        return params;
    } else {
        return Collections.emptyMap();
    }
}

From source file:dataprocessing.elasticsearch.ElasticSearchClient.java

/** ***************************************************************
 * @param corpus Corpus line is from//from www  .j  ava  2 s.  c  o m
 * @param line Line number in corpus
 * @param text line text
 * Indexes line from corpus
 *
 * Mapping for chatbot/dialog:
 PUT chatbot
 {
     "mappings": {
         "dialog": {
             "properties": {
                 "corpus": {
                     "type": "string",
                             "index": "not_analyzed"
                 },
                 "file": {
                     "type": "string",
                             "index": "not_analyzed"
                 },
                 "line": {
                     "type": "long",
                             "index": "not_analyzed"
                 },
                 "text": {
                     "type": "string",
                             "index": "analyzed",
                             "analyzer": "english"
                 }
             }
         }
     }
 }
 */
public void indexDocument(String corpus, String file, int line, String text) {

    try {
        JSONObject entity = new JSONObject();
        entity.put("corpus", corpus);
        entity.put("file", file);
        entity.put("line", line);
        entity.put("text", text);

        // Post JSON entity
        client.performRequest("POST", String.format("/%s/%s/%s", index, type, corpus + "_" + file + "_" + line),
                Collections.emptyMap(), new NStringEntity(entity.toString(), ContentType.APPLICATION_JSON),
                header);
    } catch (IOException e) {
        e.printStackTrace();
    }
}

From source file:com.streamsets.pipeline.stage.destination.jobtype.avroparquet.LargeInputFileIT.java

@Test
public void testLargeFile() throws Exception {
    File inputFile = new File(getInputDir(), "input.avro");
    File outputFile = new File(getOutputDir(), "input.parquet");
    long recordCount = Long.valueOf(System.getProperty(TARGET_RECORD_COUNT, TARGET_RECORD_COUNT_DEFAULT));
    StopWatch stopWatch = new StopWatch();

    stopWatch.start();/*  w w w  .  ja v  a  2s .c  om*/
    generateAvroFile(AVRO_SCHEMA, inputFile, recordCount);
    stopWatch.stop();

    LOG.info("Created input avro file in {}, contains {} records and have {}.", stopWatch.toString(),
            recordCount, humanReadableSize(inputFile.length()));

    AvroConversionCommonConfig commonConfig = new AvroConversionCommonConfig();
    AvroParquetConfig conf = new AvroParquetConfig();
    commonConfig.inputFile = inputFile.getAbsolutePath();
    commonConfig.outputDirectory = getOutputDir();

    MapReduceExecutor executor = generateExecutor(commonConfig, conf, Collections.emptyMap());

    ExecutorRunner runner = new ExecutorRunner.Builder(MapReduceDExecutor.class, executor)
            .setOnRecordError(OnRecordError.TO_ERROR).build();
    runner.runInit();

    Record record = RecordCreator.create();
    record.set(Field.create(Collections.<String, Field>emptyMap()));

    stopWatch.reset();
    stopWatch.start();
    runner.runWrite(ImmutableList.of(record));
    stopWatch.stop();
    LOG.info("Generated output parquet file in {} and have {}.", stopWatch.toString(),
            humanReadableSize(outputFile.length()));

    Assert.assertEquals(0, runner.getErrorRecords().size());
    runner.runDestroy();

    validateParquetFile(new Path(outputFile.getAbsolutePath()), recordCount);
}