Example usage for java.util Collections emptyMap

List of usage examples for java.util Collections emptyMap

Introduction

In this page you can find the example usage for java.util Collections emptyMap.

Prototype

@SuppressWarnings("unchecked")
public static final <K, V> Map<K, V> emptyMap() 

Source Link

Document

Returns an empty map (immutable).

Usage

From source file:com.xylocore.cassandra.query.PagedQuery.java

/**
 * FILLIN/*from ww  w  .j a  v a  2 s  . com*/
 * 
 * @param       aSession
 * @param       aExecutor
 * @param       aFirstQuery
 * @param       aNextQueries
 * @param       aKeyColumnNames
 */
PagedQuery(Session aSession, Executor aExecutor, PreparedStatement aFirstQuery,
        List<PreparedStatement> aNextQueries, Map<String, String> aKeyColumnNames) {
    Validate.notNull(aSession);
    Validate.notNull(aFirstQuery);
    Validate.notEmpty(aNextQueries);
    Validate.noNullElements(aNextQueries);
    Validate.notEmpty(aKeyColumnNames);

    if (aExecutor == null) {
        aExecutor = ForkJoinPool.commonPool();
    }

    session = aSession;
    executor = aExecutor;
    firstQuery = aFirstQuery;
    nextQueries = new ArrayList<>(aNextQueries);
    keyColumnNames = Collections.emptyMap();

    if (aKeyColumnNames != null && !aKeyColumnNames.isEmpty()) {
        keyColumnNames = new HashMap<>(aKeyColumnNames);
    }
}

From source file:com.frank.search.solr.core.ResultHelper.java

static Map<com.frank.search.solr.core.query.PivotField, List<FacetPivotFieldEntry>> convertFacetQueryResponseToFacetPivotMap(
        FacetQuery query, QueryResponse response) {

    if (VersionUtil.isSolr3XAvailable()) {
        // pivot facets are a solr 4+ Feature
        return Collections.emptyMap();
    }/*from   w  w  w  .j a va2 s  .com*/

    Map<com.frank.search.solr.core.query.PivotField, List<FacetPivotFieldEntry>> facetResult = new HashMap<com.frank.search.solr.core.query.PivotField, List<FacetPivotFieldEntry>>();
    NamedList<List<PivotField>> facetPivot = response.getFacetPivot();
    if (facetPivot != null && facetPivot.size() > 0) {
        for (int i = 0; i < facetPivot.size(); i++) {
            String name = facetPivot.getName(i);
            List<PivotField> pivotResult = facetPivot.get(name);
            facetResult.put(new SimplePivotField(name), convertPivotResult(pivotResult));
        }
    }

    return facetResult;
}

From source file:com.erudika.para.persistence.IndexBasedDAO.java

@Override
@SuppressWarnings("unchecked")
public <P extends ParaObject> Map<String, P> readAll(String appid, List<String> keys, boolean getAllColumns) {
    if (keys == null || StringUtils.isBlank(appid)) {
        return Collections.emptyMap();
    }//from  ww w  . j  a  va  2  s. c  o  m
    Map<String, P> results = new LinkedHashMap<String, P>(keys.size());
    List<P> list = search.findByIds(getAppidWithRouting(appid), keys);

    if (list.isEmpty()) {
        for (String key : keys) {
            if (getMap(appid).containsKey(key)) {
                results.put(key, (P) read(key));
            }
        }
    } else {
        for (P p : list) {
            if (p != null) {
                results.put(p.getId(), p);
            }
        }
    }

    logger.debug("DAO.readAll() {}", results.size());
    return results;
}

From source file:alfio.manager.FileUploadManager.java

private Map<String, String> getAttributes(UploadBase64FileModification file) {
    if (!StringUtils.startsWith(file.getType(), "image/")) {
        return Collections.emptyMap();
    }//from   w w  w. ja  va  2  s. co m

    try {
        BufferedImage image = ImageIO.read(new ByteArrayInputStream(file.getFile()));
        Map<String, String> attributes = new HashMap<>();
        attributes.put(FileBlobMetadata.ATTR_IMG_WIDTH, String.valueOf(image.getWidth()));
        attributes.put(FileBlobMetadata.ATTR_IMG_HEIGHT, String.valueOf(image.getHeight()));
        return attributes;
    } catch (IOException e) {
        log.error("error while processing image: ", e);
        return Collections.emptyMap();
    }
}

From source file:org.apache.metron.elasticsearch.integration.ElasticsearchSearchIntegrationTest.java

protected static void loadTestData() throws ParseException, IOException {
    // add bro template
    JSONObject broTemplate = JSONUtils.INSTANCE.load(new File(broTemplatePath), JSONObject.class);
    addTestFieldMappings(broTemplate, "bro_doc");
    String broTemplateJson = JSONUtils.INSTANCE.toJSON(broTemplate, true);
    HttpEntity broEntity = new NStringEntity(broTemplateJson, ContentType.APPLICATION_JSON);
    Response response = lowLevelClient.performRequest("PUT", "/_template/bro_template", Collections.emptyMap(),
            broEntity);//  w ww .j  a va2 s  .c  o m
    assertThat(response.getStatusLine().getStatusCode(), equalTo(200));
    // add snort template
    JSONObject snortTemplate = JSONUtils.INSTANCE.load(new File(snortTemplatePath), JSONObject.class);
    addTestFieldMappings(snortTemplate, "snort_doc");
    String snortTemplateJson = JSONUtils.INSTANCE.toJSON(snortTemplate, true);
    HttpEntity snortEntity = new NStringEntity(snortTemplateJson, ContentType.APPLICATION_JSON);
    response = lowLevelClient.performRequest("PUT", "/_template/snort_template", Collections.emptyMap(),
            snortEntity);
    assertThat(response.getStatusLine().getStatusCode(), equalTo(200));
    // create bro index
    response = lowLevelClient.performRequest("PUT", BRO_INDEX);
    assertThat(response.getStatusLine().getStatusCode(), equalTo(200));
    // create snort index
    response = lowLevelClient.performRequest("PUT", SNORT_INDEX);
    assertThat(response.getStatusLine().getStatusCode(), equalTo(200));

    JSONArray broRecords = (JSONArray) new JSONParser().parse(broData);

    BulkRequest bulkRequest = new BulkRequest();
    for (Object o : broRecords) {
        JSONObject json = (JSONObject) o;
        IndexRequest indexRequest = new IndexRequest(BRO_INDEX, "bro_doc", (String) json.get("guid"));
        indexRequest.source(json);
        indexRequest.timestamp(json.get("timestamp").toString());
        bulkRequest.add(indexRequest);
    }
    bulkRequest.setRefreshPolicy(RefreshPolicy.WAIT_UNTIL);
    BulkResponse bulkResponse = highLevelClient.bulk(bulkRequest);
    assertFalse(bulkResponse.hasFailures());
    assertThat(bulkResponse.status().getStatus(), equalTo(200));

    JSONArray snortRecords = (JSONArray) new JSONParser().parse(snortData);

    bulkRequest = new BulkRequest();
    for (Object o : snortRecords) {
        JSONObject json = (JSONObject) o;
        IndexRequest indexRequest = new IndexRequest(SNORT_INDEX, "snort_doc", (String) json.get("guid"));
        indexRequest.source(json);
        indexRequest.timestamp(json.get("timestamp").toString());
        bulkRequest.add(indexRequest);
    }
    bulkRequest.setRefreshPolicy(RefreshPolicy.WAIT_UNTIL);
    bulkResponse = highLevelClient.bulk(bulkRequest);
    assertFalse(bulkResponse.hasFailures());
    assertThat(bulkResponse.status().getStatus(), equalTo(200));
}

From source file:be.hikage.springtemplate.ImportTemplateBeanDefinitionParser.java

private Map<String, String> prepareReplacement(Element element) {

    List<Element> replacementElements = DomUtils.getChildElementsByTagName(element, REPLACEMENT_TAG);
    if (replacementElements.size() == 0)
        return Collections.emptyMap();

    Map<String, String> replacementMap = new HashMap();
    for (Element replacement : replacementElements) {
        replacementMap.put(replacement.getAttribute(PATTERN_ATTRIBUTE),
                replacement.getAttribute(SUBSTITUTION_ATTRIBUTE));

    }//  www .j a  v  a 2 s .com
    return replacementMap;

}

From source file:com.marand.thinkmed.medications.connector.impl.rest.RestMedicationsConnector.java

@Override
public Map<String, PatientDisplayWithLocationDto> getPatientDisplayWithLocationMap(
        final Collection<String> careProviderIds, final Collection<String> patientIds) {
    final String patientsListJson;
    if (patientIds != null) {
        if (patientIds.isEmpty()) {
            return Collections.emptyMap();
        }//w w w . java2s  .  c o m
        final String patientIdsString = patientIds.stream().collect(Collectors.joining(","));
        patientsListJson = restClient.getPatientsSummariesList(patientIdsString);
    } else {
        Preconditions.checkArgument(careProviderIds != null, "Both patientIds and careProviderId are null");
        final String careProviderIdsString = careProviderIds.stream().collect(Collectors.joining(","));
        patientsListJson = restClient.getCareProvidersPatientsSummariesList(careProviderIdsString);
    }
    final List<PatientDisplayWithLocationDto> patientsList = Arrays
            .asList(JsonUtil.fromJson(patientsListJson, PatientDisplayWithLocationDto[].class));

    return patientsList.stream()
            .collect(Collectors.toMap(p -> p.getPatientDisplayDto().getId(), Function.identity()));
}

From source file:edu.indiana.d2i.sigiri.service.SigiriServiceSkeleton.java

private Map<String, String> getQOSParametersAsNameValuePairs(QOSParameter[] qosParameters) {
    Map<String, String> qosParameterMap = new HashMap<String, String>();
    if (qosParameters != null) {
        for (QOSParameter parameter : qosParameters) {
            qosParameterMap.put(parameter.getName(), parameter.getValue());
        }// www  . j  av a  2 s . c  o  m
        return qosParameterMap;
    }
    return Collections.emptyMap();
}

From source file:com.ebay.cloud.cms.sysmgmt.monitor.metrics.MongoMetric.java

private Map<String, Object> listDatabases(final MongoClient client) {
    try {/*from w  ww . j av a2  s . c om*/
        Future<Map<String, Object>> future = executor.submit(new Callable<Map<String, Object>>() {
            @Override
            public Map<String, Object> call() {
                Map<String, Object> resultMap = new HashMap<String, Object>();
                List<String> databaseNames = client.getDatabaseNames();
                for (String databaseName : databaseNames) {
                    DB db = client.getDB(databaseName);
                    if (db != null) {
                        CommandResult cr = db.getStats();
                        if (cr != null) {
                            Object dataSize = cr.get("dataSize");
                            resultMap.put(databaseName, dataSize);
                        }
                    }
                }
                return resultMap;
            }
        });
        return future.get(listWaitPeroid, TimeUnit.MILLISECONDS);
    } catch (Exception e) {
        return Collections.emptyMap();
    }
}

From source file:com.ikanow.aleph2.analytics.storm.assets.TestPassthroughTopology.java

@Test
public void test_passthroughTopology() throws InterruptedException, ExecutionException {
    //////////////////////////////////////////////////////
    // PHASE 1: GET AN IN-TECHNOLOGY CONTEXT
    // Bucket/*  ww  w  . j a  va  2  s . co  m*/
    final AnalyticThreadJobBean.AnalyticThreadJobInputBean analytic_input = BeanTemplateUtils
            .build(AnalyticThreadJobBean.AnalyticThreadJobInputBean.class)
            .with(AnalyticThreadJobBean.AnalyticThreadJobInputBean::data_service, "stream")
            .with(AnalyticThreadJobBean.AnalyticThreadJobInputBean::resource_name_or_id, "").done().get();

    final AnalyticThreadJobBean.AnalyticThreadJobOutputBean analytic_output = BeanTemplateUtils
            .build(AnalyticThreadJobBean.AnalyticThreadJobOutputBean.class)
            .with(AnalyticThreadJobBean.AnalyticThreadJobOutputBean::is_transient, false).done().get();

    final AnalyticThreadJobBean analytic_job1 = BeanTemplateUtils.build(AnalyticThreadJobBean.class)
            .with(AnalyticThreadJobBean::name, "analytic_job1")
            .with(AnalyticThreadJobBean::inputs, Arrays.asList(analytic_input))
            .with(AnalyticThreadJobBean::output, analytic_output).done().get();

    final AnalyticThreadBean analytic_thread = BeanTemplateUtils.build(AnalyticThreadBean.class)
            .with(AnalyticThreadBean::jobs, Arrays.asList(analytic_job1)).done().get();

    final DataBucketBean test_bucket = BeanTemplateUtils.build(DataBucketBean.class)
            .with(DataBucketBean::_id, "test_passthroughtopology").with(DataBucketBean::modified, new Date())
            .with(DataBucketBean::full_name, "/test/passthrough")
            .with(DataBucketBean::analytic_thread, analytic_thread)
            .with("data_schema", BeanTemplateUtils.build(DataSchemaBean.class)
                    .with("search_index_schema",
                            BeanTemplateUtils.build(DataSchemaBean.SearchIndexSchemaBean.class).done().get())
                    .done().get())
            .done().get();

    //////////////////////////////////////////////////////
    // PHASE 2: SPECIFICALLY FOR THIS TEST
    //(Also: register a listener on the output to generate a secondary queue)
    final ICoreDistributedServices cds = _service_context
            .getService(ICoreDistributedServices.class, Optional.empty()).get();
    final String end_queue_topic = cds.generateTopicName(test_bucket.full_name(),
            ICoreDistributedServices.QUEUE_END_NAME);
    cds.createTopic(end_queue_topic, Optional.of(Collections.emptyMap()));

    //////////////////////////////////////////////////////
    // PHASE 3: SUBMIT TO TESTING SERVICE
    final BasicMessageBean res = new MockStormTestingService(_service_context).testAnalyticModule(test_bucket)
            .get();
    assertTrue("Storm starts", res.success());

    _logger.info("******** Submitted storm cluster: " + res.message());
    Thread.sleep(5000L);

    //////////////////////////////////////////////////////
    //PHASE 4: PREPARE INPUT DATA

    // 4a: cleanse

    final ISearchIndexService index_service = _service_context
            .getService(ISearchIndexService.class, Optional.empty()).get();
    final ICrudService<JsonNode> crud_service = index_service.getDataService().flatMap(
            s -> s.getWritableDataService(JsonNode.class, test_bucket, Optional.empty(), Optional.empty()))
            .flatMap(IDataWriteService::getCrudService).get();
    crud_service.deleteDatastore().get();
    _logger.info("******** Cleansed existing datastore");
    Thread.sleep(2000L);
    assertEquals(0L, crud_service.countObjects().get().intValue());

    // 4b: write to kafka

    final String topic_name = cds.generateTopicName(test_bucket.full_name(), Optional.empty());
    Iterator<String> consumer = cds.consumeAs(end_queue_topic,
            Optional.of(BucketUtils.getUniqueSignature(test_bucket.full_name(), Optional.empty())),
            Optional.empty());
    cds.produce(topic_name, "{\"test\":\"test1\"}");
    _logger.info("******** Written to CDS: " + topic_name);

    //////////////////////////////////////////////////////
    //PHASE 5: CHECK OUTPUT DATA      

    // 5a: check ES index

    for (int i = 0; i < 60; ++i) {
        Thread.sleep(1000L);
        if (crud_service.countObjects().get() > 0) {
            _logger.info("******** Waited for ES object to populate: " + i);
            break;
        }
    }
    assertEquals("Should be 1 object in the repo", 1L, crud_service.countObjects().get().intValue());
    assertEquals("Object should be test:test1", 1L,
            crud_service.countObjectsBySpec(CrudUtils.allOf().when("test", "test1")).get().intValue());

    // 5b: check kafka queue
    Thread.sleep(5000); //wait for producers to dump batch
    int message_count = 0;
    //read the item off the queue
    while (consumer.hasNext()) {
        consumer.next();
        message_count++;
    }
    assertEquals(1, message_count);
}