Example usage for org.apache.commons.collections4 CollectionUtils isNotEmpty

List of usage examples for org.apache.commons.collections4 CollectionUtils isNotEmpty

Introduction

In this page you can find the example usage for org.apache.commons.collections4 CollectionUtils isNotEmpty.

Prototype

public static boolean isNotEmpty(final Collection<?> coll) 

Source Link

Document

Null-safe check if the specified collection is not empty.

Usage

From source file:org.finra.herd.dao.impl.EmrDaoImpl.java

/**
 * Creates the job flow instance configuration containing specification of the number and type of Amazon EC2 instances.
 *
 * @param emrClusterDefinition the EMR cluster definition that contains all the EMR parameters
 *
 * @return the job flow instance configuration
 *//*from   w w w .ja v a2s.c o m*/
private JobFlowInstancesConfig getJobFlowInstancesConfig(EmrClusterDefinition emrClusterDefinition) {
    // Create a new job flow instances configuration object.
    JobFlowInstancesConfig jobFlowInstancesConfig = new JobFlowInstancesConfig();
    // Set up master/slave security group
    jobFlowInstancesConfig.setEmrManagedMasterSecurityGroup(emrClusterDefinition.getMasterSecurityGroup());
    jobFlowInstancesConfig.setEmrManagedSlaveSecurityGroup(emrClusterDefinition.getSlaveSecurityGroup());

    // Add additional security groups to master nodes.
    jobFlowInstancesConfig
            .setAdditionalMasterSecurityGroups(emrClusterDefinition.getAdditionalMasterSecurityGroups());

    // Add additional security groups to slave nodes.
    jobFlowInstancesConfig
            .setAdditionalSlaveSecurityGroups(emrClusterDefinition.getAdditionalSlaveSecurityGroups());

    // Fill-in the ssh key.
    if (StringUtils.isNotBlank(emrClusterDefinition.getSshKeyPairName())) {
        jobFlowInstancesConfig.setEc2KeyName(emrClusterDefinition.getSshKeyPairName());
    }

    // Fill in configuration for the instance groups in a cluster.
    jobFlowInstancesConfig
            .setInstanceGroups(getInstanceGroupConfigs(emrClusterDefinition.getInstanceDefinitions()));

    // Fill in instance fleet configuration.
    jobFlowInstancesConfig.setInstanceFleets(getInstanceFleets(emrClusterDefinition.getInstanceFleets()));

    // Fill-in subnet id.
    if (StringUtils.isNotBlank(emrClusterDefinition.getSubnetId())) {
        // Use collection of subnet IDs when instance fleet configuration is specified. Otherwise, we expect a single EC2 subnet ID to be passed here.
        if (CollectionUtils.isNotEmpty(jobFlowInstancesConfig.getInstanceFleets())) {
            jobFlowInstancesConfig
                    .setEc2SubnetIds(herdStringHelper.splitAndTrim(emrClusterDefinition.getSubnetId(), ","));
        } else {
            jobFlowInstancesConfig.setEc2SubnetId(emrClusterDefinition.getSubnetId());
        }
    }

    // Fill in optional keep alive flag.
    if (emrClusterDefinition.isKeepAlive() != null) {
        jobFlowInstancesConfig.setKeepJobFlowAliveWhenNoSteps(emrClusterDefinition.isKeepAlive());
    }

    // Fill in optional termination protection flag.
    if (emrClusterDefinition.isTerminationProtection() != null) {
        jobFlowInstancesConfig.setTerminationProtected(emrClusterDefinition.isTerminationProtection());
    }

    // Fill in optional Hadoop version flag.
    if (StringUtils.isNotBlank(emrClusterDefinition.getHadoopVersion())) {
        jobFlowInstancesConfig.setHadoopVersion(emrClusterDefinition.getHadoopVersion());
    }

    // Return the object.
    return jobFlowInstancesConfig;
}

From source file:org.finra.herd.dao.impl.IndexSearchDaoImpl.java

@Override
public IndexSearchResponse indexSearch(final IndexSearchRequest indexSearchRequest, final Set<String> fields,
        final Set<String> match, final String bdefActiveIndex, final String tagActiveIndex) {
    // Build a basic Boolean query upon which add all the necessary clauses as needed
    BoolQueryBuilder indexSearchQueryBuilder = QueryBuilders.boolQuery();

    String searchPhrase = indexSearchRequest.getSearchTerm();

    // If there is a search phrase, then process it
    if (StringUtils.isNotEmpty(searchPhrase)) {
        // Determine if negation terms are present
        boolean negationTermsExist = herdSearchQueryHelper.determineNegationTermsPresent(indexSearchRequest);

        // Add the negation queries builder within a 'must-not' clause to the parent bool query if negation terms exist
        if (negationTermsExist) {
            // Build negation queries- each term is added to the query with a 'must-not' clause,
            List<String> negationTerms = herdSearchQueryHelper.extractNegationTerms(indexSearchRequest);

            if (CollectionUtils.isNotEmpty(negationTerms)) {
                negationTerms.forEach(term -> {
                    indexSearchQueryBuilder
                            .mustNot(buildMultiMatchQuery(term, PHRASE, 100f, FIELD_TYPE_STEMMED, match));
                });/* w ww .  ja v  a 2 s.  com*/
            }

            // Remove the negation terms from the search phrase
            searchPhrase = herdSearchQueryHelper.extractSearchPhrase(indexSearchRequest);
        }

        // Build a Dismax query with three primary components (multi-match queries) with boost values, these values can be configured in the
        // DB which provides a way to dynamically tune search behavior at runtime:
        //  1. Phrase match query on shingles fields.
        //  2. Phrase prefix query on stemmed fields.
        //  3. Best fields query on ngrams fields.
        final MultiMatchQueryBuilder phrasePrefixMultiMatchQueryBuilder = buildMultiMatchQuery(searchPhrase,
                PHRASE_PREFIX, configurationHelper
                        .getProperty(ConfigurationValue.ELASTICSEARCH_PHRASE_PREFIX_QUERY_BOOST, Float.class),
                FIELD_TYPE_STEMMED, match);

        final MultiMatchQueryBuilder bestFieldsMultiMatchQueryBuilder = buildMultiMatchQuery(searchPhrase,
                BEST_FIELDS, configurationHelper
                        .getProperty(ConfigurationValue.ELASTICSEARCH_BEST_FIELDS_QUERY_BOOST, Float.class),
                FIELD_TYPE_NGRAMS, match);

        final MultiMatchQueryBuilder phraseMultiMatchQueryBuilder = buildMultiMatchQuery(
                searchPhrase, PHRASE, configurationHelper
                        .getProperty(ConfigurationValue.ELASTICSEARCH_PHRASE_QUERY_BOOST, Float.class),
                FIELD_TYPE_SHINGLES, match);

        final MultiMatchQueryBuilder phraseStemmedMultiMatchQueryBuilder = buildMultiMatchQuery(
                searchPhrase, PHRASE, configurationHelper
                        .getProperty(ConfigurationValue.ELASTICSEARCH_PHRASE_QUERY_BOOST, Float.class),
                FIELD_TYPE_STEMMED, match);

        // Add the multi match queries to a dis max query and add to the parent bool query within a 'must' clause
        indexSearchQueryBuilder.must(
                disMaxQuery().add(phrasePrefixMultiMatchQueryBuilder).add(bestFieldsMultiMatchQueryBuilder)
                        .add(phraseMultiMatchQueryBuilder).add(phraseStemmedMultiMatchQueryBuilder));
    }

    // Add filter clauses if index search filters are specified in the request
    if (CollectionUtils.isNotEmpty(indexSearchRequest.getIndexSearchFilters())) {
        indexSearchQueryBuilder.filter(elasticsearchHelper.addIndexSearchFilterBooleanClause(
                indexSearchRequest.getIndexSearchFilters(), bdefActiveIndex, tagActiveIndex));
    }

    // Get function score query builder
    FunctionScoreQueryBuilder functionScoreQueryBuilder = getFunctionScoreQueryBuilder(indexSearchQueryBuilder,
            bdefActiveIndex);

    // The fields in the search indexes to return
    final String[] searchSources = { NAME_SOURCE, NAMESPACE_CODE_SOURCE, TAG_CODE_SOURCE, TAG_TYPE_CODE_SOURCE,
            DISPLAY_NAME_SOURCE, DESCRIPTION_SOURCE, BDEF_TAGS_SOURCE, BDEF_TAGS_SEARCH_SCORE_MULTIPLIER };

    // Create a new indexSearch source builder
    final SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();

    // Fetch only the required fields
    searchSourceBuilder.fetchSource(searchSources, null);
    searchSourceBuilder.query(functionScoreQueryBuilder);

    // Create a indexSearch request builder
    SearchRequestBuilder searchRequestBuilder = new SearchRequestBuilder(new ElasticsearchClientImpl(),
            SearchAction.INSTANCE);
    searchRequestBuilder.setIndices(bdefActiveIndex, tagActiveIndex);
    searchRequestBuilder.setSource(searchSourceBuilder).setSize(SEARCH_RESULT_SIZE)
            .addSort(SortBuilders.scoreSort());

    // Add highlighting if specified in the request
    if (BooleanUtils.isTrue(indexSearchRequest.isEnableHitHighlighting())) {
        // Fetch configured 'tag' values for highlighting
        String preTag = configurationHelper.getProperty(ConfigurationValue.ELASTICSEARCH_HIGHLIGHT_PRETAGS);
        String postTag = configurationHelper.getProperty(ConfigurationValue.ELASTICSEARCH_HIGHLIGHT_POSTTAGS);

        searchRequestBuilder.highlighter(buildHighlightQuery(preTag, postTag, match));
    }

    // Add facet aggregations if specified in the request
    if (CollectionUtils.isNotEmpty(indexSearchRequest.getFacetFields())) {
        searchRequestBuilder = elasticsearchHelper.addFacetFieldAggregations(
                new HashSet<>(indexSearchRequest.getFacetFields()), searchRequestBuilder);
    }

    // Log the actual elasticsearch query when debug is enabled
    LOGGER.debug("indexSearchRequest={}", searchRequestBuilder.toString());

    // Retrieve the indexSearch response
    final Search.Builder searchBuilder = new Search.Builder(searchRequestBuilder.toString())
            .addIndices(Arrays.asList(bdefActiveIndex, tagActiveIndex));
    final SearchResult searchResult = jestClientHelper.execute(searchBuilder.build());
    final List<IndexSearchResult> indexSearchResults = buildIndexSearchResults(fields, tagActiveIndex,
            bdefActiveIndex, searchResult, indexSearchRequest.isEnableHitHighlighting());

    List<Facet> facets = null;
    if (CollectionUtils.isNotEmpty(indexSearchRequest.getFacetFields())) {
        // Extract facets from the search response
        facets = new ArrayList<>(
                extractFacets(indexSearchRequest, searchResult, bdefActiveIndex, tagActiveIndex));
    }

    return new IndexSearchResponse(searchResult.getTotal(), indexSearchResults, facets);
}

From source file:org.finra.herd.dao.impl.IndexSearchDaoImpl.java

/**
 * Builds a {@link HighlightBuilder} based on (pre/post)tags and fields fetched from the DB config which is added to the main {@link SearchRequestBuilder}
 *
 * @param preTag The specified pre-tag to be used for highlighting
 * @param postTag The specified post-tag to be used for highlighting
 * @param match the set of match fields that are to be searched upon in the index search
 *
 * @return A configured {@link HighlightBuilder} object
 *//*  ww w. j  a  v a 2s .c om*/
private HighlightBuilder buildHighlightQuery(String preTag, String postTag, Set<String> match) {
    HighlightBuilder highlightBuilder = new HighlightBuilder();

    // Field matching is not needed since we are matching on multiple 'type' fields like stemmed and ngrams and enabling highlighting on all those fields
    // will yield duplicates
    highlightBuilder.requireFieldMatch(false);

    // Set the configured value for pre-tags for highlighting
    highlightBuilder.preTags(preTag);

    // Set the configured value for post-tags for highlighting
    highlightBuilder.postTags(postTag);

    // Get highlight fields value from configuration
    String highlightFieldsValue;

    // If the match column is included
    if (match != null && match.contains(MATCH_COLUMN)) {
        highlightFieldsValue = configurationHelper
                .getProperty(ConfigurationValue.ELASTICSEARCH_COLUMN_MATCH_HIGHLIGHT_FIELDS);
    } else {
        highlightFieldsValue = configurationHelper
                .getProperty(ConfigurationValue.ELASTICSEARCH_HIGHLIGHT_FIELDS);
    }

    try {
        @SuppressWarnings("unchecked")
        IndexSearchHighlightFields highlightFieldsConfig = jsonHelper
                .unmarshallJsonToObject(IndexSearchHighlightFields.class, highlightFieldsValue);

        highlightFieldsConfig.getHighlightFields().forEach(highlightFieldConfig -> {

            // set the field name to the configured value
            HighlightBuilder.Field highlightField = new HighlightBuilder.Field(
                    highlightFieldConfig.getFieldName());

            // set matched_fields to the configured list of fields, this accounts for 'multifields' that analyze the same string in different ways
            if (CollectionUtils.isNotEmpty(highlightFieldConfig.getMatchedFields())) {
                highlightField.matchedFields(highlightFieldConfig.getMatchedFields().toArray(new String[0]));
            }

            // set fragment size to the configured value
            if (highlightFieldConfig.getFragmentSize() != null) {
                highlightField.fragmentSize(highlightFieldConfig.getFragmentSize());
            }

            // set the number of desired fragments to the configured value
            if (highlightFieldConfig.getNumOfFragments() != null) {
                highlightField.numOfFragments(highlightFieldConfig.getNumOfFragments());
            }

            highlightBuilder.field(highlightField);
        });

    } catch (IOException e) {
        LOGGER.warn("Could not parse the configured value for highlight fields: {}", highlightFieldsValue, e);
    }

    return highlightBuilder;
}

From source file:org.finra.herd.dao.impl.JobDefinitionDaoImpl.java

@Override
public List<JobDefinitionEntity> getJobDefinitionsByFilter(Collection<String> namespaces, String jobName) {
    // Create the criteria builder and the criteria.
    CriteriaBuilder builder = entityManager.getCriteriaBuilder();
    CriteriaQuery<JobDefinitionEntity> criteria = builder.createQuery(JobDefinitionEntity.class);

    // The criteria root is the job definition.
    Root<JobDefinitionEntity> jobDefinitionEntityRoot = criteria.from(JobDefinitionEntity.class);

    // Join to the other tables we can filter on.
    Join<JobDefinitionEntity, NamespaceEntity> namespaceEntityJoin = jobDefinitionEntityRoot
            .join(JobDefinitionEntity_.namespace);

    // Create the standard restrictions (i.e. the standard where clauses).
    List<Predicate> predicates = new ArrayList<>();
    if (CollectionUtils.isNotEmpty(namespaces)) {
        predicates.add(namespaceEntityJoin.get(NamespaceEntity_.code).in(namespaces));
    }//from w ww.  j  a  va  2 s.c  o  m
    if (StringUtils.isNotBlank(jobName)) {
        predicates.add(builder.equal(builder.upper(jobDefinitionEntityRoot.get(JobDefinitionEntity_.name)),
                jobName.toUpperCase()));
    }

    // Order the results by namespace and job name.
    List<Order> orderBy = new ArrayList<>();
    orderBy.add(builder.asc(namespaceEntityJoin.get(NamespaceEntity_.code)));
    orderBy.add(builder.asc(jobDefinitionEntityRoot.get(JobDefinitionEntity_.name)));

    // Add the clauses for the query.
    criteria.select(jobDefinitionEntityRoot)
            .where(builder.and(predicates.toArray(new Predicate[predicates.size()]))).orderBy(orderBy);

    // Execute the query and return the result list.
    return entityManager.createQuery(criteria).getResultList();
}

From source file:org.finra.herd.dao.impl.S3DaoImpl.java

@Override
public void deleteDirectory(final S3FileTransferRequestParamsDto params) {
    LOGGER.info("Deleting keys/key versions from S3... s3KeyPrefix=\"{}\" s3BucketName=\"{}\"",
            params.getS3KeyPrefix(), params.getS3BucketName());

    Assert.isTrue(!isRootKeyPrefix(params.getS3KeyPrefix()), "Deleting from root directory is not allowed.");

    try {// w  w w  .  j  a  v a  2 s  .c o  m
        // List S3 versions.
        List<S3VersionSummary> s3VersionSummaries = listVersions(params);
        LOGGER.info(
                "Found keys/key versions in S3 for deletion. s3KeyCount={} s3KeyPrefix=\"{}\" s3BucketName=\"{}\"",
                s3VersionSummaries.size(), params.getS3KeyPrefix(), params.getS3BucketName());

        // In order to avoid a MalformedXML AWS exception, we send delete request only when we have any key versions to delete.
        if (CollectionUtils.isNotEmpty(s3VersionSummaries)) {
            // Create an S3 client.
            AmazonS3Client s3Client = getAmazonS3(params);

            // Build a list of objects to be deleted.
            List<DeleteObjectsRequest.KeyVersion> keyVersions = new ArrayList<>();
            for (S3VersionSummary s3VersionSummary : s3VersionSummaries) {
                keyVersions.add(new DeleteObjectsRequest.KeyVersion(s3VersionSummary.getKey(),
                        s3VersionSummary.getVersionId()));
            }

            try {
                // Delete the key versions.
                deleteKeyVersions(s3Client, params.getS3BucketName(), keyVersions);
            } finally {
                s3Client.shutdown();
            }
        }
    } catch (AmazonClientException e) {
        throw new IllegalStateException(String.format(
                "Failed to delete keys/key versions with prefix \"%s\" from bucket \"%s\". Reason: %s",
                params.getS3KeyPrefix(), params.getS3BucketName(), e.getMessage()), e);
    }
}

From source file:org.finra.herd.dao.impl.S3DaoImpl.java

@Override
public void tagVersions(final S3FileTransferRequestParamsDto s3FileTransferRequestParamsDto,
        final S3FileTransferRequestParamsDto s3ObjectTaggerParamsDto,
        final List<S3VersionSummary> s3VersionSummaries, final Tag tag) {
    // Eliminate delete markers from the list of version summaries to be tagged.
    List<S3VersionSummary> s3VersionSummariesWithoutDeleteMarkers = null;
    if (CollectionUtils.isNotEmpty(s3VersionSummaries)) {
        s3VersionSummariesWithoutDeleteMarkers = s3VersionSummaries.stream()
                .filter(s3VersionSummary -> !s3VersionSummary.isDeleteMarker()).collect(Collectors.toList());
    }//from www .  ja  v a 2s .  c om

    LOGGER.info(
            "Tagging versions in S3... s3BucketName=\"{}\" s3KeyPrefix=\"{}\" s3VersionCount={} s3ObjectTagKey=\"{}\" s3ObjectTagValue=\"{}\" "
                    + "Excluding from tagging S3 delete markers... s3DeleteMarkerCount={}",
            s3FileTransferRequestParamsDto.getS3BucketName(), s3FileTransferRequestParamsDto.getS3KeyPrefix(),
            CollectionUtils.size(s3VersionSummariesWithoutDeleteMarkers), tag.getKey(), tag.getValue(),
            CollectionUtils.size(s3VersionSummaries)
                    - CollectionUtils.size(s3VersionSummariesWithoutDeleteMarkers));

    if (CollectionUtils.isNotEmpty(s3VersionSummariesWithoutDeleteMarkers)) {
        // Tag S3 versions.
        tagVersionsHelper(s3FileTransferRequestParamsDto, s3ObjectTaggerParamsDto,
                s3VersionSummariesWithoutDeleteMarkers, tag);

        // Log a list of S3 versions that got tagged.
        if (LOGGER.isInfoEnabled()) {
            LOGGER.info("Successfully tagged versions in S3 bucket. "
                    + "s3BucketName=\"{}\" s3KeyPrefix=\"{}\" s3VersionCount={} s3ObjectTagKey=\"{}\" s3ObjectTagValue=\"{}\"",
                    s3FileTransferRequestParamsDto.getS3BucketName(),
                    s3FileTransferRequestParamsDto.getS3KeyPrefix(),
                    s3VersionSummariesWithoutDeleteMarkers.size(), tag.getKey(), tag.getValue());

            for (S3VersionSummary s3VersionSummary : s3VersionSummariesWithoutDeleteMarkers) {
                LOGGER.info("s3Key=\"{}\" s3VersionId=\"{}\"", s3VersionSummary.getKey(),
                        s3VersionSummary.getVersionId());
            }
        }
    }
}

From source file:org.finra.herd.dao.impl.S3DaoImpl.java

private void tagVersionsHelper(final S3FileTransferRequestParamsDto s3FileTransferRequestParamsDto,
        final S3FileTransferRequestParamsDto s3ObjectTaggerParamsDto,
        final List<S3VersionSummary> s3VersionSummaries, final Tag tag) {
    // Initialize an S3 version for the error message in the catch block.
    S3VersionSummary currentS3VersionSummary = s3VersionSummaries.get(0);

    // Amazon S3 client to access S3 objects.
    AmazonS3Client s3Client = null;/*from  ww w  .j a  va  2s.c  o  m*/

    // Amazon S3 client for S3 object tagging.
    AmazonS3Client s3ObjectTaggerClient = null;

    try {
        // Create an S3 client to access S3 objects.
        s3Client = getAmazonS3(s3FileTransferRequestParamsDto);

        // Create an S3 client for S3 object tagging.
        s3ObjectTaggerClient = getAmazonS3(s3ObjectTaggerParamsDto);

        // Create a get object tagging request.
        GetObjectTaggingRequest getObjectTaggingRequest = new GetObjectTaggingRequest(
                s3FileTransferRequestParamsDto.getS3BucketName(), null, null);

        // Create a set object tagging request.
        SetObjectTaggingRequest setObjectTaggingRequest = new SetObjectTaggingRequest(
                s3FileTransferRequestParamsDto.getS3BucketName(), null, null, null);

        for (S3VersionSummary s3VersionSummary : s3VersionSummaries) {
            // Set the current S3 version summary.
            currentS3VersionSummary = s3VersionSummary;

            // Retrieve the current tagging information for the S3 version.
            getObjectTaggingRequest.setKey(s3VersionSummary.getKey());
            getObjectTaggingRequest.setVersionId(s3VersionSummary.getVersionId());
            GetObjectTaggingResult getObjectTaggingResult = s3Operations
                    .getObjectTagging(getObjectTaggingRequest, s3Client);

            // Update the list of tags to include the specified S3 object tag.
            List<Tag> updatedTags = new ArrayList<>();
            updatedTags.add(tag);
            if (CollectionUtils.isNotEmpty(getObjectTaggingResult.getTagSet())) {
                for (Tag currentTag : getObjectTaggingResult.getTagSet()) {
                    if (!StringUtils.equals(tag.getKey(), currentTag.getKey())) {
                        updatedTags.add(currentTag);
                    }
                }
            }

            // Update tagging information for the S3 version.
            setObjectTaggingRequest.setKey(s3VersionSummary.getKey());
            setObjectTaggingRequest.setVersionId(s3VersionSummary.getVersionId());
            setObjectTaggingRequest.setTagging(new ObjectTagging(updatedTags));
            s3Operations.setObjectTagging(setObjectTaggingRequest, s3ObjectTaggerClient);
        }
    } catch (Exception e) {
        throw new IllegalStateException(String.format(
                "Failed to tag S3 object with \"%s\" key and \"%s\" version id in \"%s\" bucket. Reason: %s",
                currentS3VersionSummary.getKey(), currentS3VersionSummary.getVersionId(),
                s3FileTransferRequestParamsDto.getS3BucketName(), e.getMessage()), e);
    } finally {
        if (s3Client != null) {
            s3Client.shutdown();
        }

        if (s3ObjectTaggerClient != null) {
            s3ObjectTaggerClient.shutdown();
        }
    }
}

From source file:org.finra.herd.dao.impl.SnsDaoImpl.java

@Override
public PublishResult publish(AwsParamsDto awsParamsDto, String topicArn, String messageText,
        List<MessageHeader> messageHeaders) {
    Map<String, MessageAttributeValue> messageAttributes = null;

    if (CollectionUtils.isNotEmpty(messageHeaders)) {
        messageAttributes = new HashMap<>();

        for (MessageHeader messageHeader : messageHeaders) {
            messageAttributes.put(messageHeader.getKey(), new MessageAttributeValue().withDataType("String")
                    .withStringValue(messageHeader.getValue()));
        }/*ww  w. ja  v a 2 s .co m*/
    }

    return snsOperations.publish(topicArn, messageText, messageAttributes,
            awsClientFactory.getAmazonSNSClient(awsParamsDto));
}

From source file:org.finra.herd.dao.impl.SqsDaoImpl.java

@Override
public SendMessageResult sendMessage(AwsParamsDto awsParamsDto, String queueName, String messageText,
        List<MessageHeader> messageHeaders) {
    Map<String, MessageAttributeValue> messageAttributes = null;

    if (CollectionUtils.isNotEmpty(messageHeaders)) {
        messageAttributes = new HashMap<>();

        for (MessageHeader messageHeader : messageHeaders) {
            messageAttributes.put(messageHeader.getKey(), new MessageAttributeValue().withDataType("String")
                    .withStringValue(messageHeader.getValue()));
        }/*from  w ww  .  j a  v a2s .  c  o m*/
    }

    return sqsOperations.sendMessage(queueName, messageText, messageAttributes,
            awsClientFactory.getAmazonSQSClient(awsParamsDto));
}

From source file:org.finra.herd.dao.impl.SubjectMatterExpertDaoImpl.java

@Override
public SubjectMatterExpertContactDetails getSubjectMatterExpertByKey(
        SubjectMatterExpertKey subjectMatterExpertKey) {
    // Get LDAP specific configuration settings.
    final String ldapUrl = configurationHelper.getProperty(ConfigurationValue.LDAP_URL);
    final String ldapBase = configurationHelper.getProperty(ConfigurationValue.LDAP_BASE);
    final String ldapUserDn = configurationHelper.getProperty(ConfigurationValue.LDAP_USER_DN);
    final String credStashEncryptionContext = configurationHelper
            .getProperty(ConfigurationValue.CREDSTASH_HERD_ENCRYPTION_CONTEXT);
    final String ldapUserCredentialName = configurationHelper
            .getProperty(ConfigurationValue.LDAP_USER_CREDENTIAL_NAME);

    // Log configuration values being used to create LDAP context source.
    LOGGER.info(/* ww  w . j a v  a2  s  .  com*/
            "Creating LDAP context source using the following parameters: {}=\"{}\" {}=\"{}\" {}=\"{}\" {}=\"{}\" {}=\"{}\"...",
            ConfigurationValue.LDAP_URL.getKey(), ldapUrl, ConfigurationValue.LDAP_BASE.getKey(), ldapBase,
            ConfigurationValue.LDAP_USER_DN.getKey(), ldapUserDn,
            ConfigurationValue.CREDSTASH_HERD_ENCRYPTION_CONTEXT.getKey(), credStashEncryptionContext,
            ConfigurationValue.LDAP_USER_CREDENTIAL_NAME.getKey(), ldapUserCredentialName);

    // Retrieve LDAP user password from the credstash.
    String ldapUserPassword;
    try {
        ldapUserPassword = credStashHelper.getCredentialFromCredStash(credStashEncryptionContext,
                ldapUserCredentialName);
    } catch (CredStashGetCredentialFailedException e) {
        throw new IllegalStateException(e);
    }

    // Create and initialize an LDAP context source.
    LdapContextSource contextSource = new LdapContextSource();
    contextSource.setUrl(ldapUrl);
    contextSource.setBase(ldapBase);
    contextSource.setUserDn(ldapUserDn);
    contextSource.setPassword(ldapUserPassword);
    contextSource.afterPropertiesSet();

    // Create an LDAP template.
    LdapTemplate ldapTemplate = new LdapTemplate(contextSource);

    // Create an LDAP query.
    LdapQuery ldapQuery = query()
            .where(configurationHelper.getProperty(ConfigurationValue.LDAP_ATTRIBUTE_USER_ID))
            .is(subjectMatterExpertKey.getUserId());

    // Create a subject matter expert contact details mapper.
    SubjectMatterExpertContactDetailsMapper subjectMatterExpertContactDetailsMapper = new SubjectMatterExpertContactDetailsMapper(
            configurationHelper.getProperty(ConfigurationValue.LDAP_ATTRIBUTE_USER_FULL_NAME),
            configurationHelper.getProperty(ConfigurationValue.LDAP_ATTRIBUTE_USER_JOB_TITLE),
            configurationHelper.getProperty(ConfigurationValue.LDAP_ATTRIBUTE_USER_EMAIL_ADDRESS),
            configurationHelper.getProperty(ConfigurationValue.LDAP_ATTRIBUTE_USER_TELEPHONE_NUMBER));

    // Gets information for the specified subject matter expert.
    List<SubjectMatterExpertContactDetails> subjectMatterExpertContactDetailsList = ldapOperations
            .search(ldapTemplate, ldapQuery, subjectMatterExpertContactDetailsMapper);

    // Return the results.
    return CollectionUtils.isNotEmpty(subjectMatterExpertContactDetailsList)
            ? subjectMatterExpertContactDetailsList.get(0)
            : null;
}