Example usage for java.util Collections EMPTY_MAP

List of usage examples for java.util Collections EMPTY_MAP

Introduction

In this page you can find the example usage for java.util Collections EMPTY_MAP.

Prototype

Map EMPTY_MAP

To view the source code for java.util Collections EMPTY_MAP.

Click Source Link

Document

The empty map (immutable).

Usage

From source file:org.alfresco.rest.api.tests.TestCMIS.java

/**
 * MNT-12680 /*  w w w .j  a  v  a  2 s.c  o  m*/
 * The creation date of version should be the same as creation date of the original node
 * @throws Exception
 */
@Test
public void testCreationDate() throws Exception {
    // create a site
    final TestNetwork network1 = getTestFixture().getRandomNetwork();
    String username = "user" + System.currentTimeMillis();
    PersonInfo personInfo = new PersonInfo(username, username, username, "password", null, null, null, null,
            null, null, null);
    TestPerson person1 = network1.createUser(personInfo);
    String person1Id = person1.getId();

    final String siteName = "site" + System.currentTimeMillis();

    TenantUtil.runAsUserTenant(new TenantRunAsWork<NodeRef>() {

        @Override
        public NodeRef doWork() throws Exception {
            SiteInformation siteInfo = new SiteInformation(siteName, siteName, siteName, SiteVisibility.PUBLIC);
            final TestSite site = network1.createSite(siteInfo);
            final NodeRef resNode = repoService.createDocument(site.getContainerNodeRef("documentLibrary"),
                    "testdoc.txt", "Test Doc1 Title", "Test Doc1 Description", "Test Content");
            return resNode;
        }
    }, person1Id, network1.getId());

    // create a document
    publicApiClient.setRequestContext(new RequestContext(network1.getId(), person1Id));
    CmisSession cmisSession = publicApiClient.createPublicApiCMISSession(Binding.atom, CMIS_VERSION_10,
            AlfrescoObjectFactoryImpl.class.getName());
    AlfrescoFolder docLibrary = (AlfrescoFolder) cmisSession
            .getObjectByPath("/Sites/" + siteName + "/documentLibrary");
    Map<String, String> properties = new HashMap<String, String>();
    {
        properties.put(PropertyIds.OBJECT_TYPE_ID, TYPE_CMIS_DOCUMENT);
        properties.put(PropertyIds.NAME, "mydoc-" + GUID.generate() + ".txt");
    }
    ContentStreamImpl fileContent = new ContentStreamImpl();
    {
        ContentWriter writer = new FileContentWriter(TempFileProvider.createTempFile(GUID.generate(), ".txt"));
        writer.putContent("some content");
        ContentReader reader = writer.getReader();
        fileContent.setMimeType(MimetypeMap.MIMETYPE_TEXT_PLAIN);
        fileContent.setStream(reader.getContentInputStream());
    }

    Document autoVersionedDoc = docLibrary.createDocument(properties, fileContent, VersioningState.MAJOR);
    String objectId = autoVersionedDoc.getId();
    String bareObjectId = getBareObjectId(objectId);
    // create versions
    for (int i = 0; i < 3; i++) {
        Document doc1 = (Document) cmisSession.getObject(bareObjectId);

        ObjectId pwcId = doc1.checkOut();
        Document pwc = (Document) cmisSession.getObject(pwcId.getId());

        ContentStreamImpl contentStream = new ContentStreamImpl();
        {
            ContentWriter writer = new FileContentWriter(
                    TempFileProvider.createTempFile(GUID.generate(), ".txt"));
            writer.putContent(GUID.generate());
            ContentReader reader = writer.getReader();
            contentStream.setMimeType(MimetypeMap.MIMETYPE_TEXT_PLAIN);
            contentStream.setStream(reader.getContentInputStream());
        }
        pwc.checkIn(true, Collections.EMPTY_MAP, contentStream, "checkin " + i);
    }

    GregorianCalendar cDateFirst = cmisSession.getAllVersions(bareObjectId).get(0).getCreationDate();
    GregorianCalendar cDateSecond = cmisSession.getAllVersions(bareObjectId).get(2).getCreationDate();

    if (cDateFirst.before(cDateSecond) || cDateFirst.after(cDateSecond)) {
        fail("The creation date of version should be the same as creation date of the original node");
    }
}

From source file:org.cloudfoundry.identity.uaa.oauth.UaaTokenServicesTests.java

private Jwt getIdToken(List<String> scopes) {
    AuthorizationRequest authorizationRequest = new AuthorizationRequest(CLIENT_ID, scopes);

    authorizationRequest.setResponseTypes(new HashSet<>(Arrays.asList(CompositeAccessToken.ID_TOKEN)));

    UaaPrincipal uaaPrincipal = new UaaPrincipal(defaultUser.getId(), defaultUser.getUsername(),
            defaultUser.getEmail(), defaultUser.getOrigin(), defaultUser.getExternalId(),
            defaultUser.getZoneId());//from www .j  a  v a 2 s .c  o  m
    UaaAuthentication userAuthentication = new UaaAuthentication(uaaPrincipal, null, defaultUserAuthorities,
            new HashSet<>(Arrays.asList("group1", "group2")), Collections.EMPTY_MAP, null, true,
            System.currentTimeMillis(), System.currentTimeMillis() + 1000l * 60l);
    Set<String> amr = new HashSet<>();
    amr.addAll(Arrays.asList("ext", "mfa", "rba"));
    userAuthentication.setAuthenticationMethods(amr);
    userAuthentication.setAuthContextClassRef(new HashSet<>(Arrays.asList(AuthnContext.PASSWORD_AUTHN_CTX)));
    OAuth2Authentication authentication = new OAuth2Authentication(authorizationRequest.createOAuth2Request(),
            userAuthentication);

    OAuth2AccessToken accessToken = tokenServices.createAccessToken(authentication);

    Jwt tokenJwt = JwtHelper.decode(accessToken.getValue());
    SignatureVerifier verifier = KeyInfo.getKey(tokenJwt.getHeader().getKid()).getVerifier();
    tokenJwt.verifySignature(verifier);
    assertNotNull(tokenJwt);

    Jwt idToken = JwtHelper.decode(((CompositeAccessToken) accessToken).getIdTokenValue());
    idToken.verifySignature(verifier);
    return idToken;
}

From source file:org.apache.openjpa.meta.FieldMetaData.java

/**
 * Copy state from the given field to this one. Do not copy mapping
 * information.// www.  j  av  a2  s.c o  m
 */
public void copy(FieldMetaData field) {
    super.copy(field);

    _intermediate = field.usesIntermediate();
    _implData = field.usesImplData();

    // copy field-level info; use get methods to force resolution of
    // lazy data
    _proxyClass = field.getProxyType();
    _initializer = field.getInitializer();
    _transient = field.isTransient();
    _nullValue = field.getNullValue();
    _manage = field.getManagement();
    _explicit = field.isExplicit();
    _extName = field.getExternalizer();
    _extMethod = DEFAULT_METHOD;
    _factName = field.getFactory();
    _factMethod = DEFAULT_METHOD;
    _extString = field.getExternalValues();
    _extValues = Collections.EMPTY_MAP;
    _fieldValues = Collections.EMPTY_MAP;
    _primKey = field.isPrimaryKey();
    _backingMember = field._backingMember;
    _enumField = field._enumField;
    _lobField = field._lobField;
    _serializableField = field._serializableField;
    _generated = field._generated;
    _mappedByIdValue = field._mappedByIdValue;
    _isElementCollection = field._isElementCollection;
    _access = field._access;
    _orderDec = field._orderDec;

    // embedded fields can't be versions
    if (_owner.getEmbeddingMetaData() == null && _version == null)
        _version = (field.isVersion()) ? Boolean.TRUE : Boolean.FALSE;

    // only copy this data if not already set explicitly in this instance
    if (_dfg == 0) {
        _dfg = (field.isInDefaultFetchGroup()) ? DFG_TRUE : DFG_FALSE;
        if (field.isDefaultFetchGroupExplicit())
            _dfg |= DFG_EXPLICIT;
    }
    if (_fgSet == null && field._fgSet != null)
        _fgSet = new HashSet(field._fgSet);
    if (_lfg == null)
        _lfg = field.getLoadFetchGroup();
    if (_lrs == null)
        _lrs = (field.isLRS()) ? Boolean.TRUE : Boolean.FALSE;
    if (_valStrategy == -1)
        _valStrategy = field.getValueStrategy();
    if (_upStrategy == -1)
        _upStrategy = field.getUpdateStrategy();
    if (ClassMetaData.DEFAULT_STRING.equals(_seqName)) {
        _seqName = field.getValueSequenceName();
        _seqMeta = null;
    }
    if (ClassMetaData.DEFAULT_STRING.equals(_inverse))
        _inverse = field.getInverse();

    // copy value metadata
    _val.copy(field);
    _key.copy(field.getKey());
    _elem.copy(field.getElement());
}

From source file:msi.gaml.operators.Graphs.java

@operator(value = "layout", content_type = ITypeProvider.CONTENT_TYPE_AT_INDEX
        + 1, index_type = ITypeProvider.KEY_TYPE_AT_INDEX
                + 1, category = { IOperatorCategory.GRAPH }, concept = { IConcept.GRAPH })
@doc(value = "layouts a GAMA graph.", masterDoc = true)
// TODO desc//from  w w w . j av a  2s  . c  om
public static IGraph layoutOneshot(final IScope scope, final GamaGraph graph, final String layoutEngine,
        final int timeout, final GamaMap<String, Object> options) {

    // translate Gama options to
    Map<String, Object> jOptions = null;
    if (options.isEmpty()) {
        jOptions = Collections.EMPTY_MAP;
    } else {
        jOptions = new HashMap<>(options.size());
        for (final String key : options.keySet()) {
            jOptions.put(key, options.get(scope, key));
        }
    }
    AvailableGraphLayouts
            // retrieve layout for he layout that was selected by the user
            // (may raise an exception)
            .getStaticLayout(scope, layoutEngine.trim().toLowerCase())
            // apply this layout with the options
            .doLayoutOneShot(scope, graph, timeout, jOptions);

    return graph;
}

From source file:org.apache.hadoop.hbase.security.access.AccessController.java

private AuthResult hasSomeAccess(RegionCoprocessorEnvironment e, String method, Action action)
        throws IOException {
    User requestUser = getActiveUser();/*from w  w  w. j  a  v a 2s . com*/
    TableName tableName = e.getRegion().getTableDesc().getTableName();
    AuthResult authResult = permissionGranted(method, requestUser, action, e, Collections.EMPTY_MAP);
    if (!authResult.isAllowed()) {
        for (UserPermission userPerm : AccessControlLists.getUserTablePermissions(regionEnv.getConfiguration(),
                tableName)) {
            for (Action userAction : userPerm.getActions()) {
                if (userAction.equals(action)) {
                    return AuthResult.allow(method, "Access allowed", requestUser, action, tableName, null,
                            null);
                }
            }
        }
    }
    return authResult;
}

From source file:com.perpetumobile.bit.orm.cassandra.CliMain.java

/**
 * Used to get Map of the provided options by create/update keyspace commands
 * @param options - tree representing options
 * @return Map - strategy_options map/*from   w w w.  j  a v a2 s  .co  m*/
 */
private Map<String, String> getStrategyOptionsFromTree(Tree options) {
    //Check for old [{}] syntax
    if (options.getText().equalsIgnoreCase("ARRAY")) {
        System.err.println("WARNING: [{}] strategy_options syntax is deprecated, please use {}");

        if (options.getChildCount() == 0)
            return Collections.EMPTY_MAP;

        return getStrategyOptionsFromTree(options.getChild(0));
    }

    // this map will be returned
    Map<String, String> strategyOptions = new HashMap<String, String>();

    // each child node is ^(PAIR $key $value)
    for (int j = 0; j < options.getChildCount(); j++) {
        Tree optionPair = options.getChild(j);

        // current $key
        String key = CliUtils.unescapeSQLString(optionPair.getChild(0).getText());
        // current $value
        String val = CliUtils.unescapeSQLString(optionPair.getChild(1).getText());

        strategyOptions.put(key, val);
    }

    return strategyOptions;
}

From source file:au.org.theark.core.dao.StudyDao.java

/**
 * This will get all the pheno data for the given subjects FOR THIS ONE CustomFieldGroup aka questionaire (aka data set)
 * //  ww  w . jav  a 2 s .c  om
 * @param allTheData
 * @param search
 * @param idsToInclude
 * @return the updated list of uids that are still left after the filtering. 
 */
private List<Long> applyPhenoDataSetFilters(DataExtractionVO allTheData, Search search,
        List<Long> idsToInclude) {

    Set<QueryFilter> filters = search.getQueryFilters();

    Collection<PhenoDataSetGroup> pdsgWithFilters = getPhenoDataSetGroupsForPhenoFilters(search, filters);
    List<Long> phenoCollectionIdsSoFar = new ArrayList<Long>();

    for (PhenoDataSetGroup phenoGroup : pdsgWithFilters) {
        log.info("Pheno group: " + phenoGroup.getName());
        if (idsToInclude != null && !idsToInclude.isEmpty()) {
            String queryToGetPhenoIdsForGivenSearchAndCFGFilters = getQueryForPhenoIdsForSearchAndCFGFilters(
                    search, phenoGroup);

            if (!queryToGetPhenoIdsForGivenSearchAndCFGFilters.isEmpty()) {
                Query query = getSession().createQuery(queryToGetPhenoIdsForGivenSearchAndCFGFilters);
                query.setParameterList("idList", idsToInclude);//TODO ASAP...this should be pheno list and not subjuid list now

                QueryTranslatorFactory translatorFactory = new ASTQueryTranslatorFactory();
                SessionFactoryImplementor factory = (SessionFactoryImplementor) getSession()
                        .getSessionFactory();
                QueryTranslator translator = translatorFactory.createQueryTranslator(query.getQueryString(),
                        query.getQueryString(), Collections.EMPTY_MAP, factory);
                translator.compile(Collections.EMPTY_MAP, false);
                log.info(translator.getSQLString());

                List<Long> phenosForThisCFG = query.list();
                phenoCollectionIdsSoFar.addAll(phenosForThisCFG);
                log.info("rows returned = " + phenoCollectionIdsSoFar.size());
            } else {
                log.info("there were no pheno custom data filters, therefore don't run filter query");
            }
        } else {
            log.info("there are no id's to filter.  therefore won't run filtering query");
        }
    }
    //now that we have all the phenoCollection IDs...get the updated list of subjects
    if (phenoCollectionIdsSoFar.isEmpty()) {
        if (!pdsgWithFilters.isEmpty()) {
            //there were no phenocollectionid's returned because they were validly filtered.  leave idsToIncludeAsItWas
            idsToInclude = new ArrayList<Long>();
        } else {
            //there were no filters so just leave the list of subjects ias it was
        }
    } else {
        idsToInclude = getSubjectIdsForPhenoDataIds(phenoCollectionIdsSoFar);
    }

    //now that we have the pheno collection id, we just find the data for the selected customfields

    if (!idsToInclude.isEmpty()) {
        Collection<PhenoDataSetFieldDisplay> customFieldToGet = getSelectedPhenoDataSetFieldDisplaysForSearch(
                search);//getSelectedPhenoCustomFieldDisplaysForSearch(search);
        // We have the list of phenos, and therefore the list of pheno custom data - now bring back all the custom data rows IF they have any data they need 
        if ((!phenoCollectionIdsSoFar.isEmpty()
                || (phenoCollectionIdsSoFar.isEmpty() && pdsgWithFilters.isEmpty()))
                && !customFieldToGet.isEmpty()) {
            String queryString = "select data from PhenoDataSetData data  "
                    + " left join fetch data.phenoDataSetCollection phenoDataSetCollection"
                    + " left join fetch data.phenoDataSetFieldDisplay phenoDataSetFieldDisplay "
                    + " left join fetch phenoDataSetFieldDisplay.phenoDataSetField phenoField "
                    + (((phenoCollectionIdsSoFar.isEmpty() && pdsgWithFilters.isEmpty())
                            ? (" where data.phenoDataSetCollection.linkSubjectStudy.id in (:idsToInclude) ")
                            : (" where data.phenoDataSetCollection.id in (:phenoIdsToInclude)")))
                    + " and data.phenoDataSetFieldDisplay in (:customFieldsList)"
                    + " order by data.phenoDataSetCollection.id";
            Query query2 = getSession().createQuery(queryString);
            if (phenoCollectionIdsSoFar.isEmpty() && pdsgWithFilters.isEmpty()) {
                query2.setParameterList("idsToInclude", idsToInclude);
            } else {
                query2.setParameterList("phenoIdsToInclude", phenoCollectionIdsSoFar);
            }
            query2.setParameterList("customFieldsList", customFieldToGet);

            QueryTranslatorFactory translatorFactory = new ASTQueryTranslatorFactory();
            SessionFactoryImplementor factory = (SessionFactoryImplementor) getSession().getSessionFactory();
            QueryTranslator translator = translatorFactory.createQueryTranslator(query2.getQueryString(),
                    query2.getQueryString(), Collections.EMPTY_MAP, factory);
            translator.compile(Collections.EMPTY_MAP, false);
            log.info(translator.getSQLString());
            List<PhenoDataSetData> phenoData = query2.list();

            HashMap<String, ExtractionVO> hashOfPhenosWithTheirPhenoCustomData = allTheData
                    .getPhenoCustomData();

            ExtractionVO valuesForThisPheno = new ExtractionVO();
            HashMap<String, String> map = null;
            Long previousPhenoId = null;
            //will try to order our results and can therefore just compare to last LSS and either add to or create new Extraction VO
            for (PhenoDataSetData data : phenoData) {

                if (previousPhenoId == null) {
                    map = new HashMap<String, String>();
                    previousPhenoId = data.getPhenoDataSetCollection().getId();
                    valuesForThisPheno.setSubjectUid(
                            data.getPhenoDataSetCollection().getLinkSubjectStudy().getSubjectUID());
                    valuesForThisPheno.setRecordDate(data.getPhenoDataSetCollection().getRecordDate());
                    valuesForThisPheno
                            .setCollectionName(data.getPhenoDataSetCollection().getQuestionnaire().getName());
                } else if (data.getPhenoDataSetCollection().getId().equals(previousPhenoId)) {
                    //then just put the data in
                } else { //if its a new LSS finalize previous map, etc
                    valuesForThisPheno.setKeyValues(map);
                    hashOfPhenosWithTheirPhenoCustomData.put(("" + previousPhenoId), valuesForThisPheno);
                    previousPhenoId = data.getPhenoDataSetCollection().getId();
                    map = new HashMap<String, String>();//reset
                    valuesForThisPheno = new ExtractionVO();
                    valuesForThisPheno.setSubjectUid(
                            data.getPhenoDataSetCollection().getLinkSubjectStudy().getSubjectUID());
                    valuesForThisPheno.setRecordDate(data.getPhenoDataSetCollection().getRecordDate());
                    valuesForThisPheno
                            .setCollectionName(data.getPhenoDataSetCollection().getQuestionnaire().getName());
                }

                //if any error value, then just use that - though, yet again I really question the acceptance of error data
                if (data.getErrorDataValue() != null && !data.getErrorDataValue().isEmpty()) {
                    map.put(data.getPhenoDataSetFieldDisplay().getPhenoDataSetField().getName(),
                            data.getErrorDataValue());
                } else {
                    // Determine field type and assign key value accordingly
                    if (data.getPhenoDataSetFieldDisplay().getPhenoDataSetField().getFieldType().getName()
                            .equalsIgnoreCase(Constants.FIELD_TYPE_DATE)) {
                        map.put(data.getPhenoDataSetFieldDisplay().getPhenoDataSetField().getName(),
                                data.getDateDataValue().toString());
                    }
                    if (data.getPhenoDataSetFieldDisplay().getPhenoDataSetField().getFieldType().getName()
                            .equalsIgnoreCase(Constants.FIELD_TYPE_NUMBER)) {
                        map.put(data.getPhenoDataSetFieldDisplay().getPhenoDataSetField().getName(),
                                data.getNumberDataValue().toString());
                    }
                    if (data.getPhenoDataSetFieldDisplay().getPhenoDataSetField().getFieldType().getName()
                            .equalsIgnoreCase(Constants.FIELD_TYPE_CHARACTER)) {
                        map.put(data.getPhenoDataSetFieldDisplay().getPhenoDataSetField().getName(),
                                data.getTextDataValue());
                    }
                }
            }

            //finalize the last entered key value sets/extraction VOs
            if (map != null && previousPhenoId != null) {
                valuesForThisPheno.setKeyValues(map);
                hashOfPhenosWithTheirPhenoCustomData.put("" + previousPhenoId, valuesForThisPheno);
            }

            //can probably now go ahead and add these to the dataVO...even though inevitable further filters may further axe this list or parts of it.
            allTheData.setPhenoCustomData(hashOfPhenosWithTheirPhenoCustomData);
        }

    }
    return idsToInclude;

}

From source file:com.confighub.core.store.Store.java

public Map<String, SystemConfig> getSystemConfig(final SystemConfig.ConfigGroup group) throws ConfigException {
    try {//from ww  w .  j  a  va 2  s  . co  m
        List<SystemConfig> list = em.createNamedQuery("SysConfig.byGroup").setLockMode(LockModeType.NONE)
                .setParameter("groupName", group).getResultList();

        Map<String, SystemConfig> map = new HashMap<>();
        list.forEach(e -> map.put(e.getKey(), e));

        return map;
    } catch (NoResultException e) {
        return Collections.EMPTY_MAP;
    } catch (Exception e) {
        handleException(e);
        return Collections.EMPTY_MAP;
    }
}

From source file:au.org.theark.core.dao.StudyDao.java

/**
 * /*  ww  w . ja v a 2 s . c o  m*/
 * 
 * @param allTheData
 * @param personFields
 * @param lssFields
 * @param addressFields
 * @param phoneFields
 * @param otherIDFields
 * @param subjectCFDs
 * @param search
 * @param idsAfterFiltering
 */
private void addDataFromMegaDemographicQuery(DataExtractionVO allTheData,
        Collection<DemographicField> personFields, Collection<DemographicField> lssFields,
        Collection<DemographicField> addressFields, Collection<DemographicField> phoneFields,
        Collection<DemographicField> otherIDFields, Collection<DemographicField> linkSubjectTwinsFields,
        Collection<CustomFieldDisplay> subjectCFDs, Search search, List<Long> idsAfterFiltering) {
    log.info("in addDataFromMegaDemographicQuery"); //if no id's, no need to run this
    if ((!lssFields.isEmpty() || !personFields.isEmpty() || !addressFields.isEmpty() || !phoneFields.isEmpty()
            || !linkSubjectTwinsFields.isEmpty() || !subjectCFDs.isEmpty()) && !idsAfterFiltering.isEmpty()) { // hasEmailFields(dfs)
        //note.  filtering is happening previously...we then do the fetch when we have narrowed down the list of subjects to save a lot of processing
        String queryString = "select distinct lss " // , address, lss, email " +
                + " from LinkSubjectStudy lss "
                + ((!personFields.isEmpty()) ? " left join fetch lss.person person " : "")
                + ((!addressFields.isEmpty()) ? " left join lss.person.addresses a " : "")
                + ((!phoneFields.isEmpty()) ? " left join lss.person.phones p " : "")
                + ((!linkSubjectTwinsFields.isEmpty())
                        ? " left join lss.linkSubjectTwinsAsFirstSubject lstAsFirst  "
                        : "")
                + ((!linkSubjectTwinsFields.isEmpty())
                        ? " left join lss.linkSubjectTwinsAsSecondSubject lstAsSecond  "
                        : "")
                + " where lss.study.id = " + search.getStudy().getId() + " and lss.id in (:idsToInclude) "
                + " order by lss.subjectUID";

        Query query = getSession().createQuery(queryString);
        query.setParameterList("idsToInclude", idsAfterFiltering);
        List<LinkSubjectStudy> subjects = query.list();

        QueryTranslatorFactory translatorFactory = new ASTQueryTranslatorFactory();
        SessionFactoryImplementor factory = (SessionFactoryImplementor) getSession().getSessionFactory();
        QueryTranslator translator = translatorFactory.createQueryTranslator(query.getQueryString(),
                query.getQueryString(), Collections.EMPTY_MAP, factory);
        translator.compile(Collections.EMPTY_MAP, false);
        log.info(translator.getSQLString());

        // DataExtractionVO devo; = new DataExtractionVO();
        HashMap<String, ExtractionVO> hashOfSubjectsWithTheirDemographicData = allTheData.getDemographicData();

        /* this is putting the data we extracted into a generic kind of VO doc that will be converted to an appopriate format later (such as csv/xls/pdf/xml/etc) */
        for (LinkSubjectStudy lss : subjects) {
            ExtractionVO sev = new ExtractionVO();
            sev.setKeyValues(constructKeyValueHashmap(lss, personFields, lssFields, addressFields, phoneFields,
                    otherIDFields, linkSubjectTwinsFields));
            hashOfSubjectsWithTheirDemographicData.put(lss.getSubjectUID(), sev);
        }

    }
}