Example usage for java.util HashSet toArray

List of usage examples for java.util HashSet toArray

Introduction

In this page you can find the example usage for java.util HashSet toArray.

Prototype

<T> T[] toArray(T[] a);

Source Link

Document

Returns an array containing all of the elements in this set; the runtime type of the returned array is that of the specified array.

Usage

From source file:org.lexevs.dao.database.sqlimplementedmethods.SQLImplementedMethodsDao.java

/**
 * Builds the coded entry./*www.  j a  v a  2 s . co m*/
 * 
 * @param internalCodingSchemeName the internal coding scheme name
 * @param internalVersionString the internal version string
 * @param code the code
 * @param namespace the namespace
 * @param restrictToProperties the restrict to properties
 * @param restrictToPropertyTypes the restrict to property types
 * 
 * @return the entity
 * 
 * @throws UnexpectedInternalError the unexpected internal error
 * @throws MissingResourceException the missing resource exception
 */
public Entity buildCodedEntry(String internalCodingSchemeName, String internalVersionString, String code,
        String namespace, LocalNameList restrictToProperties, PropertyType[] restrictToPropertyTypes)
        throws UnexpectedInternalError, MissingResourceException {

    try {
        Entity concept = new Entity();
        concept.setEntityCode(code);

        SQLInterface si = resourceManager.getSQLInterface(internalCodingSchemeName, internalVersionString);

        //if the namespace is null (and its 2009 model), set it to the default (which is
        //equal to the codingSchemeName.
        //This shouldn't ever happen -- all classes that call this method should provide
        //a namespace.
        if (si.supports2009Model() && StringUtils.isBlank(namespace)) {
            namespace = internalCodingSchemeName;
        }

        ArrayList<Definition> definitions = new ArrayList<Definition>();
        ArrayList<Presentation> presentations = new ArrayList<Presentation>();
        ArrayList<Property> properties = new ArrayList<Property>();
        ArrayList<Comment> comments = new ArrayList<Comment>();

        ArrayList<PropertyLink> links = new ArrayList<PropertyLink>();

        PreparedStatement getEntityCode = null;
        PreparedStatement getEntityType = null;
        PreparedStatement getEntityProperties = null;
        PreparedStatement getPropertyLinks = null;

        try {
            StringBuffer buildEntity = new StringBuffer();

            buildEntity
                    .append("Select * " + " from " + si.getTableName(SQLTableConstants.ENTITY) + " {AS} t1 ");

            if (si.supports2009Model()) {
                buildEntity.append("left join " + si.getTableName(SQLTableConstants.ENTRY_STATE) + " {AS} t2 "
                        + "on t1." + SQLTableConstants.TBLCOL_ENTRYSTATEID + " = t2."
                        + SQLTableConstants.TBLCOL_ENTRYSTATEID);
            }

            buildEntity.append(" where " + si.getSQLTableConstants().codingSchemeNameOrId + " = ? AND "
                    + si.getSQLTableConstants().entityCodeOrId + " = ?");

            if (si.supports2009Model()) {
                buildEntity.append(" AND " + SQLTableConstants.TBLCOL_ENTITYCODENAMESPACE + " = ?");
            }

            getEntityCode = si.modifyAndCheckOutPreparedStatement(buildEntity.toString());

            getEntityCode.setString(1, internalCodingSchemeName);
            getEntityCode.setString(2, code);
            if (si.supports2009Model()) {
                getEntityCode.setString(3, namespace);
            }

            ResultSet results = getEntityCode.executeQuery();

            // one and only one result
            if (results.next()) {
                concept.setIsDefined(
                        DBUtility.getBooleanFromResultSet(results, SQLTableConstants.TBLCOL_ISDEFINED));
                concept.setIsAnonymous(
                        DBUtility.getBooleanFromResultSet(results, SQLTableConstants.TBLCOL_ISANONYMOUS));
                concept.setIsActive(
                        DBUtility.getBooleanFromResultSet(results, SQLTableConstants.TBLCOL_ISACTIVE));

                if (!si.supports2009Model()) {
                    concept.setStatus(results.getString(SQLTableConstants.TBLCOL_CONCEPTSTATUS));
                } else {
                    concept.setEntityCodeNamespace(namespace);
                }

                EntityDescription ed = new EntityDescription();
                ed.setContent(results.getString(SQLTableConstants.TBLCOL_ENTITYDESCRIPTION));
                concept.setEntityDescription(ed);

                if (si.supports2009Model()) {
                    String owner = results.getString(SQLTableConstants.TBLCOL_OWNER);
                    String status = results.getString(SQLTableConstants.TBLCOL_STATUS);
                    Timestamp effectiveDate = results.getTimestamp(SQLTableConstants.TBLCOL_EFFECTIVEDATE);
                    Timestamp expirationDate = results.getTimestamp(SQLTableConstants.TBLCOL_EXPIRATIONDATE);
                    String revisionId = results.getString(SQLTableConstants.TBLCOL_REVISIONID);
                    String prevRevisionId = results.getString(SQLTableConstants.TBLCOL_PREVREVISIONID);
                    String changeType = results.getString(SQLTableConstants.TBLCOL_CHANGETYPE);
                    String relativeOrder = results.getString(SQLTableConstants.TBLCOL_RELATIVEORDER);

                    EntryState es = new EntryState();

                    if (!StringUtils.isBlank(changeType)) {
                        es.setChangeType(org.LexGrid.versions.types.ChangeType.valueOf(changeType));
                    }
                    es.setContainingRevision(revisionId);
                    es.setPrevRevision(prevRevisionId);

                    es.setRelativeOrder(computeRelativeOrder(relativeOrder));

                    concept.setEntryState(es);

                    if (owner != null) {
                        concept.setOwner(owner);
                    }
                    concept.setStatus(status);
                    concept.setEffectiveDate(effectiveDate);
                    concept.setExpirationDate(expirationDate);
                }
            }

            results.close();
            si.checkInPreparedStatement(getEntityCode);

            if (si.supports2009Model()) {
                getEntityType = si.checkOutPreparedStatement(
                        "Select * " + " from " + si.getTableName(SQLTableConstants.ENTITY_TYPE) + " where "
                                + si.getSQLTableConstants().codingSchemeNameOrId + " = ? AND "
                                + si.getSQLTableConstants().entityCodeOrId + " = ? AND "
                                + SQLTableConstants.TBLCOL_ENTITYCODENAMESPACE + " = ?");

                getEntityType.setString(1, internalCodingSchemeName);
                getEntityType.setString(2, code);
                getEntityType.setString(3, namespace);

                results = getEntityType.executeQuery();
                while (results.next()) {
                    concept.addEntityType(results.getString(SQLTableConstants.TBLCOL_ENTITYTYPE));
                }

                results.close();
                si.checkInPreparedStatement(getEntityType);
            } else {
                concept.addEntityType(SQLTableConstants.ENTITYTYPE_CONCEPT);
            }

            // populate the property links
            String addWhereSegment = (!si.supports2009Model()
                    ? (si.getSQLTableConstants().entityType + " = '" + SQLTableConstants.ENTITYTYPE_CONCEPT
                            + "' and ")
                    : "");

            getPropertyLinks = si
                    .checkOutPreparedStatement("Select " + SQLTableConstants.TBLCOL_SOURCEPROPERTYID + ", "
                            + SQLTableConstants.TBLCOL_LINK + ", " + SQLTableConstants.TBLCOL_TARGETPROPERTYID
                            + " from " + si.getTableName(SQLTableConstants.ENTITY_PROPERTY_LINKS) + " where "
                            + addWhereSegment + si.getSQLTableConstants().entityCodeOrEntityId + " = ? and "
                            + si.getSQLTableConstants().codingSchemeNameOrId + " = ?");
            getPropertyLinks.setString(1, code);
            getPropertyLinks.setString(2, internalCodingSchemeName);

            results = getPropertyLinks.executeQuery();

            while (results.next()) {
                String sourcePropertyId = results.getString(SQLTableConstants.TBLCOL_SOURCEPROPERTYID);
                String link = results.getString(SQLTableConstants.TBLCOL_LINK);
                String targetPropertyId = results.getString(SQLTableConstants.TBLCOL_TARGETPROPERTYID);

                PropertyLink pl = new PropertyLink();
                pl.setPropertyLink(link);
                pl.setSourceProperty(sourcePropertyId);
                pl.setTargetProperty(targetPropertyId);
                links.add(pl);
            }
            results.close();
            si.checkInPreparedStatement(getPropertyLinks);

            // codedEntry.setModVersion(null);

            StringBuffer propertyQuery = new StringBuffer();

            // I'm constructing a left join query to get the property
            // results I need from 3 (or 2 in 1.5 table version) different
            // tables at once, rather than doing a query on each.

            propertyQuery.append("SELECT a." + SQLTableConstants.TBLCOL_PROPERTYID + ", a."
                    + SQLTableConstants.TBLCOL_PROPERTYNAME + ", a." + SQLTableConstants.TBLCOL_LANGUAGE
                    + ", a." + SQLTableConstants.TBLCOL_FORMAT + ", a." + SQLTableConstants.TBLCOL_ISPREFERRED
                    + ", a." + SQLTableConstants.TBLCOL_DEGREEOFFIDELITY + ", a."
                    + SQLTableConstants.TBLCOL_MATCHIFNOCONTEXT + ", a."
                    + SQLTableConstants.TBLCOL_REPRESENTATIONALFORM + ", a."
                    + SQLTableConstants.TBLCOL_PROPERTYVALUE + ", a." + SQLTableConstants.TBLCOL_PROPERTYTYPE
                    + (si.supports2009Model() ? (", a." + SQLTableConstants.TBLCOL_ENTRYSTATEID) : "")
                    + (si.supports2009Model() ? ", es.*" : "") + ", b." + SQLTableConstants.TBLCOL_TYPENAME
                    + ", b." + SQLTableConstants.TBLCOL_ATTRIBUTEVALUE + ", b." + SQLTableConstants.TBLCOL_VAL1
                    + ", b." + SQLTableConstants.TBLCOL_VAL2);

            propertyQuery.append(" FROM ");

            String codingSchemeName = si.getSQLTableConstants().codingSchemeNameOrId;
            String concptCode = si.getSQLTableConstants().entityCodeOrEntityId;

            propertyQuery.append(si.getTableName(SQLTableConstants.ENTITY_PROPERTY) + " {AS} a ");
            propertyQuery.append(
                    " left join " + si.getTableName(SQLTableConstants.ENTITY_PROPERTY_MULTI_ATTRIBUTES));
            propertyQuery.append(" {AS} b on a." + codingSchemeName + " = b." + codingSchemeName + " and a."
                    + concptCode + " = b." + concptCode + " and a." + SQLTableConstants.TBLCOL_PROPERTYID
                    + " = b." + SQLTableConstants.TBLCOL_PROPERTYID);

            if (si.supports2009Model()) {
                propertyQuery
                        .append(" left join " + si.getTableName(SQLTableConstants.ENTRY_STATE) + " {AS} es ");
                propertyQuery.append("on a." + SQLTableConstants.TBLCOL_ENTRYSTATEID);
                propertyQuery.append(" = es." + SQLTableConstants.TBLCOL_ENTRYSTATEID);
            }

            propertyQuery.append(" where a." + concptCode + " = ? " + "and a." + codingSchemeName + " = ?");
            if (si.supports2009Model()) {
                propertyQuery.append(" and a." + SQLTableConstants.TBLCOL_ENTITYCODENAMESPACE + " = ?");
            }

            if (restrictToProperties != null && restrictToProperties.getEntryCount() > 0) {
                propertyQuery.append(" AND (");
                for (int i = 0; i < restrictToProperties.getEntryCount(); i++) {
                    propertyQuery.append("  " + si.getSQLTableConstants().propertyOrPropertyName + " = ? ");
                    if (i + 1 < restrictToProperties.getEntryCount()) {
                        propertyQuery.append(" OR ");
                    }
                }
                propertyQuery.append(")");

            }

            if (restrictToPropertyTypes != null && restrictToPropertyTypes.length > 0) {
                propertyQuery.append(" AND (");

                for (int i = 0; i < restrictToPropertyTypes.length; i++) {
                    propertyQuery.append(" " + SQLTableConstants.TBLCOL_PROPERTYTYPE + " = ? ");
                    if (i + 1 < restrictToPropertyTypes.length) {
                        propertyQuery.append(" OR ");
                    }
                }
                propertyQuery.append(")");

            }

            getEntityProperties = si.modifyAndCheckOutPreparedStatement(propertyQuery.toString());

            int i = 1;
            getEntityProperties.setString(i++, code);
            getEntityProperties.setString(i++, internalCodingSchemeName);
            if (si.supports2009Model()) {
                getEntityProperties.setString(i++, namespace);
            }

            if (restrictToProperties != null && restrictToProperties.getEntryCount() > 0) {
                for (int j = 0; j < restrictToProperties.getEntryCount(); j++) {
                    getEntityProperties.setString(i++, restrictToProperties.getEntry(j));
                }
            }
            if (restrictToPropertyTypes != null && restrictToPropertyTypes.length > 0) {
                for (int j = 0; j < restrictToPropertyTypes.length; j++) {
                    String pts = DaoUtility.propertyTypeToStringMap.get(restrictToPropertyTypes[j]);
                    getEntityProperties.setString(i++, pts);
                }
            }

            results = getEntityProperties.executeQuery();

            // store the property from the last row
            org.LexGrid.commonTypes.Property newProperty = null;

            // all of the fields that come from the Property table
            String propertyType, property, propertyValue, language, presentationFormat, degreeOfFidelity,
                    propertyId, representationalForm;
            Boolean matchIfNoContext, isPreferred;

            // holders for attributes, qualifiers
            Hashtable<String, Source> sources = null;
            HashSet<String> usageContexts = null;
            Hashtable<String, PropertyQualifier> propertyQualifiers = null;

            // As I process the result rows, I will get back duplicates of
            // the property information
            // if the property has more than one qualifer and/or source ,
            // etc.

            while (results.next()) {
                propertyId = results.getString(SQLTableConstants.TBLCOL_PROPERTYID);

                if (newProperty == null || !propertyId.equals(newProperty.getPropertyId())) {
                    // not equal means we have started a new property
                    property = results.getString(si.getSQLTableConstants().propertyOrPropertyName);
                    propertyType = results.getString(SQLTableConstants.TBLCOL_PROPERTYTYPE);
                    propertyValue = results.getString(SQLTableConstants.TBLCOL_PROPERTYVALUE);
                    language = results.getString(SQLTableConstants.TBLCOL_LANGUAGE);
                    presentationFormat = results
                            .getString(si.getSQLTableConstants().formatOrPresentationFormat);
                    degreeOfFidelity = results.getString(SQLTableConstants.TBLCOL_DEGREEOFFIDELITY);
                    representationalForm = results.getString(SQLTableConstants.TBLCOL_REPRESENTATIONALFORM);
                    matchIfNoContext = DBUtility.getBooleanFromResultSet(results,
                            SQLTableConstants.TBLCOL_MATCHIFNOCONTEXT);
                    isPreferred = DBUtility.getBooleanFromResultSet(results,
                            SQLTableConstants.TBLCOL_ISPREFERRED);

                    // add all of the collected sources, usage contexts, and
                    // qualifiers to
                    // the previous property
                    if (newProperty != null) {
                        newProperty.setSource(sources.values().toArray(new Source[sources.size()]));
                        newProperty.setUsageContext(usageContexts.toArray(new String[usageContexts.size()]));
                        if (!propertyQualifiers.isEmpty())
                            newProperty.setPropertyQualifier(propertyQualifiers.values()
                                    .toArray(new PropertyQualifier[propertyQualifiers.size()]));
                    }

                    // we are starting a new property, so clear out the old
                    // holders.
                    sources = new Hashtable<String, Source>();
                    usageContexts = new HashSet<String>();
                    propertyQualifiers = new Hashtable<String, PropertyQualifier>();

                    // process the property portion of the result
                    if (propertyType.equals(SQLTableConstants.TBLCOLVAL_DEFINITION)) {
                        Definition def = new Definition();
                        def.setIsPreferred(isPreferred);
                        def.setLanguage(language);
                        def.setPropertyName(property);
                        def.setPropertyId(propertyId);
                        Text text = new Text();
                        text.setContent(propertyValue);
                        text.setDataType(presentationFormat);
                        def.setValue(text);
                        definitions.add(def);
                        newProperty = def;
                    } else if (propertyType.equals(SQLTableConstants.TBLCOLVAL_PRESENTATION)) {
                        Presentation presentation = new Presentation();
                        presentation.setIsPreferred(isPreferred);
                        presentation.setLanguage(language);
                        presentation.setPropertyName(property);
                        presentation.setPropertyId(propertyId);
                        Text text = new Text();
                        text.setContent(propertyValue);
                        text.setDataType(presentationFormat);
                        presentation.setValue(text);
                        presentation.setDegreeOfFidelity(degreeOfFidelity);
                        presentation.setMatchIfNoContext(matchIfNoContext);
                        presentation.setRepresentationalForm(representationalForm);

                        presentations.add(presentation);
                        newProperty = presentation;
                    } else if (propertyType.equals(SQLTableConstants.TBLCOLVAL_COMMENT)) {
                        Comment comment = new Comment();
                        comment.setLanguage(language);
                        comment.setPropertyName(property);
                        comment.setPropertyId(propertyId);
                        Text text = new Text();
                        text.setContent(propertyValue);
                        text.setDataType(presentationFormat);
                        comment.setValue(text);
                        comments.add(comment);
                        newProperty = comment;
                    } else {
                        Property theProperty = new Property();
                        theProperty.setLanguage(language);
                        theProperty.setPropertyName(property);
                        theProperty.setPropertyId(propertyId);
                        Text text = new Text();
                        text.setContent(propertyValue);
                        text.setDataType(presentationFormat);
                        theProperty.setValue(text);
                        properties.add(theProperty);
                        newProperty = theProperty;
                    }

                    newProperty.setPropertyType(propertyType);

                    if (si.supports2009Model()) {

                        String owner = results.getString(SQLTableConstants.TBLCOL_OWNER);
                        String status = results.getString(SQLTableConstants.TBLCOL_STATUS);
                        Timestamp effectiveDate = results.getTimestamp(SQLTableConstants.TBLCOL_EFFECTIVEDATE);
                        Timestamp expirationDate = results
                                .getTimestamp(SQLTableConstants.TBLCOL_EXPIRATIONDATE);
                        String revisionId = results.getString(SQLTableConstants.TBLCOL_REVISIONID);
                        String prevRevisionId = results.getString(SQLTableConstants.TBLCOL_PREVREVISIONID);
                        String changeType = results.getString(SQLTableConstants.TBLCOL_CHANGETYPE);
                        String relativeOrder = results.getString(SQLTableConstants.TBLCOL_RELATIVEORDER);

                        if (revisionId != null) {
                            EntryState es = new EntryState();
                            if (!StringUtils.isBlank(changeType)) {
                                es.setChangeType(org.LexGrid.versions.types.ChangeType.valueOf(changeType));
                            }
                            es.setContainingRevision(revisionId);
                            es.setPrevRevision(prevRevisionId);
                            es.setRelativeOrder(computeRelativeOrder(relativeOrder));

                            newProperty.setEntryState(es);
                        }

                        if (owner != null) {
                            newProperty.setOwner(owner);
                        }

                        if (status != null)
                            newProperty.setStatus(status);
                        if (effectiveDate != null)
                            newProperty.setEffectiveDate(effectiveDate);
                        if (expirationDate != null)
                            newProperty.setExpirationDate(expirationDate);
                    }
                }

                String type = null;
                String value = null;
                String val1 = null;
                String val2 = null;

                // collect values from the multiAttributes table
                type = results.getString(SQLTableConstants.TBLCOL_TYPENAME);
                value = results.getString(SQLTableConstants.TBLCOL_ATTRIBUTEVALUE);
                val1 = results.getString(SQLTableConstants.TBLCOL_VAL1);
                if (StringUtils.isBlank(val1))
                    val1 = null;
                val2 = results.getString(SQLTableConstants.TBLCOL_VAL2);
                if (StringUtils.isBlank(val2))
                    val2 = null;

                // hashsets to remove dupes (table doesn't allow dupes, but
                // left join will create some)
                if (type != null) {
                    if (type.equalsIgnoreCase(SQLTableConstants.TBLCOLVAL_SOURCE)) {
                        if (!sources.containsKey(createUniqueKeyForSource(value, val1))) {
                            Source s = new Source();
                            s.setContent(value);
                            s.setRole(val2);
                            s.setSubRef(val1);
                            sources.put(createUniqueKeyForSource(value, val1), s);
                        }
                    } else if (type.equalsIgnoreCase(SQLTableConstants.TBLCOLVAL_USAGECONTEXT)) {
                        usageContexts.add(value);
                    } else if (type.equalsIgnoreCase(SQLTableConstants.TBLCOLVAL_QUALIFIER)) {
                        // nulls are a side affect of left join
                        if (!propertyQualifiers.containsKey(val1 + ":" + value)) {
                            PropertyQualifier pq = new PropertyQualifier();
                            Text txt = new Text();
                            txt.setContent(val1);
                            pq.setValue(txt);
                            pq.setPropertyQualifierName(value);
                            propertyQualifiers.put(val1 + ":" + value, pq);
                        }
                    } else {
                        getLogger().warn("There is invalid data in the 'typeName' column in the table "
                                + si.getTableName(SQLTableConstants.ENTITY_PROPERTY_MULTI_ATTRIBUTES)
                                + " for the concept code: " + code + " propertyId: " + propertyId
                                + " codingSchemeName: " + internalCodingSchemeName);
                    }
                }
            }

            // add all of the collected sources, usage contexts, and
            // qualifiers to
            // the previous property before exiting ...
            if (newProperty != null) {
                newProperty.setSource(sources.values().toArray(new Source[sources.size()]));
                newProperty.setUsageContext(usageContexts.toArray(new String[usageContexts.size()]));
                if (!propertyQualifiers.isEmpty())
                    newProperty.setPropertyQualifier(propertyQualifiers.values()
                            .toArray(new PropertyQualifier[propertyQualifiers.size()]));
            }
            results.close();
        } finally {
            si.checkInPreparedStatement(getEntityCode);
            si.checkInPreparedStatement(getEntityProperties);
            si.checkInPreparedStatement(getPropertyLinks);
        }

        concept.setComment(comments.toArray(new Comment[comments.size()]));
        concept.setDefinition(definitions.toArray(new Definition[definitions.size()]));
        concept.setPropertyLink(links.toArray(new PropertyLink[links.size()]));
        concept.setPresentation(presentations.toArray(new Presentation[presentations.size()]));
        concept.setProperty(properties.toArray(new Property[properties.size()]));
        return concept;
    } catch (MissingResourceException e) {
        throw e;
    } catch (Exception e) {
        throw new UnexpectedInternalError("There was an unexpected internal error.", e);
    }
}

From source file:com.vmware.identity.idm.server.config.directory.DirectoryConfigStore.java

@Override
public void setAuthnTypes(String tenantName, boolean password, boolean windows, boolean certificate,
        boolean rsaSecureID) throws Exception {
    // Set AuthnTypes
    HashSet<Integer> authnTypes = new HashSet<Integer>();
    if (password)
        authnTypes.add(DirectoryConfigStore.FLAG_AUTHN_TYPE_ALLOW_PASSWORD);
    if (windows)/*  w w  w .  j  a v a 2  s . c o  m*/
        authnTypes.add(DirectoryConfigStore.FLAG_AUTHN_TYPE_ALLOW_WINDOWS);
    if (certificate)
        authnTypes.add(DirectoryConfigStore.FLAG_AUTHN_TYPE_ALLOW_TLS_CERTIFICATE);
    if (rsaSecureID)
        authnTypes.add(DirectoryConfigStore.FLAG_AUTHN_TYPE_ALLOW_RSA_SECUREID);

    if (authnTypes.size() == 0) // This is to distinguish the case that
                                // none of the AuthnTypes is set and
                                // the case of migrating from old schema
                                // where AuthnTypes is not there.
        authnTypes.add(DirectoryConfigStore.FLAG_AUTHN_TYPE_ALLOW_NONE);
    int[] authnTypesArray = ArrayUtils.toPrimitive(authnTypes.toArray(new Integer[authnTypes.size()]));
    this.setTenantProperty(tenantName, TenantLdapObject.PROPERTY_AUTHN_TYPES,
            ServerUtils.getLdapValue(authnTypesArray));
}

From source file:com.karura.framework.plugins.utils.ContactAccessorSdk5.java

/**
 * This method takes the fields required and search options in order to produce an array of contacts that matches
 * the criteria provided./*from  ww  w  .ja  v a 2 s  . c om*/
 * 
 * @param fields
 *            an array of items to be used as search criteria
 * @param options
 *            that can be applied to contact searching
 * @return an array of contacts
 */
@Override
public JSONArray search(JSONArray fields, String where, int startIndex, int limit) {
    // Get the find options
    String searchTerm = null;

    if (TextUtils.isEmpty(where)) {
        searchTerm = "%";
    } else {
        searchTerm = "%" + where + "%";
    }

    if (BuildConfig.DEBUG) {
        Log.d(LOG_TAG, "Search Term = " + searchTerm);
        Log.d(LOG_TAG, "Field Length = " + fields.length());
        Log.d(LOG_TAG, "Fields = " + fields.toString());
    }

    // Loop through the fields the user provided to see what data should be
    // returned.
    HashMap<String, Boolean> populate = buildPopulationSet(fields);

    // Build the ugly where clause and where arguments for one big query.
    WhereOptions whereOptions = buildWhereClause(fields, searchTerm, startIndex, limit);

    // Get all the id's where the search term matches the fields passed in.
    Cursor idCursor = getContext().getContentResolver().query(ContactsContract.Data.CONTENT_URI,
            new String[] { ContactsContract.Data.CONTACT_ID }, whereOptions.getWhere(),
            whereOptions.getWhereArgs(), ContactsContract.Data.CONTACT_ID + " ASC " + whereOptions.getLimit());

    // Create a set of unique ids
    Set<String> contactIds = new HashSet<String>();
    int idColumn = -1;
    while (idCursor.moveToNext()) {
        if (idColumn < 0) {
            idColumn = idCursor.getColumnIndex(ContactsContract.Data.CONTACT_ID);
        }
        contactIds.add(idCursor.getString(idColumn));
    }
    idCursor.close();

    // Build a query that only looks at ids
    WhereOptions idOptions = buildIdClause(contactIds, searchTerm);

    // Determine which columns we should be fetching.
    HashSet<String> columnsToFetch = new HashSet<String>();
    columnsToFetch.add(ContactsContract.Data.CONTACT_ID);
    columnsToFetch.add(ContactsContract.Data.RAW_CONTACT_ID);
    columnsToFetch.add(ContactsContract.Data.MIMETYPE);

    if (isRequired(DISPLAY_NAME, populate)) {
        columnsToFetch.add(ContactsContract.CommonDataKinds.StructuredName.DISPLAY_NAME);
    }
    if (isRequired(NAME, populate)) {
        columnsToFetch.add(ContactsContract.CommonDataKinds.StructuredName.FAMILY_NAME);
        columnsToFetch.add(ContactsContract.CommonDataKinds.StructuredName.GIVEN_NAME);
        columnsToFetch.add(ContactsContract.CommonDataKinds.StructuredName.MIDDLE_NAME);
        columnsToFetch.add(ContactsContract.CommonDataKinds.StructuredName.PREFIX);
        columnsToFetch.add(ContactsContract.CommonDataKinds.StructuredName.SUFFIX);
    }
    if (isRequired(PHONE_NUMBER, populate)) {
        columnsToFetch.add(ContactsContract.CommonDataKinds.Phone._ID);
        columnsToFetch.add(ContactsContract.CommonDataKinds.Phone.NUMBER);
        columnsToFetch.add(ContactsContract.CommonDataKinds.Phone.TYPE);
    }
    if (isRequired(EMAIL, populate)) {
        columnsToFetch.add(ContactsContract.CommonDataKinds.Email._ID);
        columnsToFetch.add(ContactsContract.CommonDataKinds.Email.DATA);
        columnsToFetch.add(ContactsContract.CommonDataKinds.Email.TYPE);
    }
    if (isRequired(FORMATTED_ADDRESS, populate)) {
        columnsToFetch.add(ContactsContract.CommonDataKinds.StructuredPostal._ID);
        columnsToFetch.add(ContactsContract.CommonDataKinds.Organization.TYPE);
        columnsToFetch.add(ContactsContract.CommonDataKinds.StructuredPostal.FORMATTED_ADDRESS);
        columnsToFetch.add(ContactsContract.CommonDataKinds.StructuredPostal.STREET);
        columnsToFetch.add(ContactsContract.CommonDataKinds.StructuredPostal.CITY);
        columnsToFetch.add(ContactsContract.CommonDataKinds.StructuredPostal.REGION);
        columnsToFetch.add(ContactsContract.CommonDataKinds.StructuredPostal.POSTCODE);
        columnsToFetch.add(ContactsContract.CommonDataKinds.StructuredPostal.COUNTRY);
    }
    if (isRequired(ORGANIZATION, populate)) {
        columnsToFetch.add(ContactsContract.CommonDataKinds.Organization._ID);
        columnsToFetch.add(ContactsContract.CommonDataKinds.Organization.TYPE);
        columnsToFetch.add(ContactsContract.CommonDataKinds.Organization.DEPARTMENT);
        columnsToFetch.add(ContactsContract.CommonDataKinds.Organization.COMPANY);
        columnsToFetch.add(ContactsContract.CommonDataKinds.Organization.TITLE);
    }
    if (isRequired(IMS, populate)) {
        columnsToFetch.add(ContactsContract.CommonDataKinds.Im._ID);
        columnsToFetch.add(ContactsContract.CommonDataKinds.Im.DATA);
        columnsToFetch.add(ContactsContract.CommonDataKinds.Im.TYPE);
    }
    if (isRequired(NOTE, populate)) {
        columnsToFetch.add(ContactsContract.CommonDataKinds.Note.NOTE);
    }
    if (isRequired(URLS, populate)) {
        columnsToFetch.add(ContactsContract.CommonDataKinds.Website._ID);
        columnsToFetch.add(ContactsContract.CommonDataKinds.Website.URL);
        columnsToFetch.add(ContactsContract.CommonDataKinds.Website.TYPE);
    }
    if (isRequired(BIRTHDAY, populate)) {
        columnsToFetch.add(ContactsContract.CommonDataKinds.Event.START_DATE);
        columnsToFetch.add(ContactsContract.CommonDataKinds.Event.TYPE);
    }
    if (isRequired(PHOTO, populate)) {
        columnsToFetch.add(ContactsContract.CommonDataKinds.Photo._ID);
    }

    // Do the id query
    Cursor c = getContext().getContentResolver().query(ContactsContract.Data.CONTENT_URI,
            columnsToFetch.toArray(new String[] {}), idOptions.getWhere(), idOptions.getWhereArgs(),
            ContactsContract.Data.CONTACT_ID + " ASC");

    JSONArray contacts = populateContactArray(populate, c);
    return contacts;
}

From source file:org.apache.cordova.ContactAccessorSdk5.java

/**
 * This method takes the fields required and search options in order to produce an
 * array of contacts that matches the criteria provided.
 * @param fields an array of items to be used as search criteria
 * @param options that can be applied to contact searching
 * @return an array of contacts/*from   w  w w.  j a  v a  2s.c o  m*/
 */
@Override
public JSONArray search(JSONArray fields, JSONObject options) {
    // Get the find options
    String searchTerm = "";
    int limit = Integer.MAX_VALUE;
    boolean multiple = true;

    if (options != null) {
        searchTerm = options.optString("filter");
        if (searchTerm.length() == 0) {
            searchTerm = "%";
        } else {
            searchTerm = "%" + searchTerm + "%";
        }

        try {
            multiple = options.getBoolean("multiple");
            if (!multiple) {
                limit = 1;
            }
        } catch (JSONException e) {
            // Multiple was not specified so we assume the default is true.
        }
    } else {
        searchTerm = "%";
    }

    //Log.d(LOG_TAG, "Search Term = " + searchTerm);
    //Log.d(LOG_TAG, "Field Length = " + fields.length());
    //Log.d(LOG_TAG, "Fields = " + fields.toString());

    // Loop through the fields the user provided to see what data should be returned.
    HashMap<String, Boolean> populate = buildPopulationSet(fields);

    // Build the ugly where clause and where arguments for one big query.
    WhereOptions whereOptions = buildWhereClause(fields, searchTerm);

    // Get all the id's where the search term matches the fields passed in.
    Cursor idCursor = mApp.getActivity().getContentResolver().query(ContactsContract.Data.CONTENT_URI,
            new String[] { ContactsContract.Data.CONTACT_ID }, whereOptions.getWhere(),
            whereOptions.getWhereArgs(), ContactsContract.Data.CONTACT_ID + " ASC");

    // Create a set of unique ids
    Set<String> contactIds = new HashSet<String>();
    int idColumn = -1;
    while (idCursor.moveToNext()) {
        if (idColumn < 0) {
            idColumn = idCursor.getColumnIndex(ContactsContract.Data.CONTACT_ID);
        }
        contactIds.add(idCursor.getString(idColumn));
    }
    idCursor.close();

    // Build a query that only looks at ids
    WhereOptions idOptions = buildIdClause(contactIds, searchTerm);

    // Determine which columns we should be fetching.
    HashSet<String> columnsToFetch = new HashSet<String>();
    columnsToFetch.add(ContactsContract.Data.CONTACT_ID);
    columnsToFetch.add(ContactsContract.Data.RAW_CONTACT_ID);
    columnsToFetch.add(ContactsContract.Data.MIMETYPE);

    if (isRequired("displayName", populate)) {
        columnsToFetch.add(ContactsContract.CommonDataKinds.StructuredName.DISPLAY_NAME);
    }
    if (isRequired("name", populate)) {
        columnsToFetch.add(ContactsContract.CommonDataKinds.StructuredName.FAMILY_NAME);
        columnsToFetch.add(ContactsContract.CommonDataKinds.StructuredName.GIVEN_NAME);
        columnsToFetch.add(ContactsContract.CommonDataKinds.StructuredName.MIDDLE_NAME);
        columnsToFetch.add(ContactsContract.CommonDataKinds.StructuredName.PREFIX);
        columnsToFetch.add(ContactsContract.CommonDataKinds.StructuredName.SUFFIX);
    }
    if (isRequired("phoneNumbers", populate)) {
        columnsToFetch.add(ContactsContract.CommonDataKinds.Phone._ID);
        columnsToFetch.add(ContactsContract.CommonDataKinds.Phone.NUMBER);
        columnsToFetch.add(ContactsContract.CommonDataKinds.Phone.TYPE);
    }
    if (isRequired("emails", populate)) {
        columnsToFetch.add(ContactsContract.CommonDataKinds.Email._ID);
        columnsToFetch.add(ContactsContract.CommonDataKinds.Email.DATA);
        columnsToFetch.add(ContactsContract.CommonDataKinds.Email.TYPE);
    }
    if (isRequired("addresses", populate)) {
        columnsToFetch.add(ContactsContract.CommonDataKinds.StructuredPostal._ID);
        columnsToFetch.add(ContactsContract.CommonDataKinds.Organization.TYPE);
        columnsToFetch.add(ContactsContract.CommonDataKinds.StructuredPostal.FORMATTED_ADDRESS);
        columnsToFetch.add(ContactsContract.CommonDataKinds.StructuredPostal.STREET);
        columnsToFetch.add(ContactsContract.CommonDataKinds.StructuredPostal.CITY);
        columnsToFetch.add(ContactsContract.CommonDataKinds.StructuredPostal.REGION);
        columnsToFetch.add(ContactsContract.CommonDataKinds.StructuredPostal.POSTCODE);
        columnsToFetch.add(ContactsContract.CommonDataKinds.StructuredPostal.COUNTRY);
    }
    if (isRequired("organizations", populate)) {
        columnsToFetch.add(ContactsContract.CommonDataKinds.Organization._ID);
        columnsToFetch.add(ContactsContract.CommonDataKinds.Organization.TYPE);
        columnsToFetch.add(ContactsContract.CommonDataKinds.Organization.DEPARTMENT);
        columnsToFetch.add(ContactsContract.CommonDataKinds.Organization.COMPANY);
        columnsToFetch.add(ContactsContract.CommonDataKinds.Organization.TITLE);
    }
    if (isRequired("ims", populate)) {
        columnsToFetch.add(ContactsContract.CommonDataKinds.Im._ID);
        columnsToFetch.add(ContactsContract.CommonDataKinds.Im.DATA);
        columnsToFetch.add(ContactsContract.CommonDataKinds.Im.TYPE);
    }
    if (isRequired("note", populate)) {
        columnsToFetch.add(ContactsContract.CommonDataKinds.Note.NOTE);
    }
    if (isRequired("nickname", populate)) {
        columnsToFetch.add(ContactsContract.CommonDataKinds.Nickname.NAME);
    }
    if (isRequired("urls", populate)) {
        columnsToFetch.add(ContactsContract.CommonDataKinds.Website._ID);
        columnsToFetch.add(ContactsContract.CommonDataKinds.Website.URL);
        columnsToFetch.add(ContactsContract.CommonDataKinds.Website.TYPE);
    }
    if (isRequired("birthday", populate)) {
        columnsToFetch.add(ContactsContract.CommonDataKinds.Event.START_DATE);
        columnsToFetch.add(ContactsContract.CommonDataKinds.Event.TYPE);
    }
    if (isRequired("photos", populate)) {
        columnsToFetch.add(ContactsContract.CommonDataKinds.Photo._ID);
    }

    // Do the id query
    Cursor c = mApp.getActivity().getContentResolver().query(ContactsContract.Data.CONTENT_URI,
            columnsToFetch.toArray(new String[] {}), idOptions.getWhere(), idOptions.getWhereArgs(),
            ContactsContract.Data.CONTACT_ID + " ASC");

    JSONArray contacts = populateContactArray(limit, populate, c);
    return contacts;
}

From source file:com.remobile.contacts.ContactAccessorSdk5.java

/**
 * This method takes the fields required and search options in order to produce an
 * array of contacts that matches the criteria provided.
 * @param fields an array of items to be used as search criteria
 * @param options that can be applied to contact searching
 * @return an array of contacts//from  w  ww  . j  ava  2 s  . c  om
 */
@Override
public JSONArray search(JSONArray fields, JSONObject options) {
    // Get the find options
    String searchTerm = "";
    int limit = Integer.MAX_VALUE;
    boolean multiple = true;

    if (options != null) {
        searchTerm = options.optString("filter");
        if (searchTerm.length() == 0) {
            searchTerm = "%";
        } else {
            searchTerm = "%" + searchTerm + "%";
        }

        try {
            multiple = options.getBoolean("multiple");
            if (!multiple) {
                limit = 1;
            }
        } catch (JSONException e) {
            // Multiple was not specified so we assume the default is true.
        }
    } else {
        searchTerm = "%";
    }

    //Log.d(LOG_TAG, "Search Term = " + searchTerm);
    //Log.d(LOG_TAG, "Field Length = " + fields.length());
    //Log.d(LOG_TAG, "Fields = " + fields.toString());

    // Loop through the fields the user provided to see what data should be returned.
    HashMap<String, Boolean> populate = buildPopulationSet(options);

    // Build the ugly where clause and where arguments for one big query.
    WhereOptions whereOptions = buildWhereClause(fields, searchTerm);

    // Get all the id's where the search term matches the fields passed in.
    Cursor idCursor = mApp.getActivity().getContentResolver().query(ContactsContract.Data.CONTENT_URI,
            new String[] { ContactsContract.Data.CONTACT_ID }, whereOptions.getWhere(),
            whereOptions.getWhereArgs(), ContactsContract.Data.CONTACT_ID + " ASC");

    // Create a set of unique ids
    Set<String> contactIds = new HashSet<String>();
    int idColumn = -1;
    while (idCursor.moveToNext()) {
        if (idColumn < 0) {
            idColumn = idCursor.getColumnIndex(ContactsContract.Data.CONTACT_ID);
        }
        contactIds.add(idCursor.getString(idColumn));
    }
    idCursor.close();

    // Build a query that only looks at ids
    WhereOptions idOptions = buildIdClause(contactIds, searchTerm);

    // Determine which columns we should be fetching.
    HashSet<String> columnsToFetch = new HashSet<String>();
    columnsToFetch.add(ContactsContract.Data.CONTACT_ID);
    columnsToFetch.add(ContactsContract.Data.RAW_CONTACT_ID);
    columnsToFetch.add(ContactsContract.Data.MIMETYPE);

    if (isRequired("displayName", populate)) {
        columnsToFetch.add(ContactsContract.CommonDataKinds.StructuredName.DISPLAY_NAME);
    }
    if (isRequired("name", populate)) {
        columnsToFetch.add(ContactsContract.CommonDataKinds.StructuredName.FAMILY_NAME);
        columnsToFetch.add(ContactsContract.CommonDataKinds.StructuredName.GIVEN_NAME);
        columnsToFetch.add(ContactsContract.CommonDataKinds.StructuredName.MIDDLE_NAME);
        columnsToFetch.add(ContactsContract.CommonDataKinds.StructuredName.PREFIX);
        columnsToFetch.add(ContactsContract.CommonDataKinds.StructuredName.SUFFIX);
    }
    if (isRequired("phoneNumbers", populate)) {
        columnsToFetch.add(ContactsContract.CommonDataKinds.Phone._ID);
        columnsToFetch.add(ContactsContract.CommonDataKinds.Phone.NUMBER);
        columnsToFetch.add(ContactsContract.CommonDataKinds.Phone.TYPE);
    }
    if (isRequired("emails", populate)) {
        columnsToFetch.add(ContactsContract.CommonDataKinds.Email._ID);
        columnsToFetch.add(ContactsContract.CommonDataKinds.Email.DATA);
        columnsToFetch.add(ContactsContract.CommonDataKinds.Email.TYPE);
    }
    if (isRequired("addresses", populate)) {
        columnsToFetch.add(ContactsContract.CommonDataKinds.StructuredPostal._ID);
        columnsToFetch.add(ContactsContract.CommonDataKinds.Organization.TYPE);
        columnsToFetch.add(ContactsContract.CommonDataKinds.StructuredPostal.FORMATTED_ADDRESS);
        columnsToFetch.add(ContactsContract.CommonDataKinds.StructuredPostal.STREET);
        columnsToFetch.add(ContactsContract.CommonDataKinds.StructuredPostal.CITY);
        columnsToFetch.add(ContactsContract.CommonDataKinds.StructuredPostal.REGION);
        columnsToFetch.add(ContactsContract.CommonDataKinds.StructuredPostal.POSTCODE);
        columnsToFetch.add(ContactsContract.CommonDataKinds.StructuredPostal.COUNTRY);
    }
    if (isRequired("organizations", populate)) {
        columnsToFetch.add(ContactsContract.CommonDataKinds.Organization._ID);
        columnsToFetch.add(ContactsContract.CommonDataKinds.Organization.TYPE);
        columnsToFetch.add(ContactsContract.CommonDataKinds.Organization.DEPARTMENT);
        columnsToFetch.add(ContactsContract.CommonDataKinds.Organization.COMPANY);
        columnsToFetch.add(ContactsContract.CommonDataKinds.Organization.TITLE);
    }
    if (isRequired("ims", populate)) {
        columnsToFetch.add(ContactsContract.CommonDataKinds.Im._ID);
        columnsToFetch.add(ContactsContract.CommonDataKinds.Im.DATA);
        columnsToFetch.add(ContactsContract.CommonDataKinds.Im.TYPE);
    }
    if (isRequired("note", populate)) {
        columnsToFetch.add(ContactsContract.CommonDataKinds.Note.NOTE);
    }
    if (isRequired("nickname", populate)) {
        columnsToFetch.add(ContactsContract.CommonDataKinds.Nickname.NAME);
    }
    if (isRequired("urls", populate)) {
        columnsToFetch.add(ContactsContract.CommonDataKinds.Website._ID);
        columnsToFetch.add(ContactsContract.CommonDataKinds.Website.URL);
        columnsToFetch.add(ContactsContract.CommonDataKinds.Website.TYPE);
    }
    if (isRequired("birthday", populate)) {
        columnsToFetch.add(ContactsContract.CommonDataKinds.Event.START_DATE);
        columnsToFetch.add(ContactsContract.CommonDataKinds.Event.TYPE);
    }
    if (isRequired("photos", populate)) {
        columnsToFetch.add(ContactsContract.CommonDataKinds.Photo._ID);
    }

    // Do the id query
    Cursor c = mApp.getActivity().getContentResolver().query(ContactsContract.Data.CONTENT_URI,
            columnsToFetch.toArray(new String[] {}), idOptions.getWhere(), idOptions.getWhereArgs(),
            ContactsContract.Data.CONTACT_ID + " ASC");

    JSONArray contacts = populateContactArray(limit, populate, c);
    return contacts;
}

From source file:skewtune.mapreduce.STJobTracker.java

/**
 * SkewTune heartbeat protocol/*  w  w  w.  j  av  a 2  s . c o m*/
 * 
 * REQUEST (Heartbeat)
 * 
 * HOST TaskAttemptID -- status report (initialization|mapoutput|completed)
 * progress [splitted] TaskAttemptID (initialization|mapoutput|completed)
 * progress [splitted] ...
 * 
 * RESPONSE
 * 
 * TaskAttemptID (keep going | new map output [] | cancel )
 * 
 * .skewtune/m-0000?/part-m-XXXXX ...
 * 
 * The protocol is softstate. Jobtracker responds to each heartbeat with the
 * task to cancel and list of unknown jobs in the heart beat message. The
 * task tracker is supposed to reclaim space occupied by the unknown jobs.
 */

@Override
public synchronized HeartbeatResponse heartbeat(TaskTrackerStatus status, boolean justStarted,
        boolean justInited, short responseId) throws IOException, InterruptedException {
    if (LOG.isDebugEnabled() && dumpHeartbeat) {
        LOG.debug("Got heartbeat from: " + status.getTrackerName() + " with responseId: " + responseId);
    }

    String trackerName = status.getTrackerName();
    long now = System.currentTimeMillis();

    short newResponseId = (short) (responseId + 1);
    status.setLastSeen(now);

    trackerToLastHeartbeat.put(trackerName, status);
    trackerToHttpPort.put(trackerName, status.getHttpPort());

    HashSet<JobID> unknownJobs = new HashSet<JobID>();
    ArrayList<ReactiveMapOutput> newMapOutput = new ArrayList<ReactiveMapOutput>();
    //        ArrayList<TaskAttemptID> cancelledTasks = new ArrayList<TaskAttemptID>();
    ArrayList<TaskAction> taskActions = new ArrayList<TaskAction>();
    ArrayList<TaskStatusEvent> newTakeOver = new ArrayList<TaskStatusEvent>();

    // per job -- processing

    // FIXME retrieve task tracker
    // FIXME for each job, update task status, build host-task map
    for (JobOnTaskTracker jobReport : status.getJobReports()) {
        JobID jobid = jobReport.getJobID();
        JobInProgress jip = null;
        boolean pendingReactive = false;
        synchronized (jobs) {
            jip = jobs.get(jobid);
        }

        if (jip == null) {
            synchronized (pendingCompletedReactiveJob) {
                jip = pendingCompletedReactiveJob.get(jobid);
            }
            pendingReactive = jip != null;
        }

        if (jip == null) {
            // FIXME check the pending completion list
            unknownJobs.add(jobid); // this job must be cleared
        } else {
            int from = jobReport.getFromIndex();
            int fromTakeOver = jobReport.getFromIndexOfTakeOver();
            final JobType jobType = jip.getJobType();
            BitSet completed = new BitSet(jip.getNumMapTasks());

            synchronized (jip) {
                // load job token into this node
                if (jobType == JobType.ORIGINAL || jobType == JobType.REDUCE_REACTIVE) {
                    scheduleJobTokenLoading(jip); // we only need to load it for original job
                    // FIXME we need to load it for other job if we support recursive split
                }

                // update statistics of this task
                for (STTaskStatus taskStatus : jobReport.getTaskReports()) {
                    int action = jip.handleTaskHeartbeat(taskStatus, status.getHostName(), completed);
                    if (action != 0) {
                        taskActions.add(new TaskAction(taskStatus.getTaskID(), action));
                    }
                    //                        if ( jip.handleTaskHeartbeat(taskStatus,status.getHostName(),completed) != 0) {
                    //                            cancelledTasks.add(taskStatus.getTaskID());
                    // FIXME create task action
                    //                        }
                }
                // fetch all available new map output from FROM
                if (from >= 0) {
                    jip.retrieveNewMapOutput(newMapOutput, from);
                }
                if (fromTakeOver >= 0) {
                    jip.retrieveNewTakeOver(newTakeOver, fromTakeOver);
                }

                if (jobType == JobType.MAP_REACTIVE && pendingReactive) {
                    if (jip.isAllMapOutputIndexAvailable()) {
                        synchronized (pendingCompletedReactiveJob) {
                            pendingCompletedReactiveJob.remove(jobid);
                        }
                        cleanupPendingReactiveMap(jip);
                    }
                }
            }

            //                if ( jobType == JobType.ORIGINAL ) {
            //                    jip.notifyMapCompletion(completed);
            //                }
        }
    }

    int nextInterval = getNextHeartbeatInterval();

    return new HeartbeatResponse(newResponseId, nextInterval,
            newMapOutput.toArray(new ReactiveMapOutput[newMapOutput.size()]),
            //                cancelledTasks.toArray(new TaskAttemptID[cancelledTasks.size()]),
            taskActions.toArray(new TaskAction[taskActions.size()]),
            unknownJobs.toArray(new JobID[unknownJobs.size()]),
            newTakeOver.toArray(new TaskStatusEvent[newTakeOver.size()]));
}

From source file:com.couchbase.lite.store.SQLiteViewStore.java

/**
 * Updates the indexes of one or more views in parallel.
 *
 * @param inputViews An array of ViewStore instances, always including the receiver.
 * @return Status OK if updated or NOT_MODIFIED if already up-to-date.
 * @throws CouchbaseLiteException// w  ww . java 2  s.  c o  m
 */
@Override
@InterfaceAudience.Private
public Status updateIndexes(List<ViewStore> inputViews) throws CouchbaseLiteException {
    Log.v(Log.TAG_VIEW, "Re-indexing view: %s", name);
    if (getViewID() <= 0) {
        String msg = "getViewID() < 0";
        throw new CouchbaseLiteException(msg, new Status(Status.NOT_FOUND));
    }

    store.beginTransaction();
    boolean success = false;
    Cursor cursor = null;
    try {
        // If the view the update is for doesn't need any update, don't do anything:
        final long dbMaxSequence = store.getLastSequence();
        final long forViewLastSequence = getLastSequenceIndexed();
        if (forViewLastSequence >= dbMaxSequence) {
            success = true;
            return new Status(Status.NOT_MODIFIED);
        }

        // Check whether we need to update at all,
        // and remove obsolete emitted results from the 'maps' table:
        long minLastSequence = dbMaxSequence;
        final long[] viewLastSequence = new long[inputViews.size()];
        int deletedCount = 0;
        int i = 0;
        final HashSet<String> docTypes = new HashSet<String>();
        HashMap<String, String> viewDocTypes = null;
        boolean allDocTypes = false;
        final HashMap<Integer, Integer> viewTotalRows = new HashMap<Integer, Integer>();
        final ArrayList<SQLiteViewStore> views = new ArrayList<SQLiteViewStore>();
        final ArrayList<Mapper> mapBlocks = new ArrayList<Mapper>();

        for (ViewStore v : inputViews) {
            assert (v != null);
            SQLiteViewStore view = (SQLiteViewStore) v;
            ViewStoreDelegate delegate = view.getDelegate();
            Mapper map = delegate != null ? delegate.getMap() : null;
            if (map == null) {
                if (view == this) {
                    String msg = String.format(Locale.ENGLISH,
                            "Cannot index view %s: " + "no map block registered", view.getName());
                    Log.e(Log.TAG_VIEW, msg);
                    throw new CouchbaseLiteException(msg, new Status(Status.BAD_REQUEST));
                }
                Log.v(Log.TAG_VIEW, "    %s has no map block; skipping it", view.getName());
                continue;
            }

            views.add(view);
            mapBlocks.add(map);

            int viewID = view.getViewID();
            if (viewID <= 0) {
                String message = String.format(Locale.ENGLISH, "View '%s' not found in database",
                        view.getName());
                Log.e(Log.TAG_VIEW, message);
                throw new CouchbaseLiteException(message, new Status(Status.NOT_FOUND));
            }

            int totalRows = view.getTotalRows();
            viewTotalRows.put(viewID, totalRows);

            long last = view == this ? forViewLastSequence : view.getLastSequenceIndexed();
            viewLastSequence[i++] = last;
            if (last < 0) {
                String msg = String.format(Locale.ENGLISH, "last < 0 (%d)", last);
                throw new CouchbaseLiteException(msg, new Status(Status.INTERNAL_SERVER_ERROR));
            } else if (last < dbMaxSequence) {
                if (last == 0)
                    view.createIndex();
                minLastSequence = Math.min(minLastSequence, last);
                Log.v(Log.TAG_VIEW, "    %s last indexed at #%d", view.getName(), last);

                String docType = delegate.getDocumentType();
                if (docType != null) {
                    docTypes.add(docType);
                    if (viewDocTypes == null)
                        viewDocTypes = new HashMap<String, String>();
                    viewDocTypes.put(view.getName(), docType);
                } else {
                    allDocTypes = true;
                }

                int changes = 0;
                if (last == 0) {
                    changes = store.getStorageEngine().delete(view.queryString("maps_#"), null, null);
                } else {
                    store.optimizeSQLIndexes();
                    String[] args = { Long.toString(last), Long.toString(last) };
                    changes = store.getStorageEngine().delete(view.queryString("maps_#"),
                            "sequence IN (SELECT parent FROM revs "
                                    + "WHERE sequence>? AND +parent>0 AND +parent<=?)",
                            args);
                }

                // Update #deleted rows:
                deletedCount += changes;

                // Only count these deletes as changes if this isn't a view reset to 0
                if (last != 0) {
                    int newTotalRows = viewTotalRows.get(viewID) - changes;
                    viewTotalRows.put(viewID, newTotalRows);
                }
            }
        }

        if (minLastSequence == dbMaxSequence) {
            Log.v(Log.TAG_VIEW, "minLastSequence (%d) == dbMaxSequence (%d), nothing to do", minLastSequence,
                    dbMaxSequence);
            success = true;
            return new Status(Status.NOT_MODIFIED);
        }

        Log.v(Log.TAG_VIEW, "Updating indexes of (%s) from #%d to #%d ...", viewNames(views), minLastSequence,
                dbMaxSequence);

        // This is the emit() block, which gets called from within the user-defined map() block
        // that's called down below.
        final AtomicInteger insertedCount = new AtomicInteger(0);
        AbstractMapEmitBlock emitBlock = new AbstractMapEmitBlock() {
            @Override
            public void emit(Object key, Object value) {
                if (key == null) {
                    Log.w(Log.TAG_VIEW, "emit() called with nil key; ignoring");
                    return;
                }
                try {
                    curView.emit(key, value, this.sequence); // emit block's sequence
                    int curViewID = curView.getViewID();
                    viewTotalRows.put(curViewID, viewTotalRows.get(curViewID) + 1);
                } catch (Exception e) {
                    Log.e(Log.TAG_VIEW, "Error emitting", e);
                    throw new RuntimeException(e);
                }
            }
        };

        // Now scan every revision added since the last time the view was indexed:

        // NOTE: Below is original Query. In case query result uses a lot of memory,
        //       Android SQLiteDatabase causes null value column. Then it causes the missing
        //       _index data because following logic skip result if column is null.
        //       To avoid the issue, retrieving json field is isolated from original query.
        //       Because json field could be large, maximum size is 2MB.
        // StringBuffer sql = new StringBuffer( "SELECT revs.doc_id, sequence, docid, revid,
        // json, no_attachments, deleted FROM revs, docs WHERE sequence>? AND current!=0 ");

        boolean checkDocTypes = docTypes.size() > 1 || (allDocTypes && docTypes.size() > 0);
        StringBuilder sql = new StringBuilder(
                "SELECT revs.doc_id, sequence, docid, revid, no_attachments, deleted ");
        if (checkDocTypes)
            sql.append(", doc_type ");
        sql.append("FROM revs, docs WHERE sequence>? AND current!=0 ");
        if (minLastSequence == 0) {
            sql.append("AND deleted=0 ");
        }
        if (!allDocTypes && docTypes.size() > 0) {
            String docTypesString = getJoinedSQLQuotedStrings(docTypes.toArray(new String[docTypes.size()]));
            sql.append("AND doc_type IN (").append(docTypesString).append(") ");
        }
        // order result by deleted ASC so if multiple revs returned the non deleted are the first
        // NOTE: Views broken with concurrent update and delete
        // https://github.com/couchbase/couchbase-lite-java-core/issues/952
        sql.append("AND revs.doc_id = docs.doc_id ORDER BY revs.doc_id, deleted ASC, revid DESC");
        String[] selectArgs = { Long.toString(minLastSequence) };
        cursor = store.getStorageEngine().rawQuery(sql.toString(), selectArgs);

        boolean keepGoing = cursor.moveToNext(); // Go to first result row
        while (keepGoing) {
            // NOTE: skip row if 1st column is null
            // https://github.com/couchbase/couchbase-lite-java-core/issues/497
            if (cursor.isNull(0)) {
                keepGoing = cursor.moveToNext();
                continue;
            }

            long docID = cursor.getLong(0);

            // Reconstitute the document as a dictionary:
            long sequence = cursor.getLong(1);
            String docId = cursor.getString(2);
            if (docId.startsWith("_design/")) { // design docs don't get indexed!
                keepGoing = cursor.moveToNext();
                continue;
            }
            String revID = cursor.getString(3);
            boolean deleted = cursor.getInt(5) > 0;
            String docType = checkDocTypes ? cursor.getString(6) : null;

            // Skip rows with the same doc_id -- these are losing conflicts.
            // NOTE: Or Skip rows if 1st column is null
            // https://github.com/couchbase/couchbase-lite-java-core/issues/497
            ArrayList<String> conflicts = null;
            boolean isNull;
            while ((keepGoing = cursor.moveToNext())
                    && ((isNull = cursor.isNull(0)) || cursor.getLong(0) == docID)) {
                if (isNull)
                    continue;
                if (!deleted) {
                    if (conflicts == null)
                        conflicts = new ArrayList<String>();
                    conflicts.add(cursor.getString(3));
                }
            }

            long realSequence = sequence; // because sequence may be changed, below
            if (minLastSequence > 0) {
                // Find conflicts with documents from previous indexings.
                Cursor cursor2 = null;
                try {
                    String[] selectArgs2 = { Long.toString(docID), Long.toString(minLastSequence) };
                    cursor2 = store.getStorageEngine()
                            .rawQuery("SELECT revid, sequence FROM revs "
                                    + "WHERE doc_id=? AND sequence<=? AND current!=0 AND deleted=0 "
                                    + "ORDER BY revID DESC ", selectArgs2);

                    if (cursor2.moveToNext()) {
                        String oldRevID = cursor2.getString(0);
                        // This is the revision that used to be the 'winner'.
                        // Remove its emitted rows:
                        long oldSequence = cursor2.getLong(1);
                        String[] args = { Long.toString(oldSequence) };
                        for (SQLiteViewStore view : views) {
                            int changes = view.store.getStorageEngine().delete(view.queryString("maps_#"),
                                    "sequence=?", args);
                            deletedCount += changes;
                            int thisViewID = view.getViewID();
                            int newTotalRows = viewTotalRows.get(thisViewID) - changes;
                            viewTotalRows.put(thisViewID, newTotalRows);
                        }

                        String conflictRevID = oldRevID;
                        if (deleted || RevisionInternal.CBLCompareRevIDs(oldRevID, revID) > 0) {
                            // It still 'wins' the conflict, so it's the one that
                            // should be mapped [again], not the current revision!
                            conflictRevID = revID;
                            revID = oldRevID;
                            deleted = false;
                            sequence = oldSequence;
                        }

                        if (!deleted) {
                            // Conflict revisions:
                            if (conflicts == null)
                                conflicts = new ArrayList<String>();
                            conflicts.add(conflictRevID);
                            while (cursor2.moveToNext()) {
                                conflicts.add(cursor2.getString(0));
                            }
                        }
                    }
                } finally {
                    if (cursor2 != null) {
                        cursor2.close();
                    }
                }
            }

            if (deleted)
                continue;

            // Get json blob:
            String[] selectArgs3 = { Long.toString(sequence) };
            byte[] json = SQLiteUtils.byteArrayResultForQuery(store.getStorageEngine(),
                    "SELECT json FROM revs WHERE sequence=?", selectArgs3);

            // Get the document properties, to pass to the map function:
            Map<String, Object> curDoc = store.documentPropertiesFromJSON(json, docId, revID, false, sequence);

            if (curDoc == null) {
                Log.w(Log.TAG_VIEW, "Failed to parse JSON of doc %s rev %s", docID, revID);
                continue;
            }
            curDoc.put("_local_seq", sequence);

            if (conflicts != null)
                curDoc.put("_conflicts", conflicts);

            // Call the user-defined map() to emit new key/value pairs from this revision:
            i = -1;
            for (SQLiteViewStore view : views) {
                curView = view;
                ++i;
                if (viewLastSequence[i] < realSequence) {
                    if (checkDocTypes) {
                        String viewDocType = viewDocTypes.get(view.getName());
                        if (viewDocType != null && !viewDocType.equals(docType))
                            continue; // skip; view's documentType doesn't match this doc
                    }
                    Log.v(Log.TAG_VIEW, "#%d: map '%s' for view %s...", sequence, docID, view.getName());
                    try {
                        emitBlock.setSequence(sequence);
                        mapBlocks.get(i).map(curDoc, emitBlock);
                    } catch (Throwable e) {
                        String msg = String.format(Locale.ENGLISH, "Error when calling map block of view '%s'",
                                view.getName());
                        Log.e(Log.TAG_VIEW, msg, e);
                        throw new CouchbaseLiteException(msg, e, new Status(Status.CALLBACK_ERROR));
                    }
                }
            }
        }

        // Finally, record the last revision sequence number that was indexed and update #rows:
        for (SQLiteViewStore view : views) {
            view.finishCreatingIndex();
            int newTotalRows = viewTotalRows.get(view.getViewID());
            ContentValues updateValues = new ContentValues();
            updateValues.put("lastSequence", dbMaxSequence);
            updateValues.put("total_docs", newTotalRows);
            String[] whereArgs = { Integer.toString(view.getViewID()) };
            store.getStorageEngine().update("views", updateValues, "view_id=?", whereArgs);
        }
        Log.v(Log.TAG_VIEW, "...Finished re-indexing (%s) to #%d (deleted %d, added %d)", viewNames(views),
                dbMaxSequence, deletedCount, insertedCount.intValue());

        success = true;
        return new Status(Status.OK);
    } catch (SQLException ex) {
        throw new CouchbaseLiteException(ex, new Status(Status.DB_ERROR));
    } finally {
        curView = null;
        if (cursor != null)
            cursor.close();
        if (store != null)
            store.endTransaction(success);
    }
}

From source file:gov.noaa.pfel.coastwatch.Projects.java

/**
 * Given a whole or partial datasets.xml file, this extracts all of the unique sourceUrls,
 * then tallys the total number of datasets and number of unaggregated datasets
 * (i.e., ferret-aggregated) per domain.
 * The ferret-aggregated dataset sourceUrls are converted to their presumed original URL,
 * for easier comparison.//from  w ww.  jav  a 2  s  .  c o  m
 * This only finds sourceUrl if it is on one line by itself.
 *
 * @param datasetsXmlFileName
 */
public static void tallyUafAggregations(String datasetsXmlFileName) throws Exception {

    String fromTo[] = {
            //alternate ferret url and sourceUrl
            "http://ferret.pmel.noaa.gov/geoide/dodsC/Datasets/20thC_ReanV2/",
            "https://www.esrl.noaa.gov/psd/thredds/dodsC/Datasets/20thC_ReanV2/",
            "http://ferret.pmel.noaa.gov/geoide/dodsC/Datasets/NARR", //several variants 
            "https://www.esrl.noaa.gov/psd/thredds/dodsC/Datasets/NARR",
            "http://ferret.pmel.noaa.gov/geoide/dodsC/Datasets/cpc_us_hour_precip/",
            "https://www.esrl.noaa.gov/psd/thredds/dodsC/Datasets/cpc_us_hour_precip/",
            "http://ferret.pmel.noaa.gov/geoide/dodsC/Datasets/cpc_us_precip/",
            "https://www.esrl.noaa.gov/psd/thredds/dodsC/Datasets/cpc_us_precip/",
            "http://ferret.pmel.noaa.gov/geoide/dodsC/Datasets/cru/",
            "https://www.esrl.noaa.gov/psd/thredds/dodsC/Datasets/cru/",
            "http://ferret.pmel.noaa.gov/geoide/dodsC/Datasets/godas/",
            "https://www.esrl.noaa.gov/psd/thredds/dodsC/Datasets/godas/",
            "http://ferret.pmel.noaa.gov/geoide/dodsC/Datasets/gpcc/",
            "https://www.esrl.noaa.gov/psd/thredds/dodsC/Datasets/gpcc/",
            "http://ferret.pmel.noaa.gov/geoide/dodsC/Datasets/interp_OLR/",
            "https://www.esrl.noaa.gov/psd/thredds/dodsC/Datasets/interp_OLR/",
            "http://ferret.pmel.noaa.gov/geoide/dodsC/Datasets/msu/",
            "https://www.esrl.noaa.gov/psd/thredds/dodsC/Datasets/msu/",
            "http://ferret.pmel.noaa.gov/geoide/dodsC/Datasets/ncep.reanalysis.derived/",
            "https://www.esrl.noaa.gov/psd/thredds/dodsC/Datasets/ncep.reanalysis.derived/",
            "http://ferret.pmel.noaa.gov/geoide/dodsC/Datasets/ncep.reanalysis2.dailyavgs/",
            "https://www.esrl.noaa.gov/psd/thredds/dodsC/Datasets/ncep.reanalysis2.dailyavgs/",
            "http://ferret.pmel.noaa.gov/geoide/dodsC/Datasets/ncep.reanalysis2/",
            "https://www.esrl.noaa.gov/psd/thredds/dodsC/Datasets/ncep.reanalysis2/",
            "http://ferret.pmel.noaa.gov/geoide/dodsC/Datasets/noaa.ersst/",
            "https://www.esrl.noaa.gov/psd/thredds/dodsC/Datasets/noaa.ersst/",
            "http://ferret.pmel.noaa.gov/geoide/dodsC/Datasets/snowcover/",
            "https://www.esrl.noaa.gov/psd/thredds/dodsC/Datasets/snowcover/",
            "http://ferret.pmel.noaa.gov/geoide/dodsC/Datasets/udel.airt.precip/",
            "https://www.esrl.noaa.gov/psd/thredds/dodsC/Datasets/udel.airt.precip/" };

    //extract unique sourceUrls
    FileInputStream is = new FileInputStream(datasetsXmlFileName);
    BufferedReader in = new BufferedReader(new InputStreamReader(is));
    HashSet<String> ferretAggregations = new HashSet(); //hashsets automatically avoid duplicates
    HashSet<String> ferretAggregationSources = new HashSet();
    HashSet<String> others = new HashSet();
    String source;
    while ((source = in.readLine()) != null) {
        source = source.trim();
        if (source.startsWith("<sourceUrl>") && source.endsWith("</sourceUrl>")) {
            source = source.substring(11, source.length() - 12);
            String unferret = null;
            if (source.startsWith("http://ferret.pmel.noaa.gov/geoide/dodsC/Datasets/")) {
                for (int ft = 0; ft < fromTo.length; ft += 2) {
                    if (source.startsWith(fromTo[ft])) {
                        unferret = fromTo[ft + 1] + source.substring(fromTo[ft].length());
                        break;
                    }
                }
            }
            if (unferret == null) {
                others.add(source);
            } else {
                if (unferret.endsWith("_aggregation")) {
                    ferretAggregations.add(source);
                    ferretAggregationSources.add(unferret.substring(0, unferret.length() - 12));
                } else {
                    others.add(unferret);
                }
            }
        }
    }
    in.close();

    String2.log("***** ferretAggregations");
    String sar[] = ferretAggregations.toArray(new String[0]);
    Arrays.sort(sar);
    String2.log(String2.toNewlineString(sar));

    String2.log("***** ferretAggregationSources");
    sar = ferretAggregationSources.toArray(new String[0]);
    Arrays.sort(sar);
    String2.log(String2.toNewlineString(sar));
    Tally tally = new Tally();
    for (int i = 0; i < sar.length; i++)
        tally.add("ferretAggregationSources", File2.getProtocolDomain(sar[i]));

    String2.log("\n***** others");
    sar = others.toArray(new String[0]);
    Arrays.sort(sar);
    String2.log(String2.toNewlineString(sar));
    for (int i = 0; i < sar.length; i++)
        tally.add("otherDatasets", File2.getProtocolDomain(sar[i]));

    String2.log("\n***** Tally Info");
    String2.log(tally.toString());

}

From source file:org.sakaiproject.evaluation.logic.EvalEvaluationSetupServiceImpl.java

/**
 * Special method which skips the evaluation state checks,
 * this is mostly needed to allow us to force an update at any time <br/>
 * Synchronizes all the user assignments with the assigned groups for this evaluation
 * <br/> Always run as an admin for permissions handling
 * //from   w  w  w .j  a v a 2  s.c om
 * @param evaluation the evaluation to do assignment updates for
 * @param evalGroupId (OPTIONAL) the internal group id of an eval group,
 * this will cause the synchronize to only affect the assignments related to this group
 * @param removeAllowed if true then will remove assignments as well, otherwise only adds
 * @return the list of {@link EvalAssignUser} ids changed during the synchronization (created, updated, deleted),
 * NOTE: deleted {@link EvalAssignUser} will not be able to be retrieved
 */
public List<Long> synchronizeUserAssignmentsForced(EvalEvaluation evaluation, String evalGroupId,
        boolean removeAllowed) {
    Long evaluationId = evaluation.getId();
    String currentUserId = commonLogic.getCurrentUserId();
    if (currentUserId == null) {
        currentUserId = commonLogic.getAdminUserId();
    } else {
        // check anon and use admin instead
        EvalUser user = commonLogic.getEvalUserById(currentUserId);
        if (EvalUser.USER_TYPE_ANONYMOUS.equals(user.type) || EvalUser.USER_TYPE_INVALID.equals(user.type)
                || EvalUser.USER_TYPE_UNKNOWN.equals(user.type)) {
            currentUserId = commonLogic.getAdminUserId();
        }
    }
    ArrayList<Long> changedUserAssignments = new ArrayList<>();
    // now the syncing logic
    HashSet<Long> assignUserToRemove = new HashSet<>();
    HashSet<EvalAssignUser> assignUserToSave = new HashSet<>();
    // get all user assignments for this evaluation (and possibly limit by group)
    String[] limitGroupIds = null;
    if (evalGroupId != null) {
        limitGroupIds = new String[] { evalGroupId };
    }
    // all users assigned to this eval (and group if specified)
    List<EvalAssignUser> assignedUsers = evaluationService.getParticipantsForEval(evaluationId, null,
            limitGroupIds, null, EvalEvaluationService.STATUS_ANY, null, null);
    // keys of all assignments which are unlinked or removed
    HashSet<String> assignUserUnlinkedRemovedKeys = new HashSet<>();
    // all assignments which are linked (groupId => assignments)
    HashMap<String, List<EvalAssignUser>> groupIdLinkedAssignedUsersMap = new HashMap<>();
    for (EvalAssignUser evalAssignUser : assignedUsers) {
        if (EvalAssignUser.STATUS_UNLINKED.equals(evalAssignUser.getStatus())
                || EvalAssignUser.STATUS_REMOVED.equals(evalAssignUser.getStatus())) {
            String key = makeEvalAssignUserKey(evalAssignUser, false, false);
            assignUserUnlinkedRemovedKeys.add(key);
        }
        String egid = evalAssignUser.getEvalGroupId();
        if (egid != null) {
            if (EvalAssignUser.STATUS_LINKED.equals(evalAssignUser.getStatus())) {
                List<EvalAssignUser> l = groupIdLinkedAssignedUsersMap.get(egid);
                if (l == null) {
                    l = new ArrayList<>();
                    groupIdLinkedAssignedUsersMap.put(egid, l);
                }
                l.add(evalAssignUser);
            }
        }
    }
    List<EvalAssignGroup> assignedGroups;
    if (evalGroupId == null) {
        // get all the assigned groups for this evaluation
        Map<Long, List<EvalAssignGroup>> m = evaluationService
                .getAssignGroupsForEvals(new Long[] { evaluationId }, true, null);
        assignedGroups = m.get(evaluationId);
    } else {
        // only dealing with a single assign group (or possibly none if invalid)
        assignedGroups = new ArrayList<>();
        EvalAssignGroup assignGroup = evaluationService.getAssignGroupByEvalAndGroupId(evaluationId,
                evalGroupId);
        if (assignGroup != null) {
            assignedGroups.add(assignGroup);
        }
    }
    // iterate through all assigned groups (may have been limited to one only)
    Set<String> evalGroupIdsFromEvals = new HashSet<>(assignedGroups.size());
    for (EvalAssignGroup evalAssignGroup : assignedGroups) {
        Long assignGroupId = evalAssignGroup.getId();
        String egid = evalAssignGroup.getEvalGroupId();
        evalGroupIdsFromEvals.add(egid);
        // get all the users who currently have permission for this group
        Set<String> currentEvaluated = commonLogic.getUserIdsForEvalGroup(egid, EvalConstants.PERM_BE_EVALUATED,
                evaluation.getSectionAwareness());
        Set<String> currentAssistants = commonLogic.getUserIdsForEvalGroup(egid,
                EvalConstants.PERM_ASSISTANT_ROLE, evaluation.getSectionAwareness());
        Set<String> currentTakers = commonLogic.getUserIdsForEvalGroup(egid, EvalConstants.PERM_TAKE_EVALUATION,
                evaluation.getSectionAwareness());

        if (evaluation.getAllRolesParticipate()) {
            currentTakers.addAll(currentAssistants);
            currentTakers.addAll(currentEvaluated);
        }

        HashSet<String> currentAll = new HashSet<>();
        currentAll.addAll(currentEvaluated);
        currentAll.addAll(currentAssistants);
        currentAll.addAll(currentTakers);
        /* Resolve the current permissions against the existing assignments,
         * this should only change linked records but should respect unlinked and removed records by not
         * adding a record where one already exists for the given user/group combo,
         * any linked records which do not exist anymore should be trashed if the status is right,
         * any missing records should be added if the evaluation is still active or better
         */
        List<EvalAssignUser> linkedUserAssignsInThisGroup = groupIdLinkedAssignedUsersMap.get(egid);
        if (linkedUserAssignsInThisGroup == null) {
            // this group has not been assigned yet
            linkedUserAssignsInThisGroup = new ArrayList<>();
        }
        // filter out all linked user assignments which match exactly with the existing ones
        for (Iterator<EvalAssignUser> iterator = linkedUserAssignsInThisGroup.iterator(); iterator.hasNext();) {
            EvalAssignUser evalAssignUser = iterator.next();
            String type = evalAssignUser.getType();
            String userId = evalAssignUser.getUserId();
            if (EvalAssignUser.TYPE_EVALUATEE.equals(type)) {
                if (currentEvaluated.contains(userId)) {
                    currentEvaluated.remove(userId);
                    iterator.remove();
                }
            } else if (EvalAssignUser.TYPE_ASSISTANT.equals(type)) {
                if (currentAssistants.contains(userId)) {
                    currentAssistants.remove(userId);
                    iterator.remove();
                }
            } else if (EvalAssignUser.TYPE_EVALUATOR.equals(type)) {
                if (currentTakers.contains(userId)) {
                    currentTakers.remove(userId);
                    iterator.remove();
                }
            } else {
                throw new IllegalStateException("Do not recognize this user assignment type: " + type);
            }
        }
        // any remaining linked user assignments should be removed if not unlinked
        for (EvalAssignUser evalAssignUser : linkedUserAssignsInThisGroup) {
            if (evalAssignUser.getId() != null) {
                String key = makeEvalAssignUserKey(evalAssignUser, false, false);
                if (!assignUserUnlinkedRemovedKeys.contains(key)) {
                    assignUserToRemove.add(evalAssignUser.getId());
                }
            }
        }
        // any remaining current set items need to be added if they are not unlinked/removed
        assignUserToSave.addAll(makeUserAssignmentsFromUserIdSet(currentEvaluated, egid,
                EvalAssignUser.TYPE_EVALUATEE, assignGroupId, assignUserUnlinkedRemovedKeys));
        assignUserToSave.addAll(makeUserAssignmentsFromUserIdSet(currentAssistants, egid,
                EvalAssignUser.TYPE_ASSISTANT, assignGroupId, assignUserUnlinkedRemovedKeys));
        assignUserToSave.addAll(makeUserAssignmentsFromUserIdSet(currentTakers, egid,
                EvalAssignUser.TYPE_EVALUATOR, assignGroupId, assignUserUnlinkedRemovedKeys));
    }

    // now handle the actual persistent updates and log them
    String message = "Synchronized user assignments for eval (" + evaluationId + ") with "
            + assignedGroups.size() + " assigned groups";
    if (assignUserToRemove.isEmpty() && assignUserToSave.isEmpty()) {
        message += ": no changes to the user assignments (" + assignedUsers.size() + ")";
    } else {
        if (removeAllowed && !assignUserToRemove.isEmpty()) {
            Long[] assignUserToRemoveArray = assignUserToRemove.toArray(new Long[assignUserToRemove.size()]);
            if (LOG.isDebugEnabled()) {
                LOG.debug("Deleting user eval assignment Ids: " + assignUserToRemove);
            }
            dao.deleteSet(EvalAssignUser.class, assignUserToRemoveArray);
            message += ": removed the following assignments: " + assignUserToRemove;
            changedUserAssignments.addAll(assignUserToRemove);
        }
        if (!assignUserToSave.isEmpty()) {
            for (EvalAssignUser evalAssignUser : assignUserToSave) {
                setAssignUserDefaults(evalAssignUser, evaluation, currentUserId);
            }
            // this is meant to force the assigned users set to be re-calculated
            assignUserToSave = new HashSet<>(assignUserToSave);
            if (LOG.isDebugEnabled()) {
                LOG.debug("Saving user eval assignments: " + assignUserToSave);
            }
            dao.saveSet(assignUserToSave);
            message += ": created the following assignments: " + assignUserToSave;
            for (EvalAssignUser evalAssignUser : assignUserToSave) {
                changedUserAssignments.add(evalAssignUser.getId());
            }
        }
    }

    // one more specialty check to cleanup orphaned user assignments - EVALSYS-703
    Set<String> evalGroupIdsFromUsers = groupIdLinkedAssignedUsersMap.keySet();
    evalGroupIdsFromUsers.removeAll(evalGroupIdsFromEvals);
    if (!evalGroupIdsFromUsers.isEmpty()) {
        // there are users assigned to group ids in this eval which are not part of the assigned groups
        HashSet<Long> orphanedUserAssignments = new HashSet<>();
        for (EvalAssignUser evalAssignUser : assignedUsers) {
            String egid = evalAssignUser.getEvalGroupId();
            if (egid != null && evalGroupIdsFromUsers.contains(egid)) {
                if (EvalAssignUser.STATUS_LINKED.equals(evalAssignUser.getStatus())) {
                    orphanedUserAssignments.add(evalAssignUser.getId());
                }
            }
        }
        if (!orphanedUserAssignments.isEmpty()) {
            Long[] orphanedUserAssignmentsArray = orphanedUserAssignments
                    .toArray(new Long[orphanedUserAssignments.size()]);
            dao.deleteSet(EvalAssignUser.class, orphanedUserAssignmentsArray);
            message += ": removed the following orphaned user assignments: " + orphanedUserAssignments;
            changedUserAssignments.addAll(orphanedUserAssignments);
        }
    }

    LOG.info(message);
    return changedUserAssignments;
}