Example usage for java.util TreeSet contains

List of usage examples for java.util TreeSet contains

Introduction

In this page you can find the example usage for java.util TreeSet contains.

Prototype

public boolean contains(Object o) 

Source Link

Document

Returns true if this set contains the specified element.

Usage

From source file:org.wise.vle.utils.FileManager.java

/**
 * Compare the parent and child projects to find differences.
 * These differences include whether a node was added, deleted,
 * moved, or not moved and whether the content for the node
 * was modified.//from ww w . j a v  a2  s.  com
 * @param curriculumBaseDir the curriculum directory
 * @param parentProjectUrl the parent project url e.g. 236/wise4.project.json
 * @param projectUrl the child project url e.g. 235/wise4.project.json
 * @return the results of the analysis of the difference between the
 * parent and child project
 * @throws IOException
 */
public static String reviewUpdateProject(String curriculumBaseDir, String parentProjectUrl, String projectUrl)
        throws IOException {
    //stores node id to the node or sequence JSONObject for child project nodes
    HashMap<String, JSONObject> childNodeIdToNodeOrSequence = new HashMap<String, JSONObject>();

    //stores the filename to the node id for child project nodes
    HashMap<String, String> childFileNameToId = new HashMap<String, String>();

    //stores the node id to the node step number for child project nodes
    HashMap<String, String> childNodeIdToStepNumber = new HashMap<String, String>();

    //stores node id to the node or sequence JSONObject for parent project nodes
    HashMap<String, JSONObject> parentNodeIdToNodeOrSequence = new HashMap<String, JSONObject>();

    //stores the filename to the node id for parent project nodes
    HashMap<String, String> parentFileNameToNodeId = new HashMap<String, String>();

    //stores the node id to the node step number for parent project nodes
    HashMap<String, String> parentNodeIdToStepNumber = new HashMap<String, String>();

    //stores the .html file name to the .ht node id
    HashMap<String, String> htmlToHt = new HashMap<String, String>();

    /*
     * stores the node id to status of the node. status can be
     * "added"
     * "deleted"
     * "moved"
     * "not moved"
     */
    HashMap<String, String> nodeIdToStatus = new HashMap<String, String>();

    /*
     * stores the node id to whether that node was modified or not. modified can be
     * "true"
     * "false"
     * (note these are String values)
     */
    HashMap<String, String> nodeIdToModified = new HashMap<String, String>();

    String fileSeparator = System.getProperty("file.separator");

    //get the child project folder e.g. /Users/geoffreykwan/dev/apache-tomcat-5.5.27/webapps/curriculum/236
    String fullProjectFolderUrl = curriculumBaseDir
            + projectUrl.substring(0, projectUrl.lastIndexOf(fileSeparator));

    //get the child project file e.g. /Users/geoffreykwan/dev/apache-tomcat-5.5.27/webapps/curriculum/236/wise4.project.json
    String fullProjectFileUrl = curriculumBaseDir + projectUrl;

    //get the parent project folder e.g. /Users/geoffreykwan/dev/apache-tomcat-5.5.27/webapps/curriculum/235
    String fullParentProjectFolderUrl = curriculumBaseDir
            + parentProjectUrl.substring(0, parentProjectUrl.lastIndexOf(fileSeparator));

    //get the parent project file e.g. /Users/geoffreykwan/dev/apache-tomcat-5.5.27/webapps/curriculum/235/wise4.project.json
    String fullParentProjectFileUrl = curriculumBaseDir + parentProjectUrl;

    //get the project JSONObject for parent and child projects
    JSONObject childProject = getProjectJSONObject(fullProjectFileUrl);
    JSONObject parentProject = getProjectJSONObject(fullParentProjectFileUrl);

    //parse the parent and child projects to obtain mappings that we will use later
    parseProjectJSONObject(childProject, childNodeIdToNodeOrSequence, childFileNameToId,
            childNodeIdToStepNumber);
    parseProjectJSONObject(parentProject, parentNodeIdToNodeOrSequence, parentFileNameToNodeId,
            parentNodeIdToStepNumber);

    /*
     * compare the parent and child folders to determine if node
     * content files have been modified
     */
    compareFolder(new File(fullParentProjectFolderUrl), new File(fullProjectFolderUrl), parentFileNameToNodeId,
            htmlToHt, nodeIdToModified);

    /*
     * compare the sequences in the parent and child projects
     * to determine if sequences have been added, deleted, moved,
     * or modified and if nodes have been added, deleted, or
     * moved (node modification detection is handled in the
     * compareFolder() call above)
     */
    compareSequences(parentProject, childProject, parentNodeIdToNodeOrSequence, childNodeIdToNodeOrSequence,
            parentNodeIdToStepNumber, childNodeIdToStepNumber, nodeIdToStatus, nodeIdToModified);

    /*
     * a collection of NodeInfo objects for nodes in the child and
     * parent project
     */
    TreeSet<NodeInfo> childAndParentNodes = new TreeSet<NodeInfo>(new NodeInfoComparator());

    /*
     * a collection to keep track of all the node ids we have
     * added to the childAndParentNodes collection for quicker
     * lookup
     */
    TreeSet<String> nodeIdsAdded = new TreeSet<String>();

    /*
     * we must add nodes from the parent project first because we want
     * to show the author the structure of the parent project and
     * then add any additional nodes from the child project. we will
     * show them the parent project structure and how the nodes in the
     * parent project are different from the child project. any nodes
     * that are in the child project and not the parent project will
     * be also added to show that those nodes will be deleted.
     */

    //get all the node ids from the parent project
    Set<String> parentKeySet = parentNodeIdToStepNumber.keySet();

    //loop through all the sequences and nodes in the parent project
    Iterator<String> parentIdIterator = parentKeySet.iterator();
    while (parentIdIterator.hasNext()) {
        //get a node id
        String parentId = parentIdIterator.next();

        //get the step number for this node id
        String stepNumber = parentNodeIdToStepNumber.get(parentId);

        String title = "";
        String nodeType = "";

        try {
            //get the JSONObject for the node
            JSONObject parentNode = parentNodeIdToNodeOrSequence.get(parentId);

            //get the title and node type
            title = parentNode.getString("title");
            nodeType = parentNode.getString("type");
        } catch (JSONException e) {
            e.printStackTrace();
        }

        //create a NodeInfo object with the info from the node
        NodeInfo parentNodeInfo = new NodeInfo(stepNumber, parentId, title, nodeType, "parent");

        //add the NodeInfo to the collection
        childAndParentNodes.add(parentNodeInfo);

        //add the node id to the collection
        nodeIdsAdded.add(parentId);
    }

    //get all the nod ids from the child project
    Set<String> childKeySet = childNodeIdToStepNumber.keySet();

    //loop through all the sequences and nodes in the child project
    Iterator<String> childIdIterator = childKeySet.iterator();

    while (childIdIterator.hasNext()) {
        //get a node id
        String childId = childIdIterator.next();

        //get the step number for this node id
        String stepNumber = childNodeIdToStepNumber.get(childId);

        String title = "";
        String nodeType = "";

        try {
            //get the JSONObject for the node
            JSONObject childNode = childNodeIdToNodeOrSequence.get(childId);

            //get the title and node type
            title = childNode.getString("title");
            nodeType = childNode.getString("type");
        } catch (JSONException e) {
            e.printStackTrace();
        }

        //check if we have already added a node with this node id
        if (!nodeIdsAdded.contains(childId)) {
            //we have not added it before

            //create a NodeInfo object with the info from the node
            NodeInfo childNodeInfo = new NodeInfo(stepNumber, childId, title, nodeType, "child");

            //add the NodeInfo to the collection
            childAndParentNodes.add(childNodeInfo);

            //add the node id to the collection
            nodeIdsAdded.add(childId);
        }
    }

    /*
     * the JSONArray that will contain the status info for all the nodes
     * such as whether a node was added, deleted, moved, or modified
     */
    JSONArray nodeStatuses = new JSONArray();

    //loop through all the NodeInfo objects
    Iterator<NodeInfo> childAndParentNodesIterator = childAndParentNodes.iterator();
    while (childAndParentNodesIterator.hasNext()) {
        //get a node
        NodeInfo node = childAndParentNodesIterator.next();

        //get the info from the node
        String nodeId = node.getNodeId();
        String stepNumber = node.getStepNumber();
        String title = node.getTitle();
        String nodeType = node.getNodeType();

        //get the status of the node ("added", "deleted", "moved", "not moved")
        String status = nodeIdToStatus.get(nodeId);

        //get whether the node was modified ("true" or "false")
        String modified = nodeIdToModified.get(nodeId);

        if (status == null) {
            //if there is no status value it means it was not moved
            status = "not moved";
        }

        if (modified == null) {
            //if there was no modified value it means it was not modified
            modified = "false";
        }

        try {
            //put all the values for this node into a JSONObject
            JSONObject nodeStatus = new JSONObject();
            nodeStatus.put("stepNumber", stepNumber);
            nodeStatus.put("title", title);
            nodeStatus.put("nodeId", nodeId);
            nodeStatus.put("status", status);
            nodeStatus.put("modified", modified);
            nodeStatus.put("nodeType", nodeType);

            //add the node to the array
            nodeStatuses.put(nodeStatus);
        } catch (JSONException e) {
            e.printStackTrace();
        }
    }

    //return the status array to the client
    return nodeStatuses.toString();
}

From source file:com.eucalyptus.objectstorage.WalrusManager.java

public ListBucketResponseType listBucket(ListBucketType request) throws EucalyptusCloudException {
    ListBucketResponseType reply = (ListBucketResponseType) request.getReply();

    EntityWrapper<BucketInfo> db = EntityWrapper.get(BucketInfo.class);

    try {//from  w w w  .  j av a 2 s. c  o  m
        String bucketName = request.getBucket();
        BucketInfo bucketInfo = new BucketInfo(bucketName);
        bucketInfo.setHidden(false);
        List<BucketInfo> bucketList = db.queryEscape(bucketInfo);

        Context ctx = Contexts.lookup();
        Account account = ctx.getAccount();

        int maxKeys = -1;
        String maxKeysString = request.getMaxKeys();
        if (maxKeysString != null) {
            maxKeys = Integer.parseInt(maxKeysString);
            if (maxKeys < 0) {
                throw new InvalidArgumentException("max-keys",
                        "Argument max-keys must be an integer between 0 and " + Integer.MAX_VALUE);
            }
        } else {
            maxKeys = WalrusProperties.MAX_KEYS;
        }

        if (bucketList.size() > 0) {
            BucketInfo bucket = bucketList.get(0);
            BucketLogData logData = bucket.getLoggingEnabled() ? request.getLogData() : null;
            if (ctx.hasAdministrativePrivileges() || (bucket.canRead(account.getAccountNumber())
                    && (bucket.isGlobalRead() || Lookups.checkPrivilege(PolicySpec.S3_LISTBUCKET,
                            PolicySpec.VENDOR_S3, PolicySpec.S3_RESOURCE_BUCKET, bucketName, null)))) {
                if (logData != null) {
                    updateLogData(bucket, logData);
                    reply.setLogData(logData);
                }

                if (Contexts.lookup().hasAdministrativePrivileges()) {
                    try {
                        if (bucketHasSnapshots(bucketName)) {
                            db.rollback();
                            throw new NoSuchBucketException(bucketName);
                        }
                    } catch (Exception e) {
                        db.rollback();
                        throw new EucalyptusCloudException(e);
                    }
                }

                String prefix = request.getPrefix();
                String delimiter = request.getDelimiter();
                String marker = request.getMarker();

                reply.setName(bucketName);
                reply.setIsTruncated(false);
                reply.setPrefix(prefix);
                reply.setMarker(marker);
                reply.setDelimiter(delimiter);
                reply.setMaxKeys(maxKeys);

                if (maxKeys == 0) {
                    // No keys requested, so just return
                    reply.setContents(new ArrayList<ListEntry>());
                    reply.setCommonPrefixesList(new ArrayList<CommonPrefixesEntry>());
                    db.commit();
                    return reply;
                }

                final int queryStrideSize = maxKeys + 1;
                EntityWrapper<ObjectInfo> dbObject = db.recast(ObjectInfo.class);

                ObjectInfo searchObj = new ObjectInfo();
                searchObj.setBucketName(bucketName);
                searchObj.setLast(true);
                searchObj.setDeleted(false);

                Criteria objCriteria = dbObject.createCriteria(ObjectInfo.class);
                objCriteria.add(Example.create(searchObj));
                objCriteria.addOrder(Order.asc("objectKey"));
                objCriteria.setMaxResults(queryStrideSize); // add one to, hopefully, indicate truncation in one call

                if (!Strings.isNullOrEmpty(marker)) {
                    // The result set should be exclusive of the marker. marker could be a common prefix from a previous response. Look for keys that
                    // lexicographically follow the marker and don't contain the marker as the prefix.
                    objCriteria.add(Restrictions.gt("objectKey", marker));
                } else {
                    marker = "";
                }

                if (!Strings.isNullOrEmpty(prefix)) {
                    objCriteria.add(Restrictions.like("objectKey", prefix, MatchMode.START));
                } else {
                    prefix = "";
                }

                // Ensure not null.
                if (Strings.isNullOrEmpty(delimiter)) {
                    delimiter = "";
                }

                List<ObjectInfo> objectInfos = null;
                int resultKeyCount = 0;
                ArrayList<ListEntry> contents = new ArrayList<ListEntry>(); // contents for reply
                String nextMarker = null;
                TreeSet<String> commonPrefixes = new TreeSet<String>();
                int firstResult = -1;

                // Iterate over result sets of size maxkeys + 1
                do {
                    // Start listing from the 0th element and increment the first element to be listed by the query size
                    objCriteria.setFirstResult(queryStrideSize * (++firstResult));
                    objectInfos = (List<ObjectInfo>) objCriteria.list();

                    if (objectInfos.size() > 0) {
                        for (ObjectInfo objectInfo : objectInfos) {
                            String objectKey = objectInfo.getObjectKey();

                            // Check if it will get aggregated as a commonprefix
                            if (!Strings.isNullOrEmpty(delimiter)) {
                                String[] parts = objectKey.substring(prefix.length()).split(delimiter);
                                if (parts.length > 1) {
                                    String prefixString = prefix + parts[0] + delimiter;
                                    if (!StringUtils.equals(prefixString, marker)
                                            && !commonPrefixes.contains(prefixString)) {
                                        if (resultKeyCount == maxKeys) {
                                            // This is a new record, so we know we're truncating if this is true
                                            reply.setNextMarker(nextMarker);
                                            reply.setIsTruncated(true);
                                            resultKeyCount++;
                                            break;
                                        }

                                        commonPrefixes.add(prefixString);
                                        resultKeyCount++; // count the unique commonprefix as a single return entry

                                        // If max keys have been collected, set the next-marker. It might be needed for the response if the list is
                                        // truncated
                                        // If the common prefixes hit the limit set by max-keys, next-marker is the last common prefix
                                        if (resultKeyCount == maxKeys) {
                                            nextMarker = prefixString;
                                        }
                                    }
                                    continue;
                                }
                            }

                            if (resultKeyCount == maxKeys) {
                                // This is a new (non-commonprefix) record, so we know we're truncating
                                reply.setNextMarker(nextMarker);
                                reply.setIsTruncated(true);
                                resultKeyCount++;
                                break;
                            }

                            // Process the entry as a full key listing
                            ListEntry listEntry = new ListEntry();
                            listEntry.setKey(objectKey);
                            listEntry.setEtag(objectInfo.getEtag());
                            listEntry.setLastModified(DateUtils.format(objectInfo.getLastModified().getTime(),
                                    DateUtils.ALT_ISO8601_DATE_PATTERN));
                            listEntry.setStorageClass(objectInfo.getStorageClass());
                            listEntry.setSize(objectInfo.getSize());
                            listEntry.setStorageClass(objectInfo.getStorageClass());
                            try {
                                Account ownerAccount = Accounts.lookupAccountById(objectInfo.getOwnerId());
                                listEntry.setOwner(new CanonicalUserType(ownerAccount.getCanonicalId(),
                                        ownerAccount.getName()));
                            } catch (AuthException e) {
                                db.rollback();
                                throw new AccessDeniedException("Bucket", bucketName, logData);
                            }
                            contents.add(listEntry);

                            resultKeyCount++;

                            // If max keys have been collected, set the next-marker. It might be needed for the response if the list is truncated
                            if (resultKeyCount == maxKeys) {
                                nextMarker = objectKey;
                            }
                        }
                    }

                    if (resultKeyCount <= maxKeys && objectInfos.size() <= maxKeys) {
                        break;
                    }
                } while (resultKeyCount <= maxKeys);

                reply.setContents(contents);

                // Prefixes are already sorted, add them to the proper data structures and populate the reply
                if (!commonPrefixes.isEmpty()) {
                    ArrayList<CommonPrefixesEntry> commonPrefixesList = new ArrayList<CommonPrefixesEntry>();
                    for (String prefixEntry : commonPrefixes) {
                        commonPrefixesList.add(new CommonPrefixesEntry().add(new PrefixEntry(prefixEntry)));
                    }
                    reply.setCommonPrefixesList(commonPrefixesList);
                }
            } else {
                db.rollback();
                throw new AccessDeniedException("Bucket", bucketName, logData);
            }
        } else {
            db.rollback();
            throw new NoSuchBucketException(bucketName);
        }
        db.commit();
        return reply;
    } finally {
        if (db.isActive()) {
            db.rollback();
        }
    }
}

From source file:com.eucalyptus.objectstorage.WalrusManager.java

public ListVersionsResponseType listVersions(ListVersionsType request) throws EucalyptusCloudException {
    ListVersionsResponseType reply = (ListVersionsResponseType) request.getReply();

    EntityWrapper<BucketInfo> db = EntityWrapper.get(BucketInfo.class);

    try {//from  w w w .  j av  a  2 s . c  o m
        String bucketName = request.getBucket();
        BucketInfo bucketInfo = new BucketInfo(bucketName);
        bucketInfo.setHidden(false);
        List<BucketInfo> bucketList = db.queryEscape(bucketInfo);

        Context ctx = Contexts.lookup();
        Account account = ctx.getAccount();

        int maxKeys = -1;
        String maxKeysString = request.getMaxKeys();
        if (maxKeysString != null) {
            maxKeys = Integer.parseInt(maxKeysString);
            if (maxKeys < 0) {
                throw new InvalidArgumentException("max-keys",
                        "Argument max-keys must be an integer between 0 and " + Integer.MAX_VALUE);
            }
        } else {
            maxKeys = WalrusProperties.MAX_KEYS;
        }

        if (bucketList.size() > 0) {
            BucketInfo bucket = bucketList.get(0);
            BucketLogData logData = bucket.getLoggingEnabled() ? request.getLogData() : null;

            if (ctx.hasAdministrativePrivileges() || (bucket.canRead(account.getAccountNumber())
                    && (bucket.isGlobalRead() || Lookups.checkPrivilege(PolicySpec.S3_LISTBUCKETVERSIONS,
                            PolicySpec.VENDOR_S3, PolicySpec.S3_RESOURCE_BUCKET, bucketName, null)))) {

                if (logData != null) {
                    updateLogData(bucket, logData);
                    reply.setLogData(logData);
                }

                if (Contexts.lookup().hasAdministrativePrivileges()) {
                    try {
                        if (bucketHasSnapshots(bucketName)) {
                            db.rollback();
                            throw new NoSuchBucketException(bucketName);
                        }
                    } catch (Exception e) {
                        db.rollback();
                        throw new EucalyptusCloudException(e);
                    }
                }

                String prefix = request.getPrefix();
                String keyMarker = request.getKeyMarker();
                String versionMarker = request.getVersionIdMarker();
                String delimiter = request.getDelimiter();

                reply.setName(bucketName);
                reply.setIsTruncated(false);
                reply.setPrefix(prefix);
                reply.setMaxKeys(maxKeys);
                reply.setDelimiter(delimiter);
                reply.setKeyMarker(keyMarker);
                reply.setVersionIdMarker(versionMarker);

                if (bucket.isVersioningDisabled()) {
                    db.commit();
                    return reply;
                }

                if (maxKeys == 0) {
                    // No keys requested, so just return
                    reply.setKeyEntries(new ArrayList<KeyEntry>());
                    reply.setCommonPrefixesList(new ArrayList<CommonPrefixesEntry>());
                    db.commit();
                    return reply;
                }

                final int queryStrideSize = maxKeys + 1;
                EntityWrapper<ObjectInfo> dbObject = db.recast(ObjectInfo.class);

                ObjectInfo searchObj = new ObjectInfo();
                searchObj.setBucketName(bucketName);

                Criteria objCriteria = dbObject.createCriteria(ObjectInfo.class);
                objCriteria.add(Example.create(searchObj));
                objCriteria.addOrder(Order.asc("objectKey"));
                objCriteria.addOrder(Order.desc("lastModified"));
                objCriteria.setMaxResults(queryStrideSize); // add one to, hopefully, indicate truncation in one call

                // Ensure these aren't null
                keyMarker = (Strings.isNullOrEmpty(keyMarker) ? "" : keyMarker);
                prefix = (Strings.isNullOrEmpty(prefix) ? "" : prefix);
                versionMarker = (Strings.isNullOrEmpty(versionMarker) ? "" : versionMarker);

                if (!Strings.isNullOrEmpty(keyMarker)) {
                    if (!Strings.isNullOrEmpty(versionMarker)) {
                        Date resumeDate = null;
                        try {
                            ObjectInfo markerObj = new ObjectInfo();
                            markerObj.setBucketName(bucketName);
                            markerObj.setVersionId(versionMarker);
                            markerObj.setObjectKey(keyMarker);
                            ObjectInfo lastFromPrevObj = dbObject.uniqueResultEscape(markerObj);
                            if (lastFromPrevObj != null && lastFromPrevObj.getLastModified() != null) {
                                resumeDate = lastFromPrevObj.getLastModified();
                            } else {
                                dbObject.rollback();
                                throw new NoSuchEntityException("VersionIDMarker " + versionMarker
                                        + " does not match an existing object version");
                            }
                        } catch (TransactionException e) {
                            LOG.error(e);
                            dbObject.rollback();
                            throw new EucalyptusCloudException(
                                    "Next-Key-Marker or Next-Version-Id marker invalid");
                        }
                        // The result set should be exclusive of the key with the key-marker version-id-marker pair. Look for keys that lexicographically
                        // follow the version-id-marker for a given key-marker and also the keys that follow the key-marker.
                        objCriteria.add(Restrictions.or(
                                Restrictions.and(Restrictions.eq("objectKey", keyMarker),
                                        Restrictions.lt("lastModified", resumeDate)),
                                Restrictions.gt("objectKey", keyMarker)));
                    } else {
                        // The result set should be exclusive of the key-marker. key-marker could be a common prefix from a previous response. Look for keys
                        // that lexicographically follow the key-marker and don't contain the key-marker as the prefix.
                        objCriteria.add(Restrictions.gt("objectKey", keyMarker));
                    }
                }

                if (!Strings.isNullOrEmpty(prefix)) {
                    objCriteria.add(Restrictions.like("objectKey", prefix, MatchMode.START));
                } else {
                    prefix = ""; // ensure not null has already been set in the reply, so this is safe
                }

                List<ObjectInfo> objectInfos = null;
                int resultKeyCount = 0;
                ArrayList<KeyEntry> keyEntries = new ArrayList<KeyEntry>();
                String nextKeyMarker = null;
                String nextVersionIdMarker = null;
                TreeSet<String> commonPrefixes = new TreeSet<String>();
                int firstResult = -1;

                // Iterate over result sets of size maxkeys + 1
                do {
                    // Start listing from the 0th element and increment the first element to be listed by the query size
                    objCriteria.setFirstResult(queryStrideSize * (++firstResult));
                    objectInfos = (List<ObjectInfo>) objCriteria.list();

                    if (objectInfos.size() > 0) {

                        for (ObjectInfo objectInfo : objectInfos) {
                            String objectKey = objectInfo.getObjectKey();

                            // Check if it will get aggregated as a commonprefix
                            if (!Strings.isNullOrEmpty(delimiter)) {
                                String[] parts = objectKey.substring(prefix.length()).split(delimiter);
                                if (parts.length > 1) {
                                    String prefixString = prefix + parts[0] + delimiter;
                                    if (!StringUtils.equals(prefixString, keyMarker)
                                            && !commonPrefixes.contains(prefixString)) {
                                        if (resultKeyCount == maxKeys) {
                                            // This is a new record, so we know we're truncating if this is true
                                            reply.setNextKeyMarker(nextKeyMarker);
                                            reply.setNextVersionIdMarker(nextVersionIdMarker);
                                            reply.setIsTruncated(true);
                                            resultKeyCount++;
                                            break;
                                        }

                                        commonPrefixes.add(prefixString);
                                        resultKeyCount++; // count the unique commonprefix as a single return entry

                                        // If max keys have been collected, set the next-key-marker. It might be needed for the response if the list is
                                        // truncated
                                        // If the common prefixes hit the limit set by max-keys, next-key-marker is the last common prefix and there is no
                                        // version-id-marker
                                        if (resultKeyCount == maxKeys) {
                                            nextKeyMarker = prefixString;
                                            nextVersionIdMarker = null;
                                        }
                                    }
                                    continue;
                                }
                            }

                            if (resultKeyCount == maxKeys) {
                                // This is a new (non-commonprefix) record, so we know we're truncating
                                reply.setNextKeyMarker(nextKeyMarker);
                                reply.setNextVersionIdMarker(nextVersionIdMarker);
                                reply.setIsTruncated(true);
                                resultKeyCount++;
                                break;
                            }

                            // This is either a version entry or a delete marker
                            KeyEntry keyEntry = null;
                            if (!objectInfo.getDeleted()) {
                                keyEntry = new VersionEntry();
                                ((VersionEntry) keyEntry).setEtag(objectInfo.getEtag());
                                ((VersionEntry) keyEntry).setSize(objectInfo.getSize());
                                ((VersionEntry) keyEntry).setStorageClass(objectInfo.getStorageClass());
                            } else {
                                keyEntry = new DeleteMarkerEntry();
                            }
                            keyEntry.setKey(objectKey);
                            keyEntry.setVersionId(objectInfo.getVersionId());
                            keyEntry.setIsLatest(objectInfo.getLast());
                            keyEntry.setLastModified(DateUtils.format(objectInfo.getLastModified().getTime(),
                                    DateUtils.ALT_ISO8601_DATE_PATTERN));
                            try {
                                Account ownerAccount = Accounts.lookupAccountById(objectInfo.getOwnerId());
                                keyEntry.setOwner(new CanonicalUserType(ownerAccount.getCanonicalId(),
                                        ownerAccount.getName()));
                            } catch (AuthException e) {
                                db.rollback();
                                throw new AccessDeniedException("Bucket", bucketName, logData);
                            }
                            keyEntries.add(keyEntry);

                            resultKeyCount++;

                            // If max keys have been collected, set the next- markers. They might be needed for the response if the list is truncated
                            if (resultKeyCount == maxKeys) {
                                nextKeyMarker = objectKey;
                                nextVersionIdMarker = objectInfo.getVersionId();
                            }
                        }
                    }
                    if (resultKeyCount <= maxKeys && objectInfos.size() <= maxKeys) {
                        break;
                    }
                } while (resultKeyCount <= maxKeys);

                reply.setKeyEntries(keyEntries);

                // Prefixes are already sorted, add them to the proper data structures and populate the reply
                if (!commonPrefixes.isEmpty()) {
                    ArrayList<CommonPrefixesEntry> commonPrefixesList = new ArrayList<CommonPrefixesEntry>();
                    for (String prefixEntry : commonPrefixes) {
                        commonPrefixesList.add(new CommonPrefixesEntry().add(new PrefixEntry(prefixEntry)));
                    }
                    reply.setCommonPrefixesList(commonPrefixesList);
                }
            } else {
                db.rollback();
                throw new AccessDeniedException("Bucket", bucketName, logData);
            }
        } else {
            db.rollback();
            throw new NoSuchBucketException(bucketName);
        }
        db.commit();
        return reply;
    } finally {
        if (db.isActive()) {
            db.rollback();
        }
    }
}

From source file:com.eucalyptus.walrus.WalrusFSManager.java

@Override
public ListBucketResponseType listBucket(ListBucketType request) throws WalrusException {
    ListBucketResponseType reply = (ListBucketResponseType) request.getReply();

    EntityWrapper<BucketInfo> db = EntityWrapper.get(BucketInfo.class);

    try {/* w  ww .  j  av  a  2 s  .  co  m*/
        String bucketName = request.getBucket();
        BucketInfo bucketInfo = new BucketInfo(bucketName);
        bucketInfo.setHidden(false);
        List<BucketInfo> bucketList = db.queryEscape(bucketInfo);

        Context ctx = Contexts.lookup();
        Account account = ctx.getAccount();

        int maxKeys = -1;
        String maxKeysString = request.getMaxKeys();
        if (maxKeysString != null) {
            maxKeys = Integer.parseInt(maxKeysString);
            if (maxKeys < 0) {
                throw new InvalidArgumentException("max-keys",
                        "Argument max-keys must be an integer between 0 and " + Integer.MAX_VALUE);
            }
        } else {
            maxKeys = WalrusProperties.MAX_KEYS;
        }

        if (bucketList.size() > 0) {
            BucketInfo bucket = bucketList.get(0);
            BucketLogData logData = bucket.getLoggingEnabled() ? request.getLogData() : null;
            if (logData != null) {
                updateLogData(bucket, logData);
                reply.setLogData(logData);
            }

            if (Contexts.lookup().hasAdministrativePrivileges()) {
                try {
                    if (bucketHasSnapshots(bucketName)) {
                        db.rollback();
                        throw new NoSuchBucketException(bucketName);
                    }
                } catch (Exception e) {
                    db.rollback();
                    throw new InternalErrorException(e);
                }
            }

            String prefix = request.getPrefix();
            String delimiter = request.getDelimiter();
            String marker = request.getMarker();

            reply.setName(bucketName);
            reply.setIsTruncated(false);
            reply.setPrefix(prefix);
            reply.setMarker(marker);
            reply.setDelimiter(delimiter);
            reply.setMaxKeys(maxKeys);

            if (maxKeys == 0) {
                // No keys requested, so just return
                reply.setContents(new ArrayList<ListEntry>());
                reply.setCommonPrefixesList(new ArrayList<CommonPrefixesEntry>());
                db.commit();
                return reply;
            }

            final int queryStrideSize = maxKeys + 1;
            EntityWrapper<ObjectInfo> dbObject = db.recast(ObjectInfo.class);

            ObjectInfo searchObj = new ObjectInfo();
            searchObj.setBucketName(bucketName);
            searchObj.setLast(true);
            searchObj.setDeleted(false);

            Criteria objCriteria = dbObject.createCriteria(ObjectInfo.class);
            objCriteria.add(Example.create(searchObj));
            objCriteria.addOrder(Order.asc("objectKey"));
            objCriteria.setMaxResults(queryStrideSize); // add one to, hopefully, indicate truncation in one call

            if (!Strings.isNullOrEmpty(marker)) {
                // The result set should be exclusive of the marker. marker could be a common prefix from a previous response. Look for keys that
                // lexicographically follow the marker and don't contain the marker as the prefix.
                objCriteria.add(Restrictions.gt("objectKey", marker));
            } else {
                marker = "";
            }

            if (!Strings.isNullOrEmpty(prefix)) {
                objCriteria.add(Restrictions.like("objectKey", prefix, MatchMode.START));
            } else {
                prefix = "";
            }

            // Ensure not null.
            if (Strings.isNullOrEmpty(delimiter)) {
                delimiter = "";
            }

            List<ObjectInfo> objectInfos = null;
            int resultKeyCount = 0;
            ArrayList<ListEntry> contents = new ArrayList<ListEntry>(); // contents for reply
            String nextMarker = null;
            TreeSet<String> commonPrefixes = new TreeSet<String>();
            int firstResult = -1;

            // Iterate over result sets of size maxkeys + 1
            do {
                // Start listing from the 0th element and increment the first element to be listed by the query size
                objCriteria.setFirstResult(queryStrideSize * (++firstResult));
                objectInfos = (List<ObjectInfo>) objCriteria.list();

                if (objectInfos.size() > 0) {
                    for (ObjectInfo objectInfo : objectInfos) {
                        String objectKey = objectInfo.getObjectKey();

                        // Check if it will get aggregated as a commonprefix
                        if (!Strings.isNullOrEmpty(delimiter)) {
                            String[] parts = objectKey.substring(prefix.length()).split(delimiter);
                            if (parts.length > 1) {
                                String prefixString = prefix + parts[0] + delimiter;
                                if (!StringUtils.equals(prefixString, marker)
                                        && !commonPrefixes.contains(prefixString)) {
                                    if (resultKeyCount == maxKeys) {
                                        // This is a new record, so we know we're truncating if this is true
                                        reply.setNextMarker(nextMarker);
                                        reply.setIsTruncated(true);
                                        resultKeyCount++;
                                        break;
                                    }

                                    commonPrefixes.add(prefixString);
                                    resultKeyCount++; // count the unique commonprefix as a single return entry

                                    // If max keys have been collected, set the next-marker. It might be needed for the response if the list is
                                    // truncated
                                    // If the common prefixes hit the limit set by max-keys, next-marker is the last common prefix
                                    if (resultKeyCount == maxKeys) {
                                        nextMarker = prefixString;
                                    }
                                }
                                continue;
                            }
                        }

                        if (resultKeyCount == maxKeys) {
                            // This is a new (non-commonprefix) record, so we know we're truncating
                            reply.setNextMarker(nextMarker);
                            reply.setIsTruncated(true);
                            resultKeyCount++;
                            break;
                        }

                        // Process the entry as a full key listing
                        ListEntry listEntry = new ListEntry();
                        listEntry.setKey(objectKey);
                        listEntry.setEtag(objectInfo.getEtag());
                        listEntry.setLastModified(
                                DateFormatter.dateToListingFormattedString(objectInfo.getLastModified()));
                        listEntry.setStorageClass(objectInfo.getStorageClass());
                        listEntry.setSize(objectInfo.getSize());
                        listEntry.setStorageClass(objectInfo.getStorageClass());
                        try {
                            Account ownerAccount = Accounts.lookupAccountById(objectInfo.getOwnerId());
                            listEntry.setOwner(
                                    new CanonicalUser(ownerAccount.getCanonicalId(), ownerAccount.getName()));
                        } catch (AuthException e) {
                            db.rollback();
                            throw new AccessDeniedException("Bucket", bucketName, logData);
                        }
                        contents.add(listEntry);

                        resultKeyCount++;

                        // If max keys have been collected, set the next-marker. It might be needed for the response if the list is truncated
                        if (resultKeyCount == maxKeys) {
                            nextMarker = objectKey;
                        }
                    }
                }

                if (resultKeyCount <= maxKeys && objectInfos.size() <= maxKeys) {
                    break;
                }
            } while (resultKeyCount <= maxKeys);

            reply.setContents(contents);

            // Prefixes are already sorted, add them to the proper data structures and populate the reply
            if (!commonPrefixes.isEmpty()) {
                ArrayList<CommonPrefixesEntry> commonPrefixesList = new ArrayList<CommonPrefixesEntry>();
                for (String prefixEntry : commonPrefixes) {
                    commonPrefixesList.add(new CommonPrefixesEntry(prefixEntry));
                }
                reply.setCommonPrefixesList(commonPrefixesList);
            }
        } else {
            db.rollback();
            throw new NoSuchBucketException(bucketName);
        }
        db.commit();
        return reply;
    } finally {
        if (db.isActive()) {
            db.rollback();
        }
    }
}

From source file:com.eucalyptus.walrus.WalrusFSManager.java

@Override
public ListVersionsResponseType listVersions(ListVersionsType request) throws WalrusException {
    ListVersionsResponseType reply = (ListVersionsResponseType) request.getReply();
    EntityWrapper<BucketInfo> db = EntityWrapper.get(BucketInfo.class);

    try {/*from  w ww  .j  a v  a2s .  c  o m*/
        String bucketName = request.getBucket();
        BucketInfo bucketInfo = new BucketInfo(bucketName);
        bucketInfo.setHidden(false);
        List<BucketInfo> bucketList = db.queryEscape(bucketInfo);

        Context ctx = Contexts.lookup();
        Account account = ctx.getAccount();

        int maxKeys = -1;
        String maxKeysString = request.getMaxKeys();
        if (maxKeysString != null) {
            maxKeys = Integer.parseInt(maxKeysString);
            if (maxKeys < 0) {
                throw new InvalidArgumentException("max-keys",
                        "Argument max-keys must be an integer between 0 and " + Integer.MAX_VALUE);
            }
        } else {
            maxKeys = WalrusProperties.MAX_KEYS;
        }

        if (bucketList.size() > 0) {
            BucketInfo bucket = bucketList.get(0);
            BucketLogData logData = bucket.getLoggingEnabled() ? request.getLogData() : null;

            if (logData != null) {
                updateLogData(bucket, logData);
                reply.setLogData(logData);
            }

            String prefix = request.getPrefix();
            String keyMarker = request.getKeyMarker();
            String versionMarker = request.getVersionIdMarker();
            String delimiter = request.getDelimiter();

            reply.setName(bucketName);
            reply.setIsTruncated(false);
            reply.setPrefix(prefix);
            reply.setMaxKeys(maxKeys);
            reply.setDelimiter(delimiter);
            reply.setKeyMarker(keyMarker);
            reply.setVersionIdMarker(versionMarker);

            if (bucket.isVersioningDisabled()) {
                db.commit();
                return reply;
            }

            if (maxKeys == 0) {
                // No keys requested, so just return
                reply.setKeyEntries(new ArrayList<KeyEntry>());
                reply.setCommonPrefixesList(new ArrayList<CommonPrefixesEntry>());
                db.commit();
                return reply;
            }

            final int queryStrideSize = maxKeys + 1;
            EntityWrapper<ObjectInfo> dbObject = db.recast(ObjectInfo.class);

            ObjectInfo searchObj = new ObjectInfo();
            searchObj.setBucketName(bucketName);

            Criteria objCriteria = dbObject.createCriteria(ObjectInfo.class);
            objCriteria.add(Example.create(searchObj));
            objCriteria.addOrder(Order.asc("objectKey"));
            objCriteria.addOrder(Order.desc("lastModified"));
            objCriteria.setMaxResults(queryStrideSize); // add one to, hopefully, indicate truncation in one call

            // Ensure these aren't null
            keyMarker = (Strings.isNullOrEmpty(keyMarker) ? "" : keyMarker);
            prefix = (Strings.isNullOrEmpty(prefix) ? "" : prefix);
            versionMarker = (Strings.isNullOrEmpty(versionMarker) ? "" : versionMarker);

            if (!Strings.isNullOrEmpty(keyMarker)) {
                if (!Strings.isNullOrEmpty(versionMarker)) {
                    Date resumeDate = null;
                    try {
                        ObjectInfo markerObj = new ObjectInfo();
                        markerObj.setBucketName(bucketName);
                        markerObj.setVersionId(versionMarker);
                        markerObj.setObjectKey(keyMarker);
                        ObjectInfo lastFromPrevObj = dbObject.uniqueResultEscape(markerObj);
                        if (lastFromPrevObj != null && lastFromPrevObj.getLastModified() != null) {
                            resumeDate = lastFromPrevObj.getLastModified();
                        } else {
                            dbObject.rollback();
                            throw new NoSuchEntityException("VersionIDMarker " + versionMarker
                                    + " does not match an existing object version");
                        }
                    } catch (TransactionException e) {
                        LOG.error(e);
                        dbObject.rollback();
                        throw new InternalErrorException("Next-Key-Marker or Next-Version-Id marker invalid");
                    }
                    // The result set should be exclusive of the key with the key-marker version-id-marker pair. Look for keys that lexicographically
                    // follow the version-id-marker for a given key-marker and also the keys that follow the key-marker.
                    objCriteria.add(Restrictions.or(
                            Restrictions.and(Restrictions.eq("objectKey", keyMarker),
                                    Restrictions.lt("lastModified", resumeDate)),
                            Restrictions.gt("objectKey", keyMarker)));
                } else {
                    // The result set should be exclusive of the key-marker. key-marker could be a common prefix from a previous response. Look for keys
                    // that lexicographically follow the key-marker and don't contain the key-marker as the prefix.
                    objCriteria.add(Restrictions.gt("objectKey", keyMarker));
                }
            }

            if (!Strings.isNullOrEmpty(prefix)) {
                objCriteria.add(Restrictions.like("objectKey", prefix, MatchMode.START));
            } else {
                prefix = ""; // ensure not null has already been set in the reply, so this is safe
            }

            List<ObjectInfo> objectInfos = null;
            int resultKeyCount = 0;
            ArrayList<KeyEntry> keyEntries = new ArrayList<KeyEntry>();
            String nextKeyMarker = null;
            String nextVersionIdMarker = null;
            TreeSet<String> commonPrefixes = new TreeSet<String>();
            int firstResult = -1;

            // Iterate over result sets of size maxkeys + 1
            do {
                // Start listing from the 0th element and increment the first element to be listed by the query size
                objCriteria.setFirstResult(queryStrideSize * (++firstResult));
                objectInfos = (List<ObjectInfo>) objCriteria.list();

                if (objectInfos.size() > 0) {

                    for (ObjectInfo objectInfo : objectInfos) {
                        String objectKey = objectInfo.getObjectKey();

                        // Check if it will get aggregated as a commonprefix
                        if (!Strings.isNullOrEmpty(delimiter)) {
                            String[] parts = objectKey.substring(prefix.length()).split(delimiter);
                            if (parts.length > 1) {
                                String prefixString = prefix + parts[0] + delimiter;
                                if (!StringUtils.equals(prefixString, keyMarker)
                                        && !commonPrefixes.contains(prefixString)) {
                                    if (resultKeyCount == maxKeys) {
                                        // This is a new record, so we know we're truncating if this is true
                                        reply.setNextKeyMarker(nextKeyMarker);
                                        reply.setNextVersionIdMarker(nextVersionIdMarker);
                                        reply.setIsTruncated(true);
                                        resultKeyCount++;
                                        break;
                                    }

                                    commonPrefixes.add(prefixString);
                                    resultKeyCount++; // count the unique commonprefix as a single return entry

                                    // If max keys have been collected, set the next-key-marker. It might be needed for the response if the list is
                                    // truncated
                                    // If the common prefixes hit the limit set by max-keys, next-key-marker is the last common prefix and there is no
                                    // version-id-marker
                                    if (resultKeyCount == maxKeys) {
                                        nextKeyMarker = prefixString;
                                        nextVersionIdMarker = null;
                                    }
                                }
                                continue;
                            }
                        }

                        if (resultKeyCount == maxKeys) {
                            // This is a new (non-commonprefix) record, so we know we're truncating
                            reply.setNextKeyMarker(nextKeyMarker);
                            reply.setNextVersionIdMarker(nextVersionIdMarker);
                            reply.setIsTruncated(true);
                            resultKeyCount++;
                            break;
                        }

                        // This is either a version entry or a delete marker
                        KeyEntry keyEntry = null;
                        if (!objectInfo.getDeleted()) {
                            keyEntry = new VersionEntry();
                            ((VersionEntry) keyEntry).setEtag(objectInfo.getEtag());
                            ((VersionEntry) keyEntry).setSize(objectInfo.getSize());
                            ((VersionEntry) keyEntry).setStorageClass(objectInfo.getStorageClass());
                        } else {
                            keyEntry = new DeleteMarkerEntry();
                        }
                        keyEntry.setKey(objectKey);
                        keyEntry.setVersionId(objectInfo.getVersionId());
                        keyEntry.setIsLatest(objectInfo.getLast());
                        keyEntry.setLastModified(
                                DateFormatter.dateToListingFormattedString(objectInfo.getLastModified()));
                        try {
                            Account ownerAccount = Accounts.lookupAccountById(objectInfo.getOwnerId());
                            keyEntry.setOwner(
                                    new CanonicalUser(ownerAccount.getCanonicalId(), ownerAccount.getName()));
                        } catch (AuthException e) {
                            db.rollback();
                            throw new AccessDeniedException("Bucket", bucketName, logData);
                        }
                        keyEntries.add(keyEntry);

                        resultKeyCount++;

                        // If max keys have been collected, set the next- markers. They might be needed for the response if the list is truncated
                        if (resultKeyCount == maxKeys) {
                            nextKeyMarker = objectKey;
                            nextVersionIdMarker = objectInfo.getVersionId();
                        }
                    }
                }
                if (resultKeyCount <= maxKeys && objectInfos.size() <= maxKeys) {
                    break;
                }
            } while (resultKeyCount <= maxKeys);

            reply.setKeyEntries(keyEntries);

            // Prefixes are already sorted, add them to the proper data structures and populate the reply
            if (!commonPrefixes.isEmpty()) {
                ArrayList<CommonPrefixesEntry> commonPrefixesList = new ArrayList<CommonPrefixesEntry>();
                for (String prefixEntry : commonPrefixes) {
                    commonPrefixesList.add(new CommonPrefixesEntry(prefixEntry));
                }
                reply.setCommonPrefixesList(commonPrefixesList);
            }
        } else {
            db.rollback();
            throw new NoSuchBucketException(bucketName);
        }
        db.commit();
        return reply;
    } finally {
        if (db.isActive()) {
            db.rollback();
        }
    }
}

From source file:org.chiba.tools.schemabuilder.AbstractSchemaFormBuilder.java

/**
 * Build the type tree/*from   ww w.ja va 2 s.c om*/
 */
/*private void buildTypeTree(XSTypeDefinition type, TreeSet descendents) {
if (type != null) {
        
    if (descendents.size() > 0) {
        TreeSet compatibleTypes = (TreeSet) typeTree.get(type.getName());
        
        if (compatibleTypes == null) {
            compatibleTypes = new TreeSet(descendents);
            typeTree.put(type.getName(), compatibleTypes);
        } else {
            compatibleTypes.addAll(descendents);
        }
    }
        
    XSTypeDefinition parentType = type.getBaseType();
        
    if (parentType != null
        && type.getTypeCategory() == parentType.getTypeCategory()) {
        String typeName = type.getName();
        String parentTypeName = parentType.getName();
        if ((typeName == null && parentTypeName != null)
            || (typeName != null && parentTypeName == null)
            || (typeName != null
                && parentTypeName != null
                && !type.getName().equals(parentType.getName())
                && !parentType.getName().equals("anyType"))) {
        
TreeSet newDescendents=new TreeSet(descendents);
//extension (we only add it to "newDescendants" because we don't want
//to have a type descendant to itself, but to consider it for the parent
if (type.getTypeCategory() == XSTypeDefinition.COMPLEX_TYPE) {
XSComplexTypeDefinition complexType =
            (XSComplexTypeDefinition) type;
if (complexType.getDerivationMethod()
            == XSConstants.DERIVATION_EXTENSION
            && !complexType.getAbstract()
            && !descendents.contains(type.getName()) //to be tested
            ) {
newDescendents.add(type.getName());
}
}
//note: extensions are impossible on simpleTypes !
        
        buildTypeTree(parentType, newDescendents);
        }
    }
}
}*/
private void buildTypeTree(XSTypeDefinition type, TreeSet descendents) {
    if (type != null) {

        if (descendents.size() > 0) {
            //TreeSet compatibleTypes = (TreeSet) typeTree.get(type.getName());
            TreeSet compatibleTypes = (TreeSet) typeTree.get(type.getName());

            if (compatibleTypes == null) {
                //compatibleTypes = new TreeSet(descendents);
                compatibleTypes = new TreeSet(TypeExtensionSorter.getInstance());
                compatibleTypes.addAll(descendents);
                //typeTree.put(type.getName(), compatibleTypes);
                typeTree.put(type.getName(), compatibleTypes);
            } else {
                compatibleTypes.addAll(descendents);
            }
        }

        XSTypeDefinition parentType = type.getBaseType();

        if (parentType != null && type.getTypeCategory() == parentType.getTypeCategory()) {
            /*String typeName = type.getName();
            String parentTypeName = parentType.getName();
            if ((typeName == null && parentTypeName != null)
            || (typeName != null && parentTypeName == null)
            || (typeName != null
                && parentTypeName != null
                && !type.getName().equals(parentType.getName())
                && !parentType.getName().equals("anyType"))) {*/
            if (type != parentType
                    && (parentType.getName() == null || !parentType.getName().equals("anyType"))) {

                //TreeSet newDescendents=new TreeSet(descendents);
                TreeSet newDescendents = new TreeSet(TypeExtensionSorter.getInstance());
                newDescendents.addAll(descendents);

                //extension (we only add it to "newDescendants" because we don't want
                //to have a type descendant to itself, but to consider it for the parent
                if (type.getTypeCategory() == XSTypeDefinition.COMPLEX_TYPE) {
                    XSComplexTypeDefinition complexType = (XSComplexTypeDefinition) type;
                    if (complexType.getDerivationMethod() == XSConstants.DERIVATION_EXTENSION
                            && !complexType.getAbstract() && !descendents.contains(type) //to be tested
                    //&& !descendents.contains(type.getName()) //to be tested
                    ) {
                        //newDescendents.add(type.getName());
                        newDescendents.add(type);
                    }
                }
                //note: extensions are impossible on simpleTypes !

                buildTypeTree(parentType, newDescendents);
            }
        }
    }
}

From source file:com.qpark.maven.plugin.securityconfig.SecurityConfigMojo.java

private String getSecurityAuthorisationSpringConfig(final XsdsUtil xsds) {
    List<String> annonyoums = getSplitted(this.channelPatternsAnnonymousAuthorisation);
    List<String> admins = getSplitted(this.channelPatternsAdminAuthorisation);
    List<String> create = getSplitted(this.channelPatternsCreate);
    List<String> read = getSplitted(this.channelPatternsRead);
    List<String> update = getSplitted(this.channelPatternsUpdate);
    List<String> delete = getSplitted(this.channelPatternsDelete);

    this.roleList.add("ROLE_ANONYMOUS");
    this.roleList.add("ROLE_ADMIN");
    this.roleList.add("ROLE_ALL_OPERATIONS");

    StringBuffer sb = new StringBuffer(1024);
    TreeSet<String> serviceIds = new TreeSet<String>();
    StringBuffer operationPolicies = new StringBuffer(1024);
    sb.append("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n");
    sb.append("<beans xmlns=\"http://www.springframework.org/schema/beans\"\n");
    sb.append("\txmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" \n");
    sb.append("\txmlns:int-security=\"http://www.springframework.org/schema/integration/security\"\n");
    sb.append("\txsi:schemaLocation=\"\n");
    String springVersion = this.project.getProperties().getProperty("org.springframework.version.xsd.version");
    sb.append(//from  w  w  w .  ja  v  a2 s.c om
            "\t\thttp://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans");
    if (springVersion != null) {
        sb.append("-").append(springVersion);
    }
    sb.append(".xsd\n");
    sb.append(
            "\t\thttp://www.springframework.org/schema/integration/security http://www.springframework.org/schema/integration/security/spring-integration-security.xsd\n");
    sb.append("\">\n");
    sb.append(Util.getGeneratedAtXmlComment(this.getClass(), this.eipVersion));
    sb.append("\n");
    sb.append("\t<!-- Authorization -->\n");
    sb.append("\t<!-- Role voter. -->\n");

    sb.append("\t<bean id=\"eipRoleVoter\" class=\"com.qpark.eip.core.spring.security.EipRoleVoter\"");
    if (this.limitedAccessDataProviderBeanName != null
            && this.limitedAccessDataProviderBeanName.trim().length() > 0) {
        sb.append(">\n");
        sb.append("\t\t<property name=\"eipLimitedAccessDataProvider\" ref=\"");
        sb.append(this.limitedAccessDataProviderBeanName);
        sb.append("\"/>\n");
        sb.append("\t</bean>\n");
    } else {
        sb.append("/>\n");
    }

    sb.append(
            "\t<bean id=\"eipAccessDecisionManager\" class=\"com.qpark.eip.core.spring.security.EipAffirmativeBased\">\n");
    sb.append("\t\t<constructor-arg>\n");
    sb.append("\t\t\t<list>\n");
    sb.append("\t\t\t\t<ref bean=\"eipRoleVoter\"/>\n");
    sb.append("\t\t\t</list>\n");
    sb.append("\t\t</constructor-arg>\n");
    sb.append("\t</bean>\n");
    sb.append("\n");
    sb.append("\t<!-- \n");
    sb.append("\tThe pattern (java.util.regexp.Pattern) of the access policies regarding \n");
    sb.append("\tthe channel names used in the spring integration configurations.\n");
    sb.append("\n");
    sb.append("\tEach user gets the role ROLE_ANONYMOUS. If the user has an other role \n");
    sb.append("\tthen ROLE_ANONYMOUS the role ROLE_COMMON need to be added too.\n");
    sb.append("\n");
    sb.append("\tThe user needs only one of the listed roles. All access-policy\n");
    sb.append("\twill be checked until the user has a sufficient role or is\n");
    sb.append("\tnot authorisized to do the operation.");
    sb.append("\t-->\n");
    sb.append("\t<int-security:secured-channels \n");
    sb.append("\t\taccess-decision-manager=\"eipAccessDecisionManager\"\n");
    sb.append("\t\tauthentication-manager=\"eipAuthenticationManager\">\n");
    sb.append(this.getAccessPolicyAdminAnonymous(annonyoums, "ROLE_ANONYMOUS"));
    sb.append(this.getAccessPolicyAdminAnonymous(admins, "ROLE_ADMIN"));
    sb.append(this.getAccessPolicyReadCreateUpdateDelete(read, "ROLE_READ"));
    sb.append(this.getAccessPolicyReadCreateUpdateDelete(create, "ROLE_CREATE"));
    sb.append(this.getAccessPolicyReadCreateUpdateDelete(update, "ROLE_UPDATE"));
    sb.append(this.getAccessPolicyReadCreateUpdateDelete(delete, "ROLE_DELETE"));

    sb.append("\t\t<!-- Service wide role securement -->\n");

    operationPolicies.append("\t\t<!-- Operation web service operation channels -->\n");
    for (ElementType element : xsds.getElementTypes()) {
        if (element.isRequest()) {
            ElementType elementResponse = XsdsUtil.findResponse(element, xsds.getElementTypes(), xsds);
            if (elementResponse != null) {
                ComplexType ctResponse = new ComplexType(elementResponse.getElement().getType(), xsds);
                if (ctResponse != null && !ctResponse.isSimpleType() && !ctResponse.isPrimitiveType()) {
                    String serviceRole = new StringBuffer(32).append("ROLE_")
                            .append(element.getServiceId().toUpperCase()).toString();
                    String serviceVersionLessRole = serviceRole;
                    if (serviceRole.indexOf(".V") > 0) {
                        serviceVersionLessRole = serviceRole.substring(0, serviceRole.indexOf(".V"));
                    }
                    String operationRole = new StringBuffer(64).append(serviceRole).append("_")
                            .append(element.getOperationName().toUpperCase()).toString();
                    if (!serviceIds.contains(element.getServiceId())) {
                        serviceIds.add(element.getServiceId());
                        this.roleList.add(serviceRole);
                        sb.append("\t\t<int-security:access-policy pattern=\"");
                        sb.append(element.getChannelSecurityPatternService());
                        sb.append("\" send-access=\"");
                        sb.append(serviceRole);
                        if (!serviceRole.equals(serviceVersionLessRole)) {
                            sb.append(", ");
                            sb.append(serviceVersionLessRole);
                            this.roleList.add(serviceVersionLessRole);
                        }
                        sb.append(", ROLE_ALL_OPERATIONS");
                        sb.append("\" receive-access=\"");
                        sb.append(serviceRole);
                        if (!serviceRole.equals(serviceVersionLessRole)) {
                            sb.append(", ");
                            sb.append(serviceVersionLessRole);
                            this.roleList.add(serviceVersionLessRole);
                        }
                        sb.append(", ROLE_ALL_OPERATIONS");
                        sb.append("\" />\n");
                    }

                    operationPolicies.append("\t\t<int-security:access-policy pattern=\"");
                    operationPolicies.append(element.getChannelSecurityPatternOperation());
                    operationPolicies.append("\" send-access=\"");
                    operationPolicies.append(operationRole);
                    operationPolicies.append(", ");
                    operationPolicies.append(serviceRole);
                    operationPolicies.append(", ROLE_ALL_OPERATIONS");
                    operationPolicies.append("\" receive-access=\"");
                    operationPolicies.append(operationRole);
                    operationPolicies.append(", ");
                    operationPolicies.append(serviceRole);
                    operationPolicies.append(", ROLE_ALL_OPERATIONS");
                    operationPolicies.append("\" />\n");
                    this.roleList.add(operationRole);
                }
            }
        }
    }

    sb.append(operationPolicies);

    sb.append("\t</int-security:secured-channels>\n");
    sb.append("</beans>\n");
    return sb.toString();
}

From source file:net.spfbl.http.ServerHTTP.java

private static void writeBlockFormHTML(Locale locale, StringBuilder builder, TreeSet<String> tokenSet,
        TreeSet<String> selectionSet) throws ProcessException {
    if (!tokenSet.isEmpty()) {
        if (locale.getLanguage().toLowerCase().equals("pt")) {
            buildText(builder,//from  www .  j  a  va  2 s.c  om
                    "Se voc deseja no receber mais mensagens desta origem no futuro, selecione os identificadores que devem ser bloqueados definitivamente:");
        } else {
            buildText(builder,
                    "If you want to stop receiving messages from the source in the future, select identifiers that should definitely be blocked:");
        }
        builder.append("    <form method=\"POST\">\n");
        for (String identifier : tokenSet) {
            builder.append("        <input type=\"checkbox\" name=\"identifier\" value=\"");
            builder.append(identifier);
            if (selectionSet.contains(identifier)) {
                builder.append("\" checked>");
            } else {
                builder.append("\">");
            }
            builder.append(identifier);
            builder.append("<br>\n");
        }
        if (Core.hasRecaptchaKeys()) {
            if (locale.getLanguage().toLowerCase().equals("pt")) {
                buildText(builder,
                        "Para que sua solicitao seja aceita, resolva o desafio reCAPTCHA abaixo.");
            } else {
                buildText(builder, "For your request to be accepted, please solve the reCAPTCHA below.");
            }
        }
        builder.append("      <div id=\"divcaptcha\">\n");
        if (Core.hasRecaptchaKeys()) {
            String recaptchaKeySite = Core.getRecaptchaKeySite();
            String recaptchaKeySecret = Core.getRecaptchaKeySecret();
            ReCaptcha captcha = ReCaptchaFactory.newReCaptcha(recaptchaKeySite, recaptchaKeySecret, false);
            builder.append("      ");
            builder.append(captcha.createRecaptchaHtml(null, null).replace("\r", ""));
            // novo reCAPCHA
            //            builder.append("      <div class=\"g-recaptcha\" data-sitekey=\"");
            //            builder.append(recaptchaKeySite);
            //            builder.append("\"></div>\n");
        }
        if (locale.getLanguage().toLowerCase().equals("pt")) {
            builder.append("        <input id=\"btngo\" type=\"submit\" value=\"Bloquear\">\n");
        } else {
            builder.append("        <input id=\"btngo\" type=\"submit\" value=\"Block\">\n");
        }
        builder.append("      </div>\n");
        builder.append("    </form>\n");
    }
}

From source file:net.spfbl.spf.SPF.java

/**
 * Retorna o qualificador para uma consulta SPF.
 *
 * @param ip o IP a ser verificado./* w w w .ja  v a 2  s .com*/
 * @param deep a profundiade de navegao da vore SPF.
 * @param hostVisitedSet o conjunto de hosts visitados.
 * @return o qualificador da consulta SPF.
 * @throws ProcessException se houver falha no processamento.
 */
private Qualifier getQualifier(String ip, String sender, String helo, int deep, TreeSet<String> hostVisitedSet,
        LinkedList<String> logList) throws ProcessException {
    if (deep > 10) {
        return null; // Evita excesso de consultas.
    } else if (hostVisitedSet.contains(getHostname())) {
        return null; // Evita looping infinito.
    } else if (mechanismList == null) {
        throw new ProcessException("ERROR: HOST NOT FOUND");
    } else {
        boolean hostNotFound = false;
        hostVisitedSet.add(getHostname());
        for (Mechanism mechanism : mechanismList) {
            if (mechanism instanceof MechanismInclude) {
                try {
                    MechanismInclude include = (MechanismInclude) mechanism;
                    Qualifier qualifier = include.getQualifierSPF(ip, sender, helo, deep + 1, hostVisitedSet,
                            logList);
                    if (qualifier == null) {
                        // Nenhum qualificador foi definido
                        // ento continuar busca.
                    } else {
                        return qualifier;
                    }
                } catch (ProcessException ex) {
                    if (ex.getMessage().equals("ERROR: HOST NOT FOUND")) {
                        // No foi possvel fazer o include.
                        // O hostname mencionado no existe.
                        // Continuar a verificao dos demais 
                        // mecanismos antes de efetivar o erro.
                        hostNotFound = true;
                    } else {
                        throw ex;
                    }
                }
            } else if (mechanism instanceof MechanismPTR) {
                if (mechanism.match(ip, sender, helo)) {
                    // Mecanismo PTR s ser processado
                    // no primeiro nvel da rvore.
                    Qualifier qualifier = mechanism.getQualifier();
                    logMechanism(mechanism, qualifier, logList);
                    return qualifier;
                } else {
                    logMechanism(mechanism, null, logList);
                }
            } else if (mechanism.match(ip, sender, helo)) {
                Qualifier qualifier = mechanism.getQualifier();
                logMechanism(mechanism, qualifier, logList);
                return qualifier;
            } else {
                logMechanism(mechanism, null, logList);
            }
        }
        if (redirect != null) {
            SPF spf = CacheSPF.get(redirect);
            if (spf == null) {
                logRedirect(redirect, "NOT FOUND", logList);
                return null;
            } else {
                Qualifier qualifier = spf.getQualifier(ip, sender, helo, 0, hostVisitedSet, logList);
                logRedirect(redirect, qualifier, logList);
                return qualifier;
            }
        } else if (error || hostNotFound) {
            // Nova interpretao SPF para erro de sintaxe.
            // Em caso de erro, retornar SOFTFAIL.
            logError(Qualifier.SOFTFAIL, logList);
            return Qualifier.SOFTFAIL;
        } else if (deep > 0) {
            // O mecanismo all s deve ser
            // processado no primeiro nvel da rvore.
            return null;
        } else {
            // Retorna o qualificador do mecanismo all.
            // Pode ser nulo caso o registro no apresente o mecanismo all.
            logAll(all, logList);
            return all;
        }
    }
}

From source file:org.commoncrawl.service.listcrawler.CrawlList.java

/**
 * Initialize a new CrawlList object from a given input stream of urls 
 * //from   w w w  . j ava2s.c om
 * @param manager           - reference to the crawl history log manager 
 * @param urlInputStream - the input stream containing the list of urls that we should add to this list ... 
 * @throws IOException      
 */
public CrawlList(CrawlHistoryStorage manager, long listId, File sourceURLFile, int refreshInterval)
        throws IOException {

    _manager = manager;

    _listState = LoadState.REALLY_LOADING;

    // initialize a new list id 
    _listId = listId;

    LOG.info("*** LIST:" + getListId() + " LOADING FROM SOURCE FILE:" + sourceURLFile.getAbsolutePath());

    //establish file names 
    initializeListFileNames();

    sourceURLFile.renameTo(_listURLDataFile);

    FileInputStream urlInputStream = new FileInputStream(_listURLDataFile);

    try {

        // set we will use to hold all fingerprints generated 
        TreeSet<URLFP> urlSet = new TreeSet<URLFP>();

        // create temp files ...
        File spillOutputFile = File.createTempFile("spillOut", Long.toString(_listId));

        // create mergesortspillwriter 
        SequenceFileSpillWriter<URLFP, ProxyCrawlHistoryItem> spillwriter = new SequenceFileSpillWriter<URLFP, ProxyCrawlHistoryItem>(
                FileSystem.getLocal(CrawlEnvironment.getHadoopConfig()), CrawlEnvironment.getHadoopConfig(),
                new Path(spillOutputFile.getAbsolutePath()), URLFP.class, ProxyCrawlHistoryItem.class, null,
                false);

        try {

            MergeSortSpillWriter<URLFP, ProxyCrawlHistoryItem> merger = new MergeSortSpillWriter<URLFP, ProxyCrawlHistoryItem>(
                    CrawlEnvironment.getHadoopConfig(), spillwriter,
                    FileSystem.getLocal(CrawlEnvironment.getHadoopConfig()),
                    new Path(manager.getLocalDataDir().getAbsolutePath()), null,
                    new RawKeyValueComparator<URLFP, ProxyCrawlHistoryItem>() {

                        DataInputBuffer _key1Buffer = new DataInputBuffer();
                        DataInputBuffer _key2Buffer = new DataInputBuffer();

                        @Override
                        public int compareRaw(byte[] key1Data, int key1Offset, int key1Length, byte[] key2Data,
                                int key2Offset, int key2Length, byte[] value1Data, int value1Offset,
                                int value1Length, byte[] value2Data, int value2Offset, int value2Length)
                                throws IOException {

                            _key1Buffer.reset(key1Data, key1Offset, key1Length);
                            _key2Buffer.reset(key2Data, key2Offset, key2Length);

                            _key1Buffer.skip(2); // skip verison, and 1 byte id 
                            _key2Buffer.skip(2); // skip verison, and 1 byte id 

                            int domainHash1 = WritableUtils.readVInt(_key1Buffer);
                            int domainHash2 = WritableUtils.readVInt(_key2Buffer);

                            _key1Buffer.skip(1); // skip 1 byte id 
                            _key2Buffer.skip(1); // skip 1 byte id 

                            long fingerprint1 = WritableUtils.readVLong(_key1Buffer);
                            long fingerprint2 = WritableUtils.readVLong(_key2Buffer);

                            int result = ((Integer) domainHash1).compareTo(domainHash2);

                            if (result == 0) {
                                result = ((Long) fingerprint1).compareTo(fingerprint2);
                            }

                            return result;
                        }

                        @Override
                        public int compare(URLFP key1, ProxyCrawlHistoryItem value1, URLFP key2,
                                ProxyCrawlHistoryItem value2) {
                            return key1.compareTo(key2);
                        }
                    }, URLFP.class, ProxyCrawlHistoryItem.class, false, null);

            try {

                LOG.info("*** LIST:" + getListId() + " Starting Scan of URLS In List");
                BufferedReader reader = new BufferedReader(
                        new InputStreamReader(urlInputStream, Charset.forName("UTF-8")));

                String line = null;
                int lineNumber = 0;
                ProxyCrawlHistoryItem item = new ProxyCrawlHistoryItem();
                while ((line = reader.readLine()) != null) {
                    ++lineNumber;
                    if (line.length() != 0 && !line.startsWith("#")) {
                        URLFP fingerprint = URLUtils.getURLFPFromURL(line, true);

                        if (fingerprint != null) {

                            if (!urlSet.contains(fingerprint)) {
                                // and add fingerprint to set 
                                urlSet.add(fingerprint);
                                // initialize item 
                                item.clear();
                                item.setOriginalURL(line);
                                // and spill to merger / sorter .. 
                                merger.spillRecord(fingerprint, item);
                            }
                        } else {
                            LOG.error("*** LIST:" + getListId() + " Invalid URL Encounered at Line:"
                                    + lineNumber + " URL" + line);
                        }
                    }
                }
                LOG.info("*** LIST:" + getListId() + " Completed Scan of:" + urlSet.size() + " URLS");
            } finally {
                merger.close();
            }
        } finally {
            if (spillwriter != null)
                spillwriter.close();
        }
        LOG.info("*** LIST:" + getListId() + " Generating BloomFilter for:" + urlSet.size() + " keys");
        // generate bloom filter ...  
        _bloomFilter = new URLFPBloomFilter(urlSet.size(), 7, 10);

        for (URLFP fingerprint : urlSet) {
            _bloomFilter.add(fingerprint);
        }
        LOG.info("*** LIST:" + getListId() + " Serializing BloomFilter");
        // serialize it
        FileOutputStream bloomFilterStream = new FileOutputStream(_bloomFilterData);
        try {
            _bloomFilter.serialize(bloomFilterStream);
        } finally {
            bloomFilterStream.flush();
            bloomFilterStream.close();
        }

        LOG.info("*** LIST:" + getListId() + " Starting Read of Merged Sequence File:" + spillOutputFile);
        // now initialize value map and string maps based on output sequence file ... 
        SequenceFile.Reader reader = new SequenceFile.Reader(
                FileSystem.getLocal(CrawlEnvironment.getHadoopConfig()),
                new Path(spillOutputFile.getAbsolutePath()), CrawlEnvironment.getHadoopConfig());

        LOG.info("*** LIST:" + getListId() + " PRE-ALLOCATING FIXED DATA BUFFER OF SIZE:"
                + (urlSet.size() * OnDiskCrawlHistoryItem.ON_DISK_SIZE));
        // OK, Allocate room for fixed data file upfront 
        DataOutputBuffer valueStream = new DataOutputBuffer(
                urlSet.size() * OnDiskCrawlHistoryItem.ON_DISK_SIZE);
        LOG.info("*** LIST:" + getListId() + " ALLOCATION SUCCEEDED");

        try {

            //DataOutputStream valueStream = new DataOutputStream(new FileOutputStream(_fixedDataFile));
            RandomAccessFile stringsStream = new RandomAccessFile(_variableDataFile, "rw");

            try {
                URLFP urlFP = new URLFP();
                ProxyCrawlHistoryItem item = new ProxyCrawlHistoryItem();

                // read fingerprints ... 
                while (reader.next(urlFP, item)) {
                    // write out fixed data structure and strings 
                    writeInitialOnDiskItem(urlFP, item, valueStream, stringsStream);
                }
            } finally {
                //valueStream.flush();
                //valueStream.close();
                stringsStream.close();
            }
        } finally {
            reader.close();
        }
        LOG.info("*** LIST:" + getListId() + " Finished Writing Initial Values to Disk");

        LOG.info("*** LIST:" + getListId() + " FIXED DATA BUFFER OF SIZE:" + valueStream.getLength()
                + " EXCEPECTED SIZE:" + (urlSet.size() * OnDiskCrawlHistoryItem.ON_DISK_SIZE));
        if (valueStream.getLength() != (urlSet.size() * OnDiskCrawlHistoryItem.ON_DISK_SIZE)) {
            throw new IOException("Final FixedItemData Buffer Size:" + valueStream.getLength()
                    + " != URLSetSize:" + (urlSet.size() * OnDiskCrawlHistoryItem.ON_DISK_SIZE));
        }
        // initialize temp data buffer variables 
        _tempFixedDataBuffer = valueStream.getData();
        _tempFixedDataBufferSize = valueStream.getLength();

        // update metadata 
        _metadata.setRefreshInterval(refreshInterval);
        _metadata.setUrlCount(urlSet.size());

        // setup version 
        _metadata.setVersion(1);

        // and write to disk 
        writeMetadataToDisk();

        // mark state as loaded ... 
        _listState = LoadState.LOADED;

        LOG.info("*** LIST:" + getListId() + " SYNCING");
        // reconcile with history log
        _manager.syncList(this.getListId(), urlSet, this);
        LOG.info("*** LIST:" + getListId() + " SYNC COMPLETE");

        // write metdata to disk again 
        writeMetadataToDisk();

        LOG.info("*** LIST:" + getListId() + " FLUSHING FIXED DATA");

        // and finally flush fixed data to disk 
        FileOutputStream finalDataStream = new FileOutputStream(_fixedDataFile);

        try {
            synchronized (this) {
                int blockSize = 1 << 20;
                long bytesCopied = 0;
                for (int offset = 0; offset < _tempFixedDataBufferSize; offset += blockSize) {
                    int bytesToCopy = Math.min(blockSize, _tempFixedDataBufferSize - offset);
                    finalDataStream.write(_tempFixedDataBuffer, offset, bytesToCopy);
                    bytesCopied += bytesToCopy;
                }
                // validate bytes copied 
                if (bytesCopied != _tempFixedDataBufferSize) {
                    throw new IOException("Buffer Size:" + _tempFixedDataBufferSize
                            + " Does not Match BytesCopied:" + bytesCopied);
                }

                // ok release the buffer 
                _tempFixedDataBuffer = null;
                _tempFixedDataBufferSize = 0;

                LOG.info("*** LIST:" + getListId() + " FIXED DATA FLUSH COMPLETE");
            }

        } finally {
            finalDataStream.flush();
            finalDataStream.close();
        }

        // load sub domain metadata from disk ... 
        loadSubDomainMetadataFromDisk();

    } catch (IOException e) {
        LOG.error("*** LIST:" + getListId() + " Crawl List Initialization Failed With Exception:"
                + CCStringUtils.stringifyException(e));

        _fixedDataFile.delete();
        _variableDataFile.delete();
        _bloomFilterData.delete();

        _listState = LoadState.ERROR;

        throw e;
    } finally {
        urlInputStream.close();
    }

}