Example usage for java.util Set containsAll

List of usage examples for java.util Set containsAll

Introduction

In this page you can find the example usage for java.util Set containsAll.

Prototype

boolean containsAll(Collection<?> c);

Source Link

Document

Returns true if this set contains all of the elements of the specified collection.

Usage

From source file:org.alfresco.repo.node.NodeServiceTest.java

/**
 * Ensure that nodes cannot be linked to deleted nodes.
 * <p/>/*from  w w w .  j a v a 2 s  .c o m*/
 * Conditions that <i>might</i> cause this are:<br/>
 * <ul>
 *   <li>Node created within a parent node that is being deleted</li>
 *   <li>The node cache is temporarily incorrect when the association is made</li>
 * </ul>
 * <p/>
 * <a href="https://issues.alfresco.com/jira/browse/ALF-12358">Concurrency: Possible to create association references to deleted nodes</a>
 */
@Test
public void testConcurrentLinkToDeletedNode() throws Throwable {
    // First find any broken links to start with
    final NodeEntity params = new NodeEntity();
    params.setId(0L);
    params.setTypeQNameId(deletedTypeQNameId);

    // Find all 'at risk' nodes before the test
    final List<Long> attachedToDeletedIdsBefore = getChildNodesWithDeletedParentNode(params, 0);
    logger.debug("Found child nodes with deleted parent node (before): " + attachedToDeletedIdsBefore);
    final List<Long> orphanedNodeIdsBefore = getChildNodesWithNoParentNode(params, 0);
    logger.debug("Found child nodes without parent (before): " + orphanedNodeIdsBefore);

    final NodeRef[] nodeRefs = new NodeRef[10];
    final NodeRef workspaceRootNodeRef = nodeService.getRootNode(StoreRef.STORE_REF_WORKSPACE_SPACESSTORE);
    buildNodeHierarchy(workspaceRootNodeRef, nodeRefs);

    // Fire off a bunch of threads that create random nodes within the hierarchy created above
    final RetryingTransactionCallback<NodeRef> createChildCallback = new RetryingTransactionCallback<NodeRef>() {
        @Override
        public NodeRef execute() throws Throwable {
            String randomName = this.getClass().getName() + "-" + GUID.generate();
            QName randomQName = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, randomName);
            Map<QName, Serializable> props = new HashMap<QName, Serializable>();
            props.put(ContentModel.PROP_NAME, randomName);
            // Choose a random parent node from the hierarchy
            int random = new Random().nextInt(10);
            return nodeService.createNode(nodeRefs[random], ContentModel.ASSOC_CONTAINS, randomQName,
                    ContentModel.TYPE_CONTAINER, props).getChildRef();
        }
    };
    final Runnable[] runnables = new Runnable[20];
    final List<NodeRef> nodesAtRisk = Collections.synchronizedList(new ArrayList<NodeRef>(100));

    final List<Thread> threads = new ArrayList<Thread>();
    for (int i = 0; i < runnables.length; i++) {
        runnables[i] = new Runnable() {
            @Override
            public synchronized void run() {
                AuthenticationUtil.setRunAsUserSystem();
                try {
                    wait(1000L); // A short wait before we kick off (should be notified)
                    for (int i = 0; i < 200; i++) {
                        NodeRef nodeRef = txnService.getRetryingTransactionHelper()
                                .doInTransaction(createChildCallback);
                        // Store the node for later checks
                        nodesAtRisk.add(nodeRef);
                        // Wait to give other threads a chance
                        wait(1L);
                    }
                } catch (Throwable e) {
                    // This is expected i.e. we'll just keep doing it until failure
                    logger.debug("Got exception adding child node: ", e);
                }
            }
        };
        Thread thread = new Thread(runnables[i]);
        threads.add(thread);
        thread.start();
    }

    final RetryingTransactionCallback<NodeRef> deleteWithNestedCallback = new RetryingTransactionCallback<NodeRef>() {
        @Override
        public NodeRef execute() throws Throwable {
            // Notify the threads to kick off
            for (int i = 0; i < runnables.length; i++) {
                // Notify the threads to stop waiting
                synchronized (runnables[i]) {
                    runnables[i].notify();
                }
                // Short wait to give thread a chance to run
                synchronized (this) {
                    try {
                        wait(10L);
                    } catch (Throwable e) {
                    }
                }
                ;
            }
            // Delete the parent node
            nodeService.deleteNode(nodeRefs[0]);
            return null;
        }
    };
    txnService.getRetryingTransactionHelper().doInTransaction(deleteWithNestedCallback);

    // Wait for the threads to finish
    for (Thread t : threads) {
        t.join();
    }

    logger.info("All threads should have finished");

    // Find all 'at risk' nodes after the test
    final List<Long> attachedToDeletedIdsAfter = getChildNodesWithDeletedParentNode(params,
            attachedToDeletedIdsBefore.size());
    logger.debug("Found child nodes with deleted parent node (after): " + attachedToDeletedIdsAfter);
    final List<Long> orphanedNodeIdsAfter = getChildNodesWithNoParentNode(params, orphanedNodeIdsBefore.size());
    logger.debug("Found child nodes without parent (after): " + attachedToDeletedIdsAfter);
    // Now need to identify the problem nodes

    if (attachedToDeletedIdsAfter.isEmpty() && orphanedNodeIdsAfter.isEmpty()) {
        // nothing more to test
        return;
    }

    // We are already in a failed state, but check if the orphan cleanup works

    // workaround recovery: force collection of any orphan nodes (ALF-12358 + ALF-13066)
    for (final NodeRef nodeRef : nodesAtRisk) {
        txnService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionCallback<Void>() {
            @Override
            public Void execute() throws Throwable {
                if (nodeService.exists(nodeRef)) {
                    nodeService.getPath(nodeRef); // ignore return
                }
                return null;
            }
        });
    }

    // Find all 'at risk' nodes after the test
    final List<Long> attachedToDeletedIdsCleaned = getChildNodesWithDeletedParentNode(params,
            attachedToDeletedIdsBefore.size());
    logger.debug("Found child nodes with deleted parent node (cleaned): " + attachedToDeletedIdsAfter);
    final List<Long> orphanedNodeIdsCleaned = getChildNodesWithNoParentNode(params,
            orphanedNodeIdsBefore.size());
    logger.debug("Found child nodes without parent (cleaned): " + attachedToDeletedIdsAfter);

    // Check
    assertTrue("Expected full cleanup of nodes referencing deleted nodes: " + attachedToDeletedIdsCleaned,
            attachedToDeletedIdsCleaned.isEmpty());
    assertTrue("Expected full cleanup of nodes referencing without parents: " + orphanedNodeIdsCleaned,
            orphanedNodeIdsCleaned.isEmpty());

    // check lost_found ...
    List<NodeRef> lostAndFoundNodeRefs = getLostAndFoundNodes();
    assertFalse(lostAndFoundNodeRefs.isEmpty());

    Set<Long> lostAndFoundNodeIds = new HashSet<Long>(lostAndFoundNodeRefs.size());
    for (NodeRef nodeRef : lostAndFoundNodeRefs) {
        lostAndFoundNodeIds.add((Long) nodeService.getProperty(nodeRef, ContentModel.PROP_NODE_DBID));
    }

    assertTrue("Nodes linked to deleted parent nodes not handled.",
            lostAndFoundNodeIds.containsAll(attachedToDeletedIdsAfter));
    assertTrue("Orphaned nodes not all handled.", lostAndFoundNodeIds.containsAll(orphanedNodeIdsAfter));

    // Now fail because we allowed the situation in the first place
    fail("We allowed orphaned nodes or nodes with deleted parents.");
}

From source file:org.apache.kylin.cube.model.CubeDesc.java

public void validateAggregationGroups() {
    int index = 0;

    for (AggregationGroup agg : getAggregationGroups()) {
        if (agg.getIncludes() == null) {
            logger.error("Aggregation group " + index + " 'includes' field not set");
            throw new IllegalStateException("Aggregation group " + index + " includes field not set");
        }//from  w  ww  .ja  v a  2  s  . c  o  m

        if (agg.getSelectRule() == null) {
            logger.error("Aggregation group " + index + " 'select_rule' field not set");
            throw new IllegalStateException("Aggregation group " + index + " select rule field not set");
        }

        Set<String> includeDims = new TreeSet<>(String.CASE_INSENSITIVE_ORDER);
        getDims(includeDims, agg.getIncludes());

        Set<String> mandatoryDims = new TreeSet<>(String.CASE_INSENSITIVE_ORDER);
        getDims(mandatoryDims, agg.getSelectRule().mandatoryDims);

        ArrayList<Set<String>> hierarchyDimsList = Lists.newArrayList();
        Set<String> hierarchyDims = new TreeSet<>(String.CASE_INSENSITIVE_ORDER);
        getDims(hierarchyDimsList, hierarchyDims, agg.getSelectRule().hierarchyDims);

        ArrayList<Set<String>> jointDimsList = Lists.newArrayList();
        Set<String> jointDims = new TreeSet<>(String.CASE_INSENSITIVE_ORDER);
        getDims(jointDimsList, jointDims, agg.getSelectRule().jointDims);

        if (!includeDims.containsAll(mandatoryDims) || !includeDims.containsAll(hierarchyDims)
                || !includeDims.containsAll(jointDims)) {
            List<String> notIncluded = Lists.newArrayList();
            final Iterable<String> all = Iterables
                    .unmodifiableIterable(Iterables.concat(mandatoryDims, hierarchyDims, jointDims));
            for (String dim : all) {
                if (includeDims.contains(dim) == false) {
                    notIncluded.add(dim);
                }
            }
            Collections.sort(notIncluded);
            logger.error("Aggregation group " + index
                    + " Include dimensions not containing all the used dimensions");
            throw new IllegalStateException("Aggregation group " + index
                    + " 'includes' dimensions not include all the dimensions:" + notIncluded.toString());
        }

        if (CollectionUtils.containsAny(mandatoryDims, hierarchyDims)) {
            logger.warn(
                    "Aggregation group " + index + " mandatory dimensions overlap with hierarchy dimensions: "
                            + ensureOrder(CollectionUtils.intersection(mandatoryDims, hierarchyDims)));
        }
        if (CollectionUtils.containsAny(mandatoryDims, jointDims)) {
            logger.warn("Aggregation group " + index + " mandatory dimensions overlap with joint dimensions: "
                    + ensureOrder(CollectionUtils.intersection(mandatoryDims, jointDims)));
        }

        if (CollectionUtils.containsAny(hierarchyDims, jointDims)) {
            logger.error("Aggregation group " + index + " hierarchy dimensions overlap with joint dimensions");
            throw new IllegalStateException(
                    "Aggregation group " + index + " hierarchy dimensions overlap with joint dimensions: "
                            + ensureOrder(CollectionUtils.intersection(hierarchyDims, jointDims)));
        }

        if (hasSingleOrNone(hierarchyDimsList)) {
            logger.error("Aggregation group " + index + " require at least 2 dimensions in a hierarchy");
            throw new IllegalStateException(
                    "Aggregation group " + index + " require at least 2 dimensions in a hierarchy.");
        }
        if (hasSingleOrNone(jointDimsList)) {
            logger.error("Aggregation group " + index + " require at least 2 dimensions in a joint");
            throw new IllegalStateException(
                    "Aggregation group " + index + " require at least 2 dimensions in a joint");
        }

        Pair<Boolean, Set<String>> overlap = hasOverlap(hierarchyDimsList, hierarchyDims);
        if (overlap.getFirst() == true) {
            logger.error("Aggregation group " + index + " a dimension exist in more than one hierarchy: "
                    + ensureOrder(overlap.getSecond()));
            throw new IllegalStateException("Aggregation group " + index
                    + " a dimension exist in more than one hierarchy: " + ensureOrder(overlap.getSecond()));
        }

        overlap = hasOverlap(jointDimsList, jointDims);
        if (overlap.getFirst() == true) {
            logger.error("Aggregation group " + index + " a dimension exist in more than one joint: "
                    + ensureOrder(overlap.getSecond()));
            throw new IllegalStateException("Aggregation group " + index
                    + " a dimension exist in more than one joint: " + ensureOrder(overlap.getSecond()));
        }

        index++;
    }
}

From source file:org.dataconservancy.packaging.tool.impl.generator.OrePackageModelBuilderTest.java

@Test
public void propertyExtractionTest() throws Exception {
    OrePackageModelBuilder builder = new OrePackageModelBuilder();
    File baseDir = tmpfolder.newFolder("destiny");
    PackageAssembler assembler = new FunctionalAssemblerMock(baseDir);

    PackageGenerationParameters params = new PackageGenerationParameters();
    params.addParam(GeneralParameterNames.CONTENT_ROOT_LOCATION, baseDir.getPath());
    builder.init(params);/*from  w w  w  .  j a v  a2s  .  c  o m*/

    PackageArtifact project = newArtifact(ArtifactType.Project);
    PackageArtifact collection = newArtifact(ArtifactType.Collection);
    PackageArtifact dataItem = newArtifact(ArtifactType.DataItem);
    PackageArtifact dataFile = newArtifact(ArtifactType.DataFile);
    PackageArtifact metadataFile = newArtifact(ArtifactType.MetadataFile);

    addRandomPropertiesTo(project);

    addRel(DcsBoPackageOntology.IS_MEMBER_OF, project, collection);
    addRandomPropertiesTo(collection);

    addRel(DcsBoPackageOntology.IS_MEMBER_OF, collection, dataItem);
    addRandomPropertiesTo(dataItem);

    addRel(DcsBoPackageOntology.IS_METADATA_FOR, collection, metadataFile);
    addRandomPropertiesTo(metadataFile);

    Path rootPath = Paths.get(baseDir.getPath());

    File metaContent = new File(baseDir, "dataFileTest12.tst");
    Path metaConentPath = Paths.get(metaContent.getPath());

    IOUtils.write("test", new FileOutputStream(metaContent));
    metadataFile.setArtifactRef(rootPath.relativize(metaConentPath).toString());

    addRel(DcsBoPackageOntology.IS_MEMBER_OF, dataItem, dataFile);
    addRandomPropertiesTo(dataFile);
    File content = new File(baseDir, "cow");

    IOUtils.write("test", new FileOutputStream(content));
    Path contentPath = Paths.get(content.getPath());
    dataFile.setArtifactRef(rootPath.relativize(contentPath).toString());

    PackageDescription desc = new PackageDescription();
    desc.setPackageArtifacts(asSet(project, collection, dataItem, dataFile, metadataFile));
    desc.setRootArtifactRef(project.getArtifactRef());
    builder.buildModel(desc, assembler);

    ResourceMapExtractor extractor = new ResourceMapExtractor();

    Map<String, AttributeSet> attrs = extractor.execute(baseDir, builder.getPackageRemURI());

    Set<String> extractedValues = new HashSet<>();

    for (AttributeSet attSet : attrs.values()) {
        for (Attribute att : attSet.getAttributes()) {
            extractedValues.add(att.getValue());
        }
    }

    for (PackageArtifact artifact : asSet(project, collection, dataItem, dataFile, metadataFile)) {

        for (String key : artifact.getPropertyNames()) {
            if (artifact.hasSimpleProperty(key)) {
                for (String value : artifact.getSimplePropertyValues(key)) {
                    assertTrue("Missing value for property " + key, extractedValues.contains(value));
                }
            } else if (artifact.hasPropertyValueGroup(key)) {
                artifact.getPropertyValueGroups(key);
                for (PropertyValueGroup group : artifact.getPropertyValueGroups(key)) {
                    for (String subKey : group.getSubPropertyNames()) {
                        for (String value : group.getSubPropertyValues(subKey)) {
                            assertTrue(String.format("Missing value for property %s->%s", key, subKey),
                                    extractedValues.contains(value));
                        }
                        assertTrue(extractedValues.containsAll(group.getSubPropertyValues(subKey)));
                    }
                }
            } else {
                Assert.fail("No value for property " + key);
            }
        }
    }
}

From source file:org.cloudfoundry.identity.uaa.oauth.UaaTokenServices.java

private void checkForApproval(String userid, String clientId, Collection<String> requestedScopes,
        Collection<String> autoApprovedScopes) {
    if (autoApprovedScopes.containsAll(requestedScopes)) {
        return;/*from w w w.  j av  a2s.c om*/
    }
    Set<String> approvedScopes = new HashSet<>(autoApprovedScopes);

    // Search through the users approvals for scopes that are requested, not
    // auto approved, not expired,
    // not DENIED and not approved more recently than when this access token
    // was issued.
    List<Approval> approvals = approvalStore.getApprovals(userid, clientId);
    for (Approval approval : approvals) {
        if (requestedScopes.contains(approval.getScope()) && approval.getStatus() == ApprovalStatus.APPROVED) {
            if (!approval.isCurrentlyActive()) {
                logger.debug("Approval " + approval + " has expired. Need to re-approve.");
                throw new InvalidTokenException("Invalid token (approvals expired)");
            }
            approvedScopes.add(approval.getScope());
        }
    }

    // Only issue the token if all the requested scopes have unexpired
    // approvals made before the refresh token was
    // issued OR if those scopes are auto approved
    if (!approvedScopes.containsAll(requestedScopes)) {
        logger.debug("All requested scopes " + requestedScopes + " were not approved " + approvedScopes);
        Set<String> unapprovedScopes = new HashSet<String>(requestedScopes);
        unapprovedScopes.removeAll(approvedScopes);
        throw new InvalidTokenException(
                "Invalid token (some requested scopes are not approved): " + unapprovedScopes);
    }
}

From source file:com.aurel.track.accessControl.AccessBeans.java

/**
 * Returns the fields restrictions for a person for a projects and issue
 * types By convention a field will be returned only if it is restricted
 * (either no read (and consequently no modify) or no modify right)
 *
 * @param personID/*from w w w .  j  av a2 s  .  c o m*/
 * @param projectToIssueTypesMap
 * @param fieldIDs
 * @param edit
 *            whether we are in an editing (edit/create issue) mode or only
 *            read only mode (print issue, email sending)
 */
public static Map<Integer, Map<Integer, Map<Integer, Integer>>> getFieldRestrictions(Integer personID,
        Map<Integer, Set<Integer>> projectToIssueTypesMap, List<Integer> fieldIDs, boolean edit) {
    Map<Integer, Map<Integer, Map<Integer, Integer>>> resultMap = new HashMap<Integer, Map<Integer, Map<Integer, Integer>>>();
    if (personID == null) {
        // person can be null for example by POP3 email submission
        // the submitter gets an email by creating of the issue,
        // but then no restrictions are needed
        return resultMap;
    }
    Map<Integer, Set<Integer>> hiddenFields = new HashMap<Integer, Set<Integer>>();
    Map<Integer, Set<Integer>> readOnlyFields = new HashMap<Integer, Set<Integer>>();
    FieldsRestrictionsToRoleBL.getRestrictedFieldsToRoles(fieldIDs, hiddenFields, readOnlyFields);
    if (!hasFieldRestrictions(hiddenFields, readOnlyFields, fieldIDs, edit)) {
        return resultMap;
    }
    /**
     * Get the role for any project/issueType combinations
     */
    Map<Integer, Map<Integer, Set<Integer>>> rolesForProjectIssueType = getRolesForPersonInProjectsForIssueTypes(
            personID, projectToIssueTypesMap);
    for (Integer projectID : projectToIssueTypesMap.keySet()) {
        Map<Integer, Map<Integer, Integer>> projectRestrictions = new HashMap<Integer, Map<Integer, Integer>>();
        resultMap.put(projectID, projectRestrictions);
        Set<Integer> issueTypesSet = projectToIssueTypesMap.get(projectID);
        Map<Integer, Set<Integer>> projectRoles = rolesForProjectIssueType.get(projectID);
        if (projectRoles != null) {
            for (Integer issueTypeID : issueTypesSet) {
                Map<Integer, Integer> projectIssueTypeRestrictions = new HashMap<Integer, Integer>();
                projectRestrictions.put(issueTypeID, projectIssueTypeRestrictions);
                Set<Integer> rolesWithIssueType = projectRoles.get(issueTypeID);
                Set<Integer> rolesWithoutIssueType = projectRoles.get(NO_ISSUETYPE_RESTRICTION);
                Set<Integer> roles = new HashSet<Integer>();
                if (rolesWithIssueType != null && !rolesWithIssueType.isEmpty()) {
                    roles.addAll(rolesWithIssueType);
                }
                if (rolesWithoutIssueType != null && !rolesWithoutIssueType.isEmpty()) {
                    roles.addAll(rolesWithoutIssueType);
                }
                if (!roles.isEmpty()) {
                    if (edit) {
                        Set<Integer> allRolesWithHiddenOrReadOnlyFields = new HashSet<Integer>();
                        for (Set<Integer> rolesWithHiddenFields : hiddenFields.values()) {
                            allRolesWithHiddenOrReadOnlyFields.addAll(rolesWithHiddenFields);
                        }
                        for (Set<Integer> rolesWithReadOnlyFields : readOnlyFields.values()) {
                            allRolesWithHiddenOrReadOnlyFields.addAll(rolesWithReadOnlyFields);
                        }
                        if (allRolesWithHiddenOrReadOnlyFields.containsAll(roles)) {
                            addRestrictions(projectIssueTypeRestrictions, roles, hiddenFields,
                                    TRoleFieldBean.ACCESSFLAG.NOACCESS);
                            addRestrictions(projectIssueTypeRestrictions, roles, readOnlyFields,
                                    TRoleFieldBean.ACCESSFLAG.READ_ONLY);
                        } else {
                            LOGGER.debug("Role without restriction found for person " + personID + " project "
                                    + projectID + " and issueType " + issueTypeID);
                            return resultMap;
                        }
                    } else {
                        Set<Integer> allRolesWithHiddenFields = new HashSet<Integer>();
                        for (Set<Integer> rolesWithHiddenFields : hiddenFields.values()) {
                            allRolesWithHiddenFields.addAll(rolesWithHiddenFields);
                        }
                        if (allRolesWithHiddenFields.containsAll(roles)) {
                            addRestrictions(projectIssueTypeRestrictions, roles, hiddenFields,
                                    TRoleFieldBean.ACCESSFLAG.NOACCESS);
                        } else {
                            LOGGER.debug("Role without restriction found for person " + personID + " project "
                                    + projectID + " and issueType " + issueTypeID);
                            return resultMap;
                        }
                    }
                } else {
                    LOGGER.debug(
                            noRole + personID + " in project " + projectID + " and issueType" + issueTypeID);
                }
            }
        } else {
            LOGGER.debug(noRole + personID + " in project " + projectID);
        }
    }
    return resultMap;
}

From source file:jenkins.branch.MultiBranchProject.java

/**
 * Offers direct access to set the configurable list of branch sources <strong>while</strong> preserving
 * branch source id associations for sources that are otherwise unmodified
 *
 * @param sources the new sources.//from   w  w w .  j  a  v  a2  s .  com
 * @throws IOException if the sources could not be persisted to disk.
 */
public void setSourcesList(List<BranchSource> sources) throws IOException {
    if (this.sources.isEmpty() || sources.isEmpty()) {
        // easy
        this.sources.replaceBy(sources);
        return;
    }
    Set<String> oldIds = sourceIds(this.sources);
    Set<String> newIds = sourceIds(sources);
    if (oldIds.containsAll(newIds) || newIds.containsAll(oldIds)) {
        // either adding, removing, or updating without an id change
        this.sources.replaceBy(sources);
        return;
    }
    // Now we need to check if any of the new entries are effectively the same as an old entry that is being removed
    // we will store the ID changes in a map and process all the affected branches to update their sourceIds
    Map<String, String> changedIds = new HashMap<>();
    Set<String> additions = new HashSet<>(newIds);
    additions.removeAll(oldIds);
    Set<String> removals = new HashSet<>(oldIds);
    removals.removeAll(newIds);

    for (BranchSource addition : sources) {
        String additionId = addition.getSource().getId();
        if (!additions.contains(additionId)) {
            continue;
        }
        for (BranchSource removal : this.sources) {
            String removalId = removal.getSource().getId();
            if (!removals.contains(removalId)) {
                continue;
            }
            if (!equalButForId(removal.getSource(), addition.getSource())) {
                continue;
            }
            changedIds.put(removalId, additionId);
            // now take these two out of consideration
            removals.remove(removalId);
            additions.remove(additionId);
            break;
        }
    }
    this.sources.replaceBy(sources);
    BranchProjectFactory<P, R> factory = getProjectFactory();
    for (P item : getItems()) {
        if (!factory.isProject(item)) {
            continue;
        }
        Branch oldBranch = factory.getBranch(item);
        if (changedIds.containsKey(oldBranch.getSourceId())) {
            Branch newBranch = new Branch(changedIds.get(oldBranch.getSourceId()), oldBranch.getHead(),
                    oldBranch.getScm(), oldBranch.getProperties());
            newBranch.setActions(oldBranch.getActions());
            factory.setBranch(item, newBranch);
        }
    }
}

From source file:org.auraframework.impl.root.component.BaseComponentDefTest.java

/**
 * Test method for {@link BaseComponentDef#getModelDefDescriptors()}.
 *//* w w w  . j av a 2  s. c o m*/
@Test
public void testGetModelDefDescriptors() throws QuickFixException {
    DefDescriptor<T> grandParentDesc = addSourceAutoCleanup(getDefClass(),
            String.format(baseTag, "extensible='true'", ""));
    DefDescriptor<ModelDef> grandParentModelDesc = DefDescriptorImpl.getAssociateDescriptor(grandParentDesc,
            ModelDef.class, DefDescriptor.JAVASCRIPT_PREFIX);
    addSourceAutoCleanup(grandParentModelDesc, "{obj:{}}");

    DefDescriptor<T> parentDesc = addSourceAutoCleanup(getDefClass(),
            String.format(baseTag, String.format("extends='%s' extensible='true' model='js://test.jsModel'",
                    grandParentDesc.getDescriptorName()), ""));

    DefDescriptor<T> compDesc = addSourceAutoCleanup(getDefClass(),
            String.format(baseTag, String.format(
                    "extends='%s' model='java://org.auraframework.components.test.java.model.TestModel'",
                    parentDesc.getDescriptorName()), ""));

    List<DefDescriptor<ModelDef>> dds = definitionService.getDefinition(compDesc).getModelDefDescriptors();
    assertNotNull(dds);

    assertEquals(3, dds.size());
    List<String> names = Lists.transform(dds, new Function<DefDescriptor<?>, String>() {
        @Override
        public String apply(DefDescriptor<?> input) {
            return input.getQualifiedName();
        }
    });
    Set<String> expected = ImmutableSet.of("java://org.auraframework.components.test.java.model.TestModel",
            "js://test.jsModel", grandParentModelDesc.getQualifiedName());
    if (!names.containsAll(expected)) {
        fail("Missing expected models. Expected: " + expected + ", Actual: " + names);
    }
    if (!expected.containsAll(names)) {
        fail("Unexpected models. Expected: " + expected + ", Actual: " + names);
    }
}

From source file:com.streamsets.pipeline.stage.processor.fieldhasher.TestFieldHasherProcessor.java

private void checkFieldIssueContinue(Record record, HasherConfig hasherConfig, Set<String> expectedValidFields,
        Map<String, Field> expectedVal) throws StageException {
    StageRunner.Output output;/*from  www  .j  a va 2 s .c om*/
    FieldHasherProcessor processor;
    ProcessorRunner runner;

    final Set<String> validFieldsFromTheProcessor = registerCallbackForValidFields();

    processor = PowerMockito.spy(new FieldHasherProcessor(hasherConfig, OnStagePreConditionFailure.CONTINUE));

    runner = new ProcessorRunner.Builder(FieldHasherDProcessor.class, processor).addOutputLane("a").build();
    runner.runInit();

    try {
        output = runner.runProcess(Arrays.asList(record));
        Assert.assertEquals("Valid Fields Size mismatch", expectedValidFields.size(),
                validFieldsFromTheProcessor.size());
        Assert.assertTrue("Expected Valid Fields Not Present",
                validFieldsFromTheProcessor.containsAll(expectedValidFields));
        Assert.assertEquals(1, output.getRecords().get("a").size());
        Record outputRecord = output.getRecords().get("a").get(0);

        Field field = outputRecord.get();
        Assert.assertTrue(field.getValue() instanceof Map);

        Map<String, Field> result = field.getValueAsMap();
        Assert.assertEquals("Expected fields does not match: ", expectedVal.size(), result.size());

        Set<String> resultFieldPaths = new HashSet<String>();
        for (Map.Entry<String, Field> entry : result.entrySet()) {
            String fieldKey = entry.getKey();
            Field outputField = entry.getValue();
            Field expectedField = expectedVal.get(fieldKey);
            Assert.assertEquals("Expected Type not present for field:" + fieldKey, expectedField.getType(),
                    outputField.getType());
            Assert.assertEquals("Expected Value not present for field: " + fieldKey, expectedField.getValue(),
                    outputField.getValue());
            resultFieldPaths.add("/" + fieldKey);
        }
    } catch (StageException e) {
        Assert.fail("Should not throw an exception when On Stage Precondition Continue");
    } finally {
        runner.runDestroy();
    }
}

From source file:org.auraframework.impl.root.component.BaseComponentDefTest.java

@Test
public void testAppendDependenciesWithAllReferences() throws QuickFixException {
    DefDescriptor<T> parentDesc = addSourceAutoCleanup(getDefClass(),
            String.format(baseTag, "extensible='true'", ""));
    DefDescriptor<ComponentDef> childDesc = addSourceAutoCleanup(ComponentDef.class, "<aura:component/>");
    DefDescriptor<InterfaceDef> intfDesc = addSourceAutoCleanup(InterfaceDef.class, "<aura:interface/>");
    DefDescriptor<EventDef> eventDesc = addSourceAutoCleanup(EventDef.class,
            "<aura:event type='component' support='GA'/>");
    DefDescriptor<ProviderDef> providerDesc = definitionService.getDefDescriptor(
            "java://org.auraframework.impl.java.provider.ConcreteProvider", ProviderDef.class);

    DefDescriptor<T> cmpDesc = addSourceAutoCleanup(getDefClass(),
            String.format(baseTag,
                    String.format("extends='%s' implements='%s' provider='%s'", parentDesc.getDescriptorName(),
                            intfDesc.getDescriptorName(), providerDesc),
                    String.format("<%s/><aura:registerevent name='evt' type='%s'/>",
                            childDesc.getDescriptorName(), eventDesc.getDescriptorName())));

    DefDescriptor<ModelDef> modelDesc = DefDescriptorImpl.getAssociateDescriptor(cmpDesc, ModelDef.class,
            DefDescriptor.JAVASCRIPT_PREFIX);
    addSourceAutoCleanup(modelDesc, "{obj:{}}");
    DefDescriptor<ControllerDef> controllerDesc = DefDescriptorImpl.getAssociateDescriptor(cmpDesc,
            ControllerDef.class, DefDescriptor.JAVASCRIPT_PREFIX);
    addSourceAutoCleanup(controllerDesc, "{hi:function(){}}");

    DefDescriptor<RendererDef> renderDesc = DefDescriptorImpl.getAssociateDescriptor(cmpDesc, RendererDef.class,
            DefDescriptor.JAVASCRIPT_PREFIX);
    addSourceAutoCleanup(renderDesc, "({render:function(c){return this.superRender();}})");

    DefDescriptor<HelperDef> helperDesc = DefDescriptorImpl.getAssociateDescriptor(cmpDesc, HelperDef.class,
            DefDescriptor.JAVASCRIPT_PREFIX);
    addSourceAutoCleanup(helperDesc, "({help:function(){}})");

    DefDescriptor<StyleDef> styleDesc = definitionService.getDefDescriptor(cmpDesc, DefDescriptor.CSS_PREFIX,
            StyleDef.class);
    String className = cmpDesc.getNamespace() + StringUtils.capitalize(cmpDesc.getName());
    addSourceAutoCleanup(styleDesc, String.format(".%s {font-style:italic;}", className));

    Set<DefDescriptor<?>> dependencies = new HashSet<>();
    definitionService.getDefinition(cmpDesc).appendDependencies(dependencies);

    Set<DefDescriptor<?>> expected = Sets.newHashSet(parentDesc, childDesc, intfDesc, providerDesc, modelDesc,
            controllerDesc, eventDesc, styleDesc, renderDesc, helperDesc);
    if (!dependencies.containsAll(expected)) {
        fail(String.format("missing dependencies - EXPECTED: %s, ACTUAL: %s", expected, dependencies));
    }//from  w  w w  . j av a2 s .  co m
    if (!expected.containsAll(dependencies)) {
        fail(String.format("extra dependencies - EXPECTED: %s, ACTUAL: %s", expected, dependencies));
    }
}

From source file:com.thinkbiganalytics.feedmgr.service.feed.DefaultFeedManagerFeedService.java

/**
 * Assign the feed sources/destinations//w w w .ja  v a  2  s  .c  o m
 *
 * @param feed       the feed rest model
 * @param domainFeed the domain feed
 */
private void assignFeedDatasources(FeedMetadata feed, Feed domainFeed) {
    final Feed.ID domainFeedId = domainFeed.getId();
    Set<com.thinkbiganalytics.metadata.api.datasource.Datasource.ID> sources = new HashSet<com.thinkbiganalytics.metadata.api.datasource.Datasource.ID>();
    Set<com.thinkbiganalytics.metadata.api.datasource.Datasource.ID> destinations = new HashSet<com.thinkbiganalytics.metadata.api.datasource.Datasource.ID>();

    String uniqueName = FeedNameUtil.fullName(feed.getCategory().getSystemName(), feed.getSystemFeedName());

    RegisteredTemplate template = feed.getRegisteredTemplate();
    if (template == null) {
        //fetch it for checks
        template = templateRestProvider.getRegisteredTemplate(feed.getTemplateId());

    }
    //find Definition registration

    derivedDatasourceFactory.populateDatasources(feed, template, sources, destinations);
    //remove the older sources only if they have changed

    if (domainFeed.getSources() != null) {
        Set<Datasource.ID> existingSourceIds = ((List<FeedSource>) domainFeed.getSources()).stream()
                .filter(source -> source.getDatasource() != null)
                .map(source1 -> source1.getDatasource().getId()).collect(Collectors.toSet());
        if (!sources.containsAll(existingSourceIds) || (sources.size() != existingSourceIds.size())) {
            //remove older sources
            //cant do it here for some reason.. need to do it in a separate transaction
            feedProvider.removeFeedSources(domainFeedId);
        }
    }
    sources.stream().forEach(sourceId -> feedProvider.ensureFeedSource(domainFeedId, sourceId));
    destinations.stream().forEach(sourceId -> feedProvider.ensureFeedDestination(domainFeedId, sourceId));

}