Example usage for java.util Set retainAll

List of usage examples for java.util Set retainAll

Introduction

In this page you can find the example usage for java.util Set retainAll.

Prototype

boolean retainAll(Collection<?> c);

Source Link

Document

Retains only the elements in this set that are contained in the specified collection (optional operation).

Usage

From source file:org.apache.falcon.resource.AbstractEntityManager.java

public APIResult update(HttpServletRequest request, String type, String entityName, String colo,
        Boolean skipDryRun) {//from  w w w.  j  av  a  2 s .  c o  m
    checkColo(colo);
    List<Entity> tokenList = null;
    try {
        EntityType entityType = EntityType.getEnum(type);
        Entity oldEntity = EntityUtil.getEntity(type, entityName);
        Entity newEntity = deserializeEntity(request, entityType);
        // KLUDGE - Until ACL is mandated entity passed should be decorated for equals check to pass
        decorateEntityWithACL(newEntity);
        validate(newEntity);

        validateUpdate(oldEntity, newEntity);
        configStore.initiateUpdate(newEntity);

        tokenList = obtainUpdateEntityLocks(oldEntity);

        StringBuilder result = new StringBuilder("Updated successfully");
        //Update in workflow engine
        if (!DeploymentUtil.isPrism()) {
            Set<String> oldClusters = EntityUtil.getClustersDefinedInColos(oldEntity);
            Set<String> newClusters = EntityUtil.getClustersDefinedInColos(newEntity);
            newClusters.retainAll(oldClusters); //common clusters for update
            oldClusters.removeAll(newClusters); //deleted clusters

            for (String cluster : newClusters) {
                result.append(getWorkflowEngine().update(oldEntity, newEntity, cluster, skipDryRun));
            }
            for (String cluster : oldClusters) {
                getWorkflowEngine().delete(oldEntity, cluster);
            }
        }

        configStore.update(entityType, newEntity);

        return new APIResult(APIResult.Status.SUCCEEDED, result.toString());
    } catch (Throwable e) {
        LOG.error("Update failed", e);
        throw FalconWebException.newException(e, Response.Status.BAD_REQUEST);
    } finally {
        ConfigurationStore.get().cleanupUpdateInit();
        releaseUpdateEntityLocks(entityName, tokenList);
    }
}

From source file:org.commonjava.cartographer.graph.MultiGraphCalculator.java

public GraphCalculation calculateFromGraphMap(final GraphComposition composition,
        final Map<GraphDescription, RelationshipGraph> graphMap) throws CartoDataException {
    Set<ProjectRelationship<?, ?>> result = null;
    Set<ProjectVersionRef> roots = null;

    out: for (final GraphDescription desc : composition.getGraphs()) {
        final RelationshipGraph graph = graphMap.get(desc);

        if (graph == null) {
            throw new CartoDataException("Cannot retrieve web for: {}.", graph);
        }/*from   ww w .j  ava 2s  . com*/

        if (result == null) {
            result = new HashSet<>(graph.getAllRelationships());
            roots = new HashSet<>(graph.getRoots());
        } else {
            switch (composition.getCalculation()) {
            case SUBTRACT: {
                result.removeAll(graph.getAllRelationships());

                if (result.isEmpty()) {
                    break out;
                }

                break;
            }
            case ADD: {
                result.addAll(graph.getAllRelationships());
                roots.addAll(graph.getRoots());
                break;
            }
            case INTERSECT: {
                result.retainAll(graph.getAllRelationships());
                roots.addAll(graph.getRoots());

                if (result.isEmpty()) {
                    break out;
                }

                break;
            }
            }
        }
    }

    return new GraphCalculation(composition.getCalculation(), composition.getGraphs(), roots, result);
}

From source file:ubic.BAMSandAllen.MatrixPairs.MatrixPair.java

public final void slimMatricesOnce() {
    log.info("Before");
    printDimensions();/*from   ww w . j a  va2  s.com*/
    // get seen, then retain those with current matrix!!!
    // aName -> bName
    Set<String> bMapped = new HashSet<String>();
    for (String aName : matrixA.getColNames()) {
        Set<String> bNames = convertANametoB(aName);
        if (bNames != null) {
            bMapped.addAll(convertANametoB(aName));
        }
    }

    // bName -> aName
    Set<String> aMapped = new HashSet<String>();
    for (String bName : matrixB.getColNames()) {
        Set<String> aNames = convertBNametoA(bName);
        if (aNames != null) {
            aMapped.addAll(aNames);
        }
    }

    // we may have been mapped to a name that has no data
    bMapped.retainAll(matrixB.getColNames());
    aMapped.retainAll(matrixA.getColNames());

    // we only realy need to slim A
    matrixA = matrixA.retainColumns(aMapped);
    matrixB = matrixB.retainColumns(bMapped);

    log.info("After");

    log.info("Removing all zero rows");

    // if this is done, it will increase the weight of up-propigated connections
    // matrixA = matrixA.removeRows( Util.findZeroRows( matrixA ) );

    matrixB = matrixB.removeRows(Util.findZeroRows(matrixB));

    printDimensions();
}

From source file:org.apache.ambari.server.controller.AmbariCustomCommandExecutionHelper.java

/**
 * Processes decommission command. Modifies the host components as needed and then
 * calls into the implementation of a custom command
 *///from w w  w  .  ja  v  a 2  s.  co  m
private void addDecommissionAction(final ActionExecutionContext actionExecutionContext,
        final RequestResourceFilter resourceFilter, Stage stage, boolean retryAllowed) throws AmbariException {

    String clusterName = actionExecutionContext.getClusterName();
    final Cluster cluster = clusters.getCluster(clusterName);
    final String serviceName = resourceFilter.getServiceName();
    String masterCompType = resourceFilter.getComponentName();
    List<String> hosts = resourceFilter.getHostNames();

    if (hosts != null && !hosts.isEmpty()) {
        throw new AmbariException("Decommission command cannot be issued with " + "target host(s) specified.");
    }

    //Get all hosts to be added and removed
    Set<String> excludedHosts = getHostList(actionExecutionContext.getParameters(), DECOM_EXCLUDED_HOSTS);
    Set<String> includedHosts = getHostList(actionExecutionContext.getParameters(), DECOM_INCLUDED_HOSTS);

    Set<String> cloneSet = new HashSet<String>(excludedHosts);
    cloneSet.retainAll(includedHosts);
    if (cloneSet.size() > 0) {
        throw new AmbariException("Same host cannot be specified for inclusion "
                + "as well as exclusion. Hosts: " + cloneSet.toString());
    }

    Service service = cluster.getService(serviceName);
    if (service == null) {
        throw new AmbariException("Specified service " + serviceName + " is not a valid/deployed service.");
    }

    Map<String, ServiceComponent> svcComponents = service.getServiceComponents();
    if (!svcComponents.containsKey(masterCompType)) {
        throw new AmbariException(
                "Specified component " + masterCompType + " does not belong to service " + serviceName + ".");
    }

    ServiceComponent masterComponent = svcComponents.get(masterCompType);
    if (!masterComponent.isMasterComponent()) {
        throw new AmbariException(
                "Specified component " + masterCompType + " is not a MASTER for service " + serviceName + ".");
    }

    if (!masterToSlaveMappingForDecom.containsKey(masterCompType)) {
        throw new AmbariException("Decommissioning is not supported for " + masterCompType);
    }

    // Find the slave component
    String slaveCompStr = actionExecutionContext.getParameters().get(DECOM_SLAVE_COMPONENT);
    final String slaveCompType;
    if (slaveCompStr == null || slaveCompStr.equals("")) {
        slaveCompType = masterToSlaveMappingForDecom.get(masterCompType);
    } else {
        slaveCompType = slaveCompStr;
        if (!masterToSlaveMappingForDecom.get(masterCompType).equals(slaveCompType)) {
            throw new AmbariException("Component " + slaveCompType + " is not supported for decommissioning.");
        }
    }

    String isDrainOnlyRequest = actionExecutionContext.getParameters().get(HBASE_MARK_DRAINING_ONLY);
    if (isDrainOnlyRequest != null && !slaveCompType.equals(Role.HBASE_REGIONSERVER.name())) {
        throw new AmbariException(HBASE_MARK_DRAINING_ONLY + " is not a valid parameter for " + masterCompType);
    }

    // Filtering hosts based on Maintenance State
    MaintenanceStateHelper.HostPredicate hostPredicate = new MaintenanceStateHelper.HostPredicate() {
        @Override
        public boolean shouldHostBeRemoved(final String hostname) throws AmbariException {
            //Get UPDATE_EXCLUDE_FILE_ONLY parameter as string
            String upd_excl_file_only_str = actionExecutionContext.getParameters()
                    .get(UPDATE_EXCLUDE_FILE_ONLY);

            String decom_incl_hosts_str = actionExecutionContext.getParameters().get(DECOM_INCLUDED_HOSTS);
            if ((upd_excl_file_only_str != null && !upd_excl_file_only_str.trim().equals(""))) {
                upd_excl_file_only_str = upd_excl_file_only_str.trim();
            }

            boolean upd_excl_file_only = false;
            //Parse of possible forms of value
            if (upd_excl_file_only_str != null && !upd_excl_file_only_str.equals("")
                    && (upd_excl_file_only_str.equals("\"true\"") || upd_excl_file_only_str.equals("'true'")
                            || upd_excl_file_only_str.equals("true"))) {
                upd_excl_file_only = true;
            }

            // If we just clear *.exclude and component have been already removed we will skip check
            if (upd_excl_file_only && decom_incl_hosts_str != null && !decom_incl_hosts_str.trim().equals("")) {
                return upd_excl_file_only;
            } else {
                return !maintenanceStateHelper.isOperationAllowed(cluster,
                        actionExecutionContext.getOperationLevel(), resourceFilter, serviceName, slaveCompType,
                        hostname);
            }
        }
    };
    // Filter excluded hosts
    Set<String> filteredExcludedHosts = new HashSet<String>(excludedHosts);
    Set<String> ignoredHosts = maintenanceStateHelper.filterHostsInMaintenanceState(filteredExcludedHosts,
            hostPredicate);
    if (!ignoredHosts.isEmpty()) {
        String message = String.format("Some hosts (%s) from host exclude list " + "have been ignored "
                + "because components on them are in Maintenance state.", ignoredHosts);
        LOG.debug(message);
    }

    // Filter included hosts
    Set<String> filteredIncludedHosts = new HashSet<String>(includedHosts);
    ignoredHosts = maintenanceStateHelper.filterHostsInMaintenanceState(filteredIncludedHosts, hostPredicate);
    if (!ignoredHosts.isEmpty()) {
        String message = String.format("Some hosts (%s) from host include list " + "have been ignored "
                + "because components on them are in Maintenance state.", ignoredHosts);
        LOG.debug(message);
    }

    // Decommission only if the sch is in state STARTED or INSTALLED
    for (ServiceComponentHost sch : svcComponents.get(slaveCompType).getServiceComponentHosts().values()) {
        if (filteredExcludedHosts.contains(sch.getHostName()) && !"true".equals(isDrainOnlyRequest)
                && sch.getState() != State.STARTED) {
            throw new AmbariException(
                    "Component " + slaveCompType + " on host " + sch.getHostName() + " cannot be "
                            + "decommissioned as its not in STARTED state. Aborting the whole request.");
        }
    }

    String alignMtnStateStr = actionExecutionContext.getParameters().get(ALIGN_MAINTENANCE_STATE);
    boolean alignMtnState = "true".equals(alignMtnStateStr);
    // Set/reset decommissioned flag on all components
    List<String> listOfExcludedHosts = new ArrayList<String>();
    for (ServiceComponentHost sch : svcComponents.get(slaveCompType).getServiceComponentHosts().values()) {
        if (filteredExcludedHosts.contains(sch.getHostName())) {
            sch.setComponentAdminState(HostComponentAdminState.DECOMMISSIONED);
            listOfExcludedHosts.add(sch.getHostName());
            if (alignMtnState) {
                sch.setMaintenanceState(MaintenanceState.ON);
            }
            LOG.info(
                    "Decommissioning " + slaveCompType + " and marking Maintenance=ON on " + sch.getHostName());
        }
        if (filteredIncludedHosts.contains(sch.getHostName())) {
            sch.setComponentAdminState(HostComponentAdminState.INSERVICE);
            if (alignMtnState) {
                sch.setMaintenanceState(MaintenanceState.OFF);
            }
            LOG.info("Recommissioning " + slaveCompType + " and marking Maintenance=OFF on "
                    + sch.getHostName());
        }
    }

    // In the event there are more than one master host the following logic is applied
    // -- HDFS/DN, MR1/TT, YARN/NM call refresh node on both
    // -- HBASE/RS call only on one host

    // Ensure host is active
    Map<String, ServiceComponentHost> masterSchs = masterComponent.getServiceComponentHosts();
    String primaryCandidate = null;
    for (String hostName : masterSchs.keySet()) {
        if (primaryCandidate == null) {
            primaryCandidate = hostName;
        } else {
            ServiceComponentHost sch = masterSchs.get(hostName);
            if (sch.getState() == State.STARTED) {
                primaryCandidate = hostName;
            }
        }
    }

    StringBuilder commandDetail = getReadableDecommissionCommandDetail(actionExecutionContext,
            filteredIncludedHosts, listOfExcludedHosts);

    for (String hostName : masterSchs.keySet()) {
        RequestResourceFilter commandFilter = new RequestResourceFilter(serviceName, masterComponent.getName(),
                Collections.singletonList(hostName));
        List<RequestResourceFilter> resourceFilters = new ArrayList<RequestResourceFilter>();
        resourceFilters.add(commandFilter);

        ActionExecutionContext commandContext = new ActionExecutionContext(clusterName,
                actionExecutionContext.getActionName(), resourceFilters);

        String clusterHostInfoJson = StageUtils.getGson().toJson(
                StageUtils.getClusterHostInfo(clusters.getHostsForCluster(cluster.getClusterName()), cluster));

        // Reset cluster host info as it has changed
        stage.setClusterHostInfo(clusterHostInfoJson);

        Map<String, String> commandParams = new HashMap<String, String>();
        if (serviceName.equals(Service.Type.HBASE.name())) {
            commandParams.put(DECOM_EXCLUDED_HOSTS, StringUtils.join(listOfExcludedHosts, ','));
            if ((isDrainOnlyRequest != null) && isDrainOnlyRequest.equals("true")) {
                commandParams.put(HBASE_MARK_DRAINING_ONLY, isDrainOnlyRequest);
            } else {
                commandParams.put(HBASE_MARK_DRAINING_ONLY, "false");
            }
        }

        if (!serviceName.equals(Service.Type.HBASE.name()) || hostName.equals(primaryCandidate)) {
            commandParams.put(UPDATE_EXCLUDE_FILE_ONLY, "false");
            addCustomCommandAction(commandContext, commandFilter, stage, commandParams,
                    commandDetail.toString(), retryAllowed);
        }
    }
}

From source file:org.apache.hyracks.algebricks.rewriter.rules.subplan.IntroduceGroupByForSubplanRule.java

@Override
public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
        throws AlgebricksException {
    AbstractLogicalOperator op0 = (AbstractLogicalOperator) opRef.getValue();
    if (op0.getOperatorTag() != LogicalOperatorTag.SUBPLAN) {
        return false;
    }//w ww  . j a  v a  2  s  . c  om
    SubplanOperator subplan = (SubplanOperator) op0;

    Iterator<ILogicalPlan> plansIter = subplan.getNestedPlans().iterator();
    ILogicalPlan p = null;
    while (plansIter.hasNext()) {
        p = plansIter.next();
    }
    if (p == null) {
        return false;
    }
    if (p.getRoots().size() != 1) {
        return false;
    }
    Mutable<ILogicalOperator> subplanRoot = p.getRoots().get(0);
    AbstractLogicalOperator op1 = (AbstractLogicalOperator) subplanRoot.getValue();

    Mutable<ILogicalOperator> botRef = subplanRoot;
    AbstractLogicalOperator op2;
    // Project is optional
    if (op1.getOperatorTag() != LogicalOperatorTag.PROJECT) {
        op2 = op1;
    } else {
        ProjectOperator project = (ProjectOperator) op1;
        botRef = project.getInputs().get(0);
        op2 = (AbstractLogicalOperator) botRef.getValue();
    }
    if (op2.getOperatorTag() != LogicalOperatorTag.AGGREGATE) {
        return false;
    }
    AggregateOperator aggregate = (AggregateOperator) op2;

    Set<LogicalVariable> free = new HashSet<LogicalVariable>();
    VariableUtilities.getUsedVariables(aggregate, free);

    Mutable<ILogicalOperator> op3Ref = aggregate.getInputs().get(0);
    AbstractLogicalOperator op3 = (AbstractLogicalOperator) op3Ref.getValue();

    while (op3.getInputs().size() == 1) {
        Set<LogicalVariable> prod = new HashSet<LogicalVariable>();
        VariableUtilities.getProducedVariables(op3, prod);
        free.removeAll(prod);
        VariableUtilities.getUsedVariables(op3, free);
        botRef = op3Ref;
        op3Ref = op3.getInputs().get(0);
        op3 = (AbstractLogicalOperator) op3Ref.getValue();
    }

    if (op3.getOperatorTag() != LogicalOperatorTag.INNERJOIN
            && op3.getOperatorTag() != LogicalOperatorTag.LEFTOUTERJOIN) {
        return false;
    }
    AbstractBinaryJoinOperator join = (AbstractBinaryJoinOperator) op3;
    if (join.getCondition().getValue() == ConstantExpression.TRUE) {
        return false;
    }
    VariableUtilities.getUsedVariables(join, free);

    AbstractLogicalOperator b0 = (AbstractLogicalOperator) join.getInputs().get(0).getValue();
    // see if there's an NTS at the end of the pipeline
    NestedTupleSourceOperator outerNts = getNts(b0);
    if (outerNts == null) {
        AbstractLogicalOperator b1 = (AbstractLogicalOperator) join.getInputs().get(1).getValue();
        outerNts = getNts(b1);
        if (outerNts == null) {
            return false;
        }
    }

    Set<LogicalVariable> pkVars = computeGbyVars(outerNts, free, context);
    if (pkVars == null || pkVars.size() < 1) {
        // there is no non-trivial primary key, group-by keys are all live variables
        // that were produced by descendant or self
        ILogicalOperator subplanInput = subplan.getInputs().get(0).getValue();
        pkVars = new HashSet<LogicalVariable>();
        //get live variables
        VariableUtilities.getLiveVariables(subplanInput, pkVars);

        //get produced variables
        Set<LogicalVariable> producedVars = new HashSet<LogicalVariable>();
        VariableUtilities.getProducedVariablesInDescendantsAndSelf(subplanInput, producedVars);

        //retain the intersection
        pkVars.retainAll(producedVars);
    }
    AlgebricksConfig.ALGEBRICKS_LOGGER.fine("Found FD for introducing group-by: " + pkVars);

    Mutable<ILogicalOperator> rightRef = join.getInputs().get(1);
    LogicalVariable testForNull = null;
    AbstractLogicalOperator right = (AbstractLogicalOperator) rightRef.getValue();
    switch (right.getOperatorTag()) {
    case UNNEST: {
        UnnestOperator innerUnnest = (UnnestOperator) right;
        // Select [ $y != null ]
        testForNull = innerUnnest.getVariable();
        break;
    }
    case RUNNINGAGGREGATE: {
        ILogicalOperator inputToRunningAggregate = right.getInputs().get(0).getValue();
        Set<LogicalVariable> producedVars = new ListSet<LogicalVariable>();
        VariableUtilities.getProducedVariables(inputToRunningAggregate, producedVars);
        if (!producedVars.isEmpty()) {
            // Select [ $y != null ]
            testForNull = producedVars.iterator().next();
        }
        break;
    }
    case DATASOURCESCAN: {
        DataSourceScanOperator innerScan = (DataSourceScanOperator) right;
        // Select [ $y != null ]
        if (innerScan.getVariables().size() == 1) {
            testForNull = innerScan.getVariables().get(0);
        }
        break;
    }
    default:
        break;
    }
    if (testForNull == null) {
        testForNull = context.newVar();
        AssignOperator tmpAsgn = new AssignOperator(testForNull,
                new MutableObject<ILogicalExpression>(ConstantExpression.TRUE));
        tmpAsgn.getInputs().add(new MutableObject<ILogicalOperator>(rightRef.getValue()));
        rightRef.setValue(tmpAsgn);
        context.computeAndSetTypeEnvironmentForOperator(tmpAsgn);
    }

    IFunctionInfo finfoEq = context.getMetadataProvider().lookupFunction(AlgebricksBuiltinFunctions.IS_MISSING);
    ILogicalExpression isNullTest = new ScalarFunctionCallExpression(finfoEq,
            new MutableObject<ILogicalExpression>(new VariableReferenceExpression(testForNull)));
    IFunctionInfo finfoNot = context.getMetadataProvider().lookupFunction(AlgebricksBuiltinFunctions.NOT);
    ScalarFunctionCallExpression nonNullTest = new ScalarFunctionCallExpression(finfoNot,
            new MutableObject<ILogicalExpression>(isNullTest));
    SelectOperator selectNonNull = new SelectOperator(new MutableObject<ILogicalExpression>(nonNullTest), false,
            null);
    GroupByOperator g = new GroupByOperator();
    Mutable<ILogicalOperator> newSubplanRef = new MutableObject<ILogicalOperator>(subplan);
    NestedTupleSourceOperator nts = new NestedTupleSourceOperator(new MutableObject<ILogicalOperator>(g));
    opRef.setValue(g);
    selectNonNull.getInputs().add(new MutableObject<ILogicalOperator>(nts));

    List<Mutable<ILogicalOperator>> prodInpList = botRef.getValue().getInputs();
    prodInpList.clear();
    prodInpList.add(new MutableObject<ILogicalOperator>(selectNonNull));

    ILogicalPlan gPlan = new ALogicalPlanImpl(new MutableObject<ILogicalOperator>(subplanRoot.getValue()));
    g.getNestedPlans().add(gPlan);
    subplanRoot.setValue(op3Ref.getValue());
    g.getInputs().add(newSubplanRef);

    HashSet<LogicalVariable> underVars = new HashSet<LogicalVariable>();
    VariableUtilities.getLiveVariables(subplan.getInputs().get(0).getValue(), underVars);
    underVars.removeAll(pkVars);
    Map<LogicalVariable, LogicalVariable> mappedVars = buildVarExprList(pkVars, context, g, g.getGroupByList());
    context.updatePrimaryKeys(mappedVars);
    for (LogicalVariable uv : underVars) {
        g.getDecorList().add(new Pair<LogicalVariable, Mutable<ILogicalExpression>>(null,
                new MutableObject<ILogicalExpression>(new VariableReferenceExpression(uv))));
    }
    OperatorPropertiesUtil.typeOpRec(subplanRoot, context);
    OperatorPropertiesUtil.typeOpRec(gPlan.getRoots().get(0), context);
    context.computeAndSetTypeEnvironmentForOperator(g);
    return true;
}

From source file:ubic.pubmedgate.resolve.ResolutionRDFModel.java

public Set<Resource> resolveToTerms(String mention, RDFResolver resolver, Set<Resource> allTerms) {
    Resource r = makeMentionNode(mention);

    // r is now linked to the terms
    Model resolvedStatements = resolver.resolve(r);

    model = model.add(resolvedStatements);

    Set<Resource> terms = getLinkedResources(r);

    terms.retainAll(allTerms);
    return terms;
}

From source file:opennlp.tools.fca.BasicLevelMetrics.java

public void predictability() {

    if (attributesExtent == null)
        this.setUp();
    ArrayList<Integer> attributes = new ArrayList<Integer>();
    ArrayList<Integer> outOfIntent = new ArrayList<Integer>();
    Set<Integer> intersection;
    ArrayList<Integer> attrExtent;
    double sum, term;

    for (int i = 0; i < cl.attributeCount; i++) {
        attributes.add(i);//  w w  w  .j ava2 s .  co m
    }
    for (FormalConcept c : cl.conceptList) {
        sum = 0;
        outOfIntent = new ArrayList<Integer>();
        outOfIntent.addAll(attributes);
        outOfIntent.removeAll(c.intent);
        for (Integer y : outOfIntent) {
            intersection = new HashSet<Integer>();
            intersection.addAll(c.extent);
            attrExtent = attributesExtent.get(y);
            intersection.retainAll(attrExtent);
            term = 1. * intersection.size() / c.extent.size();

            if (term > 0) {
                sum -= term * Math.log(term);
            }
        }
        c.blP = Double.isNaN(1 - sum / outOfIntent.size()) ? 0 : 1 - sum / outOfIntent.size();
    }

}

From source file:org.opennms.ng.dao.support.PropertiesGraphDao.java

/** {@inheritDoc} */
@Override/*from   w  w w  .  j a v a 2  s.co  m*/
public PrefabGraph[] getPrefabGraphsForResource(OnmsResource resource) {
    if (resource == null) {
        LOG.warn("returning empty graph list for resource because it is null");
        return new PrefabGraph[0];
    }
    Set<OnmsAttribute> attributes = resource.getAttributes();
    // Check if there are no attributes
    if (attributes.size() == 0) {
        LOG.debug("returning empty graph list for resource {} because its attribute list is empty", resource);
        return new PrefabGraph[0];
    }

    Set<String> availableRrdAttributes = resource.getRrdGraphAttributes().keySet();
    Set<String> availableStringAttributes = resource.getStringPropertyAttributes().keySet();
    Set<String> availableExternalAttributes = resource.getExternalValueAttributes().keySet();

    // Check if there are no RRD attributes
    if (availableRrdAttributes.size() == 0) {
        LOG.debug("returning empty graph list for resource {} because it has no RRD attributes", resource);
        return new PrefabGraph[0];
    }

    String resourceType = resource.getResourceType().getName();

    Map<String, PrefabGraph> returnList = new LinkedHashMap<String, PrefabGraph>();
    for (PrefabGraph query : getAllPrefabGraphs()) {
        if (resourceType != null && !query.hasMatchingType(resourceType)) {
            LOG.debug("skipping {} because its types \"{}\" does not match resourceType \"{}\"",
                    query.getName(), StringUtils.arrayToDelimitedString(query.getTypes(), ", "), resourceType);
            continue;
        }

        if (!verifyAttributesExist(query, "RRD", Arrays.asList(query.getColumns()), availableRrdAttributes)) {
            continue;
        }
        if (!verifyAttributesExist(query, "string property", Arrays.asList(query.getPropertiesValues()),
                availableStringAttributes)) {
            continue;
        }
        if (!verifyAttributesExist(query, "external value", Arrays.asList(query.getExternalValues()),
                availableExternalAttributes)) {
            continue;
        }

        LOG.debug("adding {} to query list", query.getName());

        returnList.put(query.getName(), query);
    }

    if (LOG.isDebugEnabled()) {
        ArrayList<String> nameList = new ArrayList<String>(returnList.size());
        for (PrefabGraph graph : returnList.values()) {
            nameList.add(graph.getName());
        }
        LOG.debug("found {} prefabricated graphs for resource {}: {}", nameList.size(), resource,
                StringUtils.collectionToDelimitedString(nameList, ", "));
    }

    Set<String> suppressReports = new HashSet<String>();
    for (Entry<String, PrefabGraph> entry : returnList.entrySet()) {
        suppressReports.addAll(Arrays.asList(entry.getValue().getSuppress()));
    }

    suppressReports.retainAll(returnList.keySet());
    if (suppressReports.size() > 0) {
        LOG.debug("suppressing {} prefabricated graphs for resource {}: {}", suppressReports.size(), resource,
                StringUtils.collectionToDelimitedString(suppressReports, ", "));
    }

    for (String suppressReport : suppressReports) {
        returnList.remove(suppressReport);
    }

    return returnList.values().toArray(new PrefabGraph[returnList.size()]);
}

From source file:org.syncope.core.rest.controller.ConfigurationController.java

@PreAuthorize("hasRole('CONFIGURATION_LIST')")
@RequestMapping(method = RequestMethod.GET, value = "/mailTemplates")
public ModelAndView getMailTemplates() {
    CachingMetadataReaderFactory cachingMetadataReaderFactory = new CachingMetadataReaderFactory();

    Set<String> htmlTemplates = new HashSet<String>();
    Set<String> textTemplates = new HashSet<String>();

    try {/* w w w .  java  2 s.com*/
        for (Resource resource : resResolver.getResources("classpath:/mailTemplates/*.vm")) {

            String template = resource.getURL().toExternalForm();
            if (template.endsWith(".html.vm")) {
                htmlTemplates.add(template.substring(template.indexOf("mailTemplates/") + 14,
                        template.indexOf(".html.vm")));
            } else if (template.endsWith(".txt.vm")) {
                textTemplates.add(template.substring(template.indexOf("mailTemplates/") + 14,
                        template.indexOf(".txt.vm")));
            } else {
                LOG.warn("Unexpected template found: {}, ignoring...", template);
            }
        }
    } catch (IOException e) {
        LOG.error("While searching for class implementing {}", Validator.class.getName(), e);
    }

    // Only templates available both as HTML and TEXT are considered
    htmlTemplates.retainAll(textTemplates);

    return new ModelAndView().addObject(htmlTemplates);
}

From source file:org.opennms.netmgt.dao.support.PropertiesGraphDao.java

/** {@inheritDoc} */
@Override/* w  w  w. j  ava2 s. co  m*/
public PrefabGraph[] getPrefabGraphsForResource(final OnmsResource resource) {
    if (resource == null) {
        LOG.warn("returning empty graph list for resource because it is null");
        return new PrefabGraph[0];
    }
    Set<OnmsAttribute> attributes = new LinkedHashSet<>(resource.getAttributes());
    // Check if there are no attributes
    if (attributes.size() == 0) {
        LOG.debug("returning empty graph list for resource {} because its attribute list is empty", resource);
        return new PrefabGraph[0];
    }

    Set<String> availableRrdAttributes = new LinkedHashSet<>(resource.getRrdGraphAttributes().keySet());
    Set<String> availableStringAttributes = new LinkedHashSet<>(
            resource.getStringPropertyAttributes().keySet());
    Set<String> availableExternalAttributes = new LinkedHashSet<>(
            resource.getExternalValueAttributes().keySet());

    // Check if there are no RRD attributes
    if (availableRrdAttributes.size() == 0) {
        LOG.debug("returning empty graph list for resource {} because it has no RRD attributes", resource);
        return new PrefabGraph[0];
    }

    String resourceType = resource.getResourceType().getName();

    Map<String, PrefabGraph> returnList = new LinkedHashMap<String, PrefabGraph>();
    for (PrefabGraph query : getAllPrefabGraphs()) {
        if (resourceType != null && !query.hasMatchingType(resourceType)) {
            LOG.debug("skipping {} because its types \"{}\" does not match resourceType \"{}\"",
                    query.getName(), StringUtils.arrayToDelimitedString(query.getTypes(), ", "), resourceType);
            continue;
        }

        if (!verifyAttributesExist(query, "RRD", Arrays.asList(query.getColumns()), availableRrdAttributes)) {
            continue;
        }
        if (!verifyAttributesExist(query, "string property", Arrays.asList(query.getPropertiesValues()),
                availableStringAttributes)) {
            continue;
        }
        if (!verifyAttributesExist(query, "external value", Arrays.asList(query.getExternalValues()),
                availableExternalAttributes)) {
            continue;
        }

        LOG.debug("adding {} to query list", query.getName());

        returnList.put(query.getName(), query);
    }

    if (LOG.isDebugEnabled()) {
        ArrayList<String> nameList = new ArrayList<String>(returnList.size());
        for (PrefabGraph graph : returnList.values()) {
            nameList.add(graph.getName());
        }
        LOG.debug("found {} prefabricated graphs for resource {}: {}", nameList.size(), resource,
                StringUtils.collectionToDelimitedString(nameList, ", "));
    }

    final Set<String> suppressReports = new HashSet<String>();
    for (final Entry<String, PrefabGraph> entry : returnList.entrySet()) {
        suppressReports.addAll(Arrays.asList(entry.getValue().getSuppress()));
    }

    suppressReports.retainAll(returnList.keySet());
    if (suppressReports.size() > 0) {
        LOG.debug("suppressing {} prefabricated graphs for resource {}: {}", suppressReports.size(), resource,
                StringUtils.collectionToDelimitedString(suppressReports, ", "));
    }

    for (final String suppressReport : suppressReports) {
        returnList.remove(suppressReport);
    }

    return returnList.values().toArray(new PrefabGraph[returnList.size()]);
}