Example usage for java.util Set toString

List of usage examples for java.util Set toString

Introduction

In this page you can find the example usage for java.util Set toString.

Prototype

public String toString() 

Source Link

Document

Returns a string representation of the object.

Usage

From source file:io.fabric8.apiman.rest.Kubernetes2ApimanFilter.java

/**
 * Given a token is//from  ww  w  .  j a  v a2s  .  c  o  m
 * @param authToken
 * @return
 */
public ApimanInfo syncKubernetesToApiman(final String authToken) {
    log.info("KubernetesToApiman");
    SudoSecurityContext sudoSecurityContext = new SudoSecurityContext();
    ApimanInfo apimanInfo = new ApimanInfo();

    OpenShiftClient osClient = null;
    try {
        Config config = new ConfigBuilder().withOauthToken(authToken).build();
        if (kubernetesMasterUrl != null)
            config.setMasterUrl(kubernetesMasterUrl);
        osClient = new DefaultOpenShiftClient(config);
        String username = osClient.users().withName("~").get().getMetadata().getName();
        apimanInfo.token = authToken;

        //get k8s projects owned by user
        Set<String> namespaceIds = new HashSet<String>();
        ProjectList projectList = osClient.projects().list();
        for (Project project : projectList.getItems()) {
            String orgId = BeanUtils.idFromName(project.getMetadata().getName());
            namespaceIds.add(orgId);
        }

        List<OrganizationSummaryBean> orgBeans = userResource.getOrganizations(username);
        Set<String> apimanOrganizationIdsForUser = new HashSet<String>();

        //if apiman holds a namespace that was deleted in openshift then delete it
        for (OrganizationSummaryBean org : orgBeans) {
            if (!namespaceIds.contains(org.getId())) {
                //delete the organization in apiman
                deleteOrganization(org.getId(), sudoSecurityContext, username);
            } else {
                apimanOrganizationIdsForUser.add(org.getId());
            }
        }
        log.info(apimanOrganizationIdsForUser.toString());

        //add namespaces to apiman if not there already
        for (Project project : projectList.getItems()) {
            String orgId = BeanUtils.idFromName(project.getMetadata().getName());
            log.info("Namespace: " + orgId);
            if (!apimanOrganizationIdsForUser.contains(orgId)) {
                log.info("User " + username + " is not a member of organizationId '" + orgId + "'");
                try {
                    organizationResource.get(orgId);
                    log.info("Adding user '" + username + "' as member to organizationId '" + orgId + "'");
                    GrantRolesBean bean = new GrantRolesBean();
                    bean.setUserId(username);
                    Set<String> roleIds = new HashSet<String>();
                    roleIds.add("Organization Owner");
                    bean.setRoleIds(roleIds);
                    sudoSecurityContext.sudo(organizationResource, "Kubernetes2Apiman", true);
                    organizationResource.grant(orgId, bean);
                    sudoSecurityContext.exit();
                } catch (OrganizationNotFoundException e) {
                    log.info("Creating organizationId '" + orgId + "' as it does not yet exist in Apiman");
                    NewOrganizationBean orgBean = new NewOrganizationBean();
                    orgBean.setName(project.getMetadata().getName());
                    orgBean.setDescription("Namespace '" + orgId + "' created by Kubernetes2Apiman");
                    sudoSecurityContext.sudo(organizationResource, username, false);
                    organizationResource.create(orgBean);
                    sudoSecurityContext.exit();
                }
                apimanInfo.organizations.add(orgId);
            }
            //Get servicesIn Kubernetes Namespace
            Set<String> serviceIds = new HashSet<String>();
            ServiceList serviceList = osClient.services().inNamespace(orgId).list();
            for (Service service : serviceList.getItems()) {
                String serviceId = BeanUtils.idFromName(service.getMetadata().getName());
                serviceIds.add(serviceId);
            }
            //APIs in organization
            List<ApiSummaryBean> apiSummaryBeans = organizationResource.listApi(orgId);
            Set<String> apimanApiIds = new HashSet<String>();
            for (ApiSummaryBean bean : apiSummaryBeans) {
                //retire and delete from apiman if no longer in openshift
                if (!serviceIds.contains(bean.getId())) {
                    sudoSecurityContext.sudo(actionResource, username, true);
                    retireApi(orgId, bean.getId());
                    sudoSecurityContext.exit();
                    sudoSecurityContext.sudo(organizationResource, username, true);
                    deleteApi(orgId, bean.getId(), sudoSecurityContext);
                    sudoSecurityContext.exit();
                } else {
                    apimanApiIds.add(bean.getId());
                }
            }

            sudoSecurityContext.sudo(organizationResource, username, false);
            sudoSecurityContext.sudo(actionResource, username, false);
            Kubernetes2ApimanMapper mapper = new Kubernetes2ApimanMapper(osClient);
            for (Service service : serviceList.getItems()) {
                if (!apimanApiIds.contains(BeanUtils.idFromName(service.getMetadata().getName()))) {
                    String action = getApimanPublishAnnotation(service);
                    if (action != null) {
                        log.info("Creating API '" + service.getMetadata().getName() + "' in apiman");
                        //map service to bean
                        AvailableApiBean bean = mapper.createAvailableApiBean(service, null);
                        if (bean != null) {
                            if (!isServiceReady(bean)) {
                                apimanInfo.isReady = false;
                                break;
                            }
                            NewApiBean newApiBean = new NewApiBean();
                            newApiBean.setDefinitionType(bean.getDefinitionType());
                            newApiBean.setDefinitionUrl(bean.getDefinitionUrl());
                            newApiBean.setDescription(bean.getDescription());
                            newApiBean.setEndpoint(bean.getEndpoint());
                            newApiBean.setEndpointType(bean.getEndpointType());
                            newApiBean.setInitialVersion("1.0");
                            newApiBean.setName(bean.getName());

                            Set<ApiPlanBean> apiPlanBeans = getPlansForApiman(service);
                            if (apiPlanBeans == null) {
                                newApiBean.setPublicAPI(true);
                            } else {
                                newApiBean.setPlans(apiPlanBeans);
                            }
                            log.info("New API: " + newApiBean);
                            organizationResource.createApi(orgId, newApiBean);
                            String apiId = BeanUtils.idFromName(service.getMetadata().getName());
                            apimanInfo.apis.add(apiId);

                            if (action.equalsIgnoreCase("publish")) {
                                ActionBean publishApiAction = new ActionBean();
                                publishApiAction.setOrganizationId(orgId);
                                publishApiAction.setEntityId(apiId);
                                publishApiAction.setEntityVersion("1.0");
                                publishApiAction.setType(ActionType.publishAPI);
                                log.info("Publish API: " + publishApiAction);
                                actionResource.performAction(publishApiAction);
                            }
                        }
                    } else {
                        log.debug("Apiman import not requested for this service");
                    }
                }
            }
            sudoSecurityContext.exit();
        }
    } catch (Exception e) {
        log.error("Kubernetes2Apiman mapping Exception. ", e);
    } finally {
        sudoSecurityContext.exit();
        if (osClient != null)
            osClient.close();

    }
    return apimanInfo;
}

From source file:ubic.pubmedgate.interactions.NormalizePairs.java

/**
 * Given a list of pairs and an XML reader to resolve them, create a connection matrix.
 * //from  www .  j av  a  2 s.  com
 * @param reader
 * @param pairs
 */
public NormalizeResult normalizePairsToMatrix(AirolaXMLReader reader, List<String> pairs,
        SLOutputReader SLReader, boolean writeOut, String name) throws Exception {
    // dump out pairs to HTML
    ShowSLErrors.writeExamples(reader, SLReader, pairs,
            Config.config.getString("whitetext.iteractions.results.folder") + name + ".html");

    // sort pairs by score (for AUC)
    pairs = SLReader.sortPairUsingScore(new LinkedList<String>(pairs));
    Collections.reverse(pairs);

    Map<String, String> results = new HashMap<String, String>();
    results.put("Input pair list size", pairs.size() + "");
    results.put("annotationSet", reader.getAnnotatorNameSet());

    Direction direction = Direction.ANYDIRECTION;
    boolean propigated = true;
    DoubleMatrix<String, String> dataMatrix = getConnectionMatrix(propigated, direction);

    BAMSDataLoader bamsLoader = new BAMSDataLoader();

    List<String> connectionRegionNames = dataMatrix.getRowNames();
    DoubleMatrix<String, String> predictedMatrix = new DenseDoubleMatrix<String, String>(
            connectionRegionNames.size(), connectionRegionNames.size());
    predictedMatrix.setRowNames(connectionRegionNames);
    predictedMatrix.setColumnNames(connectionRegionNames);

    Set<Resource> allTerms = resolveModel.getTerms(); // for speed
    Set<Resource> allConcepts = resolveModel.getConcepts(); // for speed

    int totalResolves = 0;
    int inMatrix = 0;
    int oneToManyMappings = 0;
    Set<String> uniqueConnections = new HashSet<String>();
    Set<String> uniqueRegions = new HashSet<String>();
    List<String> resovledPairs = new LinkedList<String>();

    Set<NormalizedConnection> normalizedPairs = new HashSet<NormalizedConnection>();
    int selfConnections = 0;
    int notInRowNames = 0;
    int pairsNotResolved = 0;
    int bothMatched = 0;
    int count = 0;
    List<Double> positiveRanks = new LinkedList<Double>();
    int totalDepth = 0;
    int pairsWithOneMatch = 0;

    for (String pair : pairs) {
        count++;
        boolean atLeast1Match = false;
        boolean atLeast1Normalization = false;

        // for testing
        // if ( count > 300 ) {
        // break;
        // }

        StopWatch s = new StopWatch();
        s.start();
        String regionA = reader.getPartnerAText(pair);
        String regionB = reader.getPartnerBText(pair);
        Set<String> regionAresolves = resolveModel.resolve(regionA, resolver, allTerms, allConcepts);

        log.info("Testing:" + regionA + " -> " + regionB + " " + count + " of " + pairs.size());

        // for speed only go forward if A resolved
        if (!regionAresolves.isEmpty()) {
            Set<String> regionBresolves = resolveModel.resolve(regionB, resolver, allTerms, allConcepts);

            if (!regionBresolves.isEmpty()) {
                log.info("Resolved:");
                log.info("   " + regionA + " -> " + regionAresolves.toString());
                log.info("   " + regionB + " -> " + regionBresolves.toString());
                totalResolves++;
                atLeast1Normalization = true;
                if (regionAresolves.size() > 1)
                    oneToManyMappings++;
                if (regionBresolves.size() > 1)
                    oneToManyMappings++;

                if (regionAresolves.equals(regionBresolves)) {
                    selfConnections++;
                    uniqueRegions.addAll(regionAresolves);
                    resovledPairs.add(pair);
                } else {
                    for (String resolvedA : regionAresolves) {
                        for (String resolvedB : regionBresolves) {
                            resovledPairs.add(pair);
                            uniqueRegions.add(resolvedA);
                            uniqueRegions.add(resolvedB);

                            totalDepth += bamsLoader.getParents(resolvedA).size();
                            totalDepth += bamsLoader.getParents(resolvedB).size();

                            if (dataMatrix.getRowNames().contains(resolvedA)
                                    && dataMatrix.getRowNames().contains(resolvedB)) {
                                if (resolvedA.equals(resolvedB)) { // also done at the set level
                                    selfConnections++;
                                } else {
                                    uniqueConnections.add(resolvedA + resolvedB);
                                    uniqueConnections.add(resolvedB + resolvedA);

                                    // a pair can match to more than one connection!! FIX - list
                                    NormalizedConnection c = new NormalizedConnection();
                                    c.regionA = resolvedA;
                                    c.regionB = resolvedB;
                                    c.pairID = pair;
                                    normalizedPairs.add(c);

                                    // put in connection matrix
                                    double currentValue = predictedMatrix.getByKeys(resolvedA, resolvedB);
                                    currentValue += 1;
                                    predictedMatrix.setByKeys(resolvedA, resolvedB, currentValue);
                                    predictedMatrix.setByKeys(resolvedB, resolvedA, currentValue);
                                    if (dataMatrix.getByKeys(resolvedA, resolvedB) == 1d) {
                                        atLeast1Match = true;
                                        positiveRanks.add((double) (resovledPairs.size()));
                                        inMatrix++;
                                    }
                                }
                            } else {
                                notInRowNames++;
                                log.info("Not in matrix but resolved");
                            }
                        }
                    }
                }
            }
        } // end if on region A resolve
        if (atLeast1Normalization) {
            bothMatched++;
        } else {
            pairsNotResolved++;
        }
        if (atLeast1Match) {
            pairsWithOneMatch++;
        }
    }

    results.put("PairsWithOneNormalize", bothMatched + "");
    results.put("PairsWithOneNormalize2", totalResolves + "");
    results.put("PairsWithOneMatch", pairsWithOneMatch + "");
    results.put("Unresolved pairs", pairsNotResolved + "");
    results.put("RP connected", "" + inMatrix);
    results.put("RP Self connections", "" + selfConnections);
    results.put("Not in BAMS", "" + notInRowNames);
    results.put("RP Unique normalized pairs (not counting self connects)", "" + (uniqueConnections.size() / 2));
    results.put("RP AUC", ROC.aroc(resovledPairs.size(), positiveRanks) + "");
    results.put("RP Resolved pairings", resovledPairs.size() + "");
    results.put("RP Average pair depth", (totalDepth / (double) resovledPairs.size()) + "");
    results.put("Unique regions", uniqueRegions.size() + "");
    results.put("One to many mapping rate", "" + ((double) oneToManyMappings / (2 * totalResolves)));
    results.put("Name", name);

    log.info("Pairs:" + pairs.size());
    log.info("Total resolves:" + totalResolves + " of " + pairs.size());
    log.info("connected in BAMS Matrix:" + inMatrix);
    log.info("Self connections:" + selfConnections);
    log.info("Not in BAMS ROWS:" + notInRowNames);
    log.info("Unresolved pairs:" + pairsNotResolved);

    log.info("Unique normalized pairs (not counting self connects):" + (uniqueConnections.size() / 2));

    if (writeOut)
        FileTools.stringsToFile(resovledPairs, reader.getNormalizedPairsFilename());

    // write out matrix, where??
    String matrixFileName = (Config.config.getString("whitetext.iteractions.results.folder") + name
            + ".matrix");
    Util.writeRTable(matrixFileName + ".txt", predictedMatrix);
    Util.writeImage(matrixFileName + ".png", predictedMatrix);

    NormalizeResult normResult = new NormalizeResult();
    normResult.normalizedPairs = normalizedPairs;
    normResult.statisticMap = results;
    normResult.name = name;

    return normResult;
}

From source file:org.squashtest.tm.service.internal.requirement.VerifiedRequirementsManagerServiceImpl.java

@Override
@Transactional(readOnly = true)/*from   ww w.j  av  a2 s  .  c  om*/
public PagedCollectionHolder<List<VerifiedRequirement>> findAllVerifiedRequirementsByTestCaseId(long testCaseId,
        PagingAndSorting pas) {

    LOGGER.debug("Looking for verified requirements of TestCase[id:{}]", testCaseId);

    Set<Long> calleesIds = callTreeFinder.getTestCaseCallTree(testCaseId);

    calleesIds.add(testCaseId);

    LOGGER.debug("Fetching Requirements verified by TestCases {}", calleesIds.toString());

    List<RequirementVersion> pagedVersionVerifiedByCalles = requirementVersionCoverageDao
            .findDistinctRequirementVersionsByTestCases(calleesIds, pas);

    TestCase mainTestCase = testCaseDao.findById(testCaseId);

    List<VerifiedRequirement> pagedVerifiedReqs = buildVerifiedRequirementList(mainTestCase,
            pagedVersionVerifiedByCalles);

    long totalVerified = requirementVersionCoverageDao.numberDistinctVerifiedByTestCases(calleesIds);

    LOGGER.debug("Total count of verified requirements : {}", totalVerified);

    return new PagingBackedPagedCollectionHolder<>(pas, totalVerified, pagedVerifiedReqs);
}

From source file:io.hops.hopsworks.api.zeppelin.rest.NotebookRestApi.java

private String ownerPermissionError(Set<String> current, Set<String> allowed) throws IOException {
    LOG.info("Cannot change permissions. Connection owners {}. Allowed owners {}", current.toString(),
            allowed.toString());//from   ww  w. ja  v  a 2 s. co m
    return "Insufficient privileges to change permissions.\n\n" + "Allowed owners: " + allowed.toString()
            + "\n\n" + "User belongs to: " + current.toString();
}

From source file:org.orbeon.oxf.processor.pipeline.choose.AbstractChooseProcessor.java

public Processor createInstance() {

    // We store here the "refs with no id" and "ids with no ref" for each branch.
    // Those are list of collections (one collection for each branch).
    final List refsWithNoId = new ArrayList();
    final List idsWithNoRef = new ArrayList();
    final List paramRefs = new ArrayList();

    for (Iterator astIterator = chooseAST.getWhen().iterator(); astIterator.hasNext();) {

        // Get info about id used in this branch
        final ASTWhen when = (ASTWhen) astIterator.next();
        IdInfo idInfo = when.getIdInfo();
        paramRefs.add(idInfo.getOutputRefs());

        // Determine all <p:input ref="..."> with no <p:output id="...">.
        // Those are the inputs of this processor.
        final Set branchRefsWithNoId = new HashSet(idInfo.getInputRefs());
        branchRefsWithNoId.removeAll(idInfo.getOutputIds());
        refsWithNoId.add(branchRefsWithNoId);

        // Determine all <p:output id="..."> with no <p:input ref="...">.
        // Those are the outputs of this processor.
        final Set branchIdsWithNoRef = new HashSet(idInfo.getOutputIds());
        branchIdsWithNoRef.removeAll(idInfo.getInputRefs());
        idsWithNoRef.add(branchIdsWithNoRef);
    }// w  w w.  j  a  v a 2s  .  com

    // Make sure that the "ids with no ref" are the same for each branch
    if (idsWithNoRef.size() > 1) {
        final Collection firstBranchIdsWithNoRef = (Collection) idsWithNoRef.get(0);
        int branchId = 0;
        for (Iterator i = idsWithNoRef.iterator(); i.hasNext();) {
            branchId++;
            final Collection branchIdsWithNoRef = (Collection) i.next();
            if (branchIdsWithNoRef != firstBranchIdsWithNoRef
                    && !CollectionUtils.isEqualCollection(branchIdsWithNoRef, firstBranchIdsWithNoRef))
                throw new ValidationException("ASTChoose branch number " + branchId
                        + " does not declare the same ids " + branchIdsWithNoRef.toString()
                        + " as the previous branches " + firstBranchIdsWithNoRef.toString(), getLocationData());
        }
    }

    // Make sure that the "param ref" are the same for each branch
    if (paramRefs.size() > 1) {
        final Collection firstBranchParamRefs = (Collection) paramRefs.get(0);
        int branchId = 0;
        for (Iterator i = paramRefs.iterator(); i.hasNext();) {
            branchId++;
            final Collection branchParamRefs = (Collection) i.next();
            if (branchParamRefs != firstBranchParamRefs
                    && !CollectionUtils.isEqualCollection(branchParamRefs, firstBranchParamRefs))
                throw new ValidationException("ASTChoose branch number " + branchId
                        + " does not declare the same refs " + branchParamRefs.toString()
                        + " as the previous branches " + firstBranchParamRefs.toString(), getLocationData());
        }
    }

    // Compute the union of "refs with no id" for all the branches
    final Set allRefsWithNoId = new HashSet();
    for (Iterator i = refsWithNoId.iterator(); i.hasNext();)
        allRefsWithNoId.addAll((Set) i.next());

    // Create the list of inputs based on allRefsWithNoId
    final List astParams = new ArrayList();
    for (int i = 0; i < 2; i++) {
        final Set parameters;
        if (i == 0) {
            parameters = allRefsWithNoId;
        } else {
            parameters = new HashSet();
            parameters.addAll((Set) idsWithNoRef.get(0));
            parameters.addAll((Set) paramRefs.get(0));
        }

        for (Iterator j = parameters.iterator(); j.hasNext();) {
            final String paramName = (String) j.next();
            ASTParam astParam = new ASTParam();
            astParam.setType(i == 0 ? ASTParam.INPUT : ASTParam.OUTPUT);
            astParam.setName(paramName);
            astParams.add(astParam);
        }
    }

    // For each branch, create a new pipeline processor
    final List<Processor> branchProcessors = new ArrayList();
    final List branchConditions = new ArrayList();
    final List<NamespaceMapping> branchNamespaces = new ArrayList<NamespaceMapping>();

    for (Iterator astIterator = chooseAST.getWhen().iterator(); astIterator.hasNext();) {
        final ASTWhen astWhen = (ASTWhen) astIterator.next();

        // Save condition
        branchConditions.add(astWhen.getTest());
        // Get namespaces declared at this point in the pipeline
        if (astWhen.getNode() != null && astWhen.getNamespaces().mapping.size() != 0) {
            throw new ValidationException("ASTWhen cannot have both a node and namespaces defined",
                    astWhen.getLocationData());
        }
        branchNamespaces.add(astWhen.getNode() != null
                ? new NamespaceMapping(Dom4jUtils.getNamespaceContextNoDefault((Element) astWhen.getNode()))
                : astWhen.getNamespaces());

        // Add an identity processor to connect the output of the branch to
        // the <param type="output"> of the pipeline
        final Set idsToConvert = (Set) idsWithNoRef.get(0);
        for (Iterator i = idsToConvert.iterator(); i.hasNext();) {
            final String id = (String) i.next();
            final ASTProcessorCall identityConnector = new ASTProcessorCall(
                    XMLConstants.IDENTITY_PROCESSOR_QNAME);
            {
                identityConnector.addInput(new ASTInput("data", new ASTHrefId(new ASTOutput(null, id))));
                final ASTParam outParam = new ASTParam(ASTParam.OUTPUT, id);
                final LocationData locDat = Dom4jUtils.getLocationData();
                final ASTOutput astOut = new ASTOutput("data", outParam);
                astOut.setLocationData(locDat);
                identityConnector.addOutput(astOut);
            }
            astWhen.addStatement(identityConnector);
        }

        final ASTPipeline astPipeline = new ASTPipeline();
        astPipeline.setValidity(validity);
        astPipeline.getParams().addAll(astParams);
        astPipeline.getStatements().addAll(astWhen.getStatements());
        astPipeline.setNode(astWhen.getNode());
        final Processor pipelineProcessor = new PipelineProcessor(astPipeline);
        if (getId() != null)
            pipelineProcessor.setId(getId() + "-branch" + branchProcessors.size());
        branchProcessors.add(pipelineProcessor);
    }

    return new ConcreteChooseProcessor(getId(), getLocationData(), branchConditions, branchNamespaces,
            branchProcessors, allRefsWithNoId, (Set) idsWithNoRef.get(0), (Set) paramRefs.get(0));
}

From source file:org.b3log.rhythm.service.ArticleService.java

/**
 * Gets a list of articles specified by the given greater/less operation and the specified accessibility check count. 
 * //from   w  ww.  j a  v a2  s.  co  m
 * @param greaterOrLess the given greater/less operation, '>' or '<'
 * @param checkCnt the specified accessibility check count
 * @return a list of articles, returns an empty list if not found by the specified condition
 */
public Set<String> getArticleIdsByAccessibilityCheckCnt(final char greaterOrLess, final int checkCnt) {
    final Set<String> ret = new HashSet<String>();

    final FilterOperator operator = ('>' == greaterOrLess) ? FilterOperator.GREATER_THAN
            : FilterOperator.LESS_THAN;

    final Query query = new Query()
            .setFilter(new PropertyFilter(Article.ARTICLE_ACCESSIBILITY_NOT_200_CNT, operator, checkCnt))
            .setPageSize(BATCH_SIZE).setPageCount(1).addProjection(Keys.OBJECT_ID, String.class);

    try {
        final JSONObject result = articleRepository.get(query);
        final JSONArray articles = result.getJSONArray(Keys.RESULTS);

        for (int i = 0; i < articles.length(); i++) {
            ret.add(articles.getJSONObject(i).getString(Keys.OBJECT_ID));
        }

        LOGGER.log(Level.DEBUG, "Article Ids[{0}]", ret.toString());
    } catch (final Exception e) {
        LOGGER.log(Level.ERROR, "Gets article ids by accessibility check count[greaterOrLess=" + greaterOrLess
                + ", checkCnt=" + checkCnt + "] failed", e);
    }

    return ret;
}

From source file:it.polimi.tower4clouds.manager.MonitoringManager.java

private void installRule(MonitoringRule rule) throws RuleInstallationException {
    logger.info("Installing rule {}", rule.getId());
    try {/*from w w  w .  j  a v a2  s.  c o  m*/
        Query query = queryFactory.prepareQuery(rule);

        queryIdByRuleId.put(rule.getId(), query.getId());

        Set<String> inputMetrics = query.getRequiredMetrics();
        logger.debug("Input metrics: {}", inputMetrics.toString());
        for (String inMetric : inputMetrics) {
            DCConfiguration newDCConfig = prepareDCConfig(rule, inMetric);
            if (!streamsByMetric.containsKey(inMetric)) {
                String newStream = prepareStreamURI(inMetric);
                logger.debug("Registering stream {}", inMetric, newStream, newDCConfig);
                dataAnalyzer.registerStream(newStream);
                streamsByMetric.put(inMetric, newStream);
            }
            addInputMetric(inMetric, rule.getId());
            synchronized (dcAndResourcesLock) {
                dCConfigByRuleId.put(rule.getId(), newDCConfig);
                logger.debug("Saving DC configuration: {}", newDCConfig);
            }
            String inputStream = streamsByMetric.get(inMetric);
            logger.debug("Adding input stream {} to query", inputStream);
            query.addInputStreamURI(inputStream);
        }
        String csparqlQuery = query.build();
        logger.debug("Installing query:\n{}", csparqlQuery);
        dataAnalyzer.registerQuery(query.getId(), csparqlQuery);
        queryByQueryId.put(query.getId(), query);
        if (query.hasOutputMetric()) {
            String outputMetric = query.getOutputMetric();
            logger.debug("Output metric: {}", outputMetric);
            if (streamsByMetric.containsKey(outputMetric)) {
                String message = "The metric " + outputMetric + " is alread produced by ";
                if (ruleIdByObservableMetric.containsKey(outputMetric)) {
                    message += "rule " + ruleIdByObservableMetric.get(outputMetric);
                } else {
                    message += "some data collector according to rules "
                            + getRulesIdsFromInputMetric(outputMetric).toString();
                }
                throw new RuleInstallationException(message);
            }
            String outputStreamURI = prepareStreamURI(query.getId());
            streamsByMetric.put(outputMetric, outputStreamURI);
            ruleIdByObservableMetric.put(outputMetric, rule.getId());
        }
        if (query.hasActions()) {
            dataAnalyzer.addHttpObserver(prepareQueryURI(query.getId()), "http://" + config.getMmIP() + ":"
                    + config.getMmPort() + "/v1/monitoring-rules/" + rule.getId() + "/actions", "TOWER/JSON");
        }
        rulesByRuleId.put(rule.getId(), rule);
    } catch (Exception e) {
        logger.error("Error while installing rule {}, rolling back", rule.getId(), e);
        try {
            cleanUpRule(rule.getId());
        } catch (IOException e1) {
            throw new RuleInstallationException(
                    "Something went wrong while rolling back the installation of rule " + rule.getId()
                            + ", try uninstalling and reinstalling the rule",
                    e);
        }
        throw new RuleInstallationException(e);
    }
}

From source file:com.ibm.jaggr.core.impl.modulebuilder.javascript.RequireExpansionCompilerPassTest.java

@Test
public void testHasPluginResolution() throws Exception {
    Features features = new Features();
    Set<String> dependentFeatures = new TreeSet<String>();
    features.put("feature1", true);
    features.put("feature2", true);
    List<ModuleDeps> expanded = new ArrayList<ModuleDeps>();
    RequireExpansionCompilerPass pass = new RequireExpansionCompilerPass(mockAggregator, features,
            dependentFeatures, expanded, new MutableBoolean(false), true, null, false, null);

    String code, output;//from   w  w  w  . j a v a2  s  . c  o m
    code = "require([\"has!feature1?has1\",\"has!feature2?has2\"]);";
    output = runPass(pass, code);
    System.out.println(output);
    Assert.assertEquals("[feature1, feature2]", dependentFeatures.toString());
    Assert.assertEquals("require([\"has!feature1?has1\",\"has!feature2?has2\",\"" + placeHolder0 + "\"]);",
            output);
    Assert.assertEquals(new LinkedHashSet<String>(Arrays.asList(new String[] { "dep1", "dep2" })),
            expanded.get(0).getModuleIds());

    features.put("feature2", false);
    dependentFeatures.clear();
    output = runPass(pass, code);
    Assert.assertEquals("[feature1, feature2]", dependentFeatures.toString());
    Assert.assertEquals("require([\"has!feature1?has1\",\"has!feature2?has2\",\"" + placeHolder0 + "\"]);",
            output);
    Assert.assertEquals(new LinkedHashSet<String>(Arrays.asList(new String[] { "dep1" })),
            expanded.get(0).getModuleIds());

    features.put("feature1", false);
    dependentFeatures.clear();
    output = runPass(pass, code);
    Assert.assertEquals("[feature1, feature2]", dependentFeatures.toString());
    Assert.assertEquals("require([\"has!feature1?has1\",\"has!feature2?has2\"]);", output);
    Assert.assertEquals(0, expanded.get(0).getModuleIds().size());

    features.remove("feature2");
    dependentFeatures.clear();
    output = runPass(pass, code);
    Assert.assertEquals("[feature1, feature2]", dependentFeatures.toString());
    Assert.assertEquals("require([\"has!feature1?has1\",\"has!feature2?has2\",\"" + placeHolder0 + "\"]);",
            output);
    Assert.assertEquals(new LinkedHashSet<String>(Arrays.asList(new String[] { "has!feature2?dep2" })),
            expanded.get(0).getModuleIds());

    mockAggregator.getOptions().setOption(IOptions.DISABLE_HASPLUGINBRANCHING, true);
    output = runPass(pass, code);
    Assert.assertEquals("[feature1, feature2]", dependentFeatures.toString());
    Assert.assertEquals("require([\"has!feature1?has1\",\"has!feature2?has2\"]);", output);
    Assert.assertEquals(0, expanded.get(0).getModuleIds().size());

    mockAggregator.getOptions().setOption(IOptions.DISABLE_HASPLUGINBRANCHING, false);
    features.put("feature1", true);
    dependentFeatures.clear();
    output = runPass(pass, code);
    Assert.assertEquals("[feature1, feature2]", dependentFeatures.toString());
    Assert.assertEquals("require([\"has!feature1?has1\",\"has!feature2?has2\",\"" + placeHolder0 + "\"]);",
            output);
    Assert.assertEquals(new LinkedHashSet<String>(Arrays.asList(new String[] { "dep1", "has!feature2?dep2" })),
            expanded.get(0).getModuleIds());

    features.remove("feature1");
    dependentFeatures.clear();
    output = runPass(pass, code);
    Assert.assertEquals("[feature1, feature2]", dependentFeatures.toString());
    Assert.assertEquals("require([\"has!feature1?has1\",\"has!feature2?has2\",\"" + placeHolder0 + "\"]);",
            output);
    Assert.assertEquals(
            new LinkedHashSet<String>(Arrays.asList(new String[] { "has!feature1?dep1", "has!feature2?dep2" })),
            expanded.get(0).getModuleIds());

    mockAggregator.getOptions().setOption(IOptions.DISABLE_HASPLUGINBRANCHING, true);
    output = runPass(pass, code);
    Assert.assertEquals("[feature1, feature2]", dependentFeatures.toString());
    Assert.assertEquals("require([\"has!feature1?has1\",\"has!feature2?has2\"]);", output);
    Assert.assertEquals(0, expanded.get(0).getModuleIds().size());
}

From source file:io.bifroest.commons.boot.BootLoaderNG.java

/**
 * Calculates the systems boot order. This is an iterative process: At
 * first, from a list with all available systems, all systems with no
 * dependencies are removed. This is repeated until the list is empty. If
 * there are systems still remaining, there is a dependency misconfiguration
 * and a CircularDependencyException is raised.
 *
 * @return A list with systems ordered by boot priority. The first element
 * needs to start first, the second after and so on.
 * @throws CircularDependencyException If two or more systems are
 * misconfigured, a circular dependency can occur. This happens e.g. if
 * system A depends on system B and system B also requires system A. This
 * cannot be resolved and an exception is thrown.
 *///from www  . j  a  v a2 s  .c  o m
private List<Subsystem<E>> getBootOrder() throws CircularDependencyException {
    HashMap<String, Subsystem<E>> bootSystems = new HashMap<>();
    HashMap<String, List<String>> systemDependencies = new HashMap<>();
    List<Subsystem<E>> result = new ArrayList<>();

    // shuffle systems to boot, so no one can forget system dependencies
    Collections.shuffle(this.systemsToBoot);

    this.systemsToBoot.stream().forEach((system) -> {
        bootSystems.put(system.getSystemIdentifier(), system);
        systemDependencies.put(system.getSystemIdentifier(), system.getRequiredSystems().stream()
                .filter(dep -> !dep.equals(system.getSystemIdentifier())).collect(Collectors.toList()));
    });
    // while there are dependencies to solve
    while (!systemDependencies.isEmpty()) {
        // Get all nodes without any dependency            
        Set<String> keys = systemDependencies.keySet();
        List<String> resolved = new ArrayList<>();
        keys.stream().forEach((key) -> {
            log.trace("Trying to resolve {}", key);
            Collection<String> dependencies = systemDependencies.get(key);
            log.trace("Found dependencies: {}", dependencies);
            if (dependencies == null || dependencies.isEmpty()) {
                log.trace("Marking {} as resolved", key);
                resolved.add(key);
            }
        });
        // if resolved is empty, we have a loop in the graph            
        if (resolved.isEmpty()) {
            String msg = "Loop in graph! This should not happen. Check your dependencies! Remaining systems: "
                    + keys.toString();
            throw new CircularDependencyException(msg, systemDependencies);
        }

        // remove systemsToBoot found from dependency graph
        resolved.stream().forEach((systemIdentifier) -> {
            systemDependencies.remove(systemIdentifier);
            result.add(bootSystems.get(systemIdentifier));
        });

        // remove dependencies
        Set<String> systemDependenciesKeys = systemDependencies.keySet();
        systemDependenciesKeys.stream().map((key) -> systemDependencies.get(key)).forEach((values) -> {
            resolved.stream().forEach((resolvedValue) -> {
                values.removeIf(v -> v.equals(resolvedValue));
            });
        });
    }
    return result;
}

From source file:org.marketcetera.marketdata.csv.BasicCSVFeedEventTranslator.java

/**
 * Confirms that the given fields are included in the given line.
 *
 * @param inData a <code>CSVQuantum</code> value
 * @param inRequiredFields a <code>Set&lt;Integer&gt;</code> value
 * @throws CoreException if the required fields are not present
 *///from   w  w  w  . j  a v  a2 s .c  om
protected void validateRequiredFields(CSVQuantum inData, Set<Integer> inRequiredFields) throws CoreException {
    for (int field : inRequiredFields) {
        if (field > inData.getLine().length - 1) {
            throw new CoreException(new I18NBoundMessage2P(LINE_MISSING_REQUIRED_FIELDS, inData.toString(),
                    inRequiredFields.toString()));
        }
    }
}