Example usage for java.util Set containsAll

List of usage examples for java.util Set containsAll

Introduction

In this page you can find the example usage for java.util Set containsAll.

Prototype

boolean containsAll(Collection<?> c);

Source Link

Document

Returns true if this set contains all of the elements of the specified collection.

Usage

From source file:org.cloudfoundry.identity.uaa.oauth.token.UaaTokenServices.java

private void checkForApproval(String userid, String clientId, Collection<String> requestedScopes,
        Collection<String> autoApprovedScopes, Date updateCutOff) {
    Set<String> approvedScopes = new HashSet<String>();
    approvedScopes.addAll(autoApprovedScopes);

    // Search through the users approvals for scopes that are requested, not
    // auto approved, not expired,
    // not DENIED and not approved more recently than when this access token
    // was issued.
    List<Approval> approvals = approvalStore.getApprovals(userid, clientId);
    for (Approval approval : approvals) {
        if (requestedScopes.contains(approval.getScope()) && approval.getStatus() == ApprovalStatus.APPROVED) {
            if (!approval.isCurrentlyActive()) {
                logger.debug("Approval " + approval + " has expired. Need to re-approve.");
                throw new InvalidTokenException("Invalid token (approvals expired)");
            }// w  ww  .java2s .  c o  m
            if (updateCutOff.before(approval.getLastUpdatedAt())) {
                logger.debug("At least one approval " + approval + " was updated more recently at "
                        + approval.getLastUpdatedAt() + " access token was issued at " + updateCutOff);
                throw new InvalidTokenException(
                        "Invalid token (approvals updated): " + approval.getLastUpdatedAt());
            }
            approvedScopes.add(approval.getScope());
        }
    }

    // Only issue the token if all the requested scopes have unexpired
    // approvals made before the refresh token was
    // issued OR if those scopes are auto approved
    if (!approvedScopes.containsAll(requestedScopes)) {
        logger.debug("All requested scopes " + requestedScopes + " were not approved " + approvedScopes);
        Set<String> unapprovedScopes = new HashSet<String>(requestedScopes);
        unapprovedScopes.removeAll(approvedScopes);
        throw new InvalidTokenException(
                "Invalid token (some requested scopes are not approved): " + unapprovedScopes);
    }
}

From source file:com.redhat.rhn.frontend.xmlrpc.kickstart.profile.ProfileHandler.java

/**
 * Set advanced options in a kickstart profile
 * @param loggedInUser The current user//  w w w. ja  va 2s  . c  om
 * @param ksLabel the kickstart label
 * @param options the advanced options to set
 * @return 1 if success, exception otherwise
 * @throws FaultException A FaultException is thrown if
 *         the profile associated with ksLabel cannot be found
 *         or invalid advanced option is provided
 *
 * @xmlrpc.doc Set advanced options for a kickstart profile.
 * If 'md5_crypt_rootpw' is set to 'True', 'root_pw' is taken as plaintext and
 * will md5 encrypted on server side, otherwise a hash encoded password
 * (according to the auth option) is expected
 * @xmlrpc.param #session_key()
 * @xmlrpc.param #param("string","ksLabel")
 * @xmlrpc.param
 *   #array()
 *      #struct("advanced options")
 *          #prop_desc("string", "name", "Name of the advanced option.
 *              Valid Option names: autostep, interactive, install, upgrade, text,
 *              network, cdrom, harddrive, nfs, url, lang, langsupport keyboard,
 *              mouse, device, deviceprobe, zerombr, clearpart, bootloader,
 *              timezone, auth, rootpw, selinux, reboot, firewall, xconfig, skipx,
 *              key, ignoredisk, autopart, cmdline, firstboot, graphical, iscsi,
 *              iscsiname, logging, monitor, multipath, poweroff, halt, services,
 *              shutdown, user, vnc, zfcp, driverdisk, md5_crypt_rootpw")
 *          #prop_desc("string", "arguments", "Arguments of the option")
 *      #struct_end()
 *   #array_end()
 * @xmlrpc.returntype #return_int_success()
 */
public int setAdvancedOptions(User loggedInUser, String ksLabel, List<Map> options) throws FaultException {
    KickstartData ksdata = KickstartFactory.lookupKickstartDataByLabelAndOrgId(ksLabel,
            loggedInUser.getOrg().getId());
    if (ksdata == null) {
        throw new FaultException(-3, "kickstartProfileNotFound",
                "No Kickstart Profile found with label: " + ksLabel);
    }

    List<String> validOptions = Arrays.asList(VALIDOPTIONNAMES);

    Set<String> givenOptions = new HashSet<String>();
    for (Map option : options) {
        givenOptions.add((String) option.get("name"));
    }

    if (!validOptions.containsAll(givenOptions)) {
        throw new FaultException(-5, "invalidKickstartCommandName",
                "Invalid kickstart option present. List of valid options is: " + validOptions);
    }

    Long ksid = ksdata.getId();
    KickstartOptionsCommand cmd = new KickstartOptionsCommand(ksid, loggedInUser);

    //check if all the required options are present
    List<KickstartCommandName> requiredOptions = KickstartFactory.lookupKickstartRequiredOptions();

    List<String> requiredOptionNames = new ArrayList<String>();
    for (KickstartCommandName kcn : requiredOptions) {
        requiredOptionNames.add(kcn.getName());
    }

    if (!givenOptions.containsAll(requiredOptionNames)) {
        throw new FaultException(-6, "requiredOptionMissing",
                "Required option missing. List of required options: " + requiredOptionNames);
    }

    Set<KickstartCommand> customSet = new HashSet<KickstartCommand>();

    for (Iterator itr = cmd.getAvailableOptions().iterator(); itr.hasNext();) {
        Map option = null;
        KickstartCommandName cn = (KickstartCommandName) itr.next();
        if (givenOptions.contains(cn.getName())) {
            for (Map o : options) {
                if (cn.getName().equals(o.get("name"))) {
                    option = o;
                    break;
                }
            }

            KickstartCommand kc = new KickstartCommand();
            kc.setCommandName(cn);
            kc.setKickstartData(cmd.getKickstartData());
            kc.setCreated(new Date());
            kc.setModified(new Date());
            if (cn.getArgs().booleanValue()) {
                // handle password encryption
                if (cn.getName().equals("rootpw")) {
                    String pwarg = (String) option.get("arguments");
                    // password already encrypted
                    if (!md5cryptRootPw(options)) {
                        kc.setArguments(pwarg);
                    }
                    // password changed, encrypt it
                    else {
                        kc.setArguments(MD5Crypt.crypt(pwarg));
                    }
                } else {
                    kc.setArguments((String) option.get("arguments"));
                }
            }
            customSet.add(kc);
        }
    }
    cmd.getKickstartData().setOptions(customSet);
    KickstartFactory.saveKickstartData(ksdata);

    return 1;
}

From source file:org.tolven.config.model.CredentialManager.java

public void processTrustStore(TrustStoreDetail trustStoreDetail) {
    try {//from  w ww . jav a2  s  .c  o  m
        Set<X509Certificate> newTrustStoreCerts = new HashSet<X509Certificate>();
        Set<X509Certificate> previousTrustStoreCerts = new HashSet<X509Certificate>();
        Set<X509Certificate> resultingTrustStoreCerts = new HashSet<X509Certificate>();
        for (TrustStoreCertificateDetail trustStoreCertificateDetail : trustStoreDetail.getCertificate()) {
            CertificateGroupDetail certGroup = getTolvenConfigWrapper()
                    .getCredentialGroup(trustStoreCertificateDetail.getRefId());
            if (certGroup == null) {
                throw new RuntimeException("The trusted group " + trustStoreCertificateDetail.getRefId()
                        + " in truststore " + trustStoreDetail.getId() + " does not exist");
            }
            X509Certificate trustStoreX509Certificate = getTolvenConfigWrapper().getX509Certificate(certGroup);
            newTrustStoreCerts.add(trustStoreX509Certificate);
        }
        File trustStoreFile = new File(trustStoreDetail.getSource());
        if (TolvenConfigWrapper.TOLVEN_CREDENTIAL_FORMAT_PEM.equals(trustStoreDetail.getFormat())) {
            if (trustStoreFile.exists()) {
                previousTrustStoreCerts = getTolvenConfigWrapper().getX509Certificates(trustStoreFile);
                for (X509Certificate cert : previousTrustStoreCerts) {
                    resultingTrustStoreCerts.add(cert);
                }
            }
            // And now for what Java calls a Set intersection
            resultingTrustStoreCerts.retainAll(newTrustStoreCerts);
            if (resultingTrustStoreCerts.size() != newTrustStoreCerts.size()
                    || !resultingTrustStoreCerts.containsAll(newTrustStoreCerts)) {
                FileOutputStream out = null;
                try {
                    out = new FileOutputStream(trustStoreFile);
                    for (X509Certificate x509Certificate : newTrustStoreCerts) {
                        out.write(convertToPEMBytes(x509Certificate));
                    }
                } finally {
                    if (out != null) {
                        out.close();
                    }
                }
                logger.info("Created truststore: " + trustStoreDetail.getId());
            }
        } else if (TolvenConfigWrapper.TOLVEN_CREDENTIAL_FORMAT_JKS.equals(trustStoreDetail.getFormat())
                || TolvenConfigWrapper.TOLVEN_CREDENTIAL_FORMAT_PKCS12.equals(trustStoreDetail.getFormat())) {
            char[] truststorepass = getPasswordHolder().getPassword(trustStoreDetail.getId());
            if (trustStoreFile.exists()) {
                KeyStore trustStore = getTolvenConfigWrapper().getKeyStore(truststorepass, trustStoreFile,
                        trustStoreDetail.getFormat());
                Enumeration<String> enumeration = trustStore.aliases();
                while (enumeration.hasMoreElements()) {
                    String alias = enumeration.nextElement();
                    X509Certificate cert = (X509Certificate) trustStore.getCertificate(alias);
                    previousTrustStoreCerts.add(cert);
                    resultingTrustStoreCerts.add(cert);
                }
            }
            // And now for what Java calls a Set intersection
            resultingTrustStoreCerts.retainAll(newTrustStoreCerts);
            if (resultingTrustStoreCerts.size() != newTrustStoreCerts.size()
                    || !resultingTrustStoreCerts.containsAll(newTrustStoreCerts)) {
                KeyStore trustStore = KeyStore.getInstance(trustStoreDetail.getFormat());
                trustStore.load(null, truststorepass);
                for (X509Certificate newCert : newTrustStoreCerts) {
                    String alias = newCert.getSubjectDN().getName();
                    trustStore.setCertificateEntry(alias, newCert);
                }
                trustStoreFile.getParentFile().mkdirs();
                write(trustStore, trustStoreFile, truststorepass);
                logger.info("Created truststore: " + trustStoreDetail.getId());
            }
        } else {
            throw new RuntimeException("Unrecognized keystore format: " + trustStoreDetail.getFormat());
        }
    } catch (Exception ex) {
        throw new RuntimeException("Failed to process truststore: " + trustStoreDetail.getId(), ex);
    }
}

From source file:org.apache.fineract.portfolio.loanaccount.service.LoanApplicationWritePlatformServiceJpaRepositoryImpl.java

@Transactional
@Override//from www  . j  a v a  2s .  c  o m
public CommandProcessingResult modifyApplication(final Long loanId, final JsonCommand command) {

    try {
        AppUser currentUser = getAppUserIfPresent();
        final Loan existingLoanApplication = retrieveLoanBy(loanId);
        if (!existingLoanApplication.isSubmittedAndPendingApproval()) {
            throw new LoanApplicationNotInSubmittedAndPendingApprovalStateCannotBeModified(loanId);
        }

        final String productIdParamName = "productId";
        LoanProduct newLoanProduct = null;
        if (command.isChangeInLongParameterNamed(productIdParamName,
                existingLoanApplication.loanProduct().getId())) {
            final Long productId = command.longValueOfParameterNamed(productIdParamName);
            newLoanProduct = this.loanProductRepository.findOne(productId);
            if (newLoanProduct == null) {
                throw new LoanProductNotFoundException(productId);
            }
        }

        LoanProduct loanProductForValidations = newLoanProduct == null ? existingLoanApplication.loanProduct()
                : newLoanProduct;

        this.fromApiJsonDeserializer.validateForModify(command.json(), loanProductForValidations,
                existingLoanApplication);

        checkClientOrGroupActive(existingLoanApplication);

        final Set<LoanCharge> existingCharges = existingLoanApplication.charges();
        Map<Long, LoanChargeData> chargesMap = new HashMap<>();
        for (LoanCharge charge : existingCharges) {
            LoanChargeData chargeData = new LoanChargeData(charge.getId(), charge.getDueLocalDate(),
                    charge.amountOrPercentage());
            chargesMap.put(charge.getId(), chargeData);
        }
        Set<LoanDisbursementDetails> disbursementDetails = this.loanAssembler
                .fetchDisbursementData(command.parsedJson().getAsJsonObject());

        /**
         * Stores all charges which are passed in during modify loan
         * application
         **/
        final Set<LoanCharge> possiblyModifedLoanCharges = this.loanChargeAssembler
                .fromParsedJson(command.parsedJson(), disbursementDetails);
        /** Boolean determines if any charge has been modified **/
        boolean isChargeModified = false;

        Set<Charge> newTrancheChages = this.loanChargeAssembler.getNewLoanTrancheCharges(command.parsedJson());
        for (Charge charge : newTrancheChages) {
            existingLoanApplication.addTrancheLoanCharge(charge);
        }

        /**
         * If there are any charges already present, which are now not
         * passed in as a part of the request, deem the charges as modified
         **/
        if (!possiblyModifedLoanCharges.isEmpty()) {
            if (!possiblyModifedLoanCharges.containsAll(existingCharges)) {
                isChargeModified = true;
            }
        }

        /**
         * If any new charges are added or values of existing charges are
         * modified
         **/
        for (LoanCharge loanCharge : possiblyModifedLoanCharges) {
            if (loanCharge.getId() == null) {
                isChargeModified = true;
            } else {
                LoanChargeData chargeData = chargesMap.get(loanCharge.getId());
                if (loanCharge.amountOrPercentage().compareTo(chargeData.amountOrPercentage()) != 0
                        || (loanCharge.isSpecifiedDueDate()
                                && !loanCharge.getDueLocalDate().equals(chargeData.getDueDate()))) {
                    isChargeModified = true;
                }
            }
        }

        final Set<LoanCollateral> possiblyModifedLoanCollateralItems = this.loanCollateralAssembler
                .fromParsedJson(command.parsedJson());

        final Map<String, Object> changes = existingLoanApplication.loanApplicationModification(command,
                possiblyModifedLoanCharges, possiblyModifedLoanCollateralItems, this.aprCalculator,
                isChargeModified);

        if (changes.containsKey("expectedDisbursementDate")) {
            this.loanAssembler.validateExpectedDisbursementForHolidayAndNonWorkingDay(existingLoanApplication);
        }

        final String clientIdParamName = "clientId";
        if (changes.containsKey(clientIdParamName)) {
            final Long clientId = command.longValueOfParameterNamed(clientIdParamName);
            final Client client = this.clientRepository.findOneWithNotFoundDetection(clientId);
            if (client.isNotActive()) {
                throw new ClientNotActiveException(clientId);
            }

            existingLoanApplication.updateClient(client);
        }

        final String groupIdParamName = "groupId";
        if (changes.containsKey(groupIdParamName)) {
            final Long groupId = command.longValueOfParameterNamed(groupIdParamName);
            final Group group = this.groupRepository.findOneWithNotFoundDetection(groupId);
            if (group.isNotActive()) {
                throw new GroupNotActiveException(groupId);
            }

            existingLoanApplication.updateGroup(group);
        }

        if (newLoanProduct != null) {
            existingLoanApplication.updateLoanProduct(newLoanProduct);
            if (!changes.containsKey("interestRateFrequencyType")) {
                existingLoanApplication.updateInterestRateFrequencyType();
            }
            final List<ApiParameterError> dataValidationErrors = new ArrayList<>();
            final DataValidatorBuilder baseDataValidator = new DataValidatorBuilder(dataValidationErrors)
                    .resource("loan");
            if (newLoanProduct.useBorrowerCycle()) {
                final Long clientId = this.fromJsonHelper.extractLongNamed("clientId", command.parsedJson());
                final Long groupId = this.fromJsonHelper.extractLongNamed("groupId", command.parsedJson());
                Integer cycleNumber = 0;
                if (clientId != null) {
                    cycleNumber = this.loanReadPlatformService.retriveLoanCounter(clientId,
                            newLoanProduct.getId());
                } else if (groupId != null) {
                    cycleNumber = this.loanReadPlatformService.retriveLoanCounter(groupId,
                            AccountType.GROUP.getValue(), newLoanProduct.getId());
                }
                this.loanProductCommandFromApiJsonDeserializer.validateMinMaxConstraints(command.parsedJson(),
                        baseDataValidator, newLoanProduct, cycleNumber);
            } else {
                this.loanProductCommandFromApiJsonDeserializer.validateMinMaxConstraints(command.parsedJson(),
                        baseDataValidator, newLoanProduct);
            }
            if (newLoanProduct.isLinkedToFloatingInterestRate()) {
                existingLoanApplication.getLoanProductRelatedDetail().updateForFloatingInterestRates();
            } else {
                existingLoanApplication.setInterestRateDifferential(null);
                existingLoanApplication.setIsFloatingInterestRate(null);
            }
            if (!dataValidationErrors.isEmpty()) {
                throw new PlatformApiDataValidationException(dataValidationErrors);
            }
        }

        existingLoanApplication.updateIsInterestRecalculationEnabled();
        validateSubmittedOnDate(existingLoanApplication);

        final LoanProductRelatedDetail productRelatedDetail = existingLoanApplication.repaymentScheduleDetail();
        if (existingLoanApplication.loanProduct().getLoanProductConfigurableAttributes() != null) {
            updateProductRelatedDetails(productRelatedDetail, existingLoanApplication);
        }

        final String fundIdParamName = "fundId";
        if (changes.containsKey(fundIdParamName)) {
            final Long fundId = command.longValueOfParameterNamed(fundIdParamName);
            final Fund fund = this.loanAssembler.findFundByIdIfProvided(fundId);

            existingLoanApplication.updateFund(fund);
        }

        final String loanPurposeIdParamName = "loanPurposeId";
        if (changes.containsKey(loanPurposeIdParamName)) {
            final Long loanPurposeId = command.longValueOfParameterNamed(loanPurposeIdParamName);
            final CodeValue loanPurpose = this.loanAssembler.findCodeValueByIdIfProvided(loanPurposeId);
            existingLoanApplication.updateLoanPurpose(loanPurpose);
        }

        final String loanOfficerIdParamName = "loanOfficerId";
        if (changes.containsKey(loanOfficerIdParamName)) {
            final Long loanOfficerId = command.longValueOfParameterNamed(loanOfficerIdParamName);
            final Staff newValue = this.loanAssembler.findLoanOfficerByIdIfProvided(loanOfficerId);
            existingLoanApplication.updateLoanOfficerOnLoanApplication(newValue);
        }

        final String strategyIdParamName = "transactionProcessingStrategyId";
        if (changes.containsKey(strategyIdParamName)) {
            final Long strategyId = command.longValueOfParameterNamed(strategyIdParamName);
            final LoanTransactionProcessingStrategy strategy = this.loanAssembler
                    .findStrategyByIdIfProvided(strategyId);

            existingLoanApplication.updateTransactionProcessingStrategy(strategy);
        }

        final String collateralParamName = "collateral";
        if (changes.containsKey(collateralParamName)) {
            final Set<LoanCollateral> loanCollateral = this.loanCollateralAssembler
                    .fromParsedJson(command.parsedJson());
            existingLoanApplication.updateLoanCollateral(loanCollateral);
        }

        final String chargesParamName = "charges";
        if (changes.containsKey(chargesParamName)) {
            existingLoanApplication.updateLoanCharges(possiblyModifedLoanCharges);
        }

        if (changes.containsKey("recalculateLoanSchedule")) {
            changes.remove("recalculateLoanSchedule");

            final JsonElement parsedQuery = this.fromJsonHelper.parse(command.json());
            final JsonQuery query = JsonQuery.from(command.json(), parsedQuery, this.fromJsonHelper);

            final LoanScheduleModel loanSchedule = this.calculationPlatformService.calculateLoanSchedule(query,
                    false);
            existingLoanApplication.updateLoanSchedule(loanSchedule, currentUser);
            existingLoanApplication.recalculateAllCharges();
        }

        this.fromApiJsonDeserializer.validateLoanTermAndRepaidEveryValues(
                existingLoanApplication.getTermFrequency(),
                existingLoanApplication.getTermPeriodFrequencyType(),
                productRelatedDetail.getNumberOfRepayments(), productRelatedDetail.getRepayEvery(),
                productRelatedDetail.getRepaymentPeriodFrequencyType().getValue(), existingLoanApplication);

        saveAndFlushLoanWithDataIntegrityViolationChecks(existingLoanApplication);

        final String submittedOnNote = command.stringValueOfParameterNamed("submittedOnNote");
        if (StringUtils.isNotBlank(submittedOnNote)) {
            final Note note = Note.loanNote(existingLoanApplication, submittedOnNote);
            this.noteRepository.save(note);
        }

        final Long calendarId = command.longValueOfParameterNamed("calendarId");
        Calendar calendar = null;
        if (calendarId != null && calendarId != 0) {
            calendar = this.calendarRepository.findOne(calendarId);
            if (calendar == null) {
                throw new CalendarNotFoundException(calendarId);
            }
        }

        final List<CalendarInstance> ciList = (List<CalendarInstance>) this.calendarInstanceRepository
                .findByEntityIdAndEntityTypeId(loanId, CalendarEntityType.LOANS.getValue());
        if (calendar != null) {

            // For loans, allow to attach only one calendar instance per
            // loan
            if (ciList != null && !ciList.isEmpty()) {
                final CalendarInstance calendarInstance = ciList.get(0);
                if (calendarInstance.getCalendar().getId() != calendar.getId()) {
                    calendarInstance.updateCalendar(calendar);
                    this.calendarInstanceRepository.saveAndFlush(calendarInstance);
                }
            } else {
                // attaching new calendar
                final CalendarInstance calendarInstance = new CalendarInstance(calendar,
                        existingLoanApplication.getId(), CalendarEntityType.LOANS.getValue());
                this.calendarInstanceRepository.save(calendarInstance);
            }

        } else if (ciList != null && !ciList.isEmpty()) {
            final CalendarInstance calendarInstance = ciList.get(0);
            this.calendarInstanceRepository.delete(calendarInstance);
        }

        // Save linked account information
        final String linkAccountIdParamName = "linkAccountId";
        final Long savingsAccountId = command.longValueOfParameterNamed(linkAccountIdParamName);
        AccountAssociations accountAssociations = this.accountAssociationsRepository.findByLoanIdAndType(loanId,
                AccountAssociationType.LINKED_ACCOUNT_ASSOCIATION.getValue());
        boolean isLinkedAccPresent = false;
        if (savingsAccountId == null) {
            if (accountAssociations != null) {
                if (this.fromJsonHelper.parameterExists(linkAccountIdParamName, command.parsedJson())) {
                    this.accountAssociationsRepository.delete(accountAssociations);
                    changes.put(linkAccountIdParamName, null);
                } else {
                    isLinkedAccPresent = true;
                }
            }
        } else {
            isLinkedAccPresent = true;
            boolean isModified = false;
            if (accountAssociations == null) {
                isModified = true;
            } else {
                final SavingsAccount savingsAccount = accountAssociations.linkedSavingsAccount();
                if (savingsAccount == null || !savingsAccount.getId().equals(savingsAccountId)) {
                    isModified = true;
                }
            }
            if (isModified) {
                final SavingsAccount savingsAccount = this.savingsAccountAssembler
                        .assembleFrom(savingsAccountId);
                this.fromApiJsonDeserializer.validatelinkedSavingsAccount(savingsAccount,
                        existingLoanApplication);
                if (accountAssociations == null) {
                    boolean isActive = true;
                    accountAssociations = AccountAssociations.associateSavingsAccount(existingLoanApplication,
                            savingsAccount, AccountAssociationType.LINKED_ACCOUNT_ASSOCIATION.getValue(),
                            isActive);
                } else {
                    accountAssociations.updateLinkedSavingsAccount(savingsAccount);
                }
                changes.put(linkAccountIdParamName, savingsAccountId);
                this.accountAssociationsRepository.save(accountAssociations);
            }
        }

        if (!isLinkedAccPresent) {
            final Set<LoanCharge> charges = existingLoanApplication.charges();
            for (final LoanCharge loanCharge : charges) {
                if (loanCharge.getChargePaymentMode().isPaymentModeAccountTransfer()) {
                    final String errorMessage = "one of the charges requires linked savings account for payment";
                    throw new LinkedAccountRequiredException("loanCharge", errorMessage);
                }
            }
        }

        // updating loan interest recalculation details throwing null
        // pointer exception after saveAndFlush
        // http://stackoverflow.com/questions/17151757/hibernate-cascade-update-gives-null-pointer/17334374#17334374
        this.loanRepository.save(existingLoanApplication);

        if (productRelatedDetail.isInterestRecalculationEnabled()) {
            this.fromApiJsonDeserializer.validateLoanForInterestRecalculation(existingLoanApplication);
            if (changes.containsKey(LoanProductConstants.isInterestRecalculationEnabledParameterName)) {
                createAndPersistCalendarInstanceForInterestRecalculation(existingLoanApplication);
            } else {
                if (changes.containsKey(LoanProductConstants.recalculationRestFrequencyDateParamName)) {

                    CalendarInstance calendarInstance = this.calendarInstanceRepository
                            .findByEntityIdAndEntityTypeIdAndCalendarTypeId(
                                    existingLoanApplication.loanInterestRecalculationDetailId(),
                                    CalendarEntityType.LOAN_RECALCULATION_REST_DETAIL.getValue(),
                                    CalendarType.COLLECTION.getValue());
                    updateRestCalendarDetailsForInterestRecalculation(calendarInstance,
                            existingLoanApplication);
                }
                if (changes.containsKey(LoanProductConstants.recalculationCompoundingFrequencyDateParamName)) {
                    CalendarInstance calendarInstance = this.calendarInstanceRepository
                            .findByEntityIdAndEntityTypeIdAndCalendarTypeId(
                                    existingLoanApplication.loanInterestRecalculationDetailId(),
                                    CalendarEntityType.LOAN_RECALCULATION_COMPOUNDING_DETAIL.getValue(),
                                    CalendarType.COLLECTION.getValue());
                    updateCompoundingCalendarDetailsForInterestRecalculation(calendarInstance,
                            existingLoanApplication);
                }
            }

        }

        return new CommandProcessingResultBuilder() //
                .withEntityId(loanId) //
                .withOfficeId(existingLoanApplication.getOfficeId()) //
                .withClientId(existingLoanApplication.getClientId()) //
                .withGroupId(existingLoanApplication.getGroupId()) //
                .withLoanId(existingLoanApplication.getId()) //
                .with(changes).build();
    } catch (final DataIntegrityViolationException dve) {
        handleDataIntegrityIssues(command, dve);
        return CommandProcessingResult.empty();
    }
}

From source file:com.streamsets.pipeline.stage.processor.fieldhasher.TestFieldHasherProcessor.java

private void checkFieldIssueToError(Record record, HasherConfig hasherConfig, Set<String> expectedValidFields)
        throws StageException {

    StageRunner.Output output;//from ww  w . j av a  2  s.c om

    final Set<String> validFieldsFromTheProcessor = registerCallbackForValidFields();

    FieldHasherProcessor processor = PowerMockito
            .spy(new FieldHasherProcessor(hasherConfig, OnStagePreConditionFailure.TO_ERROR));

    ProcessorRunner runner = new ProcessorRunner.Builder(FieldHasherDProcessor.class, processor)
            .addOutputLane("a").setOnRecordError(OnRecordError.TO_ERROR).build();
    runner.runInit();
    try {
        output = runner.runProcess(Arrays.asList(record));
        Assert.assertEquals(0, output.getRecords().get("a").size());
        Assert.assertEquals(1, runner.getErrorRecords().size());
        Assert.assertEquals("Valid Fields Size mismatch", expectedValidFields.size(),
                validFieldsFromTheProcessor.size());
        Assert.assertTrue("Expected Valid Fields Not Present",
                validFieldsFromTheProcessor.containsAll(expectedValidFields));
        Assert.assertEquals(Errors.HASH_01.toString(),
                runner.getErrorRecords().get(0).getHeader().getErrorCode());
    } finally {
        runner.runDestroy();
    }
}

From source file:org.apache.calcite.rel.rules.AbstractMaterializedViewRule.java

/**
 * Rewriting logic is based on "Optimizing Queries Using Materialized Views:
 * A Practical, Scalable Solution" by Goldstein and Larson.
 *
 * <p>On the query side, rules matches a Project-node chain or node, where node
 * is either an Aggregate or a Join. Subplan rooted at the node operator must
 * be composed of one or more of the following operators: TableScan, Project,
 * Filter, and Join./*from   w ww .j  av a2  s  . c om*/
 *
 * <p>For each join MV, we need to check the following:
 * <ol>
 * <li> The plan rooted at the Join operator in the view produces all rows
 * needed by the plan rooted at the Join operator in the query.</li>
 * <li> All columns required by compensating predicates, i.e., predicates that
 * need to be enforced over the view, are available at the view output.</li>
 * <li> All output expressions can be computed from the output of the view.</li>
 * <li> All output rows occur with the correct duplication factor. We might
 * rely on existing Unique-Key - Foreign-Key relationships to extract that
 * information.</li>
 * </ol>
 *
 * <p>In turn, for each aggregate MV, we need to check the following:
 * <ol>
 * <li> The plan rooted at the Aggregate operator in the view produces all rows
 * needed by the plan rooted at the Aggregate operator in the query.</li>
 * <li> All columns required by compensating predicates, i.e., predicates that
 * need to be enforced over the view, are available at the view output.</li>
 * <li> The grouping columns in the query are a subset of the grouping columns
 * in the view.</li>
 * <li> All columns required to perform further grouping are available in the
 * view output.</li>
 * <li> All columns required to compute output expressions are available in the
 * view output.</li>
 * </ol>
 */
protected void perform(RelOptRuleCall call, Project topProject, RelNode node) {
    final RexBuilder rexBuilder = node.getCluster().getRexBuilder();
    final RelMetadataQuery mq = RelMetadataQuery.instance();
    final RelOptPlanner planner = call.getPlanner();
    final RexSimplify simplify = new RexSimplify(rexBuilder, true,
            planner.getExecutor() != null ? planner.getExecutor() : RexUtil.EXECUTOR);

    final List<RelOptMaterialization> materializations = (planner instanceof VolcanoPlanner)
            ? ((VolcanoPlanner) planner).getMaterializations()
            : ImmutableList.<RelOptMaterialization>of();

    if (!materializations.isEmpty()) {
        // 1. Explore query plan to recognize whether preconditions to
        // try to generate a rewriting are met
        if (!isValidPlan(topProject, node, mq)) {
            return;
        }

        // Obtain applicable (filtered) materializations
        // TODO: Filtering of relevant materializations needs to be
        // improved so we gather only materializations that might
        // actually generate a valid rewriting.
        final List<RelOptMaterialization> applicableMaterializations = RelOptMaterializations
                .getApplicableMaterializations(node, materializations);

        if (!applicableMaterializations.isEmpty()) {
            // 2. Initialize all query related auxiliary data structures
            // that will be used throughout query rewriting process
            // Generate query table references
            final Set<RelTableRef> queryTableRefs = mq.getTableReferences(node);
            if (queryTableRefs == null) {
                // Bail out
                return;
            }

            // Extract query predicates
            final RelOptPredicateList queryPredicateList = mq.getAllPredicates(node);
            if (queryPredicateList == null) {
                // Bail out
                return;
            }
            final RexNode pred = simplify.simplify(
                    RexUtil.composeConjunction(rexBuilder, queryPredicateList.pulledUpPredicates, false));
            final Triple<RexNode, RexNode, RexNode> queryPreds = splitPredicates(rexBuilder, pred);

            // Extract query equivalence classes. An equivalence class is a set
            // of columns in the query output that are known to be equal.
            final EquivalenceClasses qEC = new EquivalenceClasses();
            for (RexNode conj : RelOptUtil.conjunctions(queryPreds.getLeft())) {
                assert conj.isA(SqlKind.EQUALS);
                RexCall equiCond = (RexCall) conj;
                qEC.addEquivalenceClass((RexTableInputRef) equiCond.getOperands().get(0),
                        (RexTableInputRef) equiCond.getOperands().get(1));
            }

            // 3. We iterate through all applicable materializations trying to
            // rewrite the given query
            for (RelOptMaterialization materialization : applicableMaterializations) {
                final Project topViewProject;
                final RelNode viewNode;
                if (materialization.queryRel instanceof Project) {
                    topViewProject = (Project) materialization.queryRel;
                    viewNode = topViewProject.getInput();
                } else {
                    topViewProject = null;
                    viewNode = materialization.queryRel;
                }

                // 3.1. View checks before proceeding
                if (!isValidPlan(topViewProject, viewNode, mq)) {
                    // Skip it
                    continue;
                }

                // 3.2. Initialize all query related auxiliary data structures
                // that will be used throughout query rewriting process
                // Extract view predicates
                final RelOptPredicateList viewPredicateList = mq.getAllPredicates(viewNode);
                if (viewPredicateList == null) {
                    // Skip it
                    continue;
                }
                final RexNode viewPred = simplify.simplify(
                        RexUtil.composeConjunction(rexBuilder, viewPredicateList.pulledUpPredicates, false));
                final Triple<RexNode, RexNode, RexNode> viewPreds = splitPredicates(rexBuilder, viewPred);

                // Extract view table references
                final Set<RelTableRef> viewTableRefs = mq.getTableReferences(viewNode);
                if (viewTableRefs == null) {
                    // Bail out
                    return;
                }

                // Extract view tables
                MatchModality matchModality;
                Multimap<RexTableInputRef, RexTableInputRef> compensationEquiColumns = ArrayListMultimap
                        .create();
                if (!queryTableRefs.equals(viewTableRefs)) {
                    // We try to compensate, e.g., for join queries it might be
                    // possible to join missing tables with view to compute result.
                    // Two supported cases: query tables are subset of view tables (we need to
                    // check whether they are cardinality-preserving joins), or view tables are
                    // subset of query tables (add additional tables through joins if possible)
                    if (viewTableRefs.containsAll(queryTableRefs)) {
                        matchModality = MatchModality.QUERY_PARTIAL;
                        final EquivalenceClasses vEC = new EquivalenceClasses();
                        for (RexNode conj : RelOptUtil.conjunctions(viewPreds.getLeft())) {
                            assert conj.isA(SqlKind.EQUALS);
                            RexCall equiCond = (RexCall) conj;
                            vEC.addEquivalenceClass((RexTableInputRef) equiCond.getOperands().get(0),
                                    (RexTableInputRef) equiCond.getOperands().get(1));
                        }
                        if (!compensateQueryPartial(compensationEquiColumns, viewTableRefs, vEC,
                                queryTableRefs)) {
                            // Cannot rewrite, skip it
                            continue;
                        }
                    } else if (queryTableRefs.containsAll(viewTableRefs)) {
                        // TODO: implement latest case
                        matchModality = MatchModality.VIEW_PARTIAL;
                        continue;
                    } else {
                        // Skip it
                        continue;
                    }
                } else {
                    matchModality = MatchModality.COMPLETE;
                }

                // 4. We map every table in the query to a view table with the same qualified
                // name.
                final Multimap<RelTableRef, RelTableRef> multiMapTables = ArrayListMultimap.create();
                for (RelTableRef queryTableRef : queryTableRefs) {
                    for (RelTableRef viewTableRef : viewTableRefs) {
                        if (queryTableRef.getQualifiedName().equals(viewTableRef.getQualifiedName())) {
                            multiMapTables.put(queryTableRef, viewTableRef);
                        }
                    }
                }

                // If a table is used multiple times, we will create multiple mappings,
                // and we will try to rewrite the query using each of the mappings.
                // Then, we will try to map every source table (query) to a target
                // table (view), and if we are successful, we will try to create
                // compensation predicates to filter the view results further
                // (if needed).
                final List<BiMap<RelTableRef, RelTableRef>> flatListMappings = generateTableMappings(
                        multiMapTables);
                for (BiMap<RelTableRef, RelTableRef> tableMapping : flatListMappings) {
                    // 4.0. If compensation equivalence classes exist, we need to add
                    // the mapping to the query mapping
                    final EquivalenceClasses currQEC = EquivalenceClasses.copy(qEC);
                    if (matchModality == MatchModality.QUERY_PARTIAL) {
                        for (Entry<RexTableInputRef, RexTableInputRef> e : compensationEquiColumns.entries()) {
                            // Copy origin
                            RelTableRef queryTableRef = tableMapping.inverse().get(e.getKey().getTableRef());
                            RexTableInputRef queryColumnRef = RexTableInputRef.of(queryTableRef,
                                    e.getKey().getIndex(), e.getKey().getType());
                            // Add to query equivalence classes and table mapping
                            currQEC.addEquivalenceClass(queryColumnRef, e.getValue());
                            tableMapping.put(e.getValue().getTableRef(), e.getValue().getTableRef()); //identity
                        }
                    }

                    final RexNode compensationColumnsEquiPred;
                    final RexNode compensationRangePred;
                    final RexNode compensationResidualPred;

                    // 4.1. Establish relationship between view and query equivalence classes.
                    // If every view equivalence class is not a subset of a query
                    // equivalence class, we bail out.
                    // To establish relationship, we swap column references of the view predicates
                    // to point to query tables. Then, we create the equivalence classes for the
                    // view predicates and check that every view equivalence class is a subset of a
                    // query equivalence class: if it is not, we bail out.
                    final RexNode viewColumnsEquiPred = RexUtil.swapTableReferences(rexBuilder,
                            viewPreds.getLeft(), tableMapping.inverse());
                    final EquivalenceClasses queryBasedVEC = new EquivalenceClasses();
                    for (RexNode conj : RelOptUtil.conjunctions(viewColumnsEquiPred)) {
                        assert conj.isA(SqlKind.EQUALS);
                        RexCall equiCond = (RexCall) conj;
                        queryBasedVEC.addEquivalenceClass((RexTableInputRef) equiCond.getOperands().get(0),
                                (RexTableInputRef) equiCond.getOperands().get(1));
                    }
                    compensationColumnsEquiPred = generateEquivalenceClasses(rexBuilder, currQEC,
                            queryBasedVEC);
                    if (compensationColumnsEquiPred == null) {
                        // Skip it
                        continue;
                    }

                    // 4.2. We check that range intervals for the query are contained in the view.
                    // Compute compensating predicates.
                    final RexNode queryRangePred = RexUtil.swapColumnReferences(rexBuilder,
                            queryPreds.getMiddle(), currQEC.getEquivalenceClassesMap());
                    final RexNode viewRangePred = RexUtil.swapTableColumnReferences(rexBuilder,
                            viewPreds.getMiddle(), tableMapping.inverse(), currQEC.getEquivalenceClassesMap());
                    compensationRangePred = SubstitutionVisitor.splitFilter(simplify, queryRangePred,
                            viewRangePred);
                    if (compensationRangePred == null) {
                        // Skip it
                        continue;
                    }

                    // 4.3. Finally, we check that residual predicates of the query are satisfied
                    // within the view.
                    // Compute compensating predicates.
                    final RexNode queryResidualPred = RexUtil.swapColumnReferences(rexBuilder,
                            queryPreds.getRight(), currQEC.getEquivalenceClassesMap());
                    final RexNode viewResidualPred = RexUtil.swapTableColumnReferences(rexBuilder,
                            viewPreds.getRight(), tableMapping.inverse(), currQEC.getEquivalenceClassesMap());
                    compensationResidualPred = SubstitutionVisitor.splitFilter(simplify, queryResidualPred,
                            viewResidualPred);
                    if (compensationResidualPred == null) {
                        // Skip it
                        continue;
                    }

                    // 4.4. Final compensation predicate.
                    RexNode compensationPred = RexUtil.composeConjunction(rexBuilder, ImmutableList
                            .of(compensationColumnsEquiPred, compensationRangePred, compensationResidualPred),
                            false);
                    if (!compensationPred.isAlwaysTrue()) {
                        // All columns required by compensating predicates must be contained
                        // in the view output (condition 2).
                        List<RexNode> viewExprs = extractExpressions(topViewProject, viewNode, rexBuilder);
                        compensationPred = rewriteExpression(rexBuilder, viewNode, viewExprs, compensationPred,
                                tableMapping, currQEC.getEquivalenceClassesMap(), mq);
                        if (compensationPred == null) {
                            // Skip it
                            continue;
                        }
                    }

                    // 4.5. Generate final rewriting if possible.
                    // First, we add the compensation predicate (if any) on top of the view.
                    // Then, we trigger the Aggregate unifying method. This method will either create
                    // a Project or an Aggregate operator on top of the view. It will also compute the
                    // output expressions for the query.
                    RelBuilder builder = call.builder();
                    builder.push(materialization.tableRel);
                    if (!compensationPred.isAlwaysTrue()) {
                        builder.filter(simplify.simplify(compensationPred));
                    }
                    RelNode result = unify(rexBuilder, builder, builder.build(), topProject, node,
                            topViewProject, viewNode, tableMapping, currQEC.getEquivalenceClassesMap(), mq);
                    if (result == null) {
                        // Skip it
                        continue;
                    }
                    call.transformTo(result);
                }
            }
        }
    }
}

From source file:org.apache.hadoop.hbase.master.TestRegionPlacement.java

@Test
public void testFavoredNodesPresentForRoundRobinAssignment() throws HBaseIOException {
    LoadBalancer balancer = LoadBalancerFactory.getLoadBalancer(TEST_UTIL.getConfiguration());
    balancer.setMasterServices(TEST_UTIL.getMiniHBaseCluster().getMaster());
    List<ServerName> servers = new ArrayList<ServerName>();
    for (int i = 0; i < SLAVES; i++) {
        ServerName server = TEST_UTIL.getMiniHBaseCluster().getRegionServer(i).getServerName();
        servers.add(server);//from w ww . ja  va  2 s  .com
    }
    List<HRegionInfo> regions = new ArrayList<HRegionInfo>(1);
    HRegionInfo region = new HRegionInfo(TableName.valueOf("foobar"));
    regions.add(region);
    Map<ServerName, List<HRegionInfo>> assignmentMap = balancer.roundRobinAssignment(regions, servers);
    Set<ServerName> serverBefore = assignmentMap.keySet();
    List<ServerName> favoredNodesBefore = ((FavoredNodeLoadBalancer) balancer).getFavoredNodes(region);
    assertTrue(favoredNodesBefore.size() == 3);
    // the primary RS should be the one that the balancer's assignment returns
    assertTrue(
            ServerName.isSameHostnameAndPort(serverBefore.iterator().next(), favoredNodesBefore.get(PRIMARY)));
    // now remove the primary from the list of available servers
    List<ServerName> removedServers = removeMatchingServers(serverBefore, servers);
    // call roundRobinAssignment with the modified servers list
    assignmentMap = balancer.roundRobinAssignment(regions, servers);
    List<ServerName> favoredNodesAfter = ((FavoredNodeLoadBalancer) balancer).getFavoredNodes(region);
    assertTrue(favoredNodesAfter.size() == 3);
    // We don't expect the favored nodes assignments to change in multiple calls
    // to the roundRobinAssignment method in the balancer (relevant for AssignmentManager.assign
    // failures)
    assertTrue(favoredNodesAfter.containsAll(favoredNodesBefore));
    Set<ServerName> serverAfter = assignmentMap.keySet();
    // We expect the new RegionServer assignee to be one of the favored nodes
    // chosen earlier.
    assertTrue(
            ServerName.isSameHostnameAndPort(serverAfter.iterator().next(), favoredNodesBefore.get(SECONDARY))
                    || ServerName.isSameHostnameAndPort(serverAfter.iterator().next(),
                            favoredNodesBefore.get(TERTIARY)));

    // put back the primary in the list of available servers
    servers.addAll(removedServers);
    // now roundRobinAssignment with the modified servers list should return the primary
    // as the regionserver assignee
    assignmentMap = balancer.roundRobinAssignment(regions, servers);
    Set<ServerName> serverWithPrimary = assignmentMap.keySet();
    assertTrue(serverBefore.containsAll(serverWithPrimary));

    // Make all the favored nodes unavailable for assignment
    removeMatchingServers(favoredNodesAfter, servers);
    // call roundRobinAssignment with the modified servers list
    assignmentMap = balancer.roundRobinAssignment(regions, servers);
    List<ServerName> favoredNodesNow = ((FavoredNodeLoadBalancer) balancer).getFavoredNodes(region);
    assertTrue(favoredNodesNow.size() == 3);
    assertTrue(!favoredNodesNow.contains(favoredNodesAfter.get(PRIMARY))
            && !favoredNodesNow.contains(favoredNodesAfter.get(SECONDARY))
            && !favoredNodesNow.contains(favoredNodesAfter.get(TERTIARY)));
}

From source file:com.davidsoergel.trees.AbstractRootedPhylogeny.java

/**
 * {@inheritDoc}/*from  w w  w  . ja  v a  2 s .c  o  m*/
 */
@NotNull
public BasicRootedPhylogeny<T> extractTreeWithLeafIDs(Set<T> ids, boolean ignoreAbsentNodes,
        boolean includeInternalBranches, MutualExclusionResolutionMode mode) throws NoSuchNodeException //, NodeNamer<T> namer

{
    /*try
       {
       if (getLeafValues().equals(ids) && includeInternalBranches)
    {
    return this;
    }
       }
    catch (TreeRuntimeException e)
       {
       // the actual tree is expensive to load (e.g. NcbiTaxonomyService) so getLeafValues is a bad idea
       // OK, just do the explicit extraction anyway then
       }
    */
    /*
    List<PhylogenyNode<T>> theLeaves = idsToLeaves(ids, ignoreAbsentNodes);
            
            
    if (theLeaves.isEmpty())
       {
       throw new NoSuchNodeException("No leaves found for ids: " + ids);
       }
            
    RootedPhylogeny<T> result = extractTreeWithLeaves(theLeaves, includeInternalBranches, mode); */
    Set<List<? extends PhylogenyNode<T>>> theDisposableLeafPaths = idsToDisposableBasicLeafPaths(ids,
            ignoreAbsentNodes);

    if (theDisposableLeafPaths.isEmpty()) {
        throw new NoSuchNodeException("No leaves found for ids: " + ids);
    }

    BasicRootedPhylogeny<T> result = extractTreeWithLeafPaths(theDisposableLeafPaths, includeInternalBranches,
            mode);

    Collection<T> gotLeaves = result.getLeafValues();

    Collection<T> gotNodes = result.getNodeValues();

    // all the leaves that were found were leaves that were requested
    assert ids.containsAll(gotLeaves);

    // BAD confusing interaction between all three parameters
    //if (includeInternalBranches && !ignoreAbsentNodes) //(mode == MutualExclusionResolutionMode.LEAF || mode == MutualExclusionResolutionMode.BOTH))
    if (!ignoreAbsentNodes) {
        // some requested leaves may turn out to be internal nodes, but at least they should all be accounted for
        assert gotNodes.containsAll(ids);
    }

    /*   if (!ignoreAbsentNodes)
       {
       // any requested leaves that turned out to be internal nodes should have had a phantom leaf added
       assert gotLeaves.containsAll(ids);
       }
    */
    return result;
}

From source file:org.mitre.openid.connect.service.impl.TestMITREidDataService_1_3.java

@Test
public void testExportBlacklistedSites() throws IOException {
    BlacklistedSite site1 = new BlacklistedSite();
    site1.setId(1L);/*from   ww  w .ja v a 2 s.  c  o  m*/
    site1.setUri("http://foo.com");

    BlacklistedSite site2 = new BlacklistedSite();
    site2.setId(2L);
    site2.setUri("http://bar.com");

    BlacklistedSite site3 = new BlacklistedSite();
    site3.setId(3L);
    site3.setUri("http://baz.com");

    Set<BlacklistedSite> allBlacklistedSites = ImmutableSet.of(site1, site2, site3);

    Mockito.when(clientRepository.getAllClients()).thenReturn(new HashSet<ClientDetailsEntity>());
    Mockito.when(approvedSiteRepository.getAll()).thenReturn(new HashSet<ApprovedSite>());
    Mockito.when(wlSiteRepository.getAll()).thenReturn(new HashSet<WhitelistedSite>());
    Mockito.when(blSiteRepository.getAll()).thenReturn(allBlacklistedSites);
    Mockito.when(authHolderRepository.getAll()).thenReturn(new ArrayList<AuthenticationHolderEntity>());
    Mockito.when(tokenRepository.getAllAccessTokens()).thenReturn(new HashSet<OAuth2AccessTokenEntity>());
    Mockito.when(tokenRepository.getAllRefreshTokens()).thenReturn(new HashSet<OAuth2RefreshTokenEntity>());
    Mockito.when(sysScopeRepository.getAll()).thenReturn(new HashSet<SystemScope>());

    // do the data export
    StringWriter stringWriter = new StringWriter();
    JsonWriter writer = new JsonWriter(stringWriter);
    writer.beginObject();
    dataService.exportData(writer);
    writer.endObject();
    writer.close();

    // parse the output as a JSON object for testing
    JsonElement elem = new JsonParser().parse(stringWriter.toString());
    JsonObject root = elem.getAsJsonObject();

    // make sure the root is there
    assertThat(root.has(MITREidDataService.MITREID_CONNECT_1_3), is(true));

    JsonObject config = root.get(MITREidDataService.MITREID_CONNECT_1_3).getAsJsonObject();

    // make sure all the root elements are there
    assertThat(config.has(MITREidDataService.CLIENTS), is(true));
    assertThat(config.has(MITREidDataService.GRANTS), is(true));
    assertThat(config.has(MITREidDataService.WHITELISTEDSITES), is(true));
    assertThat(config.has(MITREidDataService.BLACKLISTEDSITES), is(true));
    assertThat(config.has(MITREidDataService.REFRESHTOKENS), is(true));
    assertThat(config.has(MITREidDataService.ACCESSTOKENS), is(true));
    assertThat(config.has(MITREidDataService.SYSTEMSCOPES), is(true));
    assertThat(config.has(MITREidDataService.AUTHENTICATIONHOLDERS), is(true));

    // make sure the root elements are all arrays
    assertThat(config.get(MITREidDataService.CLIENTS).isJsonArray(), is(true));
    assertThat(config.get(MITREidDataService.GRANTS).isJsonArray(), is(true));
    assertThat(config.get(MITREidDataService.WHITELISTEDSITES).isJsonArray(), is(true));
    assertThat(config.get(MITREidDataService.BLACKLISTEDSITES).isJsonArray(), is(true));
    assertThat(config.get(MITREidDataService.REFRESHTOKENS).isJsonArray(), is(true));
    assertThat(config.get(MITREidDataService.ACCESSTOKENS).isJsonArray(), is(true));
    assertThat(config.get(MITREidDataService.SYSTEMSCOPES).isJsonArray(), is(true));
    assertThat(config.get(MITREidDataService.AUTHENTICATIONHOLDERS).isJsonArray(), is(true));

    // check our scope list (this test)
    JsonArray sites = config.get(MITREidDataService.BLACKLISTEDSITES).getAsJsonArray();

    assertThat(sites.size(), is(3));
    // check for both of our sites in turn
    Set<BlacklistedSite> checked = new HashSet<>();
    for (JsonElement e : sites) {
        assertThat(e.isJsonObject(), is(true));
        JsonObject site = e.getAsJsonObject();

        BlacklistedSite compare = null;
        if (site.get("id").getAsLong() == site1.getId().longValue()) {
            compare = site1;
        } else if (site.get("id").getAsLong() == site2.getId().longValue()) {
            compare = site2;
        } else if (site.get("id").getAsLong() == site3.getId().longValue()) {
            compare = site3;
        }

        if (compare == null) {
            fail("Could not find matching blacklisted site id: " + site.get("id").getAsString());
        } else {
            assertThat(site.get("uri").getAsString(), equalTo(compare.getUri()));
            checked.add(compare);
        }
    }
    // make sure all of our clients were found
    assertThat(checked.containsAll(allBlacklistedSites), is(true));

}

From source file:org.mitre.openid.connect.service.impl.TestMITREidDataService_1_3.java

@Test
public void testExportWhitelistedSites() throws IOException {
    WhitelistedSite site1 = new WhitelistedSite();
    site1.setId(1L);/*w w w  .j  a v  a  2s .  co m*/
    site1.setClientId("foo");

    WhitelistedSite site2 = new WhitelistedSite();
    site2.setId(2L);
    site2.setClientId("bar");

    WhitelistedSite site3 = new WhitelistedSite();
    site3.setId(3L);
    site3.setClientId("baz");

    Set<WhitelistedSite> allWhitelistedSites = ImmutableSet.of(site1, site2, site3);

    Mockito.when(clientRepository.getAllClients()).thenReturn(new HashSet<ClientDetailsEntity>());
    Mockito.when(approvedSiteRepository.getAll()).thenReturn(new HashSet<ApprovedSite>());
    Mockito.when(blSiteRepository.getAll()).thenReturn(new HashSet<BlacklistedSite>());
    Mockito.when(wlSiteRepository.getAll()).thenReturn(allWhitelistedSites);
    Mockito.when(authHolderRepository.getAll()).thenReturn(new ArrayList<AuthenticationHolderEntity>());
    Mockito.when(tokenRepository.getAllAccessTokens()).thenReturn(new HashSet<OAuth2AccessTokenEntity>());
    Mockito.when(tokenRepository.getAllRefreshTokens()).thenReturn(new HashSet<OAuth2RefreshTokenEntity>());
    Mockito.when(sysScopeRepository.getAll()).thenReturn(new HashSet<SystemScope>());

    // do the data export
    StringWriter stringWriter = new StringWriter();
    JsonWriter writer = new JsonWriter(stringWriter);
    writer.beginObject();
    dataService.exportData(writer);
    writer.endObject();
    writer.close();

    // parse the output as a JSON object for testing
    JsonElement elem = new JsonParser().parse(stringWriter.toString());
    JsonObject root = elem.getAsJsonObject();

    // make sure the root is there
    assertThat(root.has(MITREidDataService.MITREID_CONNECT_1_3), is(true));

    JsonObject config = root.get(MITREidDataService.MITREID_CONNECT_1_3).getAsJsonObject();

    // make sure all the root elements are there
    assertThat(config.has(MITREidDataService.CLIENTS), is(true));
    assertThat(config.has(MITREidDataService.GRANTS), is(true));
    assertThat(config.has(MITREidDataService.WHITELISTEDSITES), is(true));
    assertThat(config.has(MITREidDataService.BLACKLISTEDSITES), is(true));
    assertThat(config.has(MITREidDataService.REFRESHTOKENS), is(true));
    assertThat(config.has(MITREidDataService.ACCESSTOKENS), is(true));
    assertThat(config.has(MITREidDataService.SYSTEMSCOPES), is(true));
    assertThat(config.has(MITREidDataService.AUTHENTICATIONHOLDERS), is(true));

    // make sure the root elements are all arrays
    assertThat(config.get(MITREidDataService.CLIENTS).isJsonArray(), is(true));
    assertThat(config.get(MITREidDataService.GRANTS).isJsonArray(), is(true));
    assertThat(config.get(MITREidDataService.WHITELISTEDSITES).isJsonArray(), is(true));
    assertThat(config.get(MITREidDataService.BLACKLISTEDSITES).isJsonArray(), is(true));
    assertThat(config.get(MITREidDataService.REFRESHTOKENS).isJsonArray(), is(true));
    assertThat(config.get(MITREidDataService.ACCESSTOKENS).isJsonArray(), is(true));
    assertThat(config.get(MITREidDataService.SYSTEMSCOPES).isJsonArray(), is(true));
    assertThat(config.get(MITREidDataService.AUTHENTICATIONHOLDERS).isJsonArray(), is(true));

    // check our scope list (this test)
    JsonArray sites = config.get(MITREidDataService.WHITELISTEDSITES).getAsJsonArray();

    assertThat(sites.size(), is(3));
    // check for both of our sites in turn
    Set<WhitelistedSite> checked = new HashSet<>();
    for (JsonElement e : sites) {
        assertThat(e.isJsonObject(), is(true));
        JsonObject site = e.getAsJsonObject();

        WhitelistedSite compare = null;
        if (site.get("id").getAsLong() == site1.getId().longValue()) {
            compare = site1;
        } else if (site.get("id").getAsLong() == site2.getId().longValue()) {
            compare = site2;
        } else if (site.get("id").getAsLong() == site3.getId().longValue()) {
            compare = site3;
        }

        if (compare == null) {
            fail("Could not find matching whitelisted site id: " + site.get("id").getAsString());
        } else {
            assertThat(site.get("clientId").getAsString(), equalTo(compare.getClientId()));
            checked.add(compare);
        }
    }
    // make sure all of our clients were found
    assertThat(checked.containsAll(allWhitelistedSites), is(true));

}