Example usage for com.google.common.collect Sets newHashSetWithExpectedSize

List of usage examples for com.google.common.collect Sets newHashSetWithExpectedSize

Introduction

In this page you can find the example usage for com.google.common.collect Sets newHashSetWithExpectedSize.

Prototype

public static <E> HashSet<E> newHashSetWithExpectedSize(int expectedSize) 

Source Link

Document

Creates a HashSet instance, with a high enough initial table size that it should hold expectedSize elements without resizing.

Usage

From source file:co.cask.tephra.TransactionManager.java

public boolean canCommit(Transaction tx, Collection<byte[]> changeIds)
        throws TransactionNotInProgressException {
    txMetricsCollector.rate("canCommit");
    Stopwatch timer = new Stopwatch().start();
    if (inProgress.get(tx.getTransactionId()) == null) {
        // invalid transaction, either this has timed out and moved to invalid, or something else is wrong.
        if (invalid.contains(tx.getTransactionId())) {
            throw new TransactionNotInProgressException(String.format(
                    "canCommit() is called for transaction %d that is not in progress (it is known to be invalid)",
                    tx.getTransactionId()));
        } else {//from  w ww  .j ava 2 s  .  c  o m
            throw new TransactionNotInProgressException(String.format(
                    "canCommit() is called for transaction %d that is not in progress", tx.getTransactionId()));
        }
    }

    Set<ChangeId> set = Sets.newHashSetWithExpectedSize(changeIds.size());
    for (byte[] change : changeIds) {
        set.add(new ChangeId(change));
    }

    if (hasConflicts(tx, set)) {
        return false;
    }
    // guard against changes to the transaction log while processing
    this.logReadLock.lock();
    try {
        synchronized (this) {
            ensureAvailable();
            addCommittingChangeSet(tx.getTransactionId(), set);
        }
        appendToLog(TransactionEdit.createCommitting(tx.getTransactionId(), set));
    } finally {
        this.logReadLock.unlock();
    }
    txMetricsCollector.histogram("canCommit.latency", (int) timer.elapsedMillis());
    return true;
}

From source file:org.summer.dsl.xbase.typesystem.internal.ResolvedTypes.java

public void acceptHint(Object handle, LightweightBoundTypeArgument boundTypeArgument) {
    if (boundTypeArgument.getSource() == BoundTypeArgumentSource.RESOLVED) {
        if (resolvedTypeParameters == null) {
            resolvedTypeParameters = Sets.newHashSetWithExpectedSize(3);
        }//  w w w  .ja v a  2  s . c  o  m
        if (resolvedTypeParameters.add(handle)) {
            if (boundTypeArgument.getDeclaredVariance()
                    .mergeDeclaredWithActual(boundTypeArgument.getActualVariance()) == VarianceInfo.INVARIANT) {
                resolveDependentTypeArguments(handle, boundTypeArgument);
            }
            LightweightBoundTypeArgument boundWithoutRecursion = removeRecursiveTypeArguments(handle,
                    boundTypeArgument);
            ensureTypeParameterHintsMapExists().put(handle, Collections.singletonList(boundWithoutRecursion));
        }
    } else {
        if (!isResolved(handle)) {
            if (boundTypeArgument.getTypeReference() instanceof UnboundTypeReference
                    && boundTypeArgument.getSource() != BoundTypeArgumentSource.CONSTRAINT) {
                UnboundTypeReference other = (UnboundTypeReference) boundTypeArgument.getTypeReference();
                Object otherHandle = other.getHandle();
                if (ensureTypeParameterHintsMapExists().containsKey(handle)) {
                    // don't add fully redundant hints
                    List<LightweightBoundTypeArgument> existingValues = ensureTypeParameterHintsMapExists()
                            .get(handle);
                    for (LightweightBoundTypeArgument existingValue : existingValues) {
                        if (existingValue.getTypeReference() instanceof UnboundTypeReference) {
                            if (((UnboundTypeReference) existingValue.getTypeReference())
                                    .getHandle() == otherHandle) {
                                if (existingValue.getActualVariance() == boundTypeArgument.getActualVariance()
                                        && existingValue.getDeclaredVariance() == boundTypeArgument
                                                .getDeclaredVariance()
                                        && existingValue.getSource() == boundTypeArgument.getSource()) {
                                    return;
                                }
                            }
                        }
                    }
                }
                UnboundTypeReference currentUnbound = getUnboundTypeReference(handle);
                Maps2.putIntoListMap(otherHandle, copyBoundTypeArgument(currentUnbound, boundTypeArgument),
                        ensureTypeParameterHintsMapExists());
            }
            Maps2.putIntoListMap(handle, boundTypeArgument, ensureTypeParameterHintsMapExists());
        } else {
            throw new IllegalStateException("Cannot add hints if the reference was already resolved");
        }
    }
}

From source file:com.google.devtools.build.android.ResourceShrinker.java

private void referencedString(@NonNull String string) {
    // See if the string is at all eligible; ignore strings that aren't
    // identifiers (has java identifier chars and nothing but .:/), or are empty or too long
    if (string.isEmpty() || string.length() > 80) {
        return;//from w ww. j av  a  2s.co m
    }
    boolean haveIdentifierChar = false;
    for (int i = 0, n = string.length(); i < n; i++) {
        char c = string.charAt(i);
        boolean identifierChar = Character.isJavaIdentifierPart(c);
        if (!identifierChar && c != '.' && c != ':' && c != '/') {
            // .:/ are for the fully qualified resuorce names
            return;
        } else if (identifierChar) {
            haveIdentifierChar = true;
        }
    }
    if (!haveIdentifierChar) {
        return;
    }
    if (mStrings == null) {
        mStrings = Sets.newHashSetWithExpectedSize(300);
    }
    mStrings.add(string);
}

From source file:com.palantir.atlasdb.keyvalue.jdbc.JdbcKeyValueService.java

private Set<String> getAllTableNames(DSLContext ctx) {
    Result<? extends Record> records = ctx.select(TABLE_NAME).from(METADATA_TABLE).fetch();
    Set<String> tableNames = Sets.newHashSetWithExpectedSize(records.size());
    for (Record record : records) {
        tableNames.add(record.getValue(TABLE_NAME));
    }/*from ww w . j  ava 2  s  .  co m*/
    return tableNames;
}

From source file:com.google.gerrit.server.git.MergeOp.java

private PatchSetApproval saveApprovals(Change c, PatchSet.Id merged) throws OrmException {
    // Flatten out existing approvals for this patch set based upon the current
    // permissions. Once the change is closed the approvals are not updated at
    // presentation view time, except for zero votes used to indicate a reviewer
    // was added. So we need to make sure votes are accurate now. This way if
    // permissions get modified in the future, historical records stay accurate.
    PatchSetApproval submitter = null;//from   ww  w  .j  a  v  a  2 s  .  com
    try {
        c.setStatus(Change.Status.MERGED);

        List<PatchSetApproval> approvals = db.patchSetApprovals().byPatchSet(merged).toList();
        Set<PatchSetApproval.Key> toDelete = Sets.newHashSetWithExpectedSize(approvals.size());
        for (PatchSetApproval a : approvals) {
            if (a.getValue() != 0) {
                toDelete.add(a.getKey());
            }
        }

        approvals = labelNormalizer.normalize(c, approvals);
        for (PatchSetApproval a : approvals) {
            toDelete.remove(a.getKey());
            if (a.getValue() > 0 && a.isSubmit()) {
                if (submitter == null || a.getGranted().compareTo(submitter.getGranted()) > 0) {
                    submitter = a;
                }
            }
            a.cache(c);
        }
        db.patchSetApprovals().update(approvals);
        db.patchSetApprovals().deleteKeys(toDelete);
    } catch (NoSuchChangeException err) {
        throw new OrmException(err);
    }
    return submitter;
}

From source file:edu.buaa.satla.analysis.util.VariableClassification.java

/** evaluates an expression and adds containing vars to the sets.
 * the id is the position of the expression in the edge,
 * it is 0 for all edges except a FuntionCallEdge. */
private void handleExpression(CFAEdge edge, CExpression exp, String varName, int id,
        final VariableOrField lhs) {
    CFANode pre = edge.getPredecessor();

    VariablesCollectingVisitor dcv = new VariablesCollectingVisitor(pre);
    Set<String> vars = exp.accept(dcv);
    if (vars == null) {
        vars = Sets.newHashSetWithExpectedSize(1);
    }/*  w w w .j  av a2s .  c  o  m*/

    vars.add(varName);
    dependencies.addAll(vars, dcv.getValues(), edge, id);

    BoolCollectingVisitor bcv = new BoolCollectingVisitor(pre);
    Set<String> possibleBoolean = exp.accept(bcv);
    handleResult(varName, possibleBoolean, nonIntBoolVars);

    IntEqualCollectingVisitor ncv = new IntEqualCollectingVisitor(pre);
    Set<String> possibleIntEqualVars = exp.accept(ncv);
    handleResult(varName, possibleIntEqualVars, nonIntEqVars);

    IntAddCollectingVisitor icv = new IntAddCollectingVisitor(pre);
    Set<String> possibleIntAddVars = exp.accept(icv);
    handleResult(varName, possibleIntAddVars, nonIntAddVars);

    exp.accept(new CollectingRHSVisitor(lhs));
}

From source file:com.opengamma.financial.analytics.model.curve.interestrate.MarketInstrumentImpliedYieldCurveFunction.java

private Set<ComputedValue> execute(final FunctionExecutionContext executionContext,
        final ComputationTargetSpecification targetSpec, final String forwardCurveName,
        final InterpolatedYieldCurveSpecificationWithSecurities forwardCurveSpecificationWithSecurities,
        final SnapshotDataBundle forwardMarketData, final HistoricalTimeSeriesBundle forwardTimeSeries,
        final String fundingCurveName,
        final InterpolatedYieldCurveSpecificationWithSecurities fundingCurveSpecificationWithSecurities,
        final SnapshotDataBundle fundingMarketData, final HistoricalTimeSeriesBundle fundingTimeSeries,
        final boolean createForwardYieldCurve, final boolean createFundingYieldCurve,
        final boolean createJacobian, final boolean createSensitivities) {
    final Clock snapshotClock = executionContext.getValuationClock();
    final ZonedDateTime now = ZonedDateTime.now(snapshotClock);
    final List<InstrumentDerivative> derivatives = new ArrayList<>();
    final int nFunding = fundingCurveSpecificationWithSecurities.getStrips().size();
    final int nForward = forwardCurveSpecificationWithSecurities.getStrips().size();
    final double[] initialRatesGuess = new double[nFunding + nForward];
    final double[] fundingNodeTimes = new double[nFunding];
    final double[] forwardNodeTimes = new double[nForward];
    final double[] marketValues = new double[nFunding + nForward];
    int i = 0, fundingIndex = 0, forwardIndex = 0;
    for (final FixedIncomeStripWithSecurity strip : fundingCurveSpecificationWithSecurities.getStrips()) {
        final Double fundingMarketValue = fundingMarketData.getDataPoint(strip.getSecurityIdentifier());
        if (fundingMarketValue == null) {
            throw new OpenGammaRuntimeException("Could not get funding market data for " + strip);
        }//ww  w .j  a  va  2 s  . c o m
        final double marketValue = fundingMarketValue;
        final FinancialSecurity financialSecurity = (FinancialSecurity) strip.getSecurity();
        InstrumentDerivative derivative;
        final String[] curveNames = FixedIncomeInstrumentCurveExposureHelper
                .getCurveNamesForFundingCurveInstrument(strip.getInstrumentType(), fundingCurveName,
                        forwardCurveName);
        final InstrumentDefinition<?> definition = getSecurityConverter().visit(financialSecurity);
        if (strip.getSecurity().getSecurityType().equals("FUTURE")) {
            throw new OpenGammaRuntimeException(
                    "We do not currently support FundingCurves containing FUTURES. Contact QR if you desire this.");
        }
        derivative = getDefinitionConverter().convert(financialSecurity, definition, now, curveNames,
                fundingTimeSeries);
        if (derivative == null) {
            throw new OpenGammaRuntimeException("Had a null InterestRateDefinition for " + strip);
        }
        if (_calcTypeParRate) { // set market value to the rate
            marketValues[i] = marketValue;
        } // else PV, leave at 0

        derivatives.add(derivative);
        initialRatesGuess[i] = marketValue;
        i++;
        fundingNodeTimes[fundingIndex] = derivative.accept(LAST_DATE_CALCULATOR);
        fundingIndex++;
    }
    for (final FixedIncomeStripWithSecurity strip : forwardCurveSpecificationWithSecurities.getStrips()) {
        final Double forwardMarketValue = forwardMarketData.getDataPoint(strip.getSecurityIdentifier());
        if (forwardMarketValue == null) {
            throw new OpenGammaRuntimeException("Could not get forward market data for " + strip);
        }
        double marketValue = forwardMarketValue;
        final FinancialSecurity financialSecurity = (FinancialSecurity) strip.getSecurity();
        InstrumentDerivative derivative = null;
        final String[] curveNames = FixedIncomeInstrumentCurveExposureHelper
                .getCurveNamesForForwardCurveInstrument(strip.getInstrumentType(), fundingCurveName,
                        forwardCurveName);
        try {
            InstrumentDefinition<?> definition = getSecurityConverter().visit(financialSecurity);
            if (strip.getSecurity().getSecurityType().equals("FUTURE")) {
                if (!_calcTypeParRate) {
                    // Scale notional to 1 - this is to better condition the jacobian matrix
                    // Set trade price to current market value - so the present value will be zero once fit
                    definition = ((InterestRateFutureTransactionDefinition) definition)
                            .withNewNotionalAndTransactionPrice(1.0, marketValue);
                }
                marketValue = 1 - marketValue; // transform to rate for initial rates guess
            }
            derivative = getDefinitionConverter().convert(financialSecurity, definition, now, curveNames,
                    forwardTimeSeries);
        } catch (final Exception e) {
            s_logger.error("Caught exception for " + financialSecurity, e);
        }
        if (derivative == null) {
            throw new OpenGammaRuntimeException("Had a null InterestRateDefinition for " + strip);
        }
        if (_calcTypeParRate) { // set market value to the rate, else leave at 0
            marketValues[i] = marketValue;
        }
        derivatives.add(derivative);
        initialRatesGuess[i] = marketValue;
        i++;
        forwardNodeTimes[forwardIndex] = derivative.accept(LAST_DATE_CALCULATOR);
        forwardIndex++;
    }
    final LinkedHashMap<String, double[]> curveNodes = new LinkedHashMap<>();
    final LinkedHashMap<String, Interpolator1D> interpolators = new LinkedHashMap<>();
    curveNodes.put(fundingCurveName, fundingNodeTimes);
    interpolators.put(fundingCurveName, getInterpolator(fundingCurveSpecificationWithSecurities));
    curveNodes.put(forwardCurveName, forwardNodeTimes);
    interpolators.put(forwardCurveName, getInterpolator(forwardCurveSpecificationWithSecurities));
    // TODO have use finite difference or not as an input [FIN-147]
    final Currency currency = Currency.of(targetSpec.getUniqueId().getValue());
    final MultipleYieldCurveFinderDataBundle data = new MultipleYieldCurveFinderDataBundle(derivatives,
            marketValues, null, curveNodes, interpolators, false, new FXMatrix(currency));
    // TODO have the calculator and sensitivity calculators as an input [FIN-144], [FIN-145]
    final Function1D<DoubleMatrix1D, DoubleMatrix1D> curveCalculator = new MultipleYieldCurveFinderFunction(
            data, getCalculator());
    final Function1D<DoubleMatrix1D, DoubleMatrix2D> jacobianCalculator = new MultipleYieldCurveFinderJacobian(
            data, getSensitivityCalculator());
    NewtonVectorRootFinder rootFinder;
    double[] yields = null;
    // TODO have the decomposition as an optional input [FIN-146]
    try {
        rootFinder = new BroydenVectorRootFinder(1e-4, 1e-4, 10000,
                DecompositionFactory.getDecomposition(DecompositionFactory.SV_COLT_NAME));
        yields = rootFinder.getRoot(curveCalculator, jacobianCalculator, new DoubleMatrix1D(initialRatesGuess))
                .getData();
    } catch (final Exception eSV) {
        s_logger.warn(
                "Could not find root using SV decomposition and " + _calculationType + " method for curves "
                        + fundingCurveName + " and " + forwardCurveName + ". Error was: " + eSV.getMessage());
        throw new OpenGammaRuntimeException(
                "Could not find curves " + fundingCurveName + " (" + targetSpec.getUniqueId().getValue() + "), "
                        + forwardCurveName + " (" + targetSpec.getUniqueId().getValue()
                        + ") using SV decomposition and calculation method " + _calculationType);
    }
    final YieldAndDiscountCurve fundingCurve;
    if (createSensitivities || createFundingYieldCurve) {
        final double[] fundingYields = Arrays.copyOfRange(yields, 0, fundingNodeTimes.length);
        fundingCurve = YieldCurve.from(InterpolatedDoublesCurve.from(fundingNodeTimes, fundingYields,
                getInterpolator(fundingCurveSpecificationWithSecurities)));
    } else {
        fundingCurve = null;
    }
    final YieldAndDiscountCurve forwardCurve;
    if (createSensitivities || createForwardYieldCurve) {
        final double[] forwardYields = Arrays.copyOfRange(yields, fundingNodeTimes.length, yields.length);
        forwardCurve = YieldCurve.from(InterpolatedDoublesCurve.from(forwardNodeTimes, forwardYields,
                getInterpolator(forwardCurveSpecificationWithSecurities)));
    } else {
        forwardCurve = null;
    }
    final Set<ComputedValue> result = Sets.newHashSetWithExpectedSize(4);
    final ValueProperties.Builder properties = createValueProperties()
            .with(ValuePropertyNames.CURVE_CALCULATION_METHOD, getCalculationType())
            .with(YieldCurveFunction.PROPERTY_FORWARD_CURVE, forwardCurveName)
            .with(YieldCurveFunction.PROPERTY_FUNDING_CURVE, fundingCurveName);
    if (createJacobian) {
        final DoubleMatrix2D jacobian = jacobianCalculator.evaluate(new DoubleMatrix1D(yields));
        result.add(new ComputedValue(new ValueSpecification(ValueRequirementNames.YIELD_CURVE_JACOBIAN,
                targetSpec, properties.get()), jacobian.getData()));
    }
    if (createSensitivities) { // calcType is PresentValue. Compute CouponSens ( dPrice / dParRate ) used in conjunction with Jacobian to get Yield Curve Node (Par Rate) Sensitivities
        final double[] couponSensitivities = new double[derivatives.size()];
        int ii = 0;
        final String[] curveNames = new String[] { forwardCurveName, fundingCurveName };
        final YieldAndDiscountCurve[] curves = new YieldAndDiscountCurve[] { forwardCurve, fundingCurve };
        final YieldCurveBundle curveBundle = new YieldCurveBundle(curveNames, curves);
        for (final InstrumentDerivative derivative : derivatives) {
            couponSensitivities[ii++] = derivative.accept(getCouponSensitivityCalculator(), curveBundle);
        }
        final ValueProperties couponProperties = createValueProperties()
                .with(YieldCurveFunction.PROPERTY_FORWARD_CURVE, forwardCurveName)
                .with(YieldCurveFunction.PROPERTY_FUNDING_CURVE, fundingCurveName).get();
        result.add(
                new ComputedValue(new ValueSpecification(ValueRequirementNames.PRESENT_VALUE_COUPON_SENSITIVITY,
                        targetSpec, couponProperties), new DoubleMatrix1D(couponSensitivities)));
    }
    if (createForwardYieldCurve) {
        result.add(new ComputedValue(new ValueSpecification(ValueRequirementNames.YIELD_CURVE, targetSpec,
                properties.with(ValuePropertyNames.CURVE, forwardCurveName).get()), forwardCurve));
    }
    if (createFundingYieldCurve) {
        result.add(
                new ComputedValue(
                        new ValueSpecification(ValueRequirementNames.YIELD_CURVE, targetSpec,
                                properties.withoutAny(ValuePropertyNames.CURVE)
                                        .with(ValuePropertyNames.CURVE, fundingCurveName).get()),
                        fundingCurve));
    }
    return result;
}

From source file:com.android.builder.VariantConfiguration.java

/**
 * Returns the list of packaged jars for this config. If the config tests a library, this
 * will include the jars of the tested config
 *
 * @return a non null, but possibly empty list.
 *///from  ww w  . j a v a  2 s . c  om
@NonNull
public List<File> getPackagedJars() {
    Set<File> jars = Sets.newHashSetWithExpectedSize(mJars.size() + mFlatLibraries.size());

    for (JarDependency jar : mJars) {
        File jarFile = jar.getJarFile();
        if (jar.isPackaged() && jarFile.exists()) {
            jars.add(jarFile);
        }
    }

    for (LibraryDependency libraryDependency : mFlatLibraries) {
        File libJar = libraryDependency.getJarFile();
        if (libJar.exists()) {
            jars.add(libJar);
        }
        for (File jarFile : libraryDependency.getLocalJars()) {
            if (jarFile.isFile()) {
                jars.add(jarFile);
            }
        }
    }

    return Lists.newArrayList(jars);
}

From source file:com.android.build.gradle.tasks.ResourceUsageAnalyzer.java

private void referencedString(@NonNull String string) {
    // See if the string is at all eligible; ignore strings that aren't
    // identifiers (has java identifier chars and nothing but .:/), or are empty or too long
    // We also allow "%", used for formatting strings.
    if (string.isEmpty() || string.length() > 80) {
        return;/*from  w  w w .  ja  va 2 s  .c o  m*/
    }
    boolean haveIdentifierChar = false;
    for (int i = 0, n = string.length(); i < n; i++) {
        char c = string.charAt(i);
        boolean identifierChar = Character.isJavaIdentifierPart(c);
        if (!identifierChar && c != '.' && c != ':' && c != '/' && c != '%') {
            // .:/ are for the fully qualified resource names, or for resource URLs or
            // relative file names
            return;
        } else if (identifierChar) {
            haveIdentifierChar = true;
        }
    }
    if (!haveIdentifierChar) {
        return;
    }
    if (strings == null) {
        strings = Sets.newHashSetWithExpectedSize(300);
    }
    strings.add(string);

    if (!foundWebContent && string.contains(ANDROID_RES)) {
        foundWebContent = true;
    }
}

From source file:com.zimbra.cs.db.DbTag.java

private static void verifyTaggedItem(DbConnection conn, Mailbox mbox, Map<Integer, UnderlyingData> tdata)
        throws ServiceException {
    int flagMask = 0;
    for (int flagId : Mailbox.REIFIED_FLAGS) {
        flagMask |= 1 << (-flagId - 1);
    }//from   w  ww.j  a va 2  s  .  c om

    PreparedStatement stmt = null;
    ResultSet rs = null;
    try {
        stmt = conn.prepareStatement(
                "SELECT id, flags, tag_names, unread FROM " + DbMailItem.getMailItemTableName(mbox) + " WHERE "
                        + DbMailItem.IN_THIS_MAILBOX_AND + "type NOT IN " + DbMailItem.NON_SEARCHABLE_TYPES);
        DbMailItem.setMailboxId(stmt, mbox, 1);

        rs = stmt.executeQuery();
        while (rs.next()) {
            int id = rs.getInt(1);
            int flags = (rs.getInt(2) & flagMask) | (rs.getBoolean(4) ? Flag.BITMASK_UNREAD : 0);
            String[] tagset = deserializeTags(rs.getString(3));
            Set<String> tags = tagset == null ? Collections.<String>emptySet() : Sets.newHashSet(tagset);

            PreparedStatement stmtcheck = null;
            ResultSet rscheck = null;
            try {
                // make sure the item counts match the tag totals
                String mailboxesMatchAnd = DebugConfig.disableMailboxGroups ? ""
                        : "tag.mailbox_id = ti.mailbox_id AND ";
                stmtcheck = conn.prepareStatement("SELECT id, name FROM " + getTagTableName(mbox, "tag")
                        + " INNER JOIN " + getTaggedItemTableName(mbox, "ti") + " ON " + mailboxesMatchAnd
                        + "tag.id = ti.tag_id" + " WHERE " + inThisMailboxAnd("ti") + "ti.item_id = ?");
                int pos = 1;
                pos = DbMailItem.setMailboxId(stmtcheck, mbox, pos);
                stmtcheck.setInt(pos++, id);

                rscheck = stmtcheck.executeQuery();
                int flagcheck = 0;
                Set<String> tagcheck = Sets.newHashSetWithExpectedSize(tags.size());
                while (rscheck.next()) {
                    int idcheck = rscheck.getInt(1);
                    String namecheck = rscheck.getString(2);
                    if (idcheck < 0) {
                        flagcheck |= 1 << (-idcheck - 1);
                    } else {
                        tagcheck.add(namecheck);
                    }
                }
                Assert.assertEquals("flags for item " + id, flags, flagcheck);
                Assert.assertEquals("tags for item " + id, tags, tagcheck);
            } catch (SQLException e) {
                throw ServiceException.FAILURE("consistency checking TAGGED_ITEM vs. MAIL_ITEM", e);
            } finally {
                DbPool.closeResults(rscheck);
                DbPool.closeStatement(stmtcheck);
            }
        }
    } catch (SQLException e) {
        throw ServiceException.FAILURE("consistency checking TAGGED_ITEM vs. MAIL_ITEM", e);
    } finally {
        DbPool.closeResults(rs);
        DbPool.closeStatement(stmt);
    }
}