Example usage for java.util Collection toString

List of usage examples for java.util Collection toString

Introduction

In this page you can find the example usage for java.util Collection toString.

Prototype

public String toString() 

Source Link

Document

Returns a string representation of the object.

Usage

From source file:org.apache.lens.cube.parse.CandidateTableResolver.java

private void resolveCandidateFactTablesForJoins(CubeQueryContext cubeql) throws LensException {
    if (cubeql.getAutoJoinCtx() == null) {
        return;/*from   w  w  w .  ja v  a  2  s  .  co m*/
    }
    Collection<String> colSet = null;
    if (cubeql.getCube() != null && !cubeql.getCandidates().isEmpty()) {
        for (Iterator<StorageCandidate> i = CandidateUtil.getStorageCandidates(cubeql.getCandidates())
                .iterator(); i.hasNext();) {
            StorageCandidate sc = i.next();
            // for each join path check for columns involved in path
            for (Map.Entry<Aliased<Dimension>, Map<AbstractCubeTable, List<String>>> joincolumnsEntry : cubeql
                    .getAutoJoinCtx().getJoinPathFromColumns().entrySet()) {
                Aliased<Dimension> reachableDim = joincolumnsEntry.getKey();
                OptionalDimCtx optdim = cubeql.getOptionalDimensionMap().get(reachableDim);
                colSet = joincolumnsEntry.getValue().get(cubeql.getCube());

                if (!checkForFactColumnExistsAndValidForRange(sc, colSet, cubeql)) {
                    if (optdim == null || optdim.isRequiredInJoinChain
                            || optdim.requiredForCandidates.contains(sc)) {
                        i.remove();
                        log.info(
                                "Not considering storage candidate :{} as it does not have columns in any of the join paths."
                                        + " Join columns:{}",
                                sc, colSet);
                        cubeql.addStoragePruningMsg(sc,
                                CandidateTablePruneCause.noColumnPartOfAJoinPath(colSet));
                        break;
                    }
                }
            }
        }
        if (cubeql.getCandidates().size() == 0) {
            throw new LensException(LensCubeErrorCode.NO_FACT_HAS_COLUMN.getLensErrorInfo(),
                    colSet == null ? "NULL" : colSet.toString());
        }
    }
}

From source file:org.intermine.dwr.AjaxServices.java

/**
 * This method gets the latest bags from the session (SessionMethods) and returns them in JSON
 * @return JSON serialized to a String//  w ww  .j  a v  a  2 s  .  c  o m
 * @throws JSONException json exception
 */
@SuppressWarnings("unchecked")
public String getSavedBagStatus() throws JSONException {
    HttpSession session = WebContextFactory.get().getSession();
    @SuppressWarnings("unchecked")
    Map<String, InterMineBag> savedBags = SessionMethods.getProfile(session).getSavedBags();
    // this is where my lists go
    Collection<JSONObject> lists = new HashSet<JSONObject>();
    try {
        for (Map.Entry<String, InterMineBag> entry : savedBags.entrySet()) {
            InterMineBag bag = entry.getValue();
            // save to the resulting JSON object only if these are 'actionable' lists
            if (bag.isCurrent() || bag.isToUpgrade()) {
                JSONObject list = new JSONObject();
                list.put("name", entry.getKey());
                list.put("status", bag.getState());
                if (bag.isCurrent()) {
                    try {
                        list.put("size", bag.getSize());
                    } catch (ObjectStoreException os) {
                        LOG.error("Problems retrieving size of bag " + bag.getName(), os);
                    }
                } else {
                    list.put("size", 0);
                }
                lists.add(list);
            }
        }
    } catch (JSONException jse) {
        LOG.error("Errors generating json objects", jse);
    }

    return lists.toString();
}

From source file:org.cloudifysource.shell.installer.LocalhostGridAgentBootstrapper.java

private void uninstallApplications(final long timeout, final TimeUnit timeunit)
        throws InterruptedException, TimeoutException, CLIException {

    Collection<String> applicationsList = null;
    boolean applicationsExist = false;
    try {//from  w w  w  .  j  ava  2s .c  o m
        if (!adminFacade.isConnected()) {
            throw new CLIException(
                    "Failed to fetch applications list. " + "Client is not connected to the rest server.");
        }

        applicationsList = adminFacade.getApplicationNamesList();
        // If there existed other applications besides the management.
        applicationsExist = applicationsList.size() > 1;
    } catch (final CLIException e) {
        if (!force) {
            throw new CLIStatusException(e, "failed_to_access_rest_before_teardown");
        }
        final String errorMessage = "Failed to fetch the currently deployed applications list."
                + " Continuing teardown-localcloud.";
        if (verbose) {
            logger.log(Level.FINE, errorMessage, e);
            publishEvent(errorMessage + System.getProperty("line.separator") + e.toString());
        } else {
            logger.log(Level.FINE, errorMessage);
            publishEvent(errorMessage);
        }
        // Suppress exception. continue with teardown.
        return;
    }

    if (applicationsExist && !force) {
        throw new CLIStatusException("apps_deployed_before_teardown_localcloud", applicationsList.toString());
    }
    final String uninstallMessage = ShellUtils.getMessageBundle()
            .getString("uninstalling_applications_before_teardown");
    publishEvent(uninstallMessage);

    if (NewRestClientUtils.isNewRestClientEnabled()) {
        uninstallNewRestClient(applicationsList, timeout, timeunit, applicationsExist);
    } else {
        uninstall(applicationsList, timeout, timeunit, applicationsExist);
    }

}

From source file:org.apache.hadoop.hbase.regionserver.HStore.java

@VisibleForTesting
protected void completeCompaction(final Collection<StoreFile> compactedFiles) throws IOException {
    try {//from  w w w. j ava2s  . co m
        // Do not delete old store files until we have sent out notification of
        // change in case old files are still being accessed by outstanding scanners.
        // Don't do this under writeLock; see HBASE-4485 for a possible deadlock
        // scenario that could have happened if continue to hold the lock.
        notifyChangedReadersObservers();
        // At this point the store will use new files for all scanners.

        // let the archive util decide if we should archive or delete the files
        LOG.debug("Removing store files after compaction...");
        for (StoreFile compactedFile : compactedFiles) {
            compactedFile.closeReader(true);
        }
        this.fs.removeStoreFiles(this.getColumnFamilyName(), compactedFiles);
    } catch (IOException e) {
        e = RemoteExceptionHandler.checkIOException(e);
        LOG.error("Failed removing compacted files in " + this + ". Files we were trying to remove are "
                + compactedFiles.toString() + "; some of them may have been already removed", e);
    }

    // 4. Compute new store size
    this.storeSize = 0L;
    this.totalUncompressedBytes = 0L;
    for (StoreFile hsf : this.storeEngine.getStoreFileManager().getStorefiles()) {
        StoreFile.Reader r = hsf.getReader();
        if (r == null) {
            LOG.warn("StoreFile " + hsf + " has a null Reader");
            continue;
        }
        this.storeSize += r.length();
        this.totalUncompressedBytes += r.getTotalUncompressedBytes();
    }
}

From source file:ubic.pubmedgate.resolve.EvaluationRDFModel.java

public Map<String, String> evaluateConceptMapping(Collection<Property> properties, Set<Resource> mentions) {
    Map<String, String> result = new HashMap<String, String>();
    int accept = 0;
    int reject = 0;
    int rejectFreq = 0;
    int rejectAbs = 0;
    int specToGen = 0;
    int noEvaluation = 0;
    int totalMentionToConceptLinks = 0;
    int mentionsWithOneAccept = 0;

    log.info("Evaluating concept mappings for " + mentions.size() + " mentions");
    // get mentions
    int count = 0;
    for (Resource mention : mentions) {
        if (count++ % 100 == 0)
            log.info("Count: " + count + " mentions");
        Set<Resource> allNeuroTerms = getLinkedNeuroTerms(mention);

        Set<Resource> neuroTerms = new HashSet<Resource>();
        for (Resource neuroTerm : allNeuroTerms) {
            // select mention to concept links for these properties
            // get terms via the properties
            for (Property p : properties) {
                boolean isLinked = model.contains(new StatementImpl(mention, p, neuroTerm));
                if (isLinked) {
                    neuroTerms.add(neuroTerm);
                }//from  ww  w  .j  a v  a  2 s  . c om
            }
        }

        boolean hasOneAccept = false;
        // get concepts for the terms
        Set<Resource> neuroConcepts = getConceptsFromTerms(neuroTerms);
        totalMentionToConceptLinks += neuroConcepts.size();
        for (Resource neuroConcept : neuroConcepts) {
            boolean isAccept = model
                    .contains(new StatementImpl(mention, Vocabulary.evaluation_accept, neuroConcept));
            boolean isReject = model
                    .contains(new StatementImpl(mention, Vocabulary.evaluation_reject, neuroConcept));
            boolean hasResult = model
                    .contains(new StatementImpl(mention, Vocabulary.evaluation_result, neuroConcept));
            boolean isSpecToGen = model.contains(
                    new StatementImpl(mention, Vocabulary.evaluation_specific_to_general, neuroConcept));
            if (isAccept) {
                accept++;
                hasOneAccept = true;
            }
            if (isReject) {
                log.info("Rejected mention:" + JenaUtil.getLabel(mention) + "->"
                        + JenaUtil.getLabel(neuroConcept));
                reject++;
                int freq = mention.getProperty(Vocabulary.number_of_occurances).getInt();
                rejectFreq += freq;
                int abs = mention.getProperty(Vocabulary.number_of_abstracts).getInt();
                rejectAbs += abs;
            }
            if (isSpecToGen)
                specToGen++;
            if (!hasResult) {
                noEvaluation++;
                log.info("No evaluation result: " + JenaUtil.getLabel(mention) + " -> "
                        + JenaUtil.getLabel(neuroConcept) + " URI: " + mention.toString() + " -> "
                        + neuroConcept.toString());

            }
        }
        if (hasOneAccept)
            mentionsWithOneAccept++;
    }
    log.info("Properties:" + properties.toString());
    result.put("properties", properties.toString());
    log.info("accept:" + accept);
    result.put("accept", "" + accept);
    log.info("reject:" + reject);
    result.put("reject", "" + reject);
    result.put("rejectAbs", rejectAbs + "");
    result.put("rejectFreq", rejectFreq + "");
    log.info("specToGen:" + specToGen);
    result.put("specToGen", "" + specToGen);
    log.info("noEvaluation:" + noEvaluation);
    result.put("noEvaluation", "" + noEvaluation);
    log.info("totalMentionToConceptLinks:" + totalMentionToConceptLinks);
    result.put("totalMentionToConceptLinks", "" + totalMentionToConceptLinks);
    result.put("mentionsWithOneAccept", "" + mentionsWithOneAccept);
    return result;
}

From source file:com.stratelia.webactiv.kmelia.control.KmeliaSessionController.java

public Collection<PublicationDetail> getAllPublicationsByTopic(PublicationPK pubPK, List<String> fatherIds)
        throws RemoteException {
    Collection<PublicationDetail> result = getPublicationBm().getDetailsByFatherIdsAndStatus(
            (ArrayList<String>) fatherIds, pubPK, "P.pubUpdateDate desc, P.pubId desc",
            PublicationDetail.VALID);//from w w w.  ja va 2 s .c o  m
    SilverTrace.info("kmelia", "KmeliaSessionController.getAllPublicationsByTopic()", "root.MSG_PARAM_VALUE",
            "publis=" + result.toString());
    return result;
}

From source file:org.marketcetera.strategy.LanguageTestBase.java

/**
 * Executes one iteration of the <code>getOptionRoots</code> test. 
 *
 * @param inUnderlyingSymbol a <code>String</code> value
 * @param inExpectedOptionRoots a <code>Collection&lt;String&gt;</code> value
 * @throws Exception if an unexpected error occurs
 *//*from   w w  w .  j  a  va2s  . c  om*/
private void doOptionRootsTest(String inUnderlyingSymbol, Collection<String> inExpectedOptionRoots)
        throws Exception {
    StrategyCoordinates strategy = getPositionsStrategy();
    setPropertiesToNull();
    AbstractRunningStrategy.setProperty("optionRootsDuringStop", "not-empty");
    if (inUnderlyingSymbol != null) {
        AbstractRunningStrategy.setProperty("underlyingSymbol", inUnderlyingSymbol);
    }
    verifyStrategyStartsAndStops(strategy.getName(), getLanguage(), strategy.getFile(), null, null, null);
    // verify expected results
    assertEquals((inExpectedOptionRoots == null ? null : inExpectedOptionRoots.toString()),
            AbstractRunningStrategy.getProperty("optionRoots"));
    assertNull(AbstractRunningStrategy.getProperty("optionRootsDuringStop"));
}

From source file:org.signserver.module.xades.signer.XAdESSignerUnitTest.java

/**
 * Run a signing test with default form and varying commitment types.
 * //from  w ww .j  a v a  2s.  c  o m
 * @param keyType Token key type to use
 * @param signatureAlgorithm Signature algorithm property value to test, if null use default
 * @param expectedSignatureAlgorithmUri Expected XML signature algorithm URI
 * @param commitmentTypesProperty COMMITMENT_TYPES property to test with
 *                                if null, doesn't set the property
 * @param expectedCommitmentTypeUris List of expected commitment type URIs
 * @param claimedRoleProperty Claimed role property to test, ff null, don't set property
 * @param claimedRoleFromUsername If set to true, include the CLAIMED_ROLE_FROM_USERNAME property
 * @param useCertCredential Pass in a (faked dummy) certificate credential in the request context to simulate the situation when the
 *                          client credential is not a user name
 * @param username Username to pass in via the request context, if null no username is passed in
 * @param expectedClaimedRole Expected claimed role in signed document, if null check that no claimed role is included
 * @throws Exception
 */
private void testProcessData_basicSigningInternal(final KeyType keyType, final String signatureAlgorithm,
        final String expectedSignatureAlgorithmUri, final String commitmentTypesProperty,
        final Collection<String> expectedCommitmentTypeUris, final String claimedRoleProperty,
        final boolean claimedRoleFromUsername, final boolean useCertCredential, final String username,
        final String expectedClaimedRole) throws Exception {
    LOG.info("processData");

    final MockedCryptoToken token;

    switch (keyType) {
    case RSA:
        token = tokenRSA;
        break;
    case DSA:
        token = tokenDSA;
        break;
    case ECDSA:
        token = tokenECDSA;
        break;
    default:
        throw new NoSuchAlgorithmException("Unknown key algorithm");
    }

    WorkerConfig config = new WorkerConfig();

    if (commitmentTypesProperty != null) {
        config.setProperty("COMMITMENT_TYPES", commitmentTypesProperty);
    }

    if (signatureAlgorithm != null) {
        config.setProperty("SIGNATUREALGORITHM", signatureAlgorithm);
    }

    if (claimedRoleProperty != null) {
        config.setProperty("CLAIMED_ROLE", claimedRoleProperty);
    }

    if (claimedRoleFromUsername) {
        config.setProperty("CLAIMED_ROLE_FROM_USERNAME", "true");
    }

    final XAdESVerificationResult r = getVerificationResult(token, config, "<testroot/>", useCertCredential,
            username);

    assertEquals("BES", r.getSignatureForm().name());
    assertEquals("Unexpected signature algorithm in signature", expectedSignatureAlgorithmUri,
            r.getSignatureAlgorithmUri());

    final QualifyingProperties qp = r.getQualifyingProperties();

    final Set<String> foundUris = new HashSet<String>();

    final SignedProperties sp = qp.getSignedProperties();

    // check for ClaimedRole
    boolean foundExpectedRole = false;
    for (final SignedSignatureProperty sigProp : sp.getSigProps()) {
        LOG.debug("signed signature property: " + sigProp.getClass().getName() + ": " + sigProp.toString());

        if (sigProp instanceof SignerRoleProperty) {
            final SignerRoleProperty role = (SignerRoleProperty) sigProp;

            for (final String claimedRole : role.getClaimedRoles()) {
                if (expectedClaimedRole == null) {
                    fail("Should not contain a claimed role");
                } else if (expectedClaimedRole.equals(claimedRole)) {
                    foundExpectedRole = true;
                } else {
                    fail("Unexpected claimed role: " + claimedRole);
                }
            }
        }
    }

    if (expectedClaimedRole != null) {
        assertTrue("Expected to find claimed role: " + claimedRoleProperty, foundExpectedRole);
    }

    for (final SignedDataObjectProperty signedObjProp : sp.getDataObjProps()) {
        LOG.debug("object property: " + signedObjProp.getClass().getName() + ": " + signedObjProp.toString());

        if (signedObjProp instanceof AllDataObjsCommitmentTypeProperty) {
            final AllDataObjsCommitmentTypeProperty commitmentType = (AllDataObjsCommitmentTypeProperty) signedObjProp;

            final String uri = commitmentType.getUri();
            LOG.debug("Found commitment type: " + uri);
            if (expectedCommitmentTypeUris.contains(uri)) {
                foundUris.add(uri);
            } else {
                fail("Unexpected commitment type: " + uri);
            }
        }
    }

    assertTrue("Should contain expected commitment types: " + expectedCommitmentTypeUris.toString(),
            foundUris.size() == expectedCommitmentTypeUris.size());
}

From source file:com.stratelia.webactiv.kmelia.control.ejb.KmeliaBmEJB.java

@Override
public void addPublicationToCombination(String pubId, List<String> combination, String componentId) {
    SilverTrace.info("kmax", "KmeliaBmEJB.addPublicationToCombination()", "root.MSG_GEN_PARAM_VALUE",
            "combination =" + combination.toString());
    PublicationPK pubPK = new PublicationPK(pubId, componentId);
    CoordinatePK coordinatePK = new CoordinatePK("unknown", pubPK);
    try {/*w  w w . j a v  a 2  s  . c o m*/

        Collection<Coordinate> coordinates = getPublicationCoordinates(pubId, componentId);

        if (!checkCombination(coordinates, combination)) {
            return;
        }

        NodeDetail nodeDetail;
        // enrich combination by get ancestors
        Iterator<String> it = combination.iterator();
        List<CoordinatePoint> allnodes = new ArrayList<CoordinatePoint>();
        int i = 1;
        while (it.hasNext()) {
            String nodeId = it.next();
            NodePK nodePK = new NodePK(nodeId, componentId);
            SilverTrace.info("kmax", "KmeliaBmEjb.addPublicationToCombination()", "root.MSG_GEN_PARAM_VALUE",
                    "avant nodeBm.getPath() ! i = " + i);
            Collection<NodeDetail> path = nodeBm.getPath(nodePK);
            SilverTrace.info("kmax", "KmeliaBmEjb.addPublicationToCombination()", "root.MSG_GEN_PARAM_VALUE",
                    "path for nodeId " + nodeId + " = " + path.toString());
            for (NodeDetail aPath : path) {
                nodeDetail = aPath;
                String anscestorId = nodeDetail.getNodePK().getId();
                int nodeLevel = nodeDetail.getLevel();
                if (!nodeDetail.getNodePK().isRoot()) {
                    CoordinatePoint point;
                    if (anscestorId.equals(nodeId)) {
                        point = new CoordinatePoint(-1, Integer.parseInt(anscestorId), true, nodeLevel, i);
                    } else {
                        point = new CoordinatePoint(-1, Integer.parseInt(anscestorId), false, nodeLevel, i);
                    }
                    allnodes.add(point);
                }
            }
            i++;
        }
        int coordinateId = coordinatesBm.addCoordinate(coordinatePK, allnodes);
        publicationBm.addFather(pubPK, new NodePK(String.valueOf(coordinateId), pubPK));
    } catch (Exception e) {
        throw new KmaxRuntimeException("KmeliaBmEjb.addPublicationToCombination()", ERROR,
                "kmax.EX_IMPOSSIBLE_DAJOUTER_LA_PUBLICATION_A_CETTE_COMBINAISON", e);
    }
}

From source file:org.apache.hadoop.hbase.regionserver.Store.java

StoreFile completeCompaction(final Collection<StoreFile> compactedFiles, final StoreFile.Writer compactedFile)
        throws IOException {
    // 1. Moving the new files into place -- if there is a new file (may not
    // be if all cells were expired or deleted).
    StoreFile result = null;//from   w ww.java  2 s. c om
    if (compactedFile != null) {
        validateStoreFile(compactedFile.getPath());
        // Move the file into the right spot
        Path origPath = compactedFile.getPath();
        Path destPath = new Path(homedir, origPath.getName());
        LOG.info("Renaming compacted file at " + origPath + " to " + destPath);
        if (!HBaseFileSystem.renameDirForFileSystem(fs, origPath, destPath)) {
            LOG.error("Failed move of compacted file " + origPath + " to " + destPath);
            throw new IOException("Failed move of compacted file " + origPath + " to " + destPath);
        }
        result = new StoreFile(this.fs, destPath, this.conf, this.cacheConf, this.family.getBloomFilterType(),
                this.dataBlockEncoder, isAssistant());
        passSchemaMetricsTo(result);
        result.createReader();
    }
    try {
        this.lock.writeLock().lock();
        try {
            // Change this.storefiles so it reflects new state but do not
            // delete old store files until we have sent out notification of
            // change in case old files are still being accessed by outstanding
            // scanners.
            ArrayList<StoreFile> newStoreFiles = Lists.newArrayList(storefiles);
            newStoreFiles.removeAll(compactedFiles);
            filesCompacting.removeAll(compactedFiles); // safe bc: lock.writeLock()

            // If a StoreFile result, move it into place.  May be null.
            if (result != null) {
                newStoreFiles.add(result);
            }

            this.storefiles = sortAndClone(newStoreFiles);
        } finally {
            // We need the lock, as long as we are updating the storefiles
            // or changing the memstore. Let us release it before calling
            // notifyChangeReadersObservers. See HBASE-4485 for a possible
            // deadlock scenario that could have happened if continue to hold
            // the lock.
            this.lock.writeLock().unlock();
        }

        // Tell observers that list of StoreFiles has changed.
        notifyChangedReadersObservers();

        // let the archive util decide if we should archive or delete the files
        LOG.debug("Removing store files after compaction...");
        HFileArchiver.archiveStoreFiles(this.conf, this.fs, this.region, this.family.getName(), compactedFiles);

    } catch (IOException e) {
        e = RemoteExceptionHandler.checkIOException(e);
        LOG.error("Failed replacing compacted files in " + this + ". Compacted file is "
                + (result == null ? "none" : result.toString()) + ".  Files replaced "
                + compactedFiles.toString() + " some of which may have been already removed", e);
    }

    // 4. Compute new store size
    this.storeSize = 0L;
    this.totalUncompressedBytes = 0L;
    for (StoreFile hsf : this.storefiles) {
        StoreFile.Reader r = hsf.getReader();
        if (r == null) {
            LOG.warn("StoreFile " + hsf + " has a null Reader");
            continue;
        }
        this.storeSize += r.length();
        this.totalUncompressedBytes += r.getTotalUncompressedBytes();
    }
    return result;
}