Example usage for java.util Set toString

List of usage examples for java.util Set toString

Introduction

In this page you can find the example usage for java.util Set toString.

Prototype

public String toString() 

Source Link

Document

Returns a string representation of the object.

Usage

From source file:com.pinterest.teletraan.worker.ClusterReplacer.java

/**
 * Step 2. LAUNCHING state should make sure all the host is in RUNNING state and serving builds
 * If some hosts are terminated or deploy failed, go back to INIT state to relaunch
 *//*from   w  w w  .  j  a  va2  s.  c o m*/
private void processLaunchingState(ClusterUpgradeEventBean eventBean) throws Exception {
    String clusterName = eventBean.getCluster_name();
    Collection<String> hostIds = Arrays.asList(eventBean.getHost_ids().split(","));

    // 1. make sure every host is running
    Set<String> failedIds = hostInfoDAO.getTerminatedHosts(new HashSet<>(hostIds));
    List<String> runningIds = hostInfoDAO.getRunningInstances(new ArrayList<>(hostIds));

    // 2. make sure hosts are serving builds
    boolean succeeded = true;
    for (String hostId : runningIds) {
        List<AgentBean> agents = agentDAO.getByHostId(hostId);
        if (agents.isEmpty()) {
            LOG.info(String.format("Host %s has not ping server yet", hostId));
            succeeded = false;
            continue;
        }

        // Make sure every env on the host are serving build
        for (AgentBean agent : agents) {
            if (agent.getDeploy_stage() != DeployStage.SERVING_BUILD) {
                succeeded = false;
                if (agent.getStatus() != AgentStatus.SUCCEEDED && agent.getStatus() != AgentStatus.UNKNOWN
                        && agent.getStatus() != AgentStatus.SCRIPT_FAILED) {
                    LOG.info(String.format("Deploy failed on host %s", hostId));
                    failedIds.add(hostId);
                }
            }
        }
    }

    // 3. if found failed hosts, terminate them and go back to INIT state to relaunch hosts
    if (!failedIds.isEmpty()) {
        Collection<String> updateHostIds = Arrays.asList(eventBean.getHost_ids().split(","));
        updateHostIds.removeAll(failedIds);
        clusterManager.terminateHosts(clusterName, failedIds, true);

        LOG.info(String.format("Successfully terminate failed hosts %s, go back to INIT state",
                failedIds.toString()));
        ClusterUpgradeEventBean updateBean = new ClusterUpgradeEventBean();
        updateBean.setHost_ids(Joiner.on(",").join(updateHostIds));
        updateBean.setState(ClusterUpgradeEventState.INIT);
        updateBean.setStatus(ClusterUpgradeEventStatus.SUCCEEDED);
        transitionState(eventBean.getId(), updateBean);
        return;
    }

    if (succeeded) {
        LOG.info("Successfully completed LAUNCHING state, move to REPLACING state");
        ClusterUpgradeEventBean updateBean = new ClusterUpgradeEventBean();
        updateBean.setState(ClusterUpgradeEventState.REPLACING);
        updateBean.setStatus(ClusterUpgradeEventStatus.SUCCEEDED);
        transitionState(eventBean.getId(), updateBean);
    }
}

From source file:org.apache.openaz.xacml.std.pap.StdEngine.java

private Set<StdPDPGroup> readProperties(Path repository, Properties properties) throws PAPException {
    Set<StdPDPGroup> groups = new HashSet<StdPDPGroup>();
    ///*from w w w.  j a  va2  s  .c  o  m*/
    // See if there is a groups property
    //
    String groupList = properties.getProperty(PROP_PAP_GROUPS, "");
    if (groupList == null) {
        logger.warn("null group list " + PROP_PAP_GROUPS);
        groupList = "";
    }
    if (logger.isDebugEnabled()) {
        logger.debug("group list: " + groupList);
    }
    //
    // Iterate the groups, converting to a set ensures we have unique groups.
    //
    for (String id : Splitter.on(',').trimResults().omitEmptyStrings().split(groupList)) {
        //
        // Add our Group Object
        //
        StdPDPGroup g = new StdPDPGroup(id.trim(),
                id.equals(properties.getProperty(PROP_PAP_GROUPS_DEFAULT, PROP_PAP_GROUPS_DEFAULT_NAME)),
                properties, Paths.get(repository.toString(), id));

        //
        // Add it in
        //
        groups.add(g);
    }
    //
    // Dump what we got
    //
    if (logger.isDebugEnabled()) {
        logger.debug("PDP Group List: " + groups.toString());
    }
    return groups;
}

From source file:org.aksw.gerbil.database.ExperimentDAOImplJUnitTest.java

@Test
public void testExperimentCreationAndSelection() throws InterruptedException {
    final String EXPERIMENT_ID = "id-999";
    Set<ExperimentTaskResult> results = new HashSet<ExperimentTaskResult>();
    Random random = new Random();
    for (int i = 0; i < 10; ++i) {
        if (i < 8) {
            results.add(new ExperimentTaskResult("annotator1", "dataset" + i, ExperimentType.D2W,
                    Matching.STRONG_ANNOTATION_MATCH,
                    new double[] { random.nextFloat(), random.nextFloat(), random.nextFloat(),
                            random.nextFloat(), random.nextFloat(), random.nextFloat() },
                    ExperimentDAO.TASK_FINISHED, random.nextInt()));
        } else {//from w  w  w.j  a  v a2  s.  co  m
            results.add(new ExperimentTaskResult("annotator1", "dataset" + i, ExperimentType.D2W,
                    Matching.STRONG_ANNOTATION_MATCH, new double[6],
                    i == 8 ? ExperimentDAO.TASK_STARTED_BUT_NOT_FINISHED_YET
                            : ErrorTypes.UNEXPECTED_EXCEPTION.getErrorCode(),
                    0));
        }
    }

    int taskId;
    for (ExperimentTaskResult result : results) {
        taskId = this.dao.createTask(result.getAnnotator(), result.getDataset(), result.getType().name(),
                result.getMatching().name(), EXPERIMENT_ID);
        if (result.state == ExperimentDAO.TASK_FINISHED) {
            this.dao.setExperimentTaskResult(taskId, result);
        } else {
            this.dao.setExperimentState(taskId, result.state);
        }
    }

    List<ExperimentTaskResult> retrievedResults = dao.getResultsOfExperiment(EXPERIMENT_ID);
    ExperimentTaskResult originalResult;
    for (ExperimentTaskResult retrievedResult : retrievedResults) {
        if (retrievedResult.state == ExperimentDAO.TASK_FINISHED) {
            Assert.assertTrue("Couldn't find " + retrievedResult.toString() + " inside of the expected results "
                    + results.toString(), results.remove(retrievedResult));
        } else {
            // We have to search them manually since the time stamps are
            // different
            originalResult = null;
            for (ExperimentTaskResult result : results) {
                if ((result.state == retrievedResult.state)
                        && (result.annotator.equals(retrievedResult.annotator))
                        && (result.dataset.equals(retrievedResult.dataset))
                        && (result.errorCount == retrievedResult.errorCount)
                        && (result.matching == retrievedResult.matching)
                        && (result.type == retrievedResult.type)) {
                    originalResult = result;
                    break;
                }
            }
            Assert.assertNotNull("Couldn't find " + retrievedResult.toString()
                    + " inside of the expected results " + results.toString(), originalResult);
            results.remove(originalResult);
        }
    }
    Assert.assertEquals("Not all expected results have been retrieved. Missing results " + results, 0,
            results.size());
}

From source file:playground.acmarmol.matsim2030.microcensus2010.MZPopulationUtils.java

public static void changeToMatsimModes(Population population) {
    Set<String> unknownModes = new HashSet<String>();

    for (Person person : population.getPersons().values()) {

        Plan plan = person.getSelectedPlan();

        if (plan != null) {
            for (PlanElement pe : plan.getPlanElements()) {

                if (pe instanceof Leg) {
                    Leg leg = (Leg) pe;//  ww w  .  ja va2  s  .  c  o  m

                    String mode = leg.getMode();
                    if (mode.equals(MZConstants.PLANE) || mode.equals(MZConstants.TRAIN)
                            || mode.equals(MZConstants.SHIP) || mode.equals(MZConstants.TAXI)
                            || mode.equals(MZConstants.TRAM) || mode.equals(MZConstants.BUS)
                            || mode.equals(MZConstants.SONSTINGER_OEV) || mode.equals(MZConstants.POSTAUTO)
                            || mode.equals(MZConstants.REISECAR) || mode.equals(MZConstants.TRAIN)) {
                        leg.setMode(TransportMode.pt); //PUBLIC TRANSPORT
                    } else if (mode.equals(MZConstants.WALK)) {
                        leg.setMode(TransportMode.walk); //WALK

                    } else if (mode.equals(MZConstants.BICYCLE) || mode.equals(MZConstants.SKATEBOARD)
                            || mode.equals(MZConstants.MOFA)) {
                        leg.setMode(TransportMode.bike); //BICYCLE

                    } else if (mode.equals(MZConstants.CAR) || mode.equals(MZConstants.MOTORCYCLE)
                            || mode.equals(MZConstants.TRUCK)) {
                        leg.setMode(TransportMode.car); //CAR

                    } else {
                        unknownModes.add(mode);
                    }
                }
            }
        }
    }

    if (!unknownModes.isEmpty()) {
        log.warn("Unhandled modes: " + unknownModes.toString());
    }
}

From source file:org.opencb.opencga.analysis.storage.variant.VariantStorage.java

/**
 * Check if a set of given cohorts are available to calculate statistics
 *
 * @param cohortIds     Set of cohorts//w  w  w . j a v a  2s . c om
 * @param updateStats   Update already existing stats
 * @param sessionId     User's sessionId
 * @throws CatalogException
 */
protected Map<Long, Cohort> checkCanCalculateCohorts(long studyId, List<Long> cohortIds, boolean updateStats,
        String sessionId) throws CatalogException {
    Set<Long> studyIdSet = new HashSet<>();
    Map<Long, Cohort> cohortMap = new HashMap<>(cohortIds.size());
    for (Long cohortId : cohortIds) {
        Cohort cohort = catalogManager.getCohort(cohortId, null, sessionId).first();
        long studyIdByCohortId = catalogManager.getStudyIdByCohortId(cohortId);
        studyIdSet.add(studyIdByCohortId);
        switch (cohort.getStatus().getName()) {
        case Cohort.CohortStatus.NONE:
        case Cohort.CohortStatus.INVALID:
            break;
        case Cohort.CohortStatus.READY:
            if (updateStats) {
                catalogManager.modifyCohort(cohortId, new ObjectMap("status.name", Cohort.CohortStatus.INVALID),
                        new QueryOptions(), sessionId);
                break;
            }
        case Cohort.CohortStatus.CALCULATING:
            throw new CatalogException("Unable to calculate stats for cohort " + "{ id: " + cohort.getId()
                    + " name: \"" + cohort.getName() + "\" }" + " with status \"" + cohort.getStatus().getName()
                    + "\"");
        }
        cohortMap.put(cohortId, cohort);
        //            QueryResult<Sample> sampleQueryResult = catalogManager.getAllSamples(studyIdByCohortId, new Query("id", cohort.getSamples()), new QueryOptions(), sessionId);
    }

    // Check that all cohorts are from the same study
    if (studyIdSet.size() != 1) {
        throw new CatalogException("Error: CohortIds are from multiple studies: " + studyIdSet.toString());
    }
    if (!new ArrayList<>(studyIdSet).get(0).equals(studyId)) {
        throw new CatalogException(
                "Error: CohortIds are from a different study than provided: " + studyIdSet.toString());
    }

    return cohortMap;
}

From source file:com.github.helenusdriver.driver.impl.PersistedMap.java

/**
 * {@inheritDoc}//w w w .j  ava2s.  c om
 *
 * @author paouelle
 *
 * @see java.util.Map#entrySet()
 */
@Override
public Set<Map.Entry<K, T>> entrySet() {
    if (eset == null) {
        final Set<Map.Entry<K, PersistedValue<T, PT>>> eset = map.entrySet();

        this.eset = new AbstractSet<Map.Entry<K, T>>() {
            @Override
            public int size() {
                return eset.size();
            }

            @Override
            public boolean isEmpty() {
                return eset.isEmpty();
            }

            @Override
            public Iterator<Map.Entry<K, T>> iterator() {
                return new TransformIterator<Map.Entry<K, PersistedValue<T, PT>>, Map.Entry<K, T>>(
                        eset.iterator()) {
                    @Override
                    protected Map.Entry<K, T> transform(Map.Entry<K, PersistedValue<T, PT>> me) {
                        return new Entry(me);
                    }
                };
            }

            @Override
            public Stream<Map.Entry<K, T>> stream() {
                return eset.stream().map(me -> new Entry(me));
            }

            @Override
            public Stream<Map.Entry<K, T>> parallelStream() {
                return eset.parallelStream().map(me -> new Entry(me));
            }

            @Override
            public boolean remove(Object o) {
                if (!(o instanceof Map.Entry)) {
                    return false;
                }
                @SuppressWarnings("unchecked")
                final Map.Entry<K, T> me = (Map.Entry<K, T>) o;

                return (map.remove(me.getKey()) != null);
            }

            @Override
            public void clear() {
                eset.clear();
            }

            @Override
            public String toString() {
                return eset.toString();
            }
        };
    }
    return eset;
}

From source file:org.alfresco.repo.content.metadata.AbstractMappingMetadataExtracter.java

/**
 * This method provides a <i>best guess</i> of what model properties should be embedded
 * in content.  The list of properties mapped by default need <b>not</b>
 * include all properties to be embedded in the document; just the obvious set of mappings
 * need be supplied.//from w w  w. j  a  v a2s  .  c  o m
 * Implementations must either provide the default mapping properties in the expected
 * location or override the method to provide the default mapping.
 * <p>
 * The default implementation looks for the default mapping file in the location
 * given by the class name and <i>.embed.properties</i>.  If the extracter's class is
 * <b>x.y.z.MyExtracter</b> then the default properties will be picked up at
 * <b>classpath:/x/y/z/MyExtracter.embed.properties</b>.
 * Inner classes are supported, but the '$' in the class name is replaced with '-', so
 * default properties for <b>x.y.z.MyStuff$MyExtracter</b> will be located using
 * <b>x.y.z.MyStuff-MyExtracter.embed.properties</b>.
 * <p>
 * The default mapping implementation should include thorough Javadocs so that the
 * system administrators can accurately determine how to best enhance or override the
 * default mapping.
 * <p>
 * If the default mapping is declared in a properties file other than the one named after
 * the class, then the {@link #readEmbedMappingProperties(String)} method can be used to quickly
 * generate the return value:
 * <pre><code>
 *      protected Map<<String, Set<QName>> getDefaultMapping()
 *      {
 *          return readEmbedMappingProperties(DEFAULT_MAPPING);
 *      }
 * </code></pre>
 * The map can also be created in code either statically or during the call.
 * <p>
 * If no embed mapping properties file is found a reverse of the extract
 * mapping in {@link #getDefaultMapping()} will be assumed with the first QName in each
 * value used as the key for this mapping and a last win approach for duplicates.
 *
 * @return              Returns the default, static embed mapping.  It may not be null.
 *
 * @see #setInheritDefaultMapping(boolean inherit)
 */
protected Map<QName, Set<String>> getDefaultEmbedMapping() {
    Map<QName, Set<String>> embedMapping = null;
    String metadataPropertiesUrl = null;
    try {
        // Can't use getSimpleName here because we lose inner class $ processing
        String className = this.getClass().getName();
        String shortClassName = className.split("\\.")[className.split("\\.").length - 1];
        // Replace $
        shortClassName = shortClassName.replace('$', '-');
        // Append .properties
        metadataPropertiesUrl = "alfresco/metadata/" + shortClassName + ".embed.properties";
        // Attempt to load the properties
        embedMapping = readEmbedMappingProperties(metadataPropertiesUrl);
    } catch (AlfrescoRuntimeException e) {
        // No embed mapping found at default location
    }
    // Try package location
    try {
        String canonicalClassName = this.getClass().getName();
        // Replace $
        canonicalClassName = canonicalClassName.replace('$', '-');
        // Replace .
        canonicalClassName = canonicalClassName.replace('.', '/');
        // Append .properties
        String packagePropertiesUrl = canonicalClassName + ".embed.properties";
        // Attempt to load the properties
        embedMapping = readEmbedMappingProperties(packagePropertiesUrl);
    } catch (AlfrescoRuntimeException e) {
        // No embed mapping found at legacy location
    }
    if (embedMapping == null) {
        if (logger.isDebugEnabled()) {
            logger.debug("No explicit embed mapping properties found at: " + metadataPropertiesUrl
                    + ", assuming reverse of extract mapping");
        }
        Map<String, Set<QName>> extractMapping = this.mapping;
        if (extractMapping == null || extractMapping.size() == 0) {
            extractMapping = getDefaultMapping();
        }
        embedMapping = new HashMap<QName, Set<String>>(extractMapping.size());
        for (String metadataKey : extractMapping.keySet()) {
            if (extractMapping.get(metadataKey) != null && extractMapping.get(metadataKey).size() > 0) {
                QName modelProperty = extractMapping.get(metadataKey).iterator().next();
                Set<String> metadataKeys = embedMapping.get(modelProperty);
                if (metadataKeys == null) {
                    metadataKeys = new HashSet<String>(1);
                    embedMapping.put(modelProperty, metadataKeys);
                }
                metadataKeys.add(metadataKey);
                if (logger.isTraceEnabled()) {
                    logger.trace("Added mapping from " + modelProperty + " to " + metadataKeys.toString());
                }
            }
        }
    }
    return embedMapping;
}

From source file:org.opencb.opencga.storage.core.manager.variant.operations.VariantStatsStorageOperation.java

/**
 * Check if a set of given cohorts are available to calculate statistics.
 *
 * @param studyId       Study id/*from  w  w  w.j  a  v a 2s . c o m*/
 * @param cohortIds     Set of cohorts
 * @param updateStats   Update already existing stats
 * @param resume        Resume statistics calculation
 * @param sessionId     User's sessionId
 * @return Map from cohortId to Cohort
 * @throws CatalogException if an error on Catalog
 */
protected Map<Long, Cohort> checkCanCalculateCohorts(long studyId, List<Long> cohortIds, boolean updateStats,
        boolean resume, String sessionId) throws CatalogException {
    Set<Long> studyIdSet = new HashSet<>();
    Map<Long, Cohort> cohortMap = new HashMap<>(cohortIds.size());
    for (Long cohortId : cohortIds) {
        Cohort cohort = catalogManager.getCohort(cohortId, null, sessionId).first();
        long studyIdByCohortId = catalogManager.getStudyIdByCohortId(cohortId);
        studyIdSet.add(studyIdByCohortId);
        switch (cohort.getStatus().getName()) {
        case Cohort.CohortStatus.NONE:
        case Cohort.CohortStatus.INVALID:
            break;
        case Cohort.CohortStatus.READY:
            if (updateStats) {
                catalogManager.getCohortManager().setStatus(cohortId.toString(), Cohort.CohortStatus.INVALID,
                        "", sessionId);
                break;
            } else {
                // If not updating the stats or resuming, can't calculate statistics for a cohort READY
                if (!resume) {
                    throw unableToCalculateCohortReady(cohort);
                }
            }
            break;
        case Cohort.CohortStatus.CALCULATING:
            if (!resume) {
                throw unableToCalculateCohortCalculating(cohort);
            }
            break;
        default:
            throw new IllegalStateException("Unknown status " + cohort.getStatus().getName());
        }
        cohortMap.put(cohortId, cohort);
        //            QueryResult<Sample> sampleQueryResult = catalogManager.getAllSamples(studyIdByCohortId, new Query("id", cohort.getSamples()),
        //                      new QueryOptions(), sessionId);
    }

    // Check that all cohorts are from the same study
    if (studyIdSet.size() != 1) {
        throw new CatalogException("Error: CohortIds are from multiple studies: " + studyIdSet.toString());
    }
    if (!new ArrayList<>(studyIdSet).get(0).equals(studyId)) {
        throw new CatalogException(
                "Error: CohortIds are from a different study than provided: " + studyIdSet.toString());
    }

    return cohortMap;
}

From source file:fr.landel.utils.assertor.AssertorIterableTest.java

/**
 * Test method for {@link AssertorIterable#contains}.
 * //  w w  w.  j  av  a  2 s. c  o m
 * @throws IOException
 *             On not contain
 */
@Test
public void testContainsIterable() throws IOException {
    final String el1 = "element1";
    final String el2 = "element2";

    final Set<String> set1 = new HashSet<>();
    final Set<String> set2 = new HashSet<>();
    final Set<String> set3 = new HashSet<>();
    set1.add(el1);
    set2.add(el1);
    set3.add(el2);

    Assertor.that(set1).containsAll(set2).orElseThrow("iterable doesn't contain the list %s*");
    assertFalse(Assertor.that(set1).containsAll(set3).isOK());
    Assertor.that(set1).containsAny(set2).orElseThrow("iterable doesn't contain the list %s*");

    Assertor.that(set1, EnumAnalysisMode.STREAM).containsAll(set2)
            .orElseThrow("iterable doesn't contain the list %s*");
    assertFalse(Assertor.that(set1, EnumAnalysisMode.STREAM).containsAll(set3).isOK());
    Assertor.that(set1, EnumAnalysisMode.STREAM).containsAny(set2)
            .orElseThrow("iterable doesn't contain the list %s*");

    Assertor.that(set1, EnumAnalysisMode.PARALLEL).containsAll(set2)
            .orElseThrow("iterable doesn't contain the list %s*");
    assertFalse(Assertor.that(set1, EnumAnalysisMode.PARALLEL).containsAll(set3).isOK());
    Assertor.that(set1, EnumAnalysisMode.PARALLEL).containsAny(set2)
            .orElseThrow("iterable doesn't contain the list %s*");

    set2.add(el2);
    Assertor.that(set1).containsAny(set2).orElseThrow("iterable doesn't contain the list %s*");

    assertException(() -> {
        Assertor.that(set1).containsAll(set2).orElseThrow("iterable doesn't contain the list %2$s*");
        fail(ERROR);
    }, IllegalArgumentException.class, "iterable doesn't contain the list " + set2.toString());

    assertException(() -> {
        Assertor.that(set1).containsAll(set2).orElseThrow(new IOException(), true);
        fail(ERROR);
    }, IOException.class);

    assertException(() -> {
        Assertor.that(set1).containsAll((Iterable<String>) null).orElseThrow();
        fail(ERROR);
    }, IllegalArgumentException.class, "neither iterables can be null or empty");

    assertException(() -> {
        Assertor.that(set1).containsAny((Iterable<String>) null).orElseThrow();
        fail(ERROR);
    }, IllegalArgumentException.class, "neither iterables can be null or empty");

    set1.clear();

    assertException(() -> {
        Assertor.that(set1).containsAll(set2).orElseThrow();
        fail(ERROR);
    }, IllegalArgumentException.class);

    assertException(() -> {
        Assertor.that(set1).containsAll(set2).orElseThrow();
        fail(ERROR);
    }, IllegalArgumentException.class, "neither iterables can be null or empty");

    assertException(() -> {
        Assertor.that((Iterable<String>) null).contains(el1).orElseThrow();
        fail(ERROR);
    }, IllegalArgumentException.class, "the iterable cannot be null or empty");

    assertException(() -> {
        Assertor.that((Iterable<String>) null).containsAny(set2).orElseThrow();
        fail(ERROR);
    }, IllegalArgumentException.class, "neither iterables can be null or empty");

    assertException(() -> {
        Assertor.that(set1).containsAll((Iterable<String>) null).orElseThrow();
        fail(ERROR);
    }, IllegalArgumentException.class, "neither iterables can be null or empty");

    set1.add(null);
    Assertor.that(set1).contains(null).orElseThrow();
}

From source file:org.apache.ojb.broker.QueryTest.java

/**
 * prefetch ProductGroups for Articles/*from   w ww.j  a v  a2  s  .c o  m*/
 */
public void testPrefetchedReferencesSingleKey() {
    ClassDescriptor cldProductGroup = broker.getClassDescriptor(ProductGroup.class);
    ClassDescriptor cldArticle = broker.getClassDescriptor(Article.class);
    Class productGroupProxy = cldProductGroup.getProxyClass();
    Class articleProxy = cldArticle.getProxyClass();

    //
    // use ProductGroup and Articles with disabled Proxy
    //
    broker.clearCache();
    cldProductGroup.setProxyClass(null);
    cldProductGroup.setProxyClassName(null);
    cldArticle.setProxyClass(null);
    cldArticle.setProxyClassName(null);
    broker.getDescriptorRepository().setClassDescriptor(cldProductGroup);
    broker.getDescriptorRepository().setClassDescriptor(cldArticle);

    Criteria crit = new Criteria();
    crit.addNotNull("productGroupId");
    crit.addLessOrEqualThan("productGroupId", new Integer(5));
    QueryByCriteria q = QueryFactory.newQuery(Article.class, crit);
    q.addOrderByDescending("productGroupId");
    q.addPrefetchedRelationship("productGroup");

    Collection results = broker.getCollectionByQuery(q);
    Set pgs = new HashSet();
    Iterator iter = results.iterator();
    while (iter.hasNext()) {
        InterfaceArticle a = (InterfaceArticle) iter.next();
        pgs.add(a.getProductGroup().getName());
    }

    assertTrue(pgs.size() > 0);
    String pgsString = pgs.toString();

    //
    // use ProductGroup and Articles with original Proxy settings
    //
    broker.clearCache();
    cldProductGroup.setProxyClass(productGroupProxy);
    cldProductGroup.setProxyClassName(productGroupProxy.getName());
    cldArticle.setProxyClass(articleProxy);
    cldArticle.setProxyClassName(articleProxy.getName());
    broker.getDescriptorRepository().setClassDescriptor(cldProductGroup);
    broker.getDescriptorRepository().setClassDescriptor(cldArticle);

    crit = new Criteria();
    crit.addNotNull("productGroupId");
    crit.addLessOrEqualThan("productGroupId", new Integer(5));
    q = QueryFactory.newQuery(Article.class, crit);
    q.addOrderByDescending("productGroupId");

    results = broker.getCollectionByQuery(q);
    Set pgs2 = new HashSet();
    iter = results.iterator();
    while (iter.hasNext()) {
        InterfaceArticle a = (InterfaceArticle) iter.next();
        pgs2.add(a.getProductGroup().getName());
    }

    assertTrue(pgs2.size() > 0);
    String pgsString2 = pgs2.toString();

    //
    // compare prefetched and 'normal' data
    //
    assertEquals("Check size", pgs.size(), pgs2.size());
    assertEquals("Check content", pgsString, pgsString2);

}