List of usage examples for java.util Collection removeAll
boolean removeAll(Collection<?> c);
From source file:edu.uci.ics.jung.graph.SparseMultigraph.java
@SuppressWarnings("unchecked") public Collection<E> getEdges(EdgeType edgeType) { if (edgeType == EdgeType.DIRECTED) { return Collections.unmodifiableSet(this.directedEdges); } else if (edgeType == EdgeType.UNDIRECTED) { Collection<E> edges = new HashSet<E>(getEdges()); edges.removeAll(directedEdges); return edges; } else {/*from w w w.j a v a2s . c om*/ return Collections.EMPTY_SET; } }
From source file:org.openconcerto.utils.CollectionMap.java
public void removeAllFromCollections(CollectionMap<? extends K, ? extends V> mm) { for (final Map.Entry<? extends K, ?> e : mm.entrySet()) { final Collection<V> coll = this.getNull(e.getKey()); if (coll != null) { coll.removeAll((Collection<?>) e.getValue()); // see #remove(Object, Object) if (coll.isEmpty()) this.remove(e.getKey()); }//from w w w . j a v a 2 s.co m } }
From source file:org.eclipse.swordfish.internal.resolver.policy.helpers.WsPolicyBuilder.java
public synchronized void registerPolicyAssertions(Collection<QName> assertions) { Collection<QName> assertionsToRegister = new LinkedList<QName>(); Collection<QName> assertionsToUnregister = new LinkedList<QName>(); if (assertionBuilder == null) { assertionBuilder = new XMLPrimitiveAssertionBuilder(); } else {/* w w w . j av a2s . co m*/ assertionsToUnregister.addAll(assertionBuilder.getKnownElements()); assertionsToUnregister.removeAll(assertions); } assertionsToRegister.addAll(assertions); assertionBuilder.setKnownElements(assertionsToRegister); AssertionBuilderRegistry builderRegistry = policyBuilder.getAssertionBuilderRegistry(); builderRegistry.setIgnoreUnknownAssertions(false); for (QName elem : assertionsToUnregister) { builderRegistry.unregister(elem); } for (QName elem : assertionsToRegister) { builderRegistry.register(elem, assertionBuilder); } }
From source file:xmlconverter.controller.logic.SiteToDir.java
/** * This method consists of some nested loops and tries to build correct path * for file creation. Each crafted path will be saved to <code>Set<String> * pathName</code>. This will avoid duplication if any. The path is crafted * manually with given root path and added String values to it * * After the end of this method <code>createDir()</code> method is called. * * @param site//from w w w . java2 s .com * @throws java.io.IOException */ //Flag 0 - Default 1 - Override 2 - Update public void writeDir(Site site) throws IOException { ArrayList<String> detectList = site.getSiteAsArrayList(); ArrayList<File> tempArray = new ArrayList<>(); ArrayList<String> allowOverrideList = new ArrayList<>(); int flag = bean.getUserInputBean().getFlag(); String siteDesc = bean.getEnvBean().getSiteDir().getName(); allowOverrideList.add(siteDesc); for (Panel s : site.getPanelList()) { String panel = s.getDescription(); tempArray.add(new File(format(home.getAbsolutePath() + slash + siteDesc.trim() + slash + panel))); int indexHardware = panel.indexOf("_"); int stationIdHardware = Character.getNumericValue(panel.charAt(indexHardware - 1)); for (String detectionList : detectList) { int index = detectionList.indexOf("_"); int stationId = Character.getNumericValue(detectionList.charAt(index + 1)); if (detectionList.contains("Detektions-Objekt") && stationId == stationIdHardware) { detection = siteDesc.trim() + slash + panel + slash + detectionList.trim(); pathName.add(new File(home, format(detection).replace("!", ""))); } else if (detectionList.contains("Abschnitt") && stationId == stationIdHardware) { build = detection.trim() + slash + detectionList.trim(); pathName.add(new File(home, format(build.trim()).replace("!", ""))); } else if (detectionList.contains("Automatische") && stationId == stationIdHardware) { pathName.add( new File(home, format(build.trim() + slash + detectionList.trim()).replace("!", ""))); } else if (detectionList.contains("Manuelle") && stationId == stationIdHardware) { pathName.add(new File(home, format(build.trim() + slash + detectionList).replace("!", ""))); } else if (detectionList.contains("Stations-Objekt") && stationId == stationIdHardware) { pathName.add(new File(home, format(siteDesc.trim() + slash + panel.trim() + slash + detectionList.trim()) .replace("!", ""))); } } } File[] sortedFiles = pathName.toArray(new File[pathName.size()]); Arrays.sort(sortedFiles); //deletes everything and afterwards revrites as it suppost to. if (flag == 1) { File check = new File(home.getAbsolutePath() + slash + allowOverrideList.get(0)); if (check.exists() && check.isDirectory()) { FileUtils.deleteDirectory(check); } /* This method makes update posible. It findes files that already exist and subtracts them from files that has been created. GG. All already existing file are added to <code>tempArray</code>. */ } else if (flag == 2) { //Somewhat cleaned File beginnDir = new File(home.getAbsolutePath() + slash + allowOverrideList.get(0)); for (File f : sortedFiles) { tempArray.add(f); } tempArray.add(beginnDir); Collection<File> collection = FileUtils.listFilesAndDirs(beginnDir, new NotFileFilter(TrueFileFilter.INSTANCE), DirectoryFileFilter.DIRECTORY); collection.removeAll(tempArray); for (File f : collection) { FileUtils.deleteDirectory(f); } } createDir(sortedFiles); }
From source file:com.nearinfinity.mele.zookeeper.ZookeeperIndexDeletionPolicy.java
@Override public void onCommit(List<? extends IndexCommit> commits) throws IOException { List<String> filesCurrentlyBeingReferenced = getListOfReferencedFiles(zk, indexRefPath); int size = commits.size(); Collection<String> previouslyReferencedFiles = new TreeSet<String>(); OUTER: for (int i = size - 2; i >= 0; i--) { IndexCommit indexCommit = commits.get(i); LOG.info("Processing index commit generation " + indexCommit.getGeneration()); Collection<String> fileNames = new TreeSet<String>(indexCommit.getFileNames()); // remove all filenames that were references in newer index commits, // this way older index commits can be released without the fear of // broken references. fileNames.removeAll(previouslyReferencedFiles); for (String fileName : fileNames) { if (filesCurrentlyBeingReferenced.contains(fileName)) { previouslyReferencedFiles.addAll(fileNames); continue OUTER; }/*from w w w . jav a2s . c o m*/ } LOG.info("Index Commit " + indexCommit.getGeneration() + " no longer needed, releasing " + fileNames); indexCommit.delete(); } }
From source file:edu.stanford.mobisocial.dungbeetle.ui.fragments.FeedMembersFragment.java
private void groupUpdateHack(final Uri feedUri) { final Context context = getActivity(); new Thread() { public void run() { String feedName = feedUri.getLastPathSegment(); final IdentityProvider ident = new DBIdentityProvider(mHelper); Maybe<Group> mg = mHelper.groupByFeedName(feedName); try { Thread.sleep(500); } catch (InterruptedException e) { }/*from w w w . j a v a 2s . c o m*/ try { // group exists already, load view final Group g = mg.get(); Collection<Contact> existingContacts = g.contactCollection(mHelper); //TODO: XXXXX these two won't do a thing because g.forceUpdate happens //in the background..... g.forceUpdate(context); Collection<Contact> newContacts = g.contactCollection(mHelper); newContacts.removeAll(existingContacts); Helpers.resendProfile(context, newContacts, true); } catch (Maybe.NoValError e) { } ident.close(); }; }.start(); }
From source file:com.ppp.prm.portal.server.service.gwt.HibernateDetachUtility.java
/** * @param value the object needing to be detached/scrubbed. * @param checkedObjectMap This maps identityHashCodes to Objects we've already detached. In that way we can * quickly determine if we've already done the work for the incoming value and avoid taversing it again. This * works well almost all of the time, but it is possible that two different objects can have the same identity hash * (conflicts are always possible with a hash). In that case we utilize the checkedObjectCollisionMap (see below). * @param checkedObjectCollisionMap checkedObjectMap maps the identityhash to the *first* object with that hash. In * most cases there will only be mapping for one hash, but it is possible to encounter the same hash for multiple * objects, especially on 32bit or IBM JVMs. It is important to know if an object has already been detached * because if it is somehow self-referencing, we have to stop the recursion. This map holds the 2nd..Nth mapping * for a single hash and is used to ensure we never try to detach an object already processed. * @param depth used to stop infinite recursion, defaults to a depth we don't expectto see, but it is configurable. * @param serializationType//from w w w . ja va2s. c om * @throws Exception if a problem occurs * @throws IllegalStateException if the recursion depth limit is reached */ private static void nullOutUninitializedFields(Object value, Map<Integer, Object> checkedObjectMap, Map<Integer, List<Object>> checkedObjectCollisionMap, int depth, SerializationType serializationType) throws Exception { if (depth > depthAllowed) { String warningMessage = "Recursed too deep [" + depth + " > " + depthAllowed + "], will not attempt to detach object of type [" + ((value != null) ? value.getClass().getName() : "N/A") + "]. This may cause serialization errors later. " + "You can try to work around this by setting the system property [" + DEPTH_ALLOWED_SYSPROP + "] to a value higher than [" + depth + "] or you can set the system property [" + THROW_EXCEPTION_ON_DEPTH_LIMIT_SYSPROP + "] to 'false'"; LOG.warn(warningMessage); if (throwExceptionOnDepthLimit) { throw new IllegalStateException(warningMessage); } return; } if (null == value) { return; } // System.identityHashCode is a hash code, and therefore not guaranteed to be unique. And we've seen this // be the case. So, we use it to try and avoid duplicating work, but handle the case when two objects may // have an identity crisis. Integer valueIdentity = hashCodeGenerator.getHashCode(value); Object checkedObject = checkedObjectMap.get(valueIdentity); if (null == checkedObject) { // if we have not yet encountered an object with this hash, store it in our map and start scrubbing checkedObjectMap.put(valueIdentity, value); } else if (value == checkedObject) { // if we have scrubbed this already, no more work to be done return; } else { // we have a situation where multiple objects have the same identity hashcode, work with our // collision map to decide whether it needs to be scrubbed and add if necessary. // Note that this code block is infrequently hit, it is by design that we've pushed the extra // work, map, etc, involved for this infrequent case into its own block. The standard cases must // be as fast and lean as possible. boolean alreadyDetached = false; List<Object> collisionObjects = checkedObjectCollisionMap.get(valueIdentity); if (null == collisionObjects) { // if this is the 2nd occurrence for this hash, create a new map entry collisionObjects = new ArrayList<Object>(1); checkedObjectCollisionMap.put(valueIdentity, collisionObjects); } else { // if we have scrubbed this already, no more work to be done for (Object collisionObject : collisionObjects) { if (value == collisionObject) { alreadyDetached = true; break; } } } if (LOG.isDebugEnabled()) { StringBuilder message = new StringBuilder("\n\tIDENTITY HASHCODE COLLISION [hash="); message.append(valueIdentity); message.append(", alreadyDetached="); message.append(alreadyDetached); message.append("]"); message.append("\n\tCurrent : "); message.append(value.getClass().getName()); message.append("\n\t "); message.append(value); message.append("\n\tPrevious : "); message.append(checkedObject.getClass().getName()); message.append("\n\t "); message.append(checkedObject); for (Object collisionObject : collisionObjects) { message.append("\n\tPrevious : "); message.append(collisionObject.getClass().getName()); message.append("\n\t "); message.append(collisionObject); } LOG.debug(message); } // now that we've done our logging, if already detached we're done. Otherwise add to the list of collision // objects for this hash, and start scrubbing if (alreadyDetached) { return; } collisionObjects.add(value); } // Perform the detaching if (value instanceof Object[]) { Object[] objArray = (Object[]) value; for (int i = 0; i < objArray.length; i++) { Object listEntry = objArray[i]; Object replaceEntry = replaceObject(listEntry); if (replaceEntry != null) { objArray[i] = replaceEntry; } nullOutUninitializedFields(objArray[i], checkedObjectMap, checkedObjectCollisionMap, depth + 1, serializationType); } } else if (value instanceof List) { // Null out any entries in initialized collections ListIterator i = ((List) value).listIterator(); while (i.hasNext()) { Object val = i.next(); Object replace = replaceObject(val); if (replace != null) { val = replace; i.set(replace); } nullOutUninitializedFields(val, checkedObjectMap, checkedObjectCollisionMap, depth + 1, serializationType); } } else if (value instanceof Collection) { Collection collection = (Collection) value; Collection itemsToBeReplaced = new ArrayList(); Collection replacementItems = new ArrayList(); for (Object item : collection) { Object replacementItem = replaceObject(item); if (replacementItem != null) { itemsToBeReplaced.add(item); replacementItems.add(replacementItem); item = replacementItem; } nullOutUninitializedFields(item, checkedObjectMap, checkedObjectCollisionMap, depth + 1, serializationType); } collection.removeAll(itemsToBeReplaced); collection.addAll(replacementItems); // watch out! if this collection is a Set, HashMap$MapSet doesn't support addAll. See BZ 688000 } else if (value instanceof Map) { Map originalMap = (Map) value; HashMap<Object, Object> replaceMap = new HashMap<Object, Object>(); for (Iterator i = originalMap.keySet().iterator(); i.hasNext();) { // get original key and value - these might be hibernate proxies Object originalKey = i.next(); Object originalKeyValue = originalMap.get(originalKey); // replace with non-hibernate classes, if appropriate (will be null otherwise) Object replaceKey = replaceObject(originalKey); Object replaceValue = replaceObject(originalKeyValue); // if either original key or original value was a hibernate proxy object, we have to // remove it from the original map, and remember the replacement objects for later if (replaceKey != null || replaceValue != null) { Object newKey = (replaceKey != null) ? replaceKey : originalKey; Object newValue = (replaceValue != null) ? replaceValue : originalKeyValue; replaceMap.put(newKey, newValue); i.remove(); } } // all hibernate proxies have been removed, we need to replace them with their // non-proxy object representations that we got from replaceObject() calls originalMap.putAll(replaceMap); // now go through each item in the map and null out their internal fields for (Object key : originalMap.keySet()) { nullOutUninitializedFields(originalMap.get(key), checkedObjectMap, checkedObjectCollisionMap, depth + 1, serializationType); nullOutUninitializedFields(key, checkedObjectMap, checkedObjectCollisionMap, depth + 1, serializationType); } } else if (value instanceof Enum) { // don't need to detach enums, treat them as special objects return; } if (serializationType == SerializationType.JAXB) { XmlAccessorType at = value.getClass().getAnnotation(XmlAccessorType.class); if (at != null && at.value() == XmlAccessType.FIELD) { nullOutFieldsByFieldAccess(value, checkedObjectMap, checkedObjectCollisionMap, depth, serializationType); } else { nullOutFieldsByAccessors(value, checkedObjectMap, checkedObjectCollisionMap, depth, serializationType); } } else if (serializationType == SerializationType.SERIALIZATION) { nullOutFieldsByFieldAccess(value, checkedObjectMap, checkedObjectCollisionMap, depth, serializationType); } }
From source file:ch.flashcard.HibernateDetachUtility.java
/** * @param value the object needing to be detached/scrubbed. * @param checkedObjectMap This maps identityHashCodes to Objects we've already detached. In that way we can * quickly determine if we've already done the work for the incoming value and avoid taversing it again. This * works well almost all of the time, but it is possible that two different objects can have the same identity hash * (conflicts are always possible with a hash). In that case we utilize the checkedObjectCollisionMap (see below). * @param checkedObjectCollisionMap checkedObjectMap maps the identityhash to the *first* object with that hash. In * most cases there will only be mapping for one hash, but it is possible to encounter the same hash for multiple * objects, especially on 32bit or IBM JVMs. It is important to know if an object has already been detached * because if it is somehow self-referencing, we have to stop the recursion. This map holds the 2nd..Nth mapping * for a single hash and is used to ensure we never try to detach an object already processed. * @param depth used to stop infinite recursion, defaults to a depth we don't expectto see, but it is configurable. * @param serializationType//from w w w. j ava2 s . c o m * @throws Exception if a problem occurs * @throws IllegalStateException if the recursion depth limit is reached */ private static void nullOutUninitializedFields(Object value, Map<Integer, Object> checkedObjectMap, Map<Integer, List<Object>> checkedObjectCollisionMap, int depth, SerializationType serializationType) throws Exception { if (depth > depthAllowed) { String warningMessage = "Recursed too deep [" + depth + " > " + depthAllowed + "], will not attempt to detach object of type [" + ((value != null) ? value.getClass().getName() : "N/A") + "]. This may cause serialization errors later. " + "You can try to work around this by setting the system property [" + DEPTH_ALLOWED_SYSPROP + "] to a value higher than [" + depth + "] or you can set the system property [" + THROW_EXCEPTION_ON_DEPTH_LIMIT_SYSPROP + "] to 'false'"; LOG.warn(warningMessage); if (throwExceptionOnDepthLimit) { throw new IllegalStateException(warningMessage); } return; } if (null == value) { return; } // System.identityHashCode is a hash code, and therefore not guaranteed to be unique. And we've seen this // be the case. So, we use it to try and avoid duplicating work, but handle the case when two objects may // have an identity crisis. Integer valueIdentity = hashCodeGenerator.getHashCode(value); Object checkedObject = checkedObjectMap.get(valueIdentity); if (null == checkedObject) { // if we have not yet encountered an object with this hash, store it in our map and start scrubbing checkedObjectMap.put(valueIdentity, value); } else if (value == checkedObject) { // if we have scrubbed this already, no more work to be done return; } else { // we have a situation where multiple objects have the same identity hashcode, work with our // collision map to decide whether it needs to be scrubbed and add if necessary. // Note that this code block is infrequently hit, it is by design that we've pushed the extra // work, map, etc, involved for this infrequent case into its own block. The standard cases must // be as fast and lean as possible. boolean alreadyDetached = false; List<Object> collisionObjects = checkedObjectCollisionMap.get(valueIdentity); if (null == collisionObjects) { // if this is the 2nd occurrence for this hash, create a new map entry collisionObjects = new ArrayList<Object>(1); checkedObjectCollisionMap.put(valueIdentity, collisionObjects); } else { // if we have scrubbed this already, no more work to be done for (Object collisionObject : collisionObjects) { if (value == collisionObject) { alreadyDetached = true; break; } } } if (LOG.isDebugEnabled()) { StringBuilder message = new StringBuilder("\n\tIDENTITY HASHCODE COLLISION [hash="); message.append(valueIdentity); message.append(", alreadyDetached="); message.append(alreadyDetached); message.append("]"); message.append("\n\tCurrent : "); message.append(value.getClass().getName()); message.append("\n\t "); message.append(value); message.append("\n\tPrevious : "); message.append(checkedObject.getClass().getName()); message.append("\n\t "); message.append(checkedObject); for (Object collisionObject : collisionObjects) { message.append("\n\tPrevious : "); message.append(collisionObject.getClass().getName()); message.append("\n\t "); message.append(collisionObject); } LOG.debug(message); } // now that we've done our logging, if already detached we're done. Otherwise add to the list of collision // objects for this hash, and start scrubbing if (alreadyDetached) { return; } collisionObjects.add(value); } // Perform the detaching if (value instanceof Object[]) { Object[] objArray = (Object[]) value; for (int i = 0; i < objArray.length; i++) { Object listEntry = objArray[i]; Object replaceEntry = replaceObject(listEntry); if (replaceEntry != null) { objArray[i] = replaceEntry; } nullOutUninitializedFields(objArray[i], checkedObjectMap, checkedObjectCollisionMap, depth + 1, serializationType); } } else if (value instanceof List) { // Null out any entries in initialized collections ListIterator i = ((List) value).listIterator(); while (i.hasNext()) { Object val = i.next(); Object replace = replaceObject(val); if (replace != null) { val = replace; i.set(replace); } nullOutUninitializedFields(val, checkedObjectMap, checkedObjectCollisionMap, depth + 1, serializationType); } } else if (value instanceof Collection) { Collection collection = (Collection) value; Collection itemsToBeReplaced = new ArrayList(); Collection replacementItems = new ArrayList(); for (Object item : collection) { Object replacementItem = replaceObject(item); if (replacementItem != null) { itemsToBeReplaced.add(item); replacementItems.add(replacementItem); item = replacementItem; } nullOutUninitializedFields(item, checkedObjectMap, checkedObjectCollisionMap, depth + 1, serializationType); } collection.removeAll(itemsToBeReplaced); collection.addAll(replacementItems); // watch out! if this collection is a Set, HashMap$MapSet doesn't support addAll. See BZ 688000 } else if (value instanceof Map) { Map originalMap = (Map) value; HashMap<Object, Object> replaceMap = new HashMap<Object, Object>(); for (Iterator i = originalMap.keySet().iterator(); i.hasNext();) { // get original key and value - these might be hibernate proxies Object originalKey = i.next(); Object originalKeyValue = originalMap.get(originalKey); // replace with non-hibernate classes, if appropriate (will be null otherwise) Object replaceKey = replaceObject(originalKey); Object replaceValue = replaceObject(originalKeyValue); // if either original key or original value was a hibernate proxy object, we have to // remove it from the original map, and remember the replacement objects for later if (replaceKey != null || replaceValue != null) { Object newKey = (replaceKey != null) ? replaceKey : originalKey; Object newValue = (replaceValue != null) ? replaceValue : originalKeyValue; replaceMap.put(newKey, newValue); i.remove(); } } // all hibernate proxies have been removed, we need to replace them with their // non-proxy object representations that we got from replaceObject() calls originalMap.putAll(replaceMap); // now go through each item in the map and null out their internal fields for (Object key : originalMap.keySet()) { nullOutUninitializedFields(originalMap.get(key), checkedObjectMap, checkedObjectCollisionMap, depth + 1, serializationType); nullOutUninitializedFields(key, checkedObjectMap, checkedObjectCollisionMap, depth + 1, serializationType); } } else if (value instanceof Enum) { // don't need to detach enums, treat them as special objects return; } if (serializationType == SerializationType.JAXB) { XmlAccessorType at = (XmlAccessorType) value.getClass().getAnnotation(XmlAccessorType.class); if (at != null && at.value() == XmlAccessType.FIELD) { nullOutFieldsByFieldAccess(value, checkedObjectMap, checkedObjectCollisionMap, depth, serializationType); } else { nullOutFieldsByAccessors(value, checkedObjectMap, checkedObjectCollisionMap, depth, serializationType); } } else if (serializationType == SerializationType.SERIALIZATION) { nullOutFieldsByFieldAccess(value, checkedObjectMap, checkedObjectCollisionMap, depth, serializationType); } }
From source file:org.codehaus.mojo.license.api.DefaultThirdPartyHelper.java
/** * {@inheritDoc}//from w ww . j a v a 2s. com */ @SuppressWarnings("unchecked") // project.getArtifacts() public SortedProperties createUnsafeMapping(LicenseMap licenseMap, File missingFile, boolean useRepositoryMissingFiles, SortedSet<MavenProject> unsafeDependencies, SortedMap<String, MavenProject> projectDependencies) throws ProjectBuildingException, IOException, ThirdPartyToolException { SortedProperties unsafeMappings = loadUnsafeMapping(licenseMap, missingFile, projectDependencies); if (CollectionUtils.isNotEmpty(unsafeDependencies)) { // there is some unresolved license if (useRepositoryMissingFiles) { // try to load missing third party files from dependencies Collection<MavenProject> projects = new ArrayList<MavenProject>(projectDependencies.values()); projects.remove(project); projects.removeAll(unsafeDependencies); SortedProperties resolvedUnsafeMapping = loadThirdPartyDescriptorForUnsafeMapping( project.getArtifacts(), unsafeDependencies, projects, licenseMap); // push back resolved unsafe mappings unsafeMappings.putAll(resolvedUnsafeMapping); } } return unsafeMappings; }
From source file:org.apache.james.mailrepository.file.FileMailRepository.java
@Override @PostConstruct/*from w w w. j a v a 2 s. co m*/ public void init() throws Exception { try { DefaultConfigurationBuilder reposConfiguration = new DefaultConfigurationBuilder(); reposConfiguration.addProperty("[@destinationURL]", destination); objectRepository = new FilePersistentObjectRepository(); objectRepository.setLog(getLogger()); objectRepository.setFileSystem(fileSystem); objectRepository.configure(reposConfiguration); objectRepository.init(); streamRepository = new FilePersistentStreamRepository(); streamRepository.setLog(getLogger()); streamRepository.setFileSystem(fileSystem); streamRepository.configure(reposConfiguration); streamRepository.init(); if (cacheKeys) keys = Collections.synchronizedSet(new HashSet<String>()); // Finds non-matching pairs and deletes the extra files HashSet<String> streamKeys = new HashSet<String>(); for (Iterator<String> i = streamRepository.list(); i.hasNext();) { streamKeys.add(i.next()); } HashSet<String> objectKeys = new HashSet<String>(); for (Iterator<String> i = objectRepository.list(); i.hasNext();) { objectKeys.add(i.next()); } @SuppressWarnings("unchecked") Collection<String> strandedStreams = (Collection<String>) streamKeys.clone(); strandedStreams.removeAll(objectKeys); for (Object strandedStream : strandedStreams) { String key = (String) strandedStream; remove(key); } @SuppressWarnings("unchecked") Collection<String> strandedObjects = (Collection<String>) objectKeys.clone(); strandedObjects.removeAll(streamKeys); for (Object strandedObject : strandedObjects) { String key = (String) strandedObject; remove(key); } if (keys != null) { // Next get a list from the object repository // and use that for the list of keys keys.clear(); for (Iterator<String> i = objectRepository.list(); i.hasNext();) { keys.add(i.next()); } } if (getLogger().isDebugEnabled()) { String logBuffer = getClass().getName() + " created in " + destination; getLogger().debug(logBuffer); } } catch (Exception e) { final String message = "Failed to retrieve Store component:" + e.getMessage(); getLogger().error(message, e); throw e; } }