List of usage examples for java.util Collection removeAll
boolean removeAll(Collection<?> c);
From source file:com.google.gwt.emultest.java.util.ListTestBase.java
public void testRemoveAllDuplicates() { Collection c = makeCollection(); c.add("a");/*from w ww . j a v a 2 s.c o m*/ c.add("a"); Collection d = makeCollection(); d.add("a"); assertTrue(c.removeAll(d)); assertEquals(0, c.size()); }
From source file:com.haulmont.cuba.core.jmx.FileStorage.java
@Override public String findOrphanFiles() { FileStorageAPI fileStorageAPI = AppBeans.get(FileStorageAPI.class); if (!(fileStorageAPI instanceof com.haulmont.cuba.core.app.filestorage.FileStorage)) { return "<not supported>"; }/*from w w w . j av a 2s.c o m*/ File[] roots = getStorageRoots(); if (roots.length == 0) return "No storage directories defined"; StringBuilder sb = new StringBuilder(); File storageFolder = roots[0]; if (!storageFolder.exists()) return ExceptionUtils.getStackTrace(new FileStorageException(FileStorageException.Type.FILE_NOT_FOUND, storageFolder.getAbsolutePath())); @SuppressWarnings("unchecked") Collection<File> systemFiles = FileUtils.listFiles(storageFolder, null, true); @SuppressWarnings("unchecked") Collection<File> filesInRootFolder = FileUtils.listFiles(storageFolder, null, false); //remove files of root storage folder (e.g. storage.log) from files collection systemFiles.removeAll(filesInRootFolder); List<FileDescriptor> fileDescriptors; Transaction tx = persistence.createTransaction(); try { EntityManager em = persistence.getEntityManager(); TypedQuery<FileDescriptor> query = em.createQuery("select fd from sys$FileDescriptor fd", FileDescriptor.class); fileDescriptors = query.getResultList(); tx.commit(); } catch (Exception e) { return ExceptionUtils.getStackTrace(e); } finally { tx.end(); } Set<String> descriptorsFileNames = new HashSet<>(); for (FileDescriptor fileDescriptor : fileDescriptors) { descriptorsFileNames .add(com.haulmont.cuba.core.app.filestorage.FileStorage.getFileName(fileDescriptor)); } for (File file : systemFiles) { if (!descriptorsFileNames.contains(file.getName())) //Encode file path if it contains non-ASCII characters if (!file.getPath().matches("\\p{ASCII}+")) { String encodedFilePath = URLEncodeUtils.encodeUtf8(file.getPath()); sb.append(encodedFilePath).append("\n"); } else { sb.append(file.getPath()).append("\n"); } } return sb.toString(); }
From source file:org.etudes.component.app.melete.SpecialAccessServiceImpl.java
public void insertSpecialAccess(List saList, SpecialAccessObjService sa, ModuleObjService mod) throws Exception { if (moduleDatesDiffer(sa, mod) == true) { //If the module has no other accesses, no checking is needed if ((saList == null) || saList.size() == 0) { setSpecialAccess(sa);/*from w w w.j ava 2 s .c o m*/ } else { //Iterate through each access for (ListIterator i = saList.listIterator(); i.hasNext();) { SpecialAccess saObj = (SpecialAccess) i.next(); //Perform checks on accesses that aren't the current one //sa is the current object if (saObj != sa) { String[] userIds = SqlHelper.decodeStringArray(saObj.getUsers()); if (userIds.length > 0) { String[] targetUserIds = SqlHelper.decodeStringArray(sa.getUsers()); if (targetUserIds.length > 0) { Collection userIdsColl = new ArrayList(Arrays.asList(userIds)); Collection targetUserIdsColl = new ArrayList(Arrays.asList(targetUserIds)); //Remove current(target) users from this special access's //user list userIdsColl.removeAll(targetUserIdsColl); if (userIdsColl != null) { userIds = (String[]) userIdsColl.toArray(new String[userIdsColl.size()]); } //If there are still userids remaining, update the special access //Otherwise, delete the special access if (userIds.length > 0) { saObj.setUsers(SqlHelper.encodeStringArray(userIds)); setSpecialAccess(saObj); } else { //delete access List delList = new ArrayList(); delList.add(saObj.getAccessId()); deleteSpecialAccess(delList); } } } } } //Finally, insert or update the current special access setSpecialAccess(sa); } } return; }
From source file:de.rrze.idmone.utils.jidgen.cli.IdGenOptions.java
/** * Fill the internal variable data by parsing a given * array of command line options.//w ww . java 2s .c o m * * @param args * the String array containing all command line options * @return the data collection * @throws ParseException */ public HashMap<String, String> parse(String[] args) throws ParseException { // get a list of all stored option objects to be processed // excluding all dummy options Collection<IdGenOption> options = new HashSet<IdGenOption>(); options.addAll(this.getOptions()); options.removeAll(this.dummyOptions.values()); Iterator<IdGenOption> iter = options.iterator(); // init the parser BasicParser parser = new BasicParser(); CommandLine commandLine = parser.parse(this, args); // iterate over all possible options while (iter.hasNext()) { IdGenOption currentOption = iter.next(); //logger.trace("Processing option \"" + currentOption.getShortOpt() + "\""); if (commandLine.hasOption(currentOption.getShortOpt())) { // option was specified String value = commandLine.getOptionValue(currentOption.getShortOpt()); if (value != null) { // option has a specified value this.data.put(currentOption.getShortOpt(), value); logger.info(currentOption.getShortOpt() + " = " + value); } else if (currentOption.hasArg()) { // option does NOT have a specified value logger.error(currentOption.getShortOpt() + " " + Messages.getString("IdGenOptions.MISSING_ARGUMENT")); System.out.println(this.getHelp(false)); System.exit(170); } else { // at least put an entry with an empty string in the data array // to mark that the option was specified this.data.put(currentOption.getShortOpt(), ""); } } else { // option was NOT specified, so use default if available if (currentOption.hasDefaultValue()) { // has default logger.info(currentOption.getShortOpt() + " " + Messages.getString("IdGenOptions.USING_DEFAULT") + " " + currentOption.getDefaultValue()); this.data.put(currentOption.getShortOpt(), currentOption.getDefaultValue()); } } } return this.data; }
From source file:org.sipfoundry.sipxconfig.phone.PhoneContextImpl.java
private Collection<PhonebookEntry> filterPhonebookEntries(Collection<PhonebookEntry> entries) { Collection entriesToRemove = select(entries, new InvalidGoogleEntrySearchPredicate()); entries.removeAll(entriesToRemove); return entries; }
From source file:dmh.kuebiko.view.ImageManagerTest.java
/** * Retrieve a list of all known image ID strings of a particular size. * @param size The size of the requested images. * @return A list of valid image IDs./*from www . j av a2s. c om*/ */ private Collection<String> getImageIds(ImageSize size) throws URISyntaxException { String path = String.format("images/%s/", size.toString().toLowerCase()); File imageDir = new File(getClass().getResource(path).toURI()); Collection<String> imageIds = Lists.newArrayList( Collections2.transform(Arrays.asList(imageDir.list()), new Function<String, String>() { @Override public String apply(String input) { return "png".equals(FilenameUtils.getExtension(input)) ? FilenameUtils.removeExtension(input) : ""; } })); imageIds.removeAll(Collections.singleton("")); return imageIds; }
From source file:org.springframework.integration.x.kafka.KafkaPartitionAllocator.java
@Override public synchronized Partition[] getObject() throws Exception { if (log.isDebugEnabled()) { log.debug("Module name is " + moduleName); log.debug("Stream name is " + streamName); log.debug("Cardinality is " + count); log.debug("Sequence is " + sequence); log.debug("Client is " + client); }//from w w w .j av a2s . co m if (partitions == null) { if (STARTED.equals(client.getState())) { try { partitionDataMutex.acquire(); byte[] partitionData = client.getData().forPath(partitionDataPath); if (partitionData == null || partitionData.length == 0) { Collection<Partition> existingPartitions = connectionFactory.getPartitions(topic); Collection<Partition> listenedPartitions = !StringUtils.hasText(partitionList) ? existingPartitions : Arrays.asList(toPartitions(parseNumberList(partitionList))); if (existingPartitions != listenedPartitions && !existingPartitions.containsAll(listenedPartitions)) { Collection<Partition> partitionsNotFound = new ArrayList<Partition>(listenedPartitions); partitionsNotFound.removeAll(existingPartitions); throw new BeanInitializationException( "Configuration contains partitions that do not exist on the topic" + " or have unavailable leaders: " + StringUtils.collectionToCommaDelimitedString(partitionsNotFound)); } final Map<Partition, BrokerAddress> leaders = connectionFactory .getLeaders(listenedPartitions); ArrayList<Partition> sortedPartitions = new ArrayList<Partition>(listenedPartitions); Collections.sort(sortedPartitions, new Comparator<Partition>() { @Override public int compare(Partition o1, Partition o2) { int i = leaders.get(o1).toString().compareTo(leaders.get(o2).toString()); if (i != 0) { return i; } else return o1.getId() - o2.getId(); } }); if (log.isDebugEnabled()) { log.debug("Partitions: " + StringUtils.collectionToCommaDelimitedString(sortedPartitions)); } // calculate the minimum size of a partition group. int minimumSize = sortedPartitions.size() / count; int remainder = sortedPartitions.size() % count; List<List<Integer>> partitionGroups = new ArrayList<List<Integer>>(); int cursor = 0; for (int i = 0; i < count; i++) { // first partitions will get an extra element int partitionGroupSize = i < remainder ? minimumSize + 1 : minimumSize; ArrayList<Integer> partitionGroup = new ArrayList<Integer>(); for (int j = 0; j < partitionGroupSize; j++) { partitionGroup.add(sortedPartitions.get(cursor++).getId()); } if (log.isDebugEnabled()) { log.debug("Partitions for " + (i + 1) + " : " + StringUtils.collectionToCommaDelimitedString(partitionGroup)); } partitionGroups.add(partitionGroup); } byte[] dataAsBytes = objectMapper.writer().writeValueAsBytes(partitionGroups); if (log.isDebugEnabled()) { log.debug(new String(dataAsBytes)); } client.setData().forPath(partitionDataPath, dataAsBytes); // the partition mapping is stored 0-based but sequence/count are 1-based if (log.isDebugEnabled()) { log.debug("Listening to: " + StringUtils .collectionToCommaDelimitedString(partitionGroups.get(sequence - 1))); } partitions = toPartitions(partitionGroups.get(sequence - 1)); } else { if (log.isDebugEnabled()) { log.debug(new String(partitionData)); } @SuppressWarnings("unchecked") List<List<Integer>> partitionGroups = objectMapper.reader(List.class) .readValue(partitionData); // the partition mapping is stored 0-based but sequence/count are 1-based if (log.isDebugEnabled()) { log.debug("Listening to: " + StringUtils .collectionToCommaDelimitedString(partitionGroups.get(sequence - 1))); } partitions = toPartitions(partitionGroups.get(sequence - 1)); } return partitions; } finally { if (partitionDataMutex.isAcquiredInThisProcess()) { partitionDataMutex.release(); } } } else { throw new BeanInitializationException( "Cannot connect to ZooKeeper, client state is " + client.getState()); } } else { return partitions; } }
From source file:org.phenotips.data.permissions.internal.DefaultPatientAccessHelper.java
@Override public AccessLevel getAccessLevel(Patient patient, EntityReference user) { AccessLevel result = this.manager.resolveAccessLevel("none"); if (patient == null || user == null) { return result; }//from ww w . j a v a 2 s.co m try { EntityReference owner = getOwner(patient).getUser(); Collection<Collaborator> collaborators = getCollaborators(patient); Set<DocumentReference> processedEntities = new HashSet<DocumentReference>(); Queue<DocumentReference> entitiesToCheck = new LinkedList<DocumentReference>(); entitiesToCheck.add((DocumentReference) user); AccessLevel currentItemAccess = null; DocumentReference currentItem; XWikiContext context = getXWikiContext(); XWikiGroupService groupService = context.getWiki().getGroupService(context); while (!entitiesToCheck.isEmpty()) { currentItem = entitiesToCheck.poll(); currentItemAccess = getAccessLevel(currentItem, owner, collaborators); if (currentItemAccess.compareTo(result) > 0) { result = currentItemAccess; } processedEntities.add(currentItem); Collection<DocumentReference> groups = groupService.getAllGroupsReferencesForMember(currentItem, 0, 0, context); groups.removeAll(processedEntities); entitiesToCheck.addAll(groups); } } catch (XWikiException ex) { this.logger.warn("Failed to compute access level for [{}] on [{}]: {}", user, patient.getId(), ex.getMessage()); } return result; }
From source file:de.huberlin.wbi.hiway.scheduler.Scheduler.java
public void updateRuntimeEstimates(String runId) { System.out.println("Updating Runtime Estimates."); System.out.println("HiwayDB: Querying Host Names from database."); Collection<String> newHostIds = dbInterface.getHostNames(); System.out.println("HiwayDB: Retrieved Host Names " + newHostIds.toString() + " from database."); newHostIds.removeAll(getNodeIds()); for (String newHostId : newHostIds) { newHost(newHostId);/* w w w.j a v a 2 s . c om*/ } System.out.println("HiwayDB: Querying Task Ids for workflow " + workflowName + " from database."); Collection<Long> newTaskIds = dbInterface.getTaskIdsForWorkflow(workflowName); System.out.println("HiwayDB: Retrieved Task Ids " + newTaskIds.toString() + " from database."); newTaskIds.removeAll(getTaskIds()); for (long newTaskId : newTaskIds) { newTask(newTaskId); } for (String hostName : getNodeIds()) { long oldMaxTimestamp = maxTimestampPerHost.get(hostName); long newMaxTimestamp = oldMaxTimestamp; for (long taskId : getTaskIds()) { System.out.println("HiwayDB: Querying InvocStats for task id " + taskId + " on host " + hostName + " since timestamp " + oldMaxTimestamp + " from database."); Collection<InvocStat> invocStats = dbInterface.getLogEntriesForTaskOnHostSince(taskId, hostName, oldMaxTimestamp); System.out.println("HiwayDB: Retrieved InvocStats " + invocStats.toString() + " from database."); for (InvocStat stat : invocStats) { newMaxTimestamp = Math.max(newMaxTimestamp, stat.getTimestamp()); updateRuntimeEstimate(stat); if (!runId.equals(stat.getRunId())) { numberOfPreviousRunTasks++; numberOfFinishedTasks++; } } } maxTimestampPerHost.put(hostName, newMaxTimestamp); } }
From source file:org.apache.cayenne.access.DbLoader.java
/** * Flattens many-to-many relationships in the generated model. *//*from w w w. j a v a 2s . co m*/ public static void flattenManyToManyRelationships(DataMap map, Collection<ObjEntity> loadedObjEntities, ObjectNameGenerator objectNameGenerator) { if (loadedObjEntities.isEmpty()) { return; } Collection<ObjEntity> entitiesForDelete = new LinkedList<ObjEntity>(); for (ObjEntity curEntity : loadedObjEntities) { ManyToManyCandidateEntity entity = ManyToManyCandidateEntity.build(curEntity); if (entity != null) { entity.optimizeRelationships(objectNameGenerator); entitiesForDelete.add(curEntity); } } // remove needed entities for (ObjEntity curDeleteEntity : entitiesForDelete) { map.removeObjEntity(curDeleteEntity.getName(), true); } loadedObjEntities.removeAll(entitiesForDelete); }