List of usage examples for java.util Collection remove
boolean remove(Object o);
From source file:com.nextep.designer.synch.services.impl.ReverseSynchronizationService.java
/** * Performs the import of the external element as a checkout of the corresponding view element. * The element is passed as a {@link IComparisonItem} instance. * // w w w .j ava 2s .c om * @param item comparison between the external and the view element * @param activity activity to use for checkoutting the view element * @return a list of commands which performs the import */ private void performUpdate(final IComparisonItem item, IReverseSynchronizationContext context) { final IVersionable<?> reposObject = (IVersionable<?>) item.getTarget(); IVersionable<?> dbObject = (IVersionable<?>) item.getSource(); // Merging // Following condition is true if we should merge source and target // according to user selection if (!MergeUtils.isSelected(item, ComparedElement.SOURCE)) { IMerger m = MergerFactory.getMerger(dbObject.getType(), ComparisonScope.DB_TO_REPOSITORY); dbObject = (IVersionable<?>) m.buildMergedObject(item, getVersioningService().getCurrentActivity()); final Collection<IVersionable<?>> toImport = context.getVersionablesToImport(); toImport.remove(item.getSource()); toImport.add(dbObject); } // Final dependency replacement pass SynchronizationHelper.replaceDependency(dbObject, context.getSourceReferenceMapping()); // Check outting object reposObject.setVersionPolicy(new CheckOutInExistingObjectVersionPolicy(dbObject)); getVersioningService().checkOut(new NullProgressMonitor(), reposObject); reposObject.setVersionPolicy(DefaultVersionPolicy.getInstance()); // Specific data processing final ISynchronizationResult result = context.getSynchronizationResult(); if (result != null && result.isDataSynchronization()) { final IDataSet dbSet = (IDataSet) dbObject.getVersionnedObject().getModel(); final IDataSet repoSet = (IDataSet) reposObject.getVersionnedObject().getModel(); // We need to align current row id from repository to ensure new lines will be properly // appended with a brand new rowid dbSet.setCurrentRowId(repoSet.getCurrentRowId()); // We tag the version final IVersionInfo version = dbObject.getVersion(); version.setVersionTag(VersionHelper.computeVersion(version)); if (item instanceof DataSetComparisonItem) { IDataDelta delta = ((DataSetComparisonItem) item).getDataDelta(); dataService.saveDataDeltaToRepository(dbSet, delta, new NullProgressMonitor()); // Since we import the dataset in the workspace, we need to make it a regular // repository // dataset, we do this by emptying the handle which will force neXtep to refetch it // from // the repository dbSet.setStorageHandle(null); } } }
From source file:de.exxeta.vortraege.mvc.controller.HomeController.java
/** * Simply selects the home view to render by returning its name. *//*from ww w . j a v a 2 s . co m*/ @RequestMapping(value = "/") public String home(Model model, @RequestParam(required = false) String startTwitter, @RequestParam(required = false) String stopTwitter, @RequestParam(required = false) String retweet, @RequestParam(required = false) String tweetId) { if (startTwitter != null) { twitterService.startTwitterAdapter(); return "redirect:/"; } if (stopTwitter != null) { twitterService.stopTwitterAdapter(); return "redirect:/"; } Collection<TwitterMessage> twitterMessages = twitterService.getTwitterMessages(); LOG.info("Retrieved {} Twitter messages.", twitterMessages.size()); model.addAttribute("twitterMessages", twitterMessages); if (retweet != null && tweetId != null) { TwitterMessage message = getTwitterMessage(twitterMessages, Long.valueOf(tweetId)); if (null != message) { // Start process LOG.info("Start twitter process for tweet id {}.", tweetId); processService.start(message); // Remove message from collection twitterMessages.remove(message); model.addAttribute("twitterMessages", twitterMessages); } else { LOG.error("No tweet found for id {}.", tweetId); } return "redirect:/"; } return "home"; }
From source file:com.nextep.datadesigner.sqlgen.impl.SQLGenerator.java
public static void processPreconditions(IGenerationResult result, List<IGenerationResult> resolvedResults, Map<DatabaseReference, IGenerationResult> refMap, Collection<IGenerationResult> stack) { if (result.getPreconditions().isEmpty()) { resolvedResults.add(result);//from ww w .ja v a2 s . c o m } else { // We have a deadloop here, so we return if (stack.contains(result)) { LOGGER.warn("Circular dependencies found, generation order may not be accurate."); return; } stack.add(result); for (DatabaseReference ref : result.getPreconditions()) { IGenerationResult precondResult = refMap.get(ref); if (!resolvedResults.contains(precondResult) && precondResult != null && precondResult != result) { try { processPreconditions(precondResult, resolvedResults, refMap, stack); } catch (StackOverflowError e) { LOGGER.info(result.getName()); throw e; } } } stack.remove(result); resolvedResults.add(result); } }
From source file:org.apache.usergrid.tools.DupOrgRepair.java
@Override public void runTool(CommandLine line) throws Exception { String outputDir = line.getOptionValue("output"); createDir(outputDir);//from ww w . ja v a2 s. c o m startSpring(); logger.info("Starting crawl of all admins"); EntityManager em = emf.getEntityManager(CassandraService.MANAGEMENT_APPLICATION_ID); Application app = em.getApplication(); // search for all orgs Query query = new Query(); query.setLimit(PAGE_SIZE); Results r = null; Multimap<String, UUID> orgs = HashMultimap.create(); do { r = em.searchCollection(app, "groups", query); for (Entity entity : r.getEntities()) { String name = entity.getProperty("path").toString().toLowerCase(); orgs.put(name, entity.getUuid()); } query.setCursor(r.getCursor()); logger.info("Searching next page"); } while (r != null && r.size() == PAGE_SIZE); // now go through and print out duplicate emails for (String name : orgs.keySet()) { Collection<UUID> ids = orgs.get(name); if (ids.size() > 1) { logger.warn("Found multiple orgs with the name {}", name); // look this up the same way the REST tier does. This way we will always // map the same way and the user will not notice a background merge OrganizationInfo orgInfo = managementService.getOrganizationByName(name); UUID targetOrgId = orgInfo.getUuid(); ids.remove(targetOrgId); for (UUID sourceId : ids) { mergeOrganizations(outputDir, sourceId, targetOrgId); } } } logger.info("Merge complete"); }
From source file:org.usergrid.tools.DupOrgRepair.java
@Override public void runTool(CommandLine line) throws Exception { String outputDir = line.getOptionValue("output"); createDir(outputDir);/*from ww w .j a va 2s . co m*/ startSpring(); logger.info("Starting crawl of all admins"); EntityManager em = emf.getEntityManager(CassandraService.MANAGEMENT_APPLICATION_ID); Application app = em.getApplication(); // search for all orgs Query query = new Query(); query.setLimit(PAGE_SIZE); Results r = null; Multimap<String, UUID> orgs = HashMultimap.create(); do { r = em.searchCollection(app, "groups", query); for (Entity entity : r.getEntities()) { String name = entity.getProperty("path").toString().toLowerCase(); orgs.put(name, entity.getUuid()); } query.setCursor(r.getCursor()); logger.info("Searching next page"); } while (r != null && r.size() == PAGE_SIZE); // now go through and print out duplicate emails for (String name : orgs.keySet()) { Collection<UUID> ids = orgs.get(name); if (ids.size() > 1) { logger.warn("Found multiple orgs with the name {}", name); // look this up the same way the REST tier does. This way we will always // map the same way and the user will not notice a background merge OrganizationInfo orgInfo = managementService.getOrganizationByName(name); UUID targetOrgId = orgInfo.getUuid(); ids.remove(targetOrgId); for (UUID sourceId : ids) { mergeOrganizations(outputDir, sourceId, targetOrgId); } } } logger.info("Merge complete"); }
From source file:ubic.gemma.core.datastructure.matrix.ExpressionDataMatrixColumnSort.java
/** * @return list of factors, sorted from simplest (fewest number of values from the biomaterials passed in) to least * simple. Continuous factors will always be first, and batch factors last. *//*from w ww . ja v a2s .c o m*/ private static List<ExperimentalFactor> orderFactorsByExperimentalDesign(List<BioMaterial> start, Collection<ExperimentalFactor> factors) { if (factors == null || factors.isEmpty()) { ExpressionDataMatrixColumnSort.log.warn("No factors supplied for sorting"); return new LinkedList<>(); } LinkedList<ExperimentalFactor> sortedFactors = new LinkedList<>(); Collection<ExperimentalFactor> factorsToTake = new HashSet<>(factors); while (!factorsToTake.isEmpty()) { ExperimentalFactor simplest = ExpressionDataMatrixColumnSort.chooseSimplestFactor(start, factorsToTake); if (simplest == null) { // none of the factors have more than one factor value. One-sided t-tests ... /* * This assertion isn't right -- we now allow this, though we can only have ONE such constant factor. * See bug 2390. Unless we are dealing with a subset, in which case there can be any number of constant * factors within the subset. */ // assert factors.size() == 1 : // "It's possible to have just one factor value, but only if there is only one factor."; sortedFactors.addAll(factors); return sortedFactors; } sortedFactors.addLast(simplest); factorsToTake.remove(simplest); } return sortedFactors; }
From source file:org.squashtest.tm.service.internal.repository.hibernate.TestCaseDaoImpl.java
@Override public List<NamedReferencePair> findTestCaseCallsDownstream(final Collection<Long> testCaseIds) { // get the node pairs when a caller/called pair was found. List<NamedReferencePair> result = findTestCaseCallsDetails(testCaseIds, "testCase.findTestCasesHavingCallStepsDetails"); // now we must also add dummy Object[] for the test case ids that hadn't any caller Collection<Long> remainingIds = new HashSet<>(testCaseIds); for (NamedReferencePair pair : result) { remainingIds.remove(pair.getCaller().getId()); }/*from w w w. j a v a 2s . c o m*/ List<NamedReference> noncalledReferences = findTestCaseDetails(remainingIds); for (NamedReference ref : noncalledReferences) { result.add(new NamedReferencePair(ref.getId(), ref.getName(), null, null)); } return result; }
From source file:org.squashtest.tm.service.internal.repository.hibernate.TestCaseDaoImpl.java
@Override /*//from www . j av a2s. c om * implementation note : the following query could not use a right outer join. So we'll do the job manually. Hence * the weird things done below. */ public List<NamedReferencePair> findTestCaseCallsUpstream(final Collection<Long> testCaseIds) { // get the node pairs when a caller/called pair was found. List<NamedReferencePair> result = findTestCaseCallsDetails(testCaseIds, "testCase.findTestCasesHavingCallerDetails"); // now we must also add dummy Object[] for the test case ids that hadn't any caller Collection<Long> remainingIds = new HashSet<>(testCaseIds); for (NamedReferencePair pair : result) { remainingIds.remove(pair.getCalled().getId()); } List<NamedReference> noncalledReferences = findTestCaseDetails(remainingIds); for (NamedReference ref : noncalledReferences) { result.add(new NamedReferencePair(null, null, ref.getId(), ref.getName())); } return result; }
From source file:com.github.tteofili.looseen.yay.SGM.java
static double evaluate(SGM network) throws Exception { double cc = 0; double wc = 0; int window = network.configuration.window; List<String> vocabulary = network.getVocabulary(); Collection<Integer> exps = new LinkedList<>(); Collection<Integer> acts = new LinkedList<>(); for (Sample sample : network.samples) { double[] inputs = sample.getInputs(); int j = 0; for (int i = 0; i < window - 1; i++) { int le = inputs.length; int actualMax = getMaxIndex(network.predictOutput(inputs), j, j + le - 1); int expectedMax = getMaxIndex(sample.getOutputs(), j, j + le - 1); exps.add(expectedMax % le);/*from www .j av a 2 s . co m*/ acts.add(actualMax % le); j += le; } boolean c = true; for (Integer e : exps) { c &= acts.remove(e); } if (c) { cc++; String x = vocabulary.get(getMaxIndex(inputs, 0, inputs.length)); StringBuilder y = new StringBuilder(); for (int e : exps) { if (y.length() > 0) { y.append(" "); } y.append(vocabulary.get(e)); } System.out.println("matched : " + x + " -> " + y); } else { wc++; } acts.clear(); exps.clear(); if (cc + wc > 2000) break; } return (cc / (wc + cc)); }
From source file:nl.strohalm.cyclos.services.access.AccessServiceSecurity.java
@Override public List<Session> searchSessions(final SessionQuery query) { if (LoggedUser.isAdministrator()) { Collection<Nature> natures = query.getNatures(); if (CollectionUtils.isEmpty(natures)) { // As usual, empty means all. We want to ensure one-by-one, so we add them here natures = EnumSet.allOf(Nature.class); }/*from ww w. j ava2 s. c om*/ if (!permissionService.hasPermission(AdminSystemPermission.STATUS_VIEW_CONNECTED_ADMINS)) { natures.remove(Nature.ADMIN); } if (!permissionService.hasPermission(AdminSystemPermission.STATUS_VIEW_CONNECTED_MEMBERS)) { natures.remove(Nature.MEMBER); } if (!permissionService.hasPermission(AdminSystemPermission.STATUS_VIEW_CONNECTED_BROKERS)) { natures.remove(Nature.BROKER); } if (!permissionService.hasPermission(AdminSystemPermission.STATUS_VIEW_CONNECTED_OPERATORS)) { natures.remove(Nature.OPERATOR); } if (natures.isEmpty()) { // Nothing left to see throw new PermissionDeniedException(); } // Apply the allowed groups Collection<Group> allowedGroups = new HashSet<Group>(); allowedGroups.addAll(permissionService.getVisibleMemberGroups()); if (natures.contains(Nature.ADMIN)) { // Add all admin groups, as they are not present on the permissionService.getVisibleMemberGroups() GroupQuery admins = new GroupQuery(); admins.setNatures(Group.Nature.ADMIN); allowedGroups.addAll(groupService.search(admins)); } if (natures.contains(Nature.OPERATOR)) { // Add all operator groups, as they are not present on the permissionService.getVisibleMemberGroups() GroupQuery operators = new GroupQuery(); operators.setIgnoreManagedBy(true); operators.setNatures(Group.Nature.OPERATOR); allowedGroups.addAll(groupService.search(operators)); } query.setGroups(PermissionHelper.checkSelection(allowedGroups, query.getGroups())); } else { // Members can only view connected operators permissionService.permission(query.getMember()).member(MemberPermission.OPERATORS_MANAGE).check(); } return accessService.searchSessions(query); }