List of usage examples for java.util Set remove
boolean remove(Object o);
From source file:de.tudarmstadt.ukp.dkpro.spelling.experiments.hoo2012.hoo2011.FixedCandidateTrigramProbabilityDetector.java
@Override public void process(JCas jcas) throws AnalysisEngineProcessException { this.jcas = jcas; countCache = new HashMap<String, Long>(); // sanity check if (JCasUtil.select(jcas, RWSECandidate.class).size() == 0) { getContext().getLogger().log(Level.WARNING, "No RWSECandidate annotations present. Probably the pipeline is not properly configured."); getContext().getLogger().log(Level.WARNING, jcas.getDocumentText()); return;//from w w w . j ava 2s .co m } for (Sentence s : JCasUtil.select(jcas, Sentence.class)) { List<RWSECandidate> candidates = JCasUtil.selectCovered(jcas, RWSECandidate.class, s); // nothing to do, if there are no candidates in the sentence, if (candidates.size() == 0) { continue; } List<Token> tokens = JCasUtil.selectCovered(jcas, Token.class, s); List<String> words = JCasUtil.toText(tokens); double targetSentenceProb = getSentenceProbability(words) * alpha; // System.out.println(words); // System.out.println(targetSentenceProb); double maxSentenceProb = targetSentenceProb; SpellingAnomaly anomaly = null; double oneMinusAlpha = 1 - alpha; for (RWSECandidate candidate : candidates) { int candidatePosition = getCandidatePosition(candidate, tokens); if (candidatePosition == -1) { throw new AnalysisEngineProcessException( new Throwable("Could not find matching token for candidate: " + candidate)); } // do not consider candidates shorter than minLength if ((candidate.getEnd() - candidate.getBegin()) < minLength) { continue; } Set<String> spellingVariations = new HashSet<String>(candidateSet); spellingVariations.remove(candidate.getCoveredText()); int nrOfSpellingVariations = spellingVariations.size(); for (String variation : spellingVariations) { List<String> changedWords = getChangedWords(variation, words, candidatePosition); double changedSentenceProb = getSentenceProbability(changedWords) * (oneMinusAlpha / nrOfSpellingVariations); // System.out.println(changedWords.get(candidatePosition)); // System.out.println(changedSentenceProb); if (changedSentenceProb > maxSentenceProb) { maxSentenceProb = changedSentenceProb; anomaly = getAnomaly(tokens.get(candidatePosition), changedWords.get(candidatePosition)); } } } // we found a sentence that has a higher probability if (maxSentenceProb > targetSentenceProb) { // add spelling anomaly anomaly.addToIndexes(); System.out.println(s.getCoveredText()); System.out.println(anomaly); System.out.println(anomaly.getSuggestions(0)); } // TODO if we aggregate all sentences with probability higher than we can use the same "permitting multiple corrections" variant from WOH_H_B } }
From source file:org.os890.ds.addon.spring.impl.SpringBridgeExtension.java
private ConfigurableApplicationContext resolveSpringContext(AfterBeanDiscovery abd, BeanManager beanManager) { List<SpringContainerManager> scmList = ServiceUtils .loadServiceImplementations(SpringContainerManager.class); //TODO cleanup Map<String, Bean<?>> cdiBeansForSpringMap = new HashMap<String, Bean<?>>(); for (Bean<?> bean : this.cdiBeansForSpring) { if (bean.getName() != null) { cdiBeansForSpringMap.put(bean.getName(), bean); }//from www. j a v a 2s . c om Set<Type> beanTypes = new HashSet<Type>(bean.getTypes()); beanTypes.remove(Object.class); beanTypes.remove(Serializable.class); Type beanType = beanTypes.size() == 1 ? beanTypes.iterator().next() : null; if (beanType instanceof Class) { //to support producers cdiBeansForSpringMap.put(((Class) beanType).getName(), bean); } else { //fallback since spring doesn't support multiple types cdiBeansForSpringMap.put(bean.getBeanClass().getName(), bean); } } BeanFactoryPostProcessor beanFactoryPostProcessor = new CdiAwareBeanFactoryPostProcessor(beanManager, cdiBeansForSpringMap); try { currentBeanFactoryPostProcessor.set(beanFactoryPostProcessor); if (scmList.isEmpty()) { return null; } if (scmList.size() == 1) { return scmList.iterator().next().bootContainer(beanFactoryPostProcessor); } if (scmList.size() > 2) { abd.addDefinitionError( new IllegalStateException(scmList.size() + " spring-context-resolvers found")); } else //2 are found -> use the custom one { for (SpringContainerManager containerManager : scmList) { if (containerManager instanceof ManualSpringContainerManager) { continue; } if (containerManager.isContainerStarted()) { return containerManager.getStartedContainer(); } return containerManager.bootContainer(beanFactoryPostProcessor); } } return null; } finally { currentBeanFactoryPostProcessor.set(null); currentBeanFactoryPostProcessor.remove(); } }
From source file:com.act.biointerpretation.analytics.ReactionDeletion.java
public static void searchForDroppedReactions(NoSQLAPI srcApi, NoSQLAPI sinkApi, File outputFile) throws IOException { Set<Long> srcIds = new HashSet<>(); DBIterator iterator = srcApi.getReadDB().getIteratorOverReactions( new BasicDBObject("$query", new BasicDBObject()).append("$orderby", new BasicDBObject("_id", 1)), new BasicDBObject("_id", true)); while (iterator.hasNext()) { DBObject obj = iterator.next();/*from w w w . j a va 2s. c o m*/ Object id = obj.get("_id"); if (id instanceof Integer) { Integer idi = (Integer) id; srcIds.add(idi.longValue()); } else { String msg = String.format("Found unexpected %s value for _id in src DB: %s", id.getClass().getName(), id); LOGGER.error(msg); throw new RuntimeException(msg); } } iterator.close(); Iterator<Reaction> sinkRxns = sinkApi.readRxnsFromInKnowledgeGraph(); while (sinkRxns.hasNext()) { Reaction rxn = sinkRxns.next(); for (JSONObject protein : rxn.getProteinData()) { if (protein.has("source_reaction_id")) { Long srcId = protein.getLong("source_reaction_id"); srcIds.remove(srcId); } else LOGGER.error("Found protein without source id for reaction %d", rxn.getUUID()); } } if (srcIds.size() == 0) { LOGGER.info( "No source read DB ids were unaccounted for in the write DB. Exiting without writing output."); return; } List<Long> sortedSrcIds = new ArrayList<>(srcIds); Collections.sort(sortedSrcIds); try (TSVWriter<String, String> writer = new TSVWriter<>(OUTPUT_HEADER)) { writer.open(outputFile); int noProteinReactions = 0; for (Long id : sortedSrcIds) { Reaction rxn = srcApi.readReactionFromInKnowledgeGraph(id); if (rxn == null) { LOGGER.error("Could not read reaction %d from source DB", id); continue; } if (rxn.getProteinData().size() == 0) { LOGGER.debug("Reaction %d has no proteins, and so cannot participate in the provenance chain", rxn.getUUID()); noProteinReactions++; continue; } Map<String, String> row = new HashMap<String, String>(OUTPUT_HEADER.size()) { { put("id", Long.valueOf(rxn.getUUID()).toString()); put("substrates", "{" + StringUtils.join(rxn.getSubstrates(), ",") + "}"); put("products", "{" + StringUtils.join(rxn.getProducts(), ",") + "}"); put("ecnum", rxn.getECNum()); put("easy_desc", rxn.getReactionName()); } }; writer.append(row); writer.flush(); } LOGGER.info("Found %d reactions with no proteins of %d reactions that might have been deleted", noProteinReactions, srcIds.size()); } }
From source file:com.p6spy.engine.spy.option.P6TestOptionDefaults.java
private void assertP6FactoryClassesEqual(List<Class<? extends P6Factory>> expected, Collection<P6Factory> factories) { final Set<Class<? extends P6Factory>> expectedSet = new HashSet<Class<? extends P6Factory>>(expected); for (P6Factory factory : factories) { expectedSet.remove(factory.getClass()); }/*www . j a v a 2s .c om*/ Assert.assertTrue(expectedSet.isEmpty()); }
From source file:com.searchbox.engine.solr.EmbeddedSolr.java
private boolean addCopyFields(Collection collection, Field field, Set<String> copyFields) { SolrCore core = coreContainer.getCore(collection.getName()); IndexSchema schema = core.getLatestSchema(); for (CopyField copyField : schema.getCopyFieldsList(field.getKey())) { copyFields.remove(copyField.getDestination().getName()); }//from w w w. ja v a 2 s.c o m Map<String, java.util.Collection<String>> copyFieldsMap = new HashMap<String, java.util.Collection<String>>(); copyFieldsMap.put(field.getKey(), copyFields); schema = schema.addCopyFields(copyFieldsMap); core.setLatestSchema(schema); return true; }
From source file:de.blizzy.documentr.access.DocumentrPermissionEvaluator.java
public boolean isLastAdminRole(String roleName) { try {//w ww .j a va 2 s . c om if (userStore.getRole(roleName).getPermissions().contains(Permission.ADMIN)) { Set<String> roles = Sets.newHashSet(userStore.listRoles()); roles.remove(roleName); // find all roles containing the ADMIN permission Set<String> adminRoles = Sets.newHashSet(); for (String role : roles) { Role r = userStore.getRole(role); if (r.getPermissions().contains(Permission.ADMIN)) { adminRoles.add(role); } } // check whether any of the admin roles is granted to any user on the "application" object if (!adminRoles.isEmpty()) { List<String> users = userStore.listUsers(); for (String user : users) { List<RoleGrantedAuthority> authorities = userStore.getUserAuthorities(user); for (RoleGrantedAuthority rga : authorities) { for (String role : adminRoles) { if (rga.getRoleName().equals(role) && rga.getTarget().equals(GrantedAuthorityTarget.APPLICATION)) { return false; } } } } } return true; } } catch (IOException e) { throw new RuntimeException(e); } return false; }
From source file:net.dv8tion.jda.core.handle.GuildEmojisUpdateHandler.java
@Override protected Long handleInternally(JSONObject content) { final long guildId = content.getLong("guild_id"); if (api.getGuildLock().isLocked(guildId)) return guildId; GuildImpl guild = (GuildImpl) api.getGuildMap().get(guildId); if (guild == null) { api.getEventCache().cache(EventCache.Type.GUILD, guildId, () -> handle(responseNumber, allContent)); return null; }/*from w ww. j a v a 2 s .c o m*/ JSONArray array = content.getJSONArray("emojis"); TLongObjectMap<Emote> emoteMap = guild.getEmoteMap(); List<Emote> oldEmotes = new ArrayList<>(emoteMap.valueCollection()); //snapshot of emote cache List<Emote> newEmotes = new ArrayList<>(); for (int i = 0; i < array.length(); i++) { JSONObject current = array.getJSONObject(i); final long emoteId = current.getLong("id"); EmoteImpl emote = (EmoteImpl) emoteMap.get(emoteId); EmoteImpl oldEmote = null; if (emote == null) { emote = new EmoteImpl(emoteId, guild); newEmotes.add(emote); } else { // emote is in our cache which is why we don't want to remove it in cleanup later oldEmotes.remove(emote); oldEmote = emote.clone(); } emote.setName(current.getString("name")).setManaged(current.getBoolean("managed")); //update roles JSONArray roles = current.getJSONArray("roles"); Set<Role> newRoles = emote.getRoleSet(); Set<Role> oldRoles = new HashSet<>(newRoles); //snapshot of cached roles for (int j = 0; j < roles.length(); j++) { Role role = guild.getRoleById(roles.getString(j)); newRoles.add(role); oldRoles.remove(role); } //cleanup old cached roles that were not found in the JSONArray for (Role r : oldRoles) { // newRoles directly writes to the set contained in the emote newRoles.remove(r); } // finally, update the emote emoteMap.put(emote.getIdLong(), emote); // check for updated fields and fire events handleReplace(oldEmote, emote); } //cleanup old emotes that don't exist anymore for (Emote e : oldEmotes) { emoteMap.remove(e.getIdLong()); api.getEventManager().handle(new EmoteRemovedEvent(api, responseNumber, e)); } for (Emote e : newEmotes) { api.getEventManager().handle(new EmoteAddedEvent(api, responseNumber, e)); } return null; }
From source file:jp.co.iidev.subartifact1.divider1.mojo.ArtifactDividerMojo.java
public void execute() throws MojoExecutionException { Artifact projArt = project.getArtifact(); Map<Dependency, Artifact> artifactsForDep = Maps.newHashMap(); projArt = project.getArtifact();//from ww w .ja v a 2 s .c om { List<Dependency> dep = project.getDependencies(); Set<Artifact> arts = project.getDependencyArtifacts(); for (Dependency dx : dep) { String grpid = dx.getGroupId(); String artid = dx.getArtifactId(); String clsf = dx.getClassifier(); for (Artifact art : arts) { boolean a = StringUtils.equals(art.getArtifactId(), artid); boolean g = StringUtils.equals(art.getGroupId(), grpid); boolean c = StringUtils.equals(art.getClassifier(), clsf); if (a && g && c) { artifactsForDep.put(dx, art); } } } } { String version = project.getVersion(); String groupId = project.getGroupId(); LinkedHashMap<File, Dependency> compiletimeClasspath = Maps.newLinkedHashMap(); File rtjar = Paths.get(System.getProperty("java.home"), "lib", "rt.jar").toFile(); Dependency rtjar_dummyDep = new Dependency(); { rtjar_dummyDep.setGroupId(SystemUtils.JAVA_VENDOR.replace(" ", ".")); rtjar_dummyDep.setVersion(SystemUtils.JAVA_RUNTIME_VERSION); rtjar_dummyDep.setArtifactId(SystemUtils.JAVA_RUNTIME_NAME); } File targetJar = project.getArtifact().getFile(); Dependency targetJarDep = new Dependency(); { targetJarDep.setArtifactId(project.getArtifactId()); targetJarDep.setGroupId(project.getGroupId()); targetJarDep.setVersion(project.getVersion()); targetJarDep.setClassifier(projArt.getClassifier()); } compiletimeClasspath.put(rtjar, rtjar_dummyDep); compiletimeClasspath.put(targetJar, targetJarDep); artifactsForDep.forEach((d, a) -> { compiletimeClasspath.put(a.getFile(), d); }); LoggableFactory lf = new LoggableFactory() { @Override public Loggable createLoggable(Class cx) { return new Loggable() { Logger l = LoggerFactory.getLogger(cx); @Override public void warn(String text) { l.warn(text); } @Override public void info(String text) { l.info(text); } @Override public void error(String text) { l.error(text); } @Override public void debug(String text) { l.debug(text); } }; } }; try { LinkedHashMap<SubArtifactDefinition, SubArtifactDeployment> buildPlan = new DivisionExecutor( lf.createLoggable(DivisionExecutor.class)).planDivision(targetJar, rootSubArtifactId, Arrays.asList(subartifacts == null ? new SubArtifact[0] : subartifacts), compiletimeClasspath, not(in(ImmutableSet.of(rtjar, targetJar))), defaultRootTransitivePropagations, defaultRootSourceReferencePropagations, defaultSubartifactSourceReferencePropagations, globalReferencePropagations, lf); Set<File> usableJar = Sets.newLinkedHashSet(compiletimeClasspath.keySet()); usableJar.remove(targetJar); usableJar.remove(rtjar); int ix = 0; for (SubArtifact s : subartifacts) { for (Dependency d : s.getExtraDependencies()) { buildPlan.get(s).getJarDeps().put(new File("x_xx_xyx_duMmy" + (ix++) + ".jar"), d); } } new PomSetGenerator(project.getBasedir().toPath().resolve("pom.xml"), outputDirectory.toPath(), templateOutputDirectory.toPath(), lf).generate(groupId, version, this.subArtifactsParentArtifactId, buildPlan); } catch (RuntimeException e) { throw e; } catch (Exception e) { throw new MojoExecutionException("division process error", e); } } }
From source file:org.bedework.eventreg.web.forms.DeleteFieldController.java
@Override public ModelAndView doRequest() throws Throwable { if (sessMan.getCurrentCalsuite() == null) { return errorReturn("No calsuite"); }/* w ww .j a v a 2s.co m*/ final String formName = req.getFormName(); final FormDef form = sessMan.getFormDef(formName); if (form == null) { return errorReturn("No current form"); } final String fieldName = req.getName(); if (fieldName == null) { return errorReturn("No field name"); } FieldDef fld = null; final Set<FieldDef> flds = form.getFields(); for (final FieldDef f : flds) { if (f.getName().equals(fieldName)) { fld = f; break; } } if (fld == null) { return errorReturn("Field not found " + fieldName); } flds.remove(fld); sessMan.updateFormDef(form); sessMan.setCurrentFormName(formName); sessMan.setMessage("ok"); return objModel(getForwardSuccess(), "form", form, "fields", new FormFields(flds)); }
From source file:com.bmwcarit.barefoot.markov.KState.java
protected void remove(C candidate, int index) { Set<C> vector = sequence.get(index).one(); counters.remove(candidate);/*from www. j a v a2 s . c om*/ vector.remove(candidate); C predecessor = candidate.predecessor(); if (predecessor != null) { counters.put(predecessor, counters.get(predecessor) - 1); if (counters.get(predecessor) == 0) { remove(predecessor, index - 1); } } }