List of usage examples for java.util Collections disjoint
public static boolean disjoint(Collection<?> c1, Collection<?> c2)
From source file:de.appsolve.padelcampus.admin.controller.general.AdminGeneralModulesController.java
@Override public ModelAndView postEditView(@ModelAttribute("Model") Module model, HttpServletRequest request, BindingResult result) {/*from w w w.j ava 2s. c o m*/ if (result.hasErrors()) { return getEditView(model); } if (model.getModuleType().equals(ModuleType.HomePage)) { model.setShowOnHomepage(Boolean.FALSE); model.setShowInMenu(Boolean.FALSE); model.setShowInFooter(Boolean.FALSE); } if (model.getModuleType().equals(ModuleType.Events) && model.getEventGroups() != null) { List<Module> eventModules = moduleDAO.findByModuleType(ModuleType.Events); eventModules.remove(model); for (Module existingModule : eventModules) { if (existingModule.getEventGroups() != null) { if (!Collections.disjoint(existingModule.getEventGroups(), model.getEventGroups())) { result.addError(new ObjectError("*", msg.get("EventGroupIsAlreadyAssociatedWith", new Object[] { existingModule.getTitle() }))); break; } } } } checkTitleRequirements(model, result); if (result.hasErrors()) { return getEditView(model); } keepSubModules(model); checkPosition(model); rewriteLinks(model); ModelAndView mav = super.postEditView(model, request, result); moduleUtil.reloadModules(request); return mav; }
From source file:com.github.anba.es6draft.test262.Test262Info.java
/** * Returns {@code true} if the test configuration has the requested features. * * @param includeFeatures//w ww. j a va2s. c o m * the set of include features, ignored if empty * @param excludeFeatures * the set of exclude features, ignored if empty * @return {@code true} if the requested features are present */ public boolean hasFeature(Set<String> includeFeatures, Set<String> excludeFeatures) { if (!includeFeatures.isEmpty() && Collections.disjoint(includeFeatures, features)) { return false; } if (!excludeFeatures.isEmpty() && !Collections.disjoint(excludeFeatures, features)) { return false; } return true; }
From source file:org.fao.geonet.api.GlobalExceptionController.java
/** * * @param request the HTTP request object. * @return true if the content type is allowed to have a body when returning an error to the client, false if the * response should contain an empty body. *///w w w .j a va2 s. c o m private boolean contentTypeNeedsBody(HttpServletRequest request) { boolean needsBody; List<MediaType> requestMediaTypes = resolveMediaTypes(new ServletWebRequest(request)); Set<MediaType> allowedContentTypes = Sets.newHashSet(MediaType.APPLICATION_XML, MediaType.APPLICATION_XHTML_XML, MediaType.APPLICATION_JSON); needsBody = !Collections.disjoint(allowedContentTypes, requestMediaTypes); return needsBody; }
From source file:de.uni_rostock.goodod.owl.normalization.SubsumptionMaterializationNormalizer.java
private void cleanClassHierarchy() { Set<OWLSubClassOfAxiom> axioms = ontology.getAxioms(AxiomType.SUBCLASS_OF, true); for (OWLSubClassOfAxiom ax : axioms) { OWLClassExpression subEx = ax.getSubClass(); OWLClassExpression superEx = ax.getSuperClass(); // Consider only named classes if ((subEx instanceof OWLClass) && (superEx instanceof OWLClass)) { /*//from ww w . ja va 2 s . c o m * Get the subclasses of the superclass and the * superclasses of the subclass. If the resulting sets * share a member (modulo the classes in the axiom), the subclass * relation is entailed by transitivity alone and we can safely * remove the axiom. */ Set<OWLClass> subs = transitiveSuperClasses(subEx.asOWLClass()); Set<OWLClass> supers = transitiveSubClasses(superEx.asOWLClass()); subs.remove(superEx); supers.remove(subEx); if (false == Collections.disjoint(subs, supers)) { changes.add(new RemoveAxiom(ontology, ax)); } } /* * Special-case: If the superclass has "Test" (case-insensitively) * as its fragment, remove the axiom. */ if (superEx instanceof OWLClass) { IRI superIRI = ((OWLClass) superEx).getIRI(); String fragment = superIRI.getFragment(); if (fragment.equalsIgnoreCase("TEST")) { RemoveAxiom removeAx = new RemoveAxiom(ontology, ax); if (false == changes.contains(removeAx)) { changes.add(removeAx); } } } } logger.debug("Removed " + changes.size() + " redundant subsumptions."); commitChanges(); }
From source file:it.geosolutions.geostore.services.rest.impl.RESTStoredDataServiceImpl.java
@Override public String get(SecurityContext sc, HttpHeaders headers, long id) throws NotFoundWebEx { if (id == -1) return "dummy payload"; //// w w w .j ava2 s . c o m // Authorization check. // boolean canRead = false; User authUser = extractAuthUser(sc); canRead = resourceAccessRead(authUser, id); // The ID is also the resource ID if (!canRead) { throw new ForbiddenErrorWebEx("This user cannot read this stored data !"); } StoredData storedData; try { storedData = storedDataService.get(id); } catch (NotFoundServiceEx e) { throw new NotFoundWebEx("Data not found"); } String data = storedData == null ? "" : storedData.getData(); // prefer no transformation if (headers.getAcceptableMediaTypes().contains(MediaType.WILDCARD_TYPE)) { return data; } else if (!Collections.disjoint(GET_TEXT_MEDIA_TYPES, headers.getAcceptableMediaTypes())) { return data; } else if (!Collections.disjoint(GET_JSON_MEDIA_TYPES, headers.getAcceptableMediaTypes())) { return toJSON(data); } else if (!Collections.disjoint(GET_XML_MEDIA_TYPES, headers.getAcceptableMediaTypes())) { return toXML(data); } else throw new InternalErrorWebEx("Illegal state (" + headers.getAcceptableMediaTypes() + ")"); }
From source file:org.languagetool.rules.uk.TokenAgreementNounVerbRule.java
@Override public final RuleMatch[] match(AnalyzedSentence sentence) { List<RuleMatch> ruleMatches = new ArrayList<>(); AnalyzedTokenReadings[] tokens = sentence.getTokensWithoutWhitespace(); List<AnalyzedToken> nounTokenReadings = new ArrayList<>(); AnalyzedTokenReadings nounAnalyzedTokenReadings = null; for (int i = 1; i < tokens.length; i++) { AnalyzedTokenReadings tokenReadings = tokens[i]; String posTag0 = tokenReadings.getAnalyzedToken(0).getPOSTag(); //TODO: skip conj . if (posTag0 == null) { nounTokenReadings.clear();//from w w w. j a v a 2 s.com continue; } if (nounTokenReadings.isEmpty()) { // no need to start checking on last token or if no noun if (i == tokens.length - 1) continue; if (!PosTagHelper.hasPosTag(tokenReadings, "noun.*:v_naz.*")) continue; for (AnalyzedToken token : tokenReadings) { String nounPosTag = token.getPOSTag(); if (nounPosTag == null) { // can happen for words with \u0301 or \u00AD continue; } // if( nounPosTag.startsWith("<") ) { // nounTokenReadings.clear(); // break; // } if (nounPosTag.startsWith("noun") && nounPosTag.contains("v_naz")) { nounTokenReadings.add(token); nounAnalyzedTokenReadings = tokenReadings; } // else if ( nounPosTag.equals(JLanguageTool.SENTENCE_END_TAGNAME) ) { // continue; // } else { nounTokenReadings.clear(); break; } } continue; } // see if we get a following verb // System.err.println("Check for verb: " + tokenReadings); List<AnalyzedToken> verbTokenReadings = new ArrayList<>(); for (AnalyzedToken token : tokenReadings) { String verbPosTag = token.getPOSTag(); if (verbPosTag == null) { // can happen for words with \u0301 or \u00AD continue; } if (verbPosTag.startsWith("<")) { verbTokenReadings.clear(); break; } if (verbPosTag.startsWith("verb")) { verbTokenReadings.add(token); } else if (verbPosTag.equals(JLanguageTool.SENTENCE_END_TAGNAME)) { continue; } else { verbTokenReadings.clear(); break; } } // no slave token - restart if (verbTokenReadings.isEmpty()) { nounTokenReadings.clear(); continue; } logger.debug("=== Checking\n\t{}\n\t{}", nounTokenReadings, verbTokenReadings); // perform the check List<Inflection> masterInflections = getNounInflections(nounTokenReadings); List<Inflection> slaveInflections = getVerbInflections(verbTokenReadings); logger.debug("\t\t{}\n\t{}", masterInflections, slaveInflections); if (Collections.disjoint(masterInflections, slaveInflections)) { if (TokenAgreementNounVerbExceptionHelper.isException(tokens, i, masterInflections, slaveInflections, nounTokenReadings, verbTokenReadings)) { nounTokenReadings.clear(); break; } if (logger.isDebugEnabled()) { logger.debug(MessageFormat.format("=== Found noun/verb mismatch\n\t{0}\n\t{1}", nounAnalyzedTokenReadings.getToken() + ": " + masterInflections + " // " + nounAnalyzedTokenReadings, verbTokenReadings.get(0).getToken() + ": " + slaveInflections + " // " + verbTokenReadings)); } String msg = String.format( "? ?: \"%s\" (%s) \"%s\" (%s)", nounTokenReadings.get(0).getToken(), formatInflections(masterInflections, true), verbTokenReadings.get(0).getToken(), formatInflections(slaveInflections, false)); RuleMatch potentialRuleMatch = new RuleMatch(this, sentence, nounAnalyzedTokenReadings.getStartPos(), tokenReadings.getEndPos(), msg, getShort()); ruleMatches.add(potentialRuleMatch); } nounTokenReadings.clear(); } return toRuleMatchArray(ruleMatches); }
From source file:org.mskcc.cbio.portal.util.CancerStudyPermissionEvaluator.java
/** * Helper function to determine if given user has access to given cancer study. * * @param cancerStudy String ID of the cancer study to check for * @param user Spring Authentication of the logged-in user. * @return boolean/*from w w w. j ava 2s. c o m*/ */ private boolean hasPermission(CancerStudy cancerStudy, Authentication authentication) { Set<String> grantedAuthorities = getGrantedAuthorities(authentication); String stableStudyID = cancerStudy.getCancerStudyStableId(); if (log.isDebugEnabled()) { log.debug("hasPermission(), cancer study stable id: " + stableStudyID); log.debug("hasPermission(), user: " + authentication.getPrincipal().toString()); for (String authority : grantedAuthorities) { log.debug("hasPermission(), authority: " + authority); } } // a user has permission to access the 'all' cancer study (everybody does) if (stableStudyID.equalsIgnoreCase(AccessControl.ALL_CANCER_STUDIES_ID)) { return true; } // if a user has access to 'all', simply return true if (grantedAuthorities.contains(AccessControl.ALL_CANCER_STUDIES_ID.toUpperCase())) { if (log.isDebugEnabled()) { log.debug("hasPermission(), user has access to ALL cancer studies, return true"); } return true; } // if a user has access to 'all_tcga', simply return true for tcga studies if (grantedAuthorities.contains(AccessControl.ALL_TCGA_CANCER_STUDIES_ID.toUpperCase()) && stableStudyID.toUpperCase().endsWith("_TCGA")) { if (log.isDebugEnabled()) { log.debug("hasPermission(), user has access to ALL_TCGA cancer studies return true"); } return true; } // if a user has access to 'all_target', simply return true for target studies if (grantedAuthorities.contains(AccessControl.ALL_TARGET_CANCER_STUDIES_ID.toUpperCase()) && (stableStudyID.toUpperCase().endsWith("_TARGET") || stableStudyID.equalsIgnoreCase("ALL_TARGET_PHASE1") || stableStudyID.equalsIgnoreCase("ALL_TARGET_PHASE2"))) { if (log.isDebugEnabled()) { log.debug("hasPermission(), user has access to ALL_NCI_TARGET cancer studies return true"); } return true; } // for groups Set<String> groups = Collections.emptySet(); try { groups = cancerStudy.getFreshGroups(); } catch (DaoException e) { groups = cancerStudy.getGroups(); } if (!Collections.disjoint(groups, grantedAuthorities)) { if (log.isDebugEnabled()) { log.debug("hasPermission(), user has access by groups return true"); } return true; } // finally, check if the user has this study specifically listed in his 'groups' (a 'group' of this study only) boolean toReturn = grantedAuthorities.contains(stableStudyID.toUpperCase()); if (log.isDebugEnabled()) { if (toReturn == true) { log.debug("hasPermission(), user has access to this cancer study: '" + stableStudyID + "', returning true."); } else { log.debug("hasPermission(), user does not have access to the cancer study: '" + stableStudyID + "', returning false."); } } // outta here return toReturn; }
From source file:org.apache.atlas.typesystem.types.TypeSystemTest.java
@Test public void testGetTraitsNames() throws Exception { HierarchicalTypeDefinition<TraitType> classificationTraitDefinition = TypesUtil.createTraitTypeDef( "Classification", ImmutableSet.<String>of(), TypesUtil.createRequiredAttrDef("tag", DataTypes.STRING_TYPE)); HierarchicalTypeDefinition<TraitType> piiTrait = TypesUtil.createTraitTypeDef("PII", ImmutableSet.<String>of()); HierarchicalTypeDefinition<TraitType> phiTrait = TypesUtil.createTraitTypeDef("PHI", ImmutableSet.<String>of()); HierarchicalTypeDefinition<TraitType> pciTrait = TypesUtil.createTraitTypeDef("PCI", ImmutableSet.<String>of()); HierarchicalTypeDefinition<TraitType> soxTrait = TypesUtil.createTraitTypeDef("SOX", ImmutableSet.<String>of()); HierarchicalTypeDefinition<TraitType> secTrait = TypesUtil.createTraitTypeDef("SEC", ImmutableSet.<String>of()); HierarchicalTypeDefinition<TraitType> financeTrait = TypesUtil.createTraitTypeDef("Finance", ImmutableSet.<String>of()); getTypeSystem().defineTypes(ImmutableList.<EnumTypeDefinition>of(), ImmutableList.<StructTypeDefinition>of(), ImmutableList.of(classificationTraitDefinition, piiTrait, phiTrait, pciTrait, soxTrait, secTrait, financeTrait), ImmutableList.<HierarchicalTypeDefinition<ClassType>>of()); final ImmutableList<String> traitsNames = getTypeSystem() .getTypeNamesByCategory(DataTypes.TypeCategory.TRAIT); Assert.assertEquals(traitsNames.size(), 7); List traits = Arrays//from ww w . j a va 2s. c o m .asList(new String[] { "Classification", "PII", "PHI", "PCI", "SOX", "SEC", "Finance", }); Assert.assertFalse(Collections.disjoint(traitsNames, traits)); }
From source file:org.languagetool.rules.uk.TokenAgreementAdjNounRule.java
@Override public final RuleMatch[] match(AnalyzedSentence sentence) { List<RuleMatch> ruleMatches = new ArrayList<>(); AnalyzedTokenReadings[] tokens = sentence.getTokensWithoutWhitespace(); List<AnalyzedToken> adjTokenReadings = new ArrayList<>(); AnalyzedTokenReadings adjAnalyzedTokenReadings = null; for (int i = 1; i < tokens.length; i++) { AnalyzedTokenReadings tokenReadings = tokens[i]; String posTag0 = tokenReadings.getAnalyzedToken(0).getPOSTag(); if (posTag0 == null) { // || posTag0.equals(JLanguageTool.SENTENCE_START_TAGNAME) ){ adjTokenReadings.clear();/* ww w . j a v a2 s . c om*/ continue; } // grab initial adjective inflections if (adjTokenReadings.isEmpty()) { // no need to start checking on last token or if no noun if (i == tokens.length - 1) continue; //TODO: nv still can be wrong if :np/:ns is present to it's not much gain for lots of work if (PosTagHelper.hasPosTagPart(tokens[i], PosTagHelper.NO_VIDMINOK_SUBSTR) //TODO: turn back on when we can handle pron || PosTagHelper.hasPosTagPart(tokens[i], "&pron") || PosTagHelper.hasPosTagPart(tokens[i], "<")) continue; if (!PosTagHelper.hasPosTagPart(tokens[i + 1], "noun:") || PosTagHelper.hasPosTagPart(tokens[i + 1], PosTagHelper.NO_VIDMINOK_SUBSTR) || PosTagHelper.hasPosTagPart(tokens[i + 1], "&pron") || PosTagHelper.hasPosTagPart(tokens[i + 1], "<")) continue; //TODO: TEMP? if (LemmaHelper.hasLemma(tokens[i], Arrays.asList("", "", "?", "?", ""), ":p:") || LemmaHelper.hasLemma(tokens[i], Arrays.asList("", "", "", "", "", ""), ":n:") || LemmaHelper.hasLemma(tokens[i], Arrays.asList(""), ":f:")) { adjTokenReadings.clear(); break; } for (AnalyzedToken token : tokenReadings) { String adjPosTag = token.getPOSTag(); if (adjPosTag == null) { // can happen for words with \u0301 or \u00AD continue; } if (adjPosTag.startsWith("adj")) { adjTokenReadings.add(token); adjAnalyzedTokenReadings = tokenReadings; } else { adjTokenReadings.clear(); break; } } continue; } List<AnalyzedToken> slaveTokenReadings = new ArrayList<>(); for (AnalyzedToken token : tokenReadings) { String nounPosTag = token.getPOSTag(); if (nounPosTag == null) { // can happen for words with \u0301 or \u00AD continue; } if (nounPosTag.startsWith("noun") && !nounPosTag.contains(PosTagHelper.NO_VIDMINOK_SUBSTR)) { slaveTokenReadings.add(token); } else if (nounPosTag.equals(JLanguageTool.SENTENCE_END_TAGNAME) || nounPosTag.equals(JLanguageTool.PARAGRAPH_END_TAGNAME)) { continue; } else { slaveTokenReadings.clear(); break; } } // no slave token - restart if (slaveTokenReadings.isEmpty()) { adjTokenReadings.clear(); continue; } logger.debug("=== Checking:\n\t{}\n\t{}", adjTokenReadings, slaveTokenReadings); // perform the check List<InflectionHelper.Inflection> masterInflections = InflectionHelper .getAdjInflections(adjTokenReadings); List<InflectionHelper.Inflection> slaveInflections = InflectionHelper .getNounInflections(slaveTokenReadings, "v_zna:var"); if (Collections.disjoint(masterInflections, slaveInflections)) { if (TokenAgreementAdjNounExceptionHelper.isException(tokens, i, masterInflections, slaveInflections, adjTokenReadings, slaveTokenReadings)) { adjTokenReadings.clear(); continue; } if (logger.isDebugEnabled()) { logger.debug(MessageFormat.format("=== Found:\n\t{0}\n\t", adjAnalyzedTokenReadings.getToken() + ": " + masterInflections + " // " + adjAnalyzedTokenReadings, slaveTokenReadings.get(0).getToken() + ": " + slaveInflections + " // " + slaveTokenReadings)); } String msg = String.format( " : : \"%s\": [%s] \"%s\": [%s]", adjTokenReadings.get(0).getToken(), formatInflections(masterInflections, true), slaveTokenReadings.get(0).getToken(), formatInflections(slaveInflections, false)); if (PosTagHelper.hasPosTagPart(adjTokenReadings, ":m:v_rod") && tokens[i].getToken().matches(".*[]") && PosTagHelper.hasPosTag(slaveTokenReadings, "noun.*?:m:v_dav.*")) { msg += ". , .. ? -/- ? -/-? ( ? ?? )?"; } else if (adjAnalyzedTokenReadings.getToken().contains("-") && Pattern.compile(".*([23]-|[02-9]-|[0-9]-)") .matcher(adjAnalyzedTokenReadings.getToken()).matches()) { msg += ". , ? ?? ? ??"; } else if (adjAnalyzedTokenReadings.getToken().startsWith("") // TODO: && tag(adjAnalyzedTokenReadings.getToken().substring(2)) has adjp && PosTagHelper.hasPosTag(slaveTokenReadings, "noun.*?:v_oru.*")) { msg += ". , ? ?"; } RuleMatch potentialRuleMatch = new RuleMatch(this, sentence, adjAnalyzedTokenReadings.getStartPos(), tokenReadings.getEndPos(), msg, getShort()); Synthesizer ukrainianSynthesizer = ukrainian.getSynthesizer(); List<String> suggestions = new ArrayList<>(); try { for (Inflection adjInflection : masterInflections) { String genderTag = ":" + adjInflection.gender + ":"; String vidmTag = adjInflection._case; if (!adjInflection._case.equals("v_kly") && (adjInflection.gender.equals("p") || PosTagHelper.hasPosTagPart(slaveTokenReadings, genderTag))) { for (AnalyzedToken nounToken : slaveTokenReadings) { if (adjInflection.animMatters()) { if (!nounToken.getPOSTag().contains(":" + adjInflection.animTag)) continue; } String newNounPosTag = nounToken.getPOSTag().replaceFirst(":.:v_...", genderTag + vidmTag); String[] synthesized = ukrainianSynthesizer.synthesize(nounToken, newNounPosTag, false); for (String s : synthesized) { String suggestion = adjAnalyzedTokenReadings.getToken() + " " + s; if (!suggestions.contains(suggestion)) { suggestions.add(suggestion); } } } } } for (Inflection nounInflection : slaveInflections) { String genderTag = ":" + nounInflection.gender + ":"; String vidmTag = nounInflection._case; if (nounInflection.animMatters()) { vidmTag += ":r" + nounInflection.animTag; } for (AnalyzedToken adjToken : adjTokenReadings) { String newAdjTag = adjToken.getPOSTag().replaceFirst(":.:v_...(:r(in)?anim)?", genderTag + vidmTag); String[] synthesized = ukrainianSynthesizer.synthesize(adjToken, newAdjTag, false); for (String s : synthesized) { String suggestion = s + " " + tokenReadings.getToken(); if (!suggestions.contains(suggestion)) { suggestions.add(suggestion); } } } } } catch (IOException e) { throw new RuntimeException(e); } // System.err.println("### " + suggestions); if (suggestions.size() > 0) { potentialRuleMatch.setSuggestedReplacements(suggestions); } ruleMatches.add(potentialRuleMatch); } adjTokenReadings.clear(); } return toRuleMatchArray(ruleMatches); }
From source file:com.numenta.taurus.service.TaurusNotificationService.java
/** * Generate a list of Pending Notifications for the given period and frequency * * @param from The initial timestamp of the period to check (inclusive) * @param to The end timestamp of the period to check (inclusive) * @param frequency The frequency in which to fire notifications in milliseconds * @return pending notifications matching the criteria *//*from w w w. j ava2 s . c o m*/ List<Notification> getPendingNotifications(long from, long to, long frequency) { TaurusDatabase database = TaurusApplication.getDatabase(); if (database == null) { return Collections.emptyList(); } ArrayList<Notification> results = new ArrayList<Notification>(); HashMap<String, Pair<Long, AnomalyValue>> anomalies = new HashMap<String, Pair<Long, AnomalyValue>>(); EnumSet<MetricType> mask; // Get all anomalies for favorite instances for (String instance : TaurusApplication.getFavoriteInstances()) { // Check last time a notification was fired for this instance long lastFired = TaurusApplication.getLastNotificationTime(instance); if (lastFired > to - frequency) { // This instance already fired a notification for this period continue; } // Check for anomalies List<Pair<Long, AnomalyValue>> data = database.getInstanceData(instance, from, to); for (Pair<Long, AnomalyValue> value : data) { if (value.second == null) { continue; } // Check for "red" anomalies float logScale = (float) DataUtils.logScale(Math.abs(value.second.anomaly)); if (logScale >= TaurusApplication.getRedBarFloor()) { // Check if found new stock related anomaly mask = MetricType.fromMask(value.second.metricMask); if (!anomalies.containsKey(instance) && !Collections.disjoint(mask, MetricType.STOCK_TYPES)) { anomalies.put(instance, value); } } } } // Create notifications based on anomalies filtered for the period TaurusDataFactory factory = database.getDataFactory(); long timestamp; String instance; String text; AnomalyValue value; for (Map.Entry<String, Pair<Long, AnomalyValue>> entry : anomalies.entrySet()) { timestamp = entry.getValue().first; value = entry.getValue().second; mask = MetricType.fromMask(value.metricMask); instance = entry.getKey(); text = formatAnomalyTitle(instance, mask, timestamp); results.add(factory.createNotification(instance, timestamp, text)); } return results; }