List of usage examples for java.util Set retainAll
boolean retainAll(Collection<?> c);
From source file:org.languagetool.rules.de.AgreementRule.java
@NotNull private Set<String> retainCommonCategories(AnalyzedTokenReadings token1, AnalyzedTokenReadings token2, AnalyzedTokenReadings token3) {//from w w w .ja va 2s .com Set<GrammarCategory> categoryToRelaxSet = Collections.emptySet(); Set<String> set1 = getAgreementCategories(token1, categoryToRelaxSet, true); if (set1 == null) { return Collections.emptySet(); // word not known, assume it's correct } boolean skipSol = !VIELE_WENIGE_LOWERCASE.contains(token1.getToken().toLowerCase()); Set<String> set2 = getAgreementCategories(token2, categoryToRelaxSet, skipSol); if (set2 == null) { return Collections.emptySet(); } Set<String> set3 = getAgreementCategories(token3, categoryToRelaxSet, true); if (set3 == null) { return Collections.emptySet(); } set1.retainAll(set2); set1.retainAll(set3); return set1; }
From source file:org.languagetool.rules.de.AgreementRule.java
@Nullable private RuleMatch checkDetNounAgreement(AnalyzedTokenReadings token1, AnalyzedTokenReadings token2, AnalyzedSentence sentence) {//from w w w . ja v a 2s . c om // TODO: remove "-".equals(token2.getToken()) after the bug fix // see Daniel's comment from 20.12.2016 at https://github.com/languagetool-org/languagetool/issues/635 if (token2.isImmunized() || NOUNS_TO_BE_IGNORED.contains(token2.getToken()) || "-".equals(token2.getToken())) { return null; } Set<String> set1 = null; if (token1.getReadings().size() == 1 && token1.getReadings().get(0).getPOSTag() != null && token1.getReadings().get(0).getPOSTag().endsWith(":STV")) { // catch the error in "Meiner Chef raucht." set1 = Collections.emptySet(); } else { set1 = getAgreementCategories(token1); } if (set1 == null) { return null; // word not known, assume it's correct } Set<String> set2 = getAgreementCategories(token2); if (set2 == null) { return null; } set1.retainAll(set2); RuleMatch ruleMatch = null; if (set1.isEmpty() && !isException(token1, token2)) { List<String> errorCategories = getCategoriesCausingError(token1, token2); String errorDetails = errorCategories.isEmpty() ? "Kasus, Genus oder Numerus" : String.join(" und ", errorCategories); String msg = "Mglicherweise fehlende grammatische bereinstimmung zwischen Artikel und Nomen " + "bezglich " + errorDetails + "."; String shortMsg = "Mglicherweise keine bereinstimmung bezglich " + errorDetails; ruleMatch = new RuleMatch(this, sentence, token1.getStartPos(), token2.getEndPos(), msg, shortMsg); /*try { // this will not give a match for compounds that are not in the dictionary... ruleMatch.setUrl(new URL("https://www.korrekturen.de/flexion/deklination/" + token2.getToken() + "/")); } catch (MalformedURLException e) { throw new RuntimeException(e); }*/ AgreementSuggestor suggestor = new AgreementSuggestor(language.getSynthesizer(), token1, token2); List<String> suggestions = suggestor.getSuggestions(); ruleMatch.setSuggestedReplacements(suggestions); } return ruleMatch; }
From source file:com.redhat.rhn.domain.user.legacy.UserImpl.java
/** {@inheritDoc} */ public Set<Role> getRoles() { Set<Role> userRoles = new HashSet<Role>(); for (Iterator<UserGroupMembers> i = groupMembers.iterator(); i.hasNext();) { UserGroupMembers ugm = i.next(); userRoles.add(ugm.getUserGroup().getRole()); }/*ww w . j a v a2s . c o m*/ if (userRoles.contains(RoleFactory.ORG_ADMIN)) { Set<Role> orgRoles = org.getRoles(); Set<Role> localImplied = new HashSet<Role>(); localImplied.addAll(UserFactory.IMPLIEDROLES); localImplied.retainAll(orgRoles); userRoles.addAll(localImplied); } return Collections.unmodifiableSet(userRoles); }
From source file:com.redhat.rhn.domain.user.legacy.UserImpl.java
/** {@inheritDoc} */ public Set<Role> getTemporaryRoles() { Set<Role> userRoles = new HashSet<Role>(); for (Iterator<UserGroupMembers> i = groupMembers.iterator(); i.hasNext();) { UserGroupMembers ugm = i.next(); if (ugm.getTemporary()) { userRoles.add(ugm.getUserGroup().getRole()); }/*from www.j a v a 2 s . com*/ } if (userRoles.contains(RoleFactory.ORG_ADMIN)) { Set<Role> orgRoles = org.getRoles(); Set<Role> localImplied = new HashSet<Role>(); localImplied.addAll(UserFactory.IMPLIEDROLES); localImplied.retainAll(orgRoles); userRoles.addAll(localImplied); } return Collections.unmodifiableSet(userRoles); }
From source file:com.redhat.rhn.domain.user.legacy.UserImpl.java
/** {@inheritDoc} */ public Set<Role> getPermanentRoles() { Set<Role> userRoles = new HashSet<Role>(); for (Iterator<UserGroupMembers> i = groupMembers.iterator(); i.hasNext();) { UserGroupMembers ugm = i.next(); if (!ugm.getTemporary()) { userRoles.add(ugm.getUserGroup().getRole()); }//from w w w. j a va 2 s. c o m } if (userRoles.contains(RoleFactory.ORG_ADMIN)) { Set<Role> orgRoles = org.getRoles(); Set<Role> localImplied = new HashSet<Role>(); localImplied.addAll(UserFactory.IMPLIEDROLES); localImplied.retainAll(orgRoles); userRoles.addAll(localImplied); } return Collections.unmodifiableSet(userRoles); }
From source file:beast.evolution.tree.SimpleRandomTree.java
public void doTheWork() { // find taxon sets we are dealing with taxonSets = new ArrayList<>(); m_bounds = new ArrayList<>(); distributions = new ArrayList<>(); taxonSetIDs = new ArrayList<>(); List<Boolean> onParent = new ArrayList<>(); lastMonophyletic = 0;//from w w w.j a v a 2 s . c om if (taxaInput.get() != null) { sTaxa.addAll(taxaInput.get().getTaxaNames()); } else { sTaxa.addAll(m_taxonset.get().asStringList()); } // pick up constraints from outputs, m_inititial input tree and output tree, if any List<MRCAPrior> calibrations = new ArrayList<MRCAPrior>(); calibrations.addAll(calibrationsInput.get()); // pick up constraints in m_initial tree for (final Object plugin : getOutputs()) { if (plugin instanceof MRCAPrior && !calibrations.contains(plugin)) { calibrations.add((MRCAPrior) plugin); } } if (m_initial.get() != null) { for (final Object plugin : m_initial.get().getOutputs()) { if (plugin instanceof MRCAPrior && !calibrations.contains(plugin)) { calibrations.add((MRCAPrior) plugin); } } } for (final MRCAPrior prior : calibrations) { final TaxonSet taxonSet = prior.taxonsetInput.get(); if (taxonSet != null && !prior.onlyUseTipsInput.get()) { final Set<String> bTaxa = new LinkedHashSet<>(); if (taxonSet.asStringList() == null) { taxonSet.initAndValidate(); } for (final String sTaxonID : taxonSet.asStringList()) { if (!sTaxa.contains(sTaxonID)) { throw new IllegalArgumentException( "Taxon <" + sTaxonID + "> could not be found in list of taxa. Choose one of " + Arrays.toString(sTaxa.toArray(new String[sTaxa.size()]))); } bTaxa.add(sTaxonID); } final ParametricDistribution distr = prior.distInput.get(); final Bound bounds = new Bound(); if (distr != null) { List<BEASTInterface> plugins = new ArrayList<>(); distr.getPredecessors(plugins); for (int i = plugins.size() - 1; i >= 0; i--) { plugins.get(i).initAndValidate(); } try { final double offset = distr.offsetInput.get(); bounds.lower = Math.max(distr.inverseCumulativeProbability(0.0) + offset, 0.0); bounds.upper = distr.inverseCumulativeProbability(1.0) + offset; assert bounds.lower <= bounds.upper; } catch (MathException e) { Log.warning .println("Could not set bounds in SimpleRandomTree::doTheWork : " + e.getMessage()); } } if (prior.isMonophyleticInput.get() || bTaxa.size() == 1) { // add any monophyletic constraint boolean isDuplicate = false; for (int k = 0; k < lastMonophyletic; ++k) { // assert prior.useOriginateInput.get().equals(onParent.get(k)) == (prior.useOriginateInput.get() == onParent.get(k)); if (bTaxa.size() == taxonSets.get(k).size() && bTaxa.equals(taxonSets.get(k)) && prior.useOriginateInput.get().equals(onParent.get(k))) { if (distr != null) { if (distributions.get(k) == null) { distributions.set(k, distr); m_bounds.set(k, bounds); taxonSetIDs.set(k, prior.getID()); } } isDuplicate = true; } } if (!isDuplicate) { taxonSets.add(lastMonophyletic, bTaxa); distributions.add(lastMonophyletic, distr); onParent.add(lastMonophyletic, prior.useOriginateInput.get()); m_bounds.add(lastMonophyletic, bounds); taxonSetIDs.add(lastMonophyletic, prior.getID()); lastMonophyletic++; } } else { // only calibrations with finite bounds are added if (!Double.isInfinite(bounds.lower) || !Double.isInfinite(bounds.upper)) { taxonSets.add(bTaxa); distributions.add(distr); m_bounds.add(bounds); taxonSetIDs.add(prior.getID()); onParent.add(prior.useOriginateInput.get()); } } } } if (ICC) { for (int i = 0; i < lastMonophyletic; i++) { final Set<String> ti = taxonSets.get(i); for (int j = i + 1; j < lastMonophyletic; j++) { final Set<String> tj = taxonSets.get(j); boolean i_in_j = tj.containsAll(ti); boolean j_in_i = ti.containsAll(tj); if (i_in_j || j_in_i) { boolean ok = true; if (i_in_j && j_in_i) { ok = (boolean) (onParent.get(i)) != (boolean) onParent.get(j); } assert ok : "" + i + ' ' + j + ' ' + ' ' + taxonSetIDs.get(i) + ' ' + taxonSetIDs.get(j); } else { Set<String> tmp = new HashSet<>(tj); tmp.retainAll(ti); assert tmp.isEmpty(); } } } } // assume all calibration constraints are Monophyletic // TODO: verify that this is a reasonable assumption lastMonophyletic = taxonSets.size(); // sort constraints in increasing set inclusion order, i.e. such that if taxon set i is subset of taxon set j, then i < j for (int i = 0; i < lastMonophyletic; i++) { for (int j = i + 1; j < lastMonophyletic; j++) { final Set<String> taxai = taxonSets.get(i); final Set<String> taxaj = taxonSets.get(j); Set<String> intersection = new LinkedHashSet<>(taxai); intersection.retainAll(taxaj); if (intersection.size() > 0) { final boolean bIsSubset = taxai.containsAll(taxaj); final boolean bIsSubset2 = taxaj.containsAll(taxai); // sanity check: make sure either // o taxonset1 is subset of taxonset2 OR // o taxonset1 is superset of taxonset2 OR // o taxonset1 does not intersect taxonset2 if (!(bIsSubset || bIsSubset2)) { throw new IllegalArgumentException( "333: Don't know how to generate a Random Tree for taxon sets that intersect, " + "but are not inclusive. Taxonset " + (taxonSetIDs.get(i) == null ? taxai : taxonSetIDs.get(i)) + " and " + (taxonSetIDs.get(j) == null ? taxaj : taxonSetIDs.get(j))); } // swap i & j if b1 subset of b2. If equal sub-sort on 'useOriginate' if (bIsSubset && (!bIsSubset2 || (onParent.get(i) && !onParent.get(j)))) { swap(taxonSets, i, j); swap(distributions, i, j); swap(m_bounds, i, j); swap(taxonSetIDs, i, j); swap(onParent, i, j); } } } } if (ICC) { for (int i = 0; i < lastMonophyletic; i++) { final Set<String> ti = taxonSets.get(i); for (int j = i + 1; j < lastMonophyletic; j++) { final Set<String> tj = taxonSets.get(j); boolean ok = tj.containsAll(ti); if (ok) { ok = !tj.equals(ti) || (!onParent.get(i) && onParent.get(j)); assert ok : "" + i + ' ' + j + ' ' + tj.equals(ti) + ' ' + taxonSetIDs.get(i) + ' ' + taxonSetIDs.get(j); } else { Set<String> tmp = new HashSet<>(tj); tmp.retainAll(ti); assert tmp.isEmpty(); } } } } for (int i = 0; i < lastMonophyletic; i++) { if (onParent.get(i)) { // make sure it is after constraint on node itself, if such exists assert (!(i + 1 < lastMonophyletic && taxonSets.get(i).equals(taxonSets.get(i + 1)) && onParent.get(i) && !onParent.get(i + 1))); // find something to attach to .... // find enclosing clade, if any. pick a non-intersecting clade in the enclosed without an onParent constraint, or one whose // onParent constraint is overlapping. final Set<String> iTaxa = taxonSets.get(i); int j = i + 1; Set<String> enclosingTaxa = sTaxa; { String someTaxon = iTaxa.iterator().next(); for (/**/; j < lastMonophyletic; j++) { if (taxonSets.get(j).contains(someTaxon)) { enclosingTaxa = taxonSets.get(j); break; } } } final int enclosingIndex = (j == lastMonophyletic) ? j : j; Set<String> candidates = new HashSet<>(enclosingTaxa); candidates.removeAll(iTaxa); Set<Integer> candidateClades = new HashSet<>(5); List<String> canTaxa = new ArrayList<>(); for (String c : candidates) { for (int k = enclosingIndex - 1; k >= 0; --k) { if (taxonSets.get(k).contains(c)) { if (!candidateClades.contains(k)) { if (onParent.get(k)) { if (!intersecting(m_bounds.get(k), m_bounds.get(i))) { break; } } else { if (!(m_bounds.get(k).lower <= m_bounds.get(i).lower)) { break; } } candidateClades.add(k); } break; } if (k == 0) { canTaxa.add(c); } } } final int sz1 = canTaxa.size(); final int sz2 = candidateClades.size(); if (sz1 + sz2 == 0 && i + 1 == enclosingIndex) { final Bound ebound = m_bounds.get(enclosingIndex); ebound.restrict(m_bounds.get(i)); } else { assert sz1 + sz2 > 0; // prefer taxa over clades (less chance of clades useOriginate clashing) final int k = Randomizer.nextInt(sz1 > 0 ? sz1 : sz2); Set<String> connectTo; int insertPoint; if (k < sz1) { // from taxa connectTo = new HashSet<>(1); connectTo.add(canTaxa.get(k)); insertPoint = i + 1; } else { // from clade final Iterator<Integer> it = candidateClades.iterator(); for (j = 0; j < k - sz1 - 1; ++j) { it.next(); } insertPoint = it.next(); connectTo = new HashSet<>(taxonSets.get(insertPoint)); insertPoint = Math.max(insertPoint, i) + 1; } final HashSet<String> cc = new HashSet<String>(connectTo); connectTo.addAll(taxonSets.get(i)); if (!connectTo.equals(enclosingTaxa) || enclosingTaxa == sTaxa) { // equal when clade already exists taxonSets.add(insertPoint, connectTo); distributions.add(insertPoint, distributions.get(i)); onParent.add(insertPoint, false); m_bounds.add(insertPoint, m_bounds.get(i)); final String tid = taxonSetIDs.get(i); taxonSetIDs.add(insertPoint, tid); lastMonophyletic += 1; } else { // we lose distribution i :( final Bound ebound = m_bounds.get(enclosingIndex); ebound.restrict(m_bounds.get(i)); } } if (true) { taxonSets.set(i, new HashSet<>()); distributions.set(i, null); m_bounds.set(i, new Bound()); final String tid = taxonSetIDs.get(i); if (tid != null) { taxonSetIDs.set(i, "was-" + tid); } } } } { int icur = 0; for (int i = 0; i < lastMonophyletic; ++i, ++icur) { final Set<String> ti = taxonSets.get(i); if (ti.isEmpty()) { icur -= 1; } else { if (icur < i) { taxonSets.set(icur, taxonSets.get(i)); distributions.set(icur, distributions.get(i)); m_bounds.set(icur, m_bounds.get(i)); taxonSetIDs.set(icur, taxonSetIDs.get(i)); onParent.set(icur, onParent.get(i)); } } } taxonSets.subList(icur, lastMonophyletic).clear(); distributions.subList(icur, lastMonophyletic).clear(); m_bounds.subList(icur, lastMonophyletic).clear(); taxonSetIDs.subList(icur, lastMonophyletic).clear(); onParent.subList(icur, lastMonophyletic).clear(); lastMonophyletic = icur; } if (ICC) { for (int i = 0; i < lastMonophyletic; i++) { final Set<String> ti = taxonSets.get(i); for (int j = i + 1; j < lastMonophyletic; j++) { final Set<String> tj = taxonSets.get(j); boolean ok = tj.containsAll(ti); if (ok) { ok = !tj.equals(ti) || (!onParent.get(i) && onParent.get(j)); assert ok : "" + i + ' ' + j + ' ' + taxonSetIDs.get(i) + ' ' + taxonSetIDs.get(j); } else { Set<String> tmp = new HashSet<>(tj); tmp.retainAll(ti); assert tmp.isEmpty(); } } } } // map parent child relationships between mono clades. nParent[i] is the immediate parent clade of i, if any. An immediate parent is the // smallest superset of i, children[i] is a list of all clades which have i as a parent. // The last one, standing for the virtual "root" of all monophyletic clades is not associated with any actual clade final int[] nParent = new int[lastMonophyletic]; children = new List[lastMonophyletic + 1]; for (int i = 0; i < lastMonophyletic + 1; i++) { children[i] = new ArrayList<>(); } for (int i = 0; i < lastMonophyletic; i++) { int j = i + 1; while (j < lastMonophyletic && !taxonSets.get(j).containsAll(taxonSets.get(i))) { j++; } nParent[i] = j; children[j].add(i); } // make sure upper bounds of a child does not exceed the upper bound of its parent for (int i = lastMonophyletic - 1; i >= 0; --i) { if (nParent[i] < lastMonophyletic) { if (m_bounds.get(i).upper > m_bounds.get(nParent[i]).upper) { m_bounds.get(i).upper = m_bounds.get(nParent[i]).upper - 1e-100; assert m_bounds.get(i).lower <= m_bounds.get(i).upper : i; } } } nodeCount = 2 * sTaxa.size() - 1; boundPerNode = new Bound[nodeCount]; distPerNode = new ParametricDistribution[nodeCount]; buildTree(sTaxa); assert nextNodeNr == nodeCount : "" + nextNodeNr + ' ' + nodeCount; double bm = branchMeanInput.get(); if (bm < 0) { double maxMean = 0; for (ParametricDistribution distr : distPerNode) { if (distr != null) { double m = distr.getMean(); if (maxMean < m) maxMean = m; } } if (maxMean > 0) { double s = 0; for (int i = 2; i <= nodeCount; ++i) { s += 1.0 / i; } bm = s / maxMean; } } double rate = 1 / (bm < 0 ? 1 : bm); boolean succ = false; int ntries = 6; final double epsi = 0.01 / rate; double clamp = 1 - clampInput.get(); while (!succ && ntries > 0) { try { succ = setHeights(rate, false, epsi, clamp); } catch (ConstraintViolatedException e) { throw new RuntimeException("Constraint failed: " + e.getMessage()); } --ntries; rate *= 2; clamp /= 2; } if (!succ) { try { succ = setHeights(rate, true, 0, 0); } catch (ConstraintViolatedException e) { throw new RuntimeException("Constraint failed: " + e.getMessage()); } } assert succ; internalNodeCount = sTaxa.size() - 1; leafNodeCount = sTaxa.size(); HashMap<String, Integer> taxonToNR = null; // preserve node numbers where possible if (m_initial.get() != null) { taxonToNR = new HashMap<>(); for (Node n : m_initial.get().getExternalNodes()) { taxonToNR.put(n.getID(), n.getNr()); } } // re-assign node numbers setNodesNrs(root, 0, new int[1], taxonToNR); initArrays(); }
From source file:org.rhq.enterprise.server.core.plugin.ProductPluginDeployer.java
/** * Registers newly detected plugins and their types. * /*from www . j av a 2s .c o m*/ * Only call this method when {@link #isReady} is true. This is a no-op if we are not ready. */ public void registerPlugins() { if (!this.isReady) { return; } for (Iterator<String> it = this.namesOfPluginsToBeRegistered.iterator(); it.hasNext();) { String pluginName = it.next(); if (!isNewOrUpdated(pluginName)) { log.debug("Plugin [" + pluginName + "] has not been updated."); it.remove(); } } if (this.namesOfPluginsToBeRegistered.isEmpty()) { log.debug("All agent plugins were already up to date in the database."); return; } Set<String> pluginsToBeRegistered = new HashSet<String>(this.namesOfPluginsToBeRegistered); log.info("Deploying [" + pluginsToBeRegistered.size() + "] new or updated agent plugins: " + pluginsToBeRegistered); PluginDependencyGraph dependencyGraph = buildDependencyGraph(); StringBuilder errorBuffer = new StringBuilder(); if (!dependencyGraph.isComplete(errorBuffer)) { log.error(errorBuffer.toString()); if (log.isDebugEnabled()) { log.debug(dependencyGraph.toString()); } // reduce the graph down to only those plugins and their deps that exist and only register those dependencyGraph = dependencyGraph.reduceGraph(); pluginsToBeRegistered.retainAll(dependencyGraph.getPlugins()); } if (pluginsToBeRegistered.size() > 0) { registerPlugins(dependencyGraph, pluginsToBeRegistered); } log.info("Plugin metadata updates are complete for [" + pluginsToBeRegistered.size() + "] plugins: " + pluginsToBeRegistered); this.namesOfPluginsToBeRegistered.removeAll(pluginsToBeRegistered); // load resource facets cache try { ResourceTypeManagerLocal typeManager = LookupUtil.getResourceTypeManager(); typeManager.reloadResourceFacetsCache(); } catch (Throwable t) { log.error("Could not load ResourceFacets cache", t); } // Trigger vacuums on some tables as the initial deployment might have changed a lot of things. // There are probably more tables involved though. // First wait to give Hibernate a chance to close all transactions etc. try { Thread.sleep(2000L); } catch (InterruptedException ignored) { } Subject superuser = LookupUtil.getSubjectManager().getOverlord(); SystemManagerLocal systemManager = LookupUtil.getSystemManager(); systemManager.vacuum(superuser, new String[] { "RHQ_MEASUREMENT_DEF", "RHQ_CONFIG_DEF", "RHQ_RESOURCE_TYPE", "RHQ_RESOURCE_TYPE_PARENTS", Plugin.TABLE_NAME }); return; }
From source file:annis.gui.SearchUI.java
public void evaluateCitation(String relativeUri) { Matcher m = citationPattern.matcher(relativeUri); if (m.matches()) { // AQL/*from w ww . ja va 2 s . c o m*/ String aql = ""; if (m.group(1) != null) { aql = m.group(1); } // CIDS Set<String> selectedCorpora = new HashSet<String>(); if (m.group(2) != null) { String[] cids = m.group(2).split(","); selectedCorpora.addAll(Arrays.asList(cids)); } // filter by actually avaible user corpora in order not to get any exception later WebResource res = Helper.getAnnisWebResource(); List<AnnisCorpus> userCorpora = res.path("query").path("corpora").get(new AnnisCorpusListType()); LinkedList<String> userCorporaStrings = new LinkedList<String>(); for (AnnisCorpus c : userCorpora) { userCorporaStrings.add(c.getName()); } selectedCorpora.retainAll(userCorporaStrings); // CLEFT and CRIGHT if (m.group(4) != null && m.group(6) != null) { int cleft = 0; int cright = 0; try { cleft = Integer.parseInt(m.group(4)); cright = Integer.parseInt(m.group(6)); } catch (NumberFormatException ex) { log.error("could not parse context value", ex); } queryController.setQuery(new PagedResultQuery(cleft, cright, 0, 10, null, aql, selectedCorpora)); } else { queryController.setQuery(new Query(aql, selectedCorpora)); } // remove all currently openend sub-windows Set<Window> all = new HashSet<Window>(getWindows()); for (Window w : all) { removeWindow(w); } } else { showNotification("Invalid citation", Notification.Type.WARNING_MESSAGE); } }
From source file:com.castlabs.csf.cff.CreateStreamingDeliveryTargetFileset.java
private FragmentIntersectionFinder getFragmentStartSamples(Map<Track, String> trackOriginalFilename) throws CommandAbortException { Set<Long> syncSamples = null; int numSamples = -1; for (Track track : trackOriginalFilename.keySet()) { if (numSamples < 0) { numSamples = track.getSamples().size(); }/*from w ww. j a v a2s . com*/ if (numSamples != track.getSamples().size()) { throw new CommandAbortException("All Tracks need the same number of samples"); } } for (Track track : trackOriginalFilename.keySet()) { if (track.getSyncSamples() != null && track.getSyncSamples().length > 0) { if (syncSamples == null) { syncSamples = new HashSet<Long>(); for (long l : track.getSyncSamples()) { syncSamples.add(l); } } else { Set<Long> syncSamples2 = new HashSet<Long>(); for (long l : track.getSyncSamples()) { syncSamples2.add(l); } syncSamples.retainAll(syncSamples2); } } } if (syncSamples != null) { List<Long> syncSampleList = new ArrayList<Long>(); syncSampleList.addAll(syncSamples); Collections.sort(syncSampleList); final long[] fragmentStartSamples = new long[syncSamples.size()]; for (int i = 0; i < fragmentStartSamples.length; i++) { fragmentStartSamples[i] = syncSampleList.get(i); } return new FragmentIntersectionFinder() { @Override public long[] sampleNumbers(Track track) { return fragmentStartSamples; } }; } else { // they have all the same amount of samples ... easy Track t = trackOriginalFilename.keySet().iterator().next(); double durationInSeconds = t.getDuration() / t.getTrackMetaData().getTimescale(); int numberOfSamples = t.getSamples().size(); int numberOfSamplesPer5Seconds = (int) Math.ceil(numberOfSamples / durationInSeconds * 5); final long[] fragmentStartSamples = new long[(int) Math .ceil(numberOfSamples / numberOfSamplesPer5Seconds) + 1]; for (int i = 0; i < fragmentStartSamples.length; i++) { fragmentStartSamples[i] = i * numberOfSamplesPer5Seconds + 1; } return new FragmentIntersectionFinder() { @Override public long[] sampleNumbers(Track track) { return fragmentStartSamples; } }; } }
From source file:org.jactr.modules.pm.common.memory.map.DefaultFINSTFeatureMap.java
/** * @see org.jactr.modules.pm.common.memory.map.IFeatureMap#getCandidateRealObjects(ChunkTypeRequest, * Set)/* www .j a v a 2 s . c o m*/ */ public void getCandidateRealObjects(ChunkTypeRequest request, Set<IIdentifier> container) { FastSet<IIdentifier> tmp = FastSet.newInstance(); boolean firstInsertion = true; for (IConditionalSlot slot : request.getConditionalSlots()) if (slot.getName().equalsIgnoreCase(_attendedSlotName)) { tmp.clear(); Object value = slot.getValue(); switch (slot.getCondition()) { case IConditionalSlot.NOT_EQUALS: not(value, tmp); break; default: equals(value, tmp); break; } if (firstInsertion) { container.addAll(tmp); firstInsertion = false; } else container.retainAll(tmp); } FastSet.recycle(tmp); }