List of usage examples for java.util Collection remove
boolean remove(Object o);
From source file:org.apache.geode.cache.query.internal.index.AbstractIndex.java
private void removeFromResultsWithUnionOrIntersection(Collection results, SelectResults intermediateResults, boolean isIntersection, Object value) { if (intermediateResults == null) { results.remove(value); } else {/*ww w . jav a 2 s . c om*/ if (isIntersection) { int numOcc = ((SelectResults) results).occurrences(value); if (numOcc > 0) { results.remove(value); intermediateResults.add(value); } } else { results.remove(value); } } }
From source file:org.xwiki.tool.xar.XARMojo.java
/** * Adds files from a specific directory to an archive. It uses an existing package.xml to filter the files to be * added.//from w w w . j a va 2s. c o m * * @param archiver the archive in which the files will be added * @param sourceDir the directory whose contents will be added to the archive * @param packageXml the corresponding package.xml file * @throws Exception if the files cannot be added to the archive */ private void addFilesToArchive(ZipArchiver archiver, File sourceDir, File packageXml) throws Exception { Collection<String> documentNames; getLog().info(String.format("Using the existing package.xml descriptor at [%s]", packageXml.getPath())); try { documentNames = getDocumentNamesFromXML(packageXml); } catch (Exception e) { getLog().error(String.format("The existing [%s] is invalid.", PACKAGE_XML)); throw e; } // Next, we scan the hole directory and subdirectories for documents. Queue<File> fileQueue = new LinkedList<File>(); addContentsToQueue(fileQueue, sourceDir); while (!fileQueue.isEmpty() && !documentNames.isEmpty()) { File currentFile = fileQueue.poll(); if (currentFile.isDirectory()) { addContentsToQueue(fileQueue, currentFile); } else { String documentReference = XWikiDocument.getReference(currentFile); if (documentNames.contains(documentReference)) { // building the path the current file will have within the archive // // Note: DO NOT USE String.split since it requires a regexp. Under Windows XP, the FileSeparator is // '\' when not escaped is a special character of the regexp // String archivedFilePath = // currentFile.getAbsolutePath().split(sourceDir.getAbsolutePath() + File.separator)[1]; String archivedFilePath = currentFile.getAbsolutePath() .substring((sourceDir.getAbsolutePath() + File.separator).length()); archivedFilePath = archivedFilePath.replace(File.separatorChar, '/'); archiver.addFile(currentFile, archivedFilePath); documentNames.remove(documentReference); } } } if (!documentNames.isEmpty()) { StringBuilder errorMessage = new StringBuilder("The following documents could not be found: "); for (String name : documentNames) { errorMessage.append(name); errorMessage.append(" "); } throw new Exception(errorMessage.toString()); } archiver.addFile(packageXml, PACKAGE_XML); }
From source file:jdao.JDAO.java
public void insertList(String table, List<Map> colList, String pkField) throws Exception { this.checkReadOnly(); if (pkField == null) { JDAO.insertList(this.dbType, this.conn, this.queryRunner, table, colList, false, null); } else {//from w ww .j a v a 2 s. c om for (Map row : colList) { Collection updateFields = row.keySet(); updateFields.remove(pkField); JDAO.insert(this.dbType, this.conn, this.queryRunner, table, row, true, updateFields); } } }
From source file:jdao.JDAO.java
public <T> List<T> insertListWithPK(String table, List<Map> colList, String pkField, Class<T> clazz) throws Exception { this.checkReadOnly(); if (pkField == null) { return this.insertListWithPK(table, colList, false, null, clazz); } else {// ww w. java 2s. c o m ArrayList<T> res = new ArrayList<T>(); for (Map row : colList) { Collection updateFields = row.keySet(); updateFields.remove(pkField); res.add(JDAO.insertWithPK(this.dbType, this.conn, this.queryRunner, table, row, true, updateFields, clazz)); } return res; } }
From source file:uk.ac.ebi.intact.editor.services.curate.cvobject.CvObjectService.java
private void initialiseParents(IntactCvTerm child, Collection<OntologyTerm> parents) { List<OntologyTerm> originalParents = new ArrayList<OntologyTerm>(parents); for (OntologyTerm r : originalParents) { if (r instanceof IntactCvTerm && !isCvParentFullyLoaded((IntactCvTerm) r)) { IntactCvTerm reloaded = getIntactDao().getEntityManager().find(IntactCvTerm.class, ((IntactCvTerm) r).getAc()); if (reloaded != null) { initialiseXrefs(reloaded.getDbXrefs()); initialiseAnnotations(reloaded.getDbAnnotations()); Hibernate.initialize(reloaded.getChildren()); // detach reloaded object so it is not changed getIntactDao().getEntityManager().detach(reloaded); if (reloaded != r) { parents.remove(r); child.addParent(reloaded); }/*from ww w . ja va 2 s .c o m*/ } } } }
From source file:org.castor.cpa.test.test88.TestLazyLoading.java
public void testIterWithDelete() throws PersistenceException { LOG.info("Running testIterWithDelete..."); // Tests iterating over a lazy-loaded Collection that has // had data deleted ArrayList<LazyAddress> masterData = new ArrayList<LazyAddress>(); Identity fullname = new Identity("First", "Person"); LazyEmployee loadPerson;//from w ww.j a va 2s .c o m // First add a record, then commit _db.begin(); loadPerson = _db.load(LazyEmployee.class, fullname); Collection<LazyAddress> addresses = loadPerson.getAddress(); LazyAddress la = new LazyAddress(); la.setId(999); la.setStreet("Rogue Street"); la.setCity("Rogue City"); la.setState("RS"); la.setZip("10666"); la.setPerson(loadPerson); addresses.add(la); _db.commit(); // New transaction _db.begin(); // test java.util.Collection.clear() for lazy loading (bug 801) loadPerson = _db.load(LazyEmployee.class, fullname); addresses = loadPerson.getAddress(); // Store the list in the database at the start of the transaction, // for comparison purposes. Select a victim for deletion. Iterator<LazyAddress> it = addresses.iterator(); // victim is last element to test bug 1022 int victim = addresses.size() - 1; int recNo = 0; LazyAddress victimAddr = null; while (it.hasNext()) { LazyAddress addr = it.next(); if (recNo++ == victim) { victimAddr = addr; } else { masterData.add(addr); } } _db.rollback(); // Now start over, and add something to the collection. Then try // iterating and clearing the collection _db.begin(); loadPerson = _db.load(LazyEmployee.class, fullname); addresses = loadPerson.getAddress(); addresses.remove(victimAddr); Iterator<LazyAddress> it2 = addresses.iterator(); while (it2.hasNext()) { LazyAddress addr = it2.next(); if (addr.equals(victimAddr)) { LOG.error("Error: Deleted record should not show up in iteration"); fail("Error: Deleted record should not show up in iteration"); } else if (!masterData.remove(addr)) { LOG.error("Error: unrecognized element from list in testIterWithDelete"); fail("Error: unrecognized element from list in testIterWithDelete"); } } _db.rollback(); if (!masterData.isEmpty()) { LOG.error("Error: iteration/deletion failed in testIterWithDelete"); fail("Error: iteration/deletion failed in testIterWithDelete"); } }
From source file:org.brandroid.openmanager.util.EventHandler.java
public void copyFile(final Collection<OpenPath> files, final OpenPath newPath, final Context mContext, final boolean copyOnly) { // for (OpenPath file : files) // copyFile(file, newPath.getChild(file.getName()), mContext); final EventType type = copyOnly ? COPY_TYPE : CUT_TYPE; for (final OpenPath file : files.toArray(new OpenPath[files.size()])) { if (checkDestinationExists(file, newPath, mContext, type)) files.remove(file); else//from w ww . j av a2 s . co m execute(new BackgroundWork(type, mContext, newPath), file); } }
From source file:org.hyperledger.fabric.sdkintegration.End2endIT.java
Channel constructChannel(String name, HFClient client, SampleOrg sampleOrg) throws Exception { //////////////////////////// //Construct the channel ////from w ww . ja v a 2s. c o m out("Constructing channel %s", name); //boolean doPeerEventing = false; boolean doPeerEventing = !testConfig.isRunningAgainstFabric10() && BAR_CHANNEL_NAME.equals(name); // boolean doPeerEventing = !testConfig.isRunningAgainstFabric10() && FOO_CHANNEL_NAME.equals(name); //Only peer Admin org SampleUser peerAdmin = sampleOrg.getPeerAdmin(); client.setUserContext(peerAdmin); Collection<Orderer> orderers = new LinkedList<>(); for (String orderName : sampleOrg.getOrdererNames()) { Properties ordererProperties = testConfig.getOrdererProperties(orderName); //example of setting keepAlive to avoid timeouts on inactive http2 connections. // Under 5 minutes would require changes to server side to accept faster ping rates. ordererProperties.put("grpc.NettyChannelBuilderOption.keepAliveTime", new Object[] { 5L, TimeUnit.MINUTES }); ordererProperties.put("grpc.NettyChannelBuilderOption.keepAliveTimeout", new Object[] { 8L, TimeUnit.SECONDS }); ordererProperties.put("grpc.NettyChannelBuilderOption.keepAliveWithoutCalls", new Object[] { true }); orderers.add(client.newOrderer(orderName, sampleOrg.getOrdererLocation(orderName), ordererProperties)); } //Just pick the first orderer in the list to create the channel. Orderer anOrderer = orderers.iterator().next(); orderers.remove(anOrderer); String path = TEST_FIXTURES_PATH + "/sdkintegration/e2e-2Orgs/" + testConfig.getFabricConfigGenVers() + "/" + name + ".tx"; ChannelConfiguration channelConfiguration = new ChannelConfiguration(new File(path)); //Create channel that has only one signer that is this orgs peer admin. If channel creation policy needed more signature they would need to be added too. Channel newChannel = client.newChannel(name, anOrderer, channelConfiguration, client.getChannelConfigurationSignature(channelConfiguration, peerAdmin)); out("Created channel %s", name); boolean everyother = true; //test with both cases when doing peer eventing. for (String peerName : sampleOrg.getPeerNames()) { String peerLocation = sampleOrg.getPeerLocation(peerName); Properties peerProperties = testConfig.getPeerProperties(peerName); //test properties for peer.. if any. if (peerProperties == null) { peerProperties = new Properties(); } //Example of setting specific options on grpc's NettyChannelBuilder peerProperties.put("grpc.NettyChannelBuilderOption.maxInboundMessageSize", 9000000); Peer peer = client.newPeer(peerName, peerLocation, peerProperties); if (testConfig.isFabricVersionAtOrAfter("1.3")) { newChannel.joinPeer(peer, createPeerOptions().setPeerRoles(EnumSet.of(PeerRole.ENDORSING_PEER, PeerRole.LEDGER_QUERY, PeerRole.CHAINCODE_QUERY, PeerRole.EVENT_SOURCE))); //Default is all roles. } else { if (doPeerEventing && everyother) { newChannel.joinPeer(peer, createPeerOptions().setPeerRoles(EnumSet.of(PeerRole.ENDORSING_PEER, PeerRole.LEDGER_QUERY, PeerRole.CHAINCODE_QUERY, PeerRole.EVENT_SOURCE))); //Default is all roles. } else { // Set peer to not be all roles but eventing. newChannel.joinPeer(peer, createPeerOptions().setPeerRoles( EnumSet.of(PeerRole.ENDORSING_PEER, PeerRole.LEDGER_QUERY, PeerRole.CHAINCODE_QUERY))); } } out("Peer %s joined channel %s", peerName, name); everyother = !everyother; } //just for testing ... if (doPeerEventing || testConfig.isFabricVersionAtOrAfter("1.3")) { // Make sure there is one of each type peer at the very least. assertFalse(newChannel.getPeers(EnumSet.of(PeerRole.EVENT_SOURCE)).isEmpty()); assertFalse(newChannel.getPeers(PeerRole.NO_EVENT_SOURCE).isEmpty()); } for (Orderer orderer : orderers) { //add remaining orderers if any. newChannel.addOrderer(orderer); } for (String eventHubName : sampleOrg.getEventHubNames()) { final Properties eventHubProperties = testConfig.getEventHubProperties(eventHubName); eventHubProperties.put("grpc.NettyChannelBuilderOption.keepAliveTime", new Object[] { 5L, TimeUnit.MINUTES }); eventHubProperties.put("grpc.NettyChannelBuilderOption.keepAliveTimeout", new Object[] { 8L, TimeUnit.SECONDS }); EventHub eventHub = client.newEventHub(eventHubName, sampleOrg.getEventHubLocation(eventHubName), eventHubProperties); newChannel.addEventHub(eventHub); } newChannel.initialize(); out("Finished initialization channel %s", name); //Just checks if channel can be serialized and deserialized .. otherwise this is just a waste :) byte[] serializedChannelBytes = newChannel.serializeChannel(); newChannel.shutdown(true); return client.deSerializeChannel(serializedChannelBytes).initialize(); }
From source file:uk.ac.ebi.mdk.apps.tool.Align2Reference.java
@Override public void process() { System.out.print("Reading query..."); final Reconstruction query = getReconstruction("query"); System.out.println("done"); System.out.print("Reading reference..."); final Reconstruction reference = getReconstruction("reference"); System.out.println("done"); System.out.printf(" Query reconstruction %20s %6s,%6s\n", query.getAccession(), query.getMetabolome().size(), query.getReactome().size()); System.out.printf("Reference reconstruction %20s %6s,%6s\n", reference.getAccession(), reference.getMetabolome().size(), reference.getReactome().size()); if (has("profile")) { // break point for starting visual vm Scanner scanner = new Scanner(System.in); System.out.print("Ready to go? [y/n]:\n"); while (!scanner.nextLine().equalsIgnoreCase("y")) { // await signal System.out.println("Okay, let me know"); System.out.print("Ready to go? [y/n]:"); }/*from w w w . ja va 2 s . c om*/ } MolecularHashFactory.getInstance().setDepth(1); final EntityAligner<Metabolite> aligner = new MappedEntityAligner<Metabolite>( reference.getMetabolome().toList(), false); final List<MetaboliteHashCodeMatcher> hashCodeMatchers = new ArrayList<MetaboliteHashCodeMatcher>(); hashCodeMatchers.add(new MetaboliteHashCodeMatcher(AtomicNumberSeed.class, BondOrderSumSeed.class, StereoSeed.class, ConnectedAtomSeed.class, ChargeSeed.class)); hashCodeMatchers.add(new MetaboliteHashCodeMatcher(AtomicNumberSeed.class, BondOrderSumSeed.class, ConnectedAtomSeed.class, StereoSeed.class)); hashCodeMatchers.add(new MetaboliteHashCodeMatcher(AtomicNumberSeed.class, BondOrderSumSeed.class, ConnectedAtomSeed.class, ChargeSeed.class)); aligner.push(new DirectMatcher<Metabolite>()); aligner.push(new MetaboliteHashCodeMatcher(AtomicNumberSeed.class, BondOrderSumSeed.class, StereoSeed.class, ConnectedAtomSeed.class, ChargeSeed.class)); aligner.push(new MetaboliteHashCodeMatcher(AtomicNumberSeed.class, BondOrderSumSeed.class, ConnectedAtomSeed.class, StereoSeed.class)); aligner.push(new MetaboliteHashCodeMatcher(AtomicNumberSeed.class, BondOrderSumSeed.class, ConnectedAtomSeed.class, ChargeSeed.class)); aligner.push(new MetaboliteHashCodeMatcher(AtomicNumberSeed.class, BondOrderSumSeed.class, ConnectedAtomSeed.class)); aligner.push(new NameMatcher<Metabolite>()); aligner.push(new NameMatcher<Metabolite>(true, true)); final EntityMatcher<Metabolite, ?> nameMatcher = new NameMatcher<Metabolite>(true, true); Thread t = new Thread(new Runnable() { @Override public void run() { Collection<Metabolite> unmatched = new ArrayList<Metabolite>(); Collection<Multimap<Metabolite, Metabolite>> mismatched = new ArrayList<Multimap<Metabolite, Metabolite>>(); int matched = 0; long start = System.currentTimeMillis(); for (Metabolite m : query.getMetabolome()) { List<Metabolite> matches = aligner.getMatches(m); matched += matches.isEmpty() ? 0 : 1; if (matches.isEmpty()) { unmatched.add(m); } // for (Metabolite r : matches) { // if (!nameMatcher.matches(m, r)) { // Multimap multimap = HashMultimap.create(); // multimap.putAll(m, matches); // mismatched.add(multimap); // break; // } // } } long end = System.currentTimeMillis(); System.out.println("Completed in " + (end - start) + " ms"); System.out.println("Matched " + matched + "/" + query.getMetabolome().size() + " entities"); System.out.println("Structure mismatch " + mismatched.size()); try { File tmp = File.createTempFile("unmatched", ".tsv"); CSVWriter writer = new CSVWriter(new FileWriter(tmp), '\t'); for (Metabolite m : unmatched) { writer.writeNext(new String[] { m.getAccession(), m.getName(), m.getAnnotationsExtending(CrossReference.class).toString() }); } writer.close(); System.out.println("Unmatched entries written to: " + tmp); } catch (IOException e) { e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates. } try { File tmp = File.createTempFile("miss-matched", ".tsv"); CSVWriter writer = new CSVWriter(new FileWriter(tmp), '\t'); for (Multimap<Metabolite, Metabolite> emap : mismatched) { for (Map.Entry<Metabolite, Metabolite> e : emap.entries()) { List<Set<Integer>> qh = new ArrayList<Set<Integer>>(); List<Set<Integer>> rh = new ArrayList<Set<Integer>>(); for (MetaboliteHashCodeMatcher matcher : hashCodeMatchers) { qh.add(matcher.calculatedMetric(e.getKey())); } for (MetaboliteHashCodeMatcher matcher : hashCodeMatchers) { rh.add(matcher.calculatedMetric(e.getValue())); } writer.writeNext(new String[] { e.getKey().getAccession(), e.getKey().getName(), e.getKey().getAnnotationsExtending(CrossReference.class).toString(), e.getValue().getAccession(), e.getValue().getName(), e.getValue().getAnnotationsExtending(CrossReference.class).toString(), "", Joiner.on(", ").join(qh), Joiner.on(", ").join(rh) }); } } writer.close(); System.out.println("Miss-matched entries written to: " + tmp); } catch (IOException e) { e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates. } } }); t.setName("METABOLOME ALIGNMENT"); t.start(); try { t.join(); } catch (InterruptedException e) { e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates. } final Map<Metabolite, Integer> countMap = new HashMap<Metabolite, java.lang.Integer>(); Reactome reactome = query.getReactome(); for (Metabolite m : query.getMetabolome()) { countMap.put(m, reactome.participatesIn(m).size()); } System.out.println("Most common metabolites:"); for (Map.Entry<Metabolite, Integer> e : entriesSortedByValues(countMap)) { if (e.getValue() > 40) { System.out.println(e.getKey() + ":" + e.getKey().hashCode() + ":" + e.getValue()); } } Set<Metabolite> queryCurrencyMetabolites = new HashSet<Metabolite>(); Set<Metabolite> referenceCurrencyMetabolites = new HashSet<Metabolite>(); queryCurrencyMetabolites.addAll(query.getMetabolome().ofName("H+")); queryCurrencyMetabolites.addAll(query.getMetabolome().ofName("H2O")); queryCurrencyMetabolites.addAll(query.getMetabolome().ofName("CO2")); queryCurrencyMetabolites.addAll(query.getMetabolome().ofName("ammonium")); queryCurrencyMetabolites.addAll(query.getMetabolome().ofName("ammonia")); referenceCurrencyMetabolites.addAll(reference.getMetabolome().ofName("H+")); referenceCurrencyMetabolites.addAll(reference.getMetabolome().ofName("H2O")); referenceCurrencyMetabolites.addAll(reference.getMetabolome().ofName("CO2")); referenceCurrencyMetabolites.addAll(reference.getMetabolome().ofName("ammonium")); referenceCurrencyMetabolites.addAll(reference.getMetabolome().ofName("ammonia")); referenceCurrencyMetabolites.addAll(reference.getMetabolome().ofName("Phosphate")); int count = 0; int transport = 0; System.out.println(); System.out.println("| REACTOME ALIGNMENT |"); EntityAligner<MetabolicReaction> reactionAligner = new MappedEntityAligner<MetabolicReaction>( reference.reactome().toList()); reactionAligner.push(new ReactionMatcher(aligner)); reactionAligner.push(new ReactionMatcher(aligner, false)); reactionAligner.push(new ReactionMatcher(aligner, true, Collections.singleton(reference.getMetabolome().ofName("H+").iterator().next()))); reactionAligner.push(new ReactionMatcher(aligner, false, Collections.singleton(reference.getMetabolome().ofName("H+").iterator().next()))); reactionAligner.push(new ReactionMatcher(aligner, false, new HashSet<Metabolite>(Arrays.asList(reference.getMetabolome().ofName("H+").iterator().next(), reference.getMetabolome().ofName("H2O").iterator().next())))); reactionAligner.push(new ReactionMatcher(aligner, false, referenceCurrencyMetabolites)); for (MetabolicReaction reaction : reactome) { // skip transport reactsions for now if (TransportReactionUtil.isTransport(reaction)) { transport++; continue; } System.out.println(reaction.getIdentifier() + ": " + reaction); Collection<MetabolicReaction> matches = reactionAligner.getMatches(reaction); for (MetabolicReaction rxnMatch : matches) { System.out.println("\t" + rxnMatch); } count += matches.isEmpty() ? 1 : 0; if (true) continue; Map<Identifier, MutableInt> reactionReferences = new HashMap<Identifier, MutableInt>(); for (Participant<Metabolite, ?> p : reaction.getParticipants()) { Metabolite m = p.getMolecule(); System.out.print("\t" + m.getName() + " == "); for (Metabolite r : aligner.getMatches(m)) { System.out.print(r + " "); for (Reaction rxnRef : reference.participatesIn(r)) { Identifier identifier = rxnRef.getIdentifier(); if (!reactionReferences.containsKey(identifier)) { reactionReferences.put(identifier, new MutableInt()); } reactionReferences.get(identifier).increment(); } } System.out.println(); } Map<Identifier, MetabolicReaction> idToReaction = new HashMap<Identifier, MetabolicReaction>(); for (MetabolicReaction r : reference.reactome()) { idToReaction.put(r.getIdentifier(), r); } System.out.println("Candidate matches for " + reaction); for (Map.Entry<Identifier, MutableInt> e : reactionReferences.entrySet()) { int nParticipants = e.getValue().toInteger(); if (nParticipants >= adjustedCount(reaction, queryCurrencyMetabolites)) { Collection<MetabolicParticipant> refps = adjustedParticipants(idToReaction.get(e.getKey()), referenceCurrencyMetabolites); boolean show = true; MetabolicReaction referenceReaction = idToReaction.get(e.getKey()); System.out.println(referenceReaction); for (Participant<Metabolite, ?> p : adjustedParticipants(reaction, queryCurrencyMetabolites)) { List<Metabolite> referenceMetabolites = aligner.getMatches(p.getMolecule()); if (referenceMetabolites.isEmpty()) { // missing reference show = false; break; } if (referenceMetabolites.size() > 1) { // complex case show = false; break; } Metabolite r = referenceMetabolites.get(0); boolean found = false; MetabolicParticipant remove = null; for (MetabolicParticipant rp : refps) { if (rp.getMolecule().equals(r)) { found = true; remove = rp; break; } } if (!found) { show = false; } else { refps.remove(remove); } } // matches if (show && refps.isEmpty()) { System.out.println("\t [match] " + referenceReaction); // MetabolicReaction rxn1 = m2.calculatedMetric(reaction).iterator().readNext(); // MetabolicReaction rxn2 = m2.calculatedMetric(referenceReaction).iterator().readNext(); // System.out.println(rxn1.hashCode()); // System.out.println(rxn2.hashCode()); // System.out.println(rxn1.equals(rxn2)); } } } } System.out.println(count + "/" + query.getReactome().size() + " were not matched (transport reactions skipped by default) n transport reactions = " + transport); }
From source file:org.apache.flex.compiler.internal.targets.SWCTarget.java
/** * For compilation units with the same absolute source path, filter based on * the source path. The compilation unit found on the highest priority * source path wins. The rest of the compilation units with qnames are * discared. If a unit is not on the source path or does not have a qname or * more than one qname, then let it thru the filter. * /*from ww w . j a v a 2 s . c o m*/ * @param compilationUnitsForFile list of compilation units to filter. * @return filtered compilation units. * @throws InterruptedException */ private Collection<ICompilationUnit> filterUnitsBasedOnSourcePath( Collection<ICompilationUnit> compilationUnitsForFile) throws InterruptedException { List<ICompilationUnit> sourcePathUnits = new ArrayList<ICompilationUnit>(compilationUnitsForFile); boolean foundHighestPriorityUnit = false; for (File sourcePath : flexProject.getSourcePath()) { for (ICompilationUnit unit : sourcePathUnits) { // We only care about filtering units on the source path // that follow the single definition rule. UnitType unitType = unit.getCompilationUnitType(); if (unitType == UnitType.AS_UNIT || unitType == UnitType.FXG_UNIT || unitType == UnitType.MXML_UNIT || unitType == UnitType.CSS_UNIT) { Collection<String> qnames = unit.getQualifiedNames(); if (qnames.size() > 1) continue; String unitQname = qnames.isEmpty() ? "" : qnames.iterator().next(); String computedQname = SourcePathManager.computeQName(sourcePath, new File(unit.getAbsoluteFilename())); if (unitQname.equals(computedQname)) { // We found a unit on the source path. Only keep the // first unit found on the source path and remove the // others. if (foundHighestPriorityUnit) compilationUnitsForFile.remove(unit); foundHighestPriorityUnit = true; break; // should only be one compilation unit on a source path } } } } return compilationUnitsForFile; }