List of usage examples for java.util HashSet removeAll
boolean removeAll(Collection<?> c);
From source file:it.iit.genomics.cru.igb.bundles.mi.business.MIWorker.java
@Override public ArrayList<MIResult> doInBackground() { // Display the log tab MIView.getInstance().getResultsTabbedPan().setSelectedIndex(0); UniprotkbUtils uniprotUtil = UniprotkbUtils.getInstance(query.getTaxid()); // Create a new Symmetry manager for each query symManager = new MISymManager(this.trackId); Collection<SeqSymmetry> selectedSyms = query.getSelectedSymmetries(); // Initialize progress property. progressBar.setIndeterminate(false); ProgressManager progressManager = new ProgressManager(5); setProgress(progressManager.getProgress()); Set<String> queryUniprotAcs = new HashSet<>(); Set<String> targetUniprotAcs = new HashSet<>(); // interactions MapOfMap<String, String> uniprotAc2uniprotAcs; // Interaction found ArrayList<MIResult> resultsInBackground = new ArrayList<>(); logAndPublish("map selection to genome and proteins.."); // Step 1//ww w .jav a 2 s. c o m progressManager.nextMajorStep(selectedSyms.size()); // Get gene Symmetries covered by query symmetries // don't create container at the moment: many genes syms for a single // gene may be present // Order list of syms SymListOrderer list = new SymListOrderer(); for (SeqSymmetry querySym : selectedSyms) { list.addSymmetry(querySym); } for (BioSeq chr : list.getSequences()) { ArrayList<SeqSymmetry> querySyms = list.getSymmetries(chr); getGenes(querySyms, chr, true); // for (MIGene gene : getGenes(querySyms, chr, true)) { // // load exons (we only need this for the selected ones) // miGene2selectedSyms.add(gene, querySym); // } progressManager.nextStep(); setProgress(progressManager.getProgress()); } // Step 2 progressManager.nextMajorStep(miGene2selectedSyms.keySet().size()); // Associate selected residues for (MIGene gene : miGene2selectedSyms.keySet()) { logAndPublish("Associate residues to " + gene.getID()); MoleculeEntry protein = gene.getProtein(); if (protein != null) { queryUniprotAcs.add(protein.getUniprotAc()); MISymContainer container = symManager.getByProtein(protein); symManager.addGeneSymmetry(container, gene); symManager.addSelectedSymmetry(container, gene); } else { igbLogger.getLogger().warn("No protein for {0}", gene.getID()); } progressManager.nextStep(); setProgress(progressManager.getProgress()); } // Step 3 progressManager.nextMajorStep(queryUniprotAcs.size()); // Get interactors uniprotAc2uniprotAcs = new MapOfMap<>(queryUniprotAcs); logAndPublish("get interactions"); HashMap<String, MoleculeEntry> targetUniprotEntries = new HashMap<>(); InteractionManager interactors = new InteractionManager(); for (String ac : queryUniprotAcs) { logAndPublish("Get interactions for " + ac); if (ac == null) { continue; } try { if (false == PsicquicInitWorker.nullServer.equals(query.getPsiquicServer()) && null != query.getPsiquicServer()) { for (Interaction interaction : PsicquicUtils.getInstance() .getInteractors(query.getPsiquicServer(), ac)) { interactors.merge(interaction); } } } catch (BridgesRemoteAccessException be) { igbLogger.severe("Cannot access PSICQUIC server!"); break; } // // Add interactors from User structures? // if (null != query.getUserStructuresPath() && query.searchUserStructures()) { // Interactome3DLocalRepository userStructures = UserStructuresManager.getInstance() // .getUserRepository(query.getUserStructuresPath()); // for (String interactorAc : userStructures.getInteractors(ac)) { // interactors.getOrCreateInteraction(ac, interactorAc).addType(INTERACTION_TYPE_I3D); // uniprotNeedMapping.add(interactorAc); // } // } // Add interactors from I3D? if (query.searchInteractome3D() || query.searchDSysMap()) { // Check or download I3D interaction file // get it from local repository? Interactome3DLocalRepository userStructures; // System.out.println("I3D cache: " + MIBundleConfiguration.getInstance().getI3DStructuresDirectory()); if (null != MIBundleConfiguration.getInstance().getI3DStructuresDirectory()) { userStructures = UserStructuresManager.getInstance() .getUserRepository(MIBundleConfiguration.getInstance().getI3DStructuresDirectory()); } else { I3DDownload download = new I3DDownload(MIBundleConfiguration.getInstance().getCachePath()); if (false == download.isDatDownloaded(query.getTaxid())) { logAndPublish("download interactions from Interactome3D"); download.downloadDat(query.getTaxid()); } // get interactions userStructures = UserStructuresManager.getInstance() .getUserRepository(download.getI3DdatPath(query.getTaxid())); } for (String interactorAc : userStructures.getInteractors(ac)) { interactors.getOrCreateInteraction(ac, interactorAc) .addType("direct interaction (Interactome3D)"); uniprotNeedMapping.add(interactorAc); } } // add interactors from PDB structures if (query.searchPDB() || query.searchPDBLocal() || query.searchEPPIC()) { MoleculeEntry entry = symManager.getByProteinAc(ac).getEntry(); PDBWSClient client = new PDBWSClient(); // Do only 10 by 10 List<String> pdbs = new ArrayList<>(); pdbs.addAll(entry.getPdbs()); while (false == pdbs.isEmpty()) { List<String> subset = pdbs.subList(0, Math.min(10, pdbs.size())); pdbs = pdbs.subList(Math.min(10, pdbs.size()), pdbs.size()); if (query.searchPPI() || query.searchNucleicAcid()) { MoleculeDescription molDesc; try { molDesc = client.getDescription(subset); } catch (BridgesRemoteAccessException be) { igbLogger.severe("Cannot access PDB!"); break; } if (molDesc != null) { for (StructureID structureId : molDesc.getStructureId()) { for (Polymer polymer : structureId.getPolymers()) { if (polymer.getPolymerDescription() == null) { igbLogger.severe("No description for " + structureId.getId()); } if (null != polymer.getType()) { switch (polymer.getType()) { case "protein": if (query.searchPPI() && null != polymer.getMacromolecule()) { String proteinAc = polymer.getMacromolecule().getAccession().get(0); if (false == proteinAc.equals(entry.getUniprotAc()) || polymer.getChains().size() > 1) { interactors.getOrCreateInteraction(ac, proteinAc) .addType(INTERACTION_TYPE_PDB); uniprotNeedMapping.add(ac); } } break; case "dna": if (false == query.searchNucleicAcid()) { break; } // Merge all DNA entries, use "DNA // as name rather that the // desciption MISymContainer dnaSym = symManager .getByProteinAc(MoleculeEntry.TAXID_DNA); uniprotNeedMapping.add(ac); interactors.getOrCreateInteraction(ac, MoleculeEntry.TAXID_DNA) .addType(INTERACTION_TYPE_PDB); if (dnaSym == null) { MoleculeEntry dnaEntry = new MoleculeEntry(MoleculeEntry.TAXID_DNA); dnaEntry.setSequence(""); dnaEntry.setTaxid(MoleculeEntry.TAXID_DNA); targetUniprotEntries.put(MoleculeEntry.TAXID_DNA, dnaEntry); dnaEntry.addGeneName(MoleculeEntry.TAXID_DNA); dnaSym = symManager.getByProtein(dnaEntry); } MoleculeEntry dnaEntry = dnaSym.getEntry(); for (Chain chain : polymer.getChains()) { ChainMapping chainMapping = new ChainMapping(structureId.getId(), chain.getId(), 0, 0); dnaEntry.addChain(structureId.getId(), chainMapping, "unspecified"); } break; case "rna": if (false == query.searchNucleicAcid()) { break; } uniprotNeedMapping.add(ac); // Merge all RNA entries, use "RNA // as name rather that the // desciption MISymContainer rnaSym = symManager .getByProteinAc(MoleculeEntry.TAXID_RNA); interactors.getOrCreateInteraction(ac, MoleculeEntry.TAXID_RNA) .addType(INTERACTION_TYPE_PDB); if (rnaSym == null) { MoleculeEntry rnaEntry = new MoleculeEntry(MoleculeEntry.TAXID_RNA); rnaEntry.setSequence(""); rnaEntry.setTaxid(MoleculeEntry.TAXID_RNA); targetUniprotEntries.put(MoleculeEntry.TAXID_RNA, rnaEntry); rnaEntry.addGeneName(MoleculeEntry.TAXID_RNA); rnaSym = symManager.getByProtein(rnaEntry); } MoleculeEntry rnaEntry = rnaSym.getEntry(); for (Chain chain : polymer.getChains()) { ChainMapping chainMapping = new ChainMapping(structureId.getId(), chain.getId(), 0, 0); rnaEntry.addChain(structureId.getId(), chainMapping, "unspecified"); } break; } } } } } } if (query.searchLigands() && false == query.searchEPPIC()) { try { for (Ligand ligand : client.getLigands(subset)) { /** * Only non polymer ligands */ if (false == ligand.isNonPolymer()) { continue; } int numAtoms = 0; for (String atom : ligand.getFormula().split(" ")) { String num = atom.replaceAll("\\D+", "").trim(); if ("".equals(num)) { numAtoms++; } else { numAtoms += Integer.parseInt(num); } } if (numAtoms <= 10) { igbLogger.info("Skip ligand: " + ligand.getFormula()); continue; } uniprotNeedMapping.add(ac); MISymContainer misym = symManager.getByProteinAc(ligand.getChemicalName()); interactors.getOrCreateInteraction(ac, ligand.getChemicalName()) .addType(INTERACTION_TYPE_PDB); if (misym == null) { MoleculeEntry ligandEntry = new MoleculeEntry(ligand.getChemicalName()); ligandEntry.setSequence(""); ligandEntry.setTaxid(MoleculeEntry.TAXID_LIGAND); ligandEntry.addGeneName(ligand.getChemicalId()); targetUniprotEntries.put(ligand.getChemicalName(), ligandEntry); misym = symManager.getByProtein(ligandEntry); } MoleculeEntry ligandEntry = misym.getEntry(); ChainMapping chainMapping = new ChainMapping(ligand.getStructureId(), "ligand", 0, 0); ligandEntry.addChain(ligand.getStructureId(), chainMapping, "unspecified"); } } catch (BridgesRemoteAccessException be) { igbLogger.severe("Cannot access PDB!"); break; } } } } if (query.searchModifications()) { MoleculeEntry entry = symManager.getByProteinAc(ac).getEntry(); for (ModifiedResidue modification : entry.getModifications()) { MISymContainer misym = symManager.getByProteinAc(modification.getDescription()); uniprotNeedMapping.add(ac); if (misym == null) { interactors.getOrCreateInteraction(ac, modification.getDescription()) .addType("direct interaction (Uniprot)"); // interactors.add(modification.getDescription(), // "association"); MoleculeEntry ligandEntry = new MoleculeEntry(modification.getDescription()); ligandEntry.setSequence(""); ligandEntry.setTaxid(MoleculeEntry.TAXID_MODIFICATION); ligandEntry.addGeneName(modification.getDescription()); targetUniprotEntries.put(modification.getDescription(), ligandEntry); symManager.getByProtein(ligandEntry); } } } Collection<String> interactorUniprotAcs = interactors.getInteractors(); for (String interactorUniprotAc : interactorUniprotAcs) { // Skip interaction if we the type of query is INTRA (i.e. only // interactions between selected genes) // and one of the protein was not selected if (QueryType.EXTRA.equals(query.getQueryType()) || queryUniprotAcs.contains(interactorUniprotAc)) { uniprotAc2uniprotAcs.add(ac, interactorUniprotAc); targetUniprotAcs.add(interactorUniprotAc); // String key = ac + "#" + interactorUniprotAc; // interactionTypes.addAll(key, // interactors.get(interactorUniprotAc)); // At this point we may not have created the symmetry } } progressManager.nextStep(); setProgress(progressManager.getProgress()); } // Only look for uniprot Acs for which we don't have an entry yet HashSet<String> uniprotAcToSearch = new HashSet<>(); uniprotAcToSearch.addAll(targetUniprotAcs); uniprotAcToSearch.removeAll(symManager.getProteinAcs()); // Allow proteins from other species try { targetUniprotEntries .putAll(uniprotUtil.getUniprotEntriesFromUniprotAccessions(uniprotAcToSearch, false)); } catch (BridgesRemoteAccessException be) { igbLogger.severe("Cannot access Uniprot!"); return resultsInBackground; } for (MoleculeEntry entry : targetUniprotEntries.values()) { MISymContainer container = symManager.getByProtein(entry); if (container == null) { } } // missing ones? Collection<String> missingUniprotAcs = new ArrayList<>(); missingUniprotAcs.addAll(uniprotAcToSearch); missingUniprotAcs.removeAll(targetUniprotEntries.keySet()); for (String missingAc : missingUniprotAcs) { MICommons.getInstance().addProteinToBlackList(missingAc); } for (MISymContainer container : symManager.getQueryContainers()) { if (null != container.getEntry()) { targetUniprotEntries.put(container.getEntry().getUniprotAc(), container.getEntry()); } } // Do I need it if I don't need symmetries? // Step 4 progressManager.nextMajorStep(targetUniprotEntries.values().size()); for (MoleculeEntry uniprotEntry : targetUniprotEntries.values()) { logAndPublish("create symmetry for " + uniprotEntry.getUniprotAc()); // Get symmetry, it has not been necessarily created MISymContainer container = symManager.getByProtein(uniprotEntry); Collection<String> geneIds; // Check if we are using Ensembl web service or QuickLoad. if (EnsemblGeneManager.class.isInstance(geneManager)) { geneIds = uniprotEntry.getEnsemblGenes(); } else { geneIds = new HashSet<>(); geneIds.addAll(uniprotEntry.getGeneNames()); geneIds.addAll(uniprotEntry.getRefseqs()); geneIds.addAll(uniprotEntry.getEnsemblGenes()); } SimpleSymWithProps overlappingSym = new SimpleSymWithProps(); overlappingSym.setProperty(TrackLineParser.ITEM_RGB, Color.RED); overlappingSym.setID(this.trackId + "-" + uniprotEntry.getGeneName()); for (String geneId : geneIds) { Collection<MIGene> genes = geneManager.getByID(geneId); // For each gene create a "result symmetry", which will be // displayed in the interaction track if (genes.isEmpty()) { continue; } RangeMerger merger = new RangeMerger(); for (MIGene gene : genes) { if (null == gene) { continue; } if (null != uniprotEntry.getVarSpliceAC(gene.getID())) { gene.getUniprotAcs().add(uniprotEntry.getUniprotAc()); gene.setProtein(uniprotEntry); symManager.addGeneSymmetry(container, gene); BioSeq chromosome = geneManager.getSequence(gene.getChromosomeName()); if (chromosome == null) { igbLogger.severe("Unavailable sequence: " + gene.getChromosomeName() + ", there may be a network problem."); continue; } merger.addRange(chromosome.getId(), new Range(gene.getMin(), gene.getMax())); } } for (String seq : merger.getSequences()) { BioSeq chromosome = geneManager.getSequence(seq); if (chromosome == null) { igbLogger.severe("No sequence for chromosome: " + seq); } for (Range range : merger.getRanges(seq)) { SeqSpan span = new SimpleSeqSpan(range.getMin(), range.getMax(), chromosome); // Check if it has already this span boolean hasSpan = false; for (int i = 0; i < overlappingSym.getSpanCount(); i++) { SeqSpan otherSpan = overlappingSym.getSpan(i); if (otherSpan.getMin() == span.getMin() && otherSpan.getMax() == span.getMax()) { hasSpan = true; break; } } if (false == hasSpan) { overlappingSym.addSpan(span); } } } if (false == genes.isEmpty()) { // we found it break; } } symManager.setResultSym(container, overlappingSym); progressManager.nextStep(); setProgress(progressManager.getProgress()); } for (String ac : uniprotNeedMapping) { MISymContainer proteinContainer = symManager.getByProteinAc(ac); for (MIGene gene : proteinContainer.getMiGenes()) { if (false == miGene2selectedSyms.containsKey(gene)) { continue; } for (SeqSymmetry selectedSym : miGene2selectedSyms.get(gene)) { logAndPublish("Load residues for " + gene.getID()); geneManager.loadTranscriptSequence(selectedSym.getSpanSeq(0), gene); // Maybe the protein was already assigned to the gene. // In order to be sure we are working on the right one, // Don't use the protein variable, but get it fromthe gene ArrayList<AAPosition> aaPositions = new ArrayList<>(); // symmetry are 0-based exclusive, // use max -1 to have inclusive coordinates Collection<AAPosition> positions = AAPositionManager.getAAPositionManager(query.getLabel()) .getAAPositions(gene, selectedSym.getSpan(0).getMin(), selectedSym.getSpan(0).getMax() - 1); aaPositions.addAll(positions); for (AAPosition aa : aaPositions) { gene2pos.add(gene, aa); } symManager.addSelectedResidues(gene.getProtein(), aaPositions); } } } // Step 5 // don't add twice the same interaction HashSet<String> interactionsDone = new HashSet<>(); progressManager.nextMajorStep(symManager.getQueryContainers().size()); for (MISymContainer container : symManager.getQueryContainers()) { logAndPublish(container.getEntry().getGeneName()); if (null == container.getEntry()) { continue; } if (null == container.getResultSym()) { continue; } String queryUniprotAc = container.getEntry().getUniprotAc(); if (null == uniprotAc2uniprotAcs.get(queryUniprotAc)) { continue; } if (MICommons.getInstance().isBlackListed(queryUniprotAc)) { continue; } for (String targetUniprotAc : uniprotAc2uniprotAcs.get(queryUniprotAc)) { if (MICommons.getInstance().isBlackListed(targetUniprotAc)) { continue; } // An interaction may be slected twice, as A-B and B-A, // avoid this. if (interactionsDone.contains(targetUniprotAc + "#" + queryUniprotAc) || interactionsDone.contains(queryUniprotAc + "#" + targetUniprotAc)) { continue; } interactionsDone.add(queryUniprotAc + "#" + targetUniprotAc); MISymContainer targetContainer = symManager.getByProteinAc(targetUniprotAc); if (targetContainer == null) { continue; } if (targetContainer.getEntry() == null) { continue; } if (targetContainer.getResultSym() == null) { continue; } MIResult result = new MIResult(trackId, container, targetContainer, interactors.getOrCreateInteraction(container.getEntry().getUniprotAc(), targetContainer.getEntry().getUniprotAc()), query, symManager); resultsInBackground.add(result); miSymmetries.add(targetContainer.getResultSym()); } progressManager.nextStep(); setProgress(progressManager.getProgress()); } AAPositionManager.removeManager(query.getLabel()); return resultsInBackground; }
From source file:com.pinterest.arcee.aws.EC2HostInfoDAOImpl.java
@Override public Set<String> getTerminatedHosts(Set<String> staleIds) throws Exception { HashSet<String> ids = new HashSet<>(staleIds); HashSet<String> terminatedHosts = new HashSet<>(); while (!ids.isEmpty()) { DescribeInstancesRequest request = new DescribeInstancesRequest(); request.setInstanceIds(ids);/*from w w w. j a v a 2 s. c om*/ try { do { DescribeInstancesResult results = ec2Client.describeInstances(request); List<Reservation> reservations = results.getReservations(); for (Reservation reservation : reservations) { for (Instance instance : reservation.getInstances()) { int stateCode = instance.getState().getCode(); String id = instance.getInstanceId(); if (stateCode == TERMINATED_CODE || stateCode == STOPPED_CODE) { LOG.info(String.format("Instance %s has already been terminated or stopped.", id)); terminatedHosts.add(id); } ids.remove(id); } } if (results.getNextToken() == null || results.getNextToken().isEmpty()) { break; } request = new DescribeInstancesRequest(); request.setInstanceIds(ids); request.setNextToken(results.getNextToken()); } while (true); LOG.debug("Cannot find the following ids in AWS:", ids); terminatedHosts.addAll(ids); return terminatedHosts; } catch (AmazonServiceException ex) { Collection<String> invalidHostIds = handleInvalidInstanceId(ex); ids.removeAll(invalidHostIds); // add invalid host ids to the terminated host list. terminatedHosts.addAll(invalidHostIds); } catch (AmazonClientException ex) { LOG.error(String.format("Get AmazonClientException, exit with terminiatedHost %s", terminatedHosts.toString()), ex); return terminatedHosts; } } return terminatedHosts; }
From source file:com.impetus.ankush2.framework.monitor.AbstractMonitor.java
/** * Method to save graph view data./*w w w . j a v a2 s.com*/ * * @param legends * the legends * @param publicIp * the public ip * @param add * the add */ private void saveGraphViewData(List<String> legends, String publicIp, boolean add) { // get Current user. User userDetails = getCurrentUser(); // username. String userName = userDetails.getUsername(); // monitoring manager objcet. MonitoringManager monitoringManager = new MonitoringManager(); // node monitoring object. NodeMonitoring nodeMonitoring = monitoringManager.getMonitoringData(publicIp); HashMap userGraphViewMap = nodeMonitoring.getGraphViewData(); // hash set of saved legends. HashSet graphViewData = (HashSet) userGraphViewMap.get(userName); // if set in null assign new hash set. if (graphViewData == null) { graphViewData = new HashSet(); } if (add) { // adding new legends. graphViewData.addAll(legends); } else { // adding new legends. graphViewData.removeAll(legends); } userGraphViewMap.put(userName, graphViewData); // setting graphViewData. nodeMonitoring.setGraphViewData(userGraphViewMap); // saving in database monitoringManager.save(nodeMonitoring); }
From source file:org.openanzo.jdbc.container.query.FindInferred.java
/** * Find all statements in container that match the provided parameters * //from w w w. j a v a 2s. c o m * @param container * source of data * @param subj * Subject resource to match, or wildcard if null * @param prop * Predicate uri to match, or wildcard if null * @param obj * Object value to match, or wildcard if null * @param contexts * Context values to match, or wildcard if null * @return Iterable set of quads containing results of find operation * @throws AnzoException * if there was an error finding statements */ @SuppressWarnings("unchecked") public static Iterable<Quad> findStatements(RDBQuadStore container, Resource subj, URI prop, Value obj, URI... contexts) throws AnzoException { Long namedGraphId = null; String graphTable = null; int metadataGraph = -1; if (contexts != null && contexts.length == 1) { URI context = (URI) StatementUtils.convertUriToAny(contexts[0]); if (context != null) { namedGraphId = container.getNodeLayout().fetchId(context, container.getConnection()); if (namedGraphId == null) { return Collections.EMPTY_LIST; // required node is not even in db } metadataGraph = UriGenerator.isMetadataGraphUri(context) ? 1 : 0; } } else { HashSet<Value> set = new HashSet<Value>(); boolean includeAllNamedGraphs = false, includeAllMetadataGraphs = false; Map<Value, Long> graphIds = null; if (contexts != null && contexts.length > 0) { for (Resource context : contexts) { if (context.equals(GRAPHS.ALL_GRAPHS)) includeAllNamedGraphs = includeAllMetadataGraphs = true; else if (context.equals(GRAPHS.ALL_NAMEDGRAPHS)) includeAllNamedGraphs = true; else if (context.equals(GRAPHS.ALL_METADATAGRAPHS)) includeAllMetadataGraphs = true; else set.add(context); } if (set.size() > 0) { graphIds = container.getNodeLayout().resolveStoredNodes(set, false, container.getConnection(), -1); if (graphIds.size() < set.size()) { set.removeAll(graphIds.keySet()); log.debug(LogUtils.RDB_MARKER, "findStatementsInferred", new UnknownGraphException(StringUtils.join(set.iterator(), ", "))); if (graphIds.size() == 0 && !includeAllNamedGraphs && !includeAllMetadataGraphs) { return Collections.EMPTY_LIST;// required node is not even in db } } if (graphIds.size() > 0) { graphTable = SQLQueryConstants.defaultGraphsTempTable; //container.populateValidGraphs(graphIds, includeAllNamedGraphs, includeAllMetadataGraphs, graphTable); //container.populateValidGraphs(graphIds, includeAllNamedGraphs, graphTable); } } else if (includeAllNamedGraphs || includeAllMetadataGraphs) { metadataGraph = includeAllNamedGraphs ? (includeAllMetadataGraphs ? -1 : 0) : 1; } } } return findStatements(container, subj, prop, obj, namedGraphId, metadataGraph, graphTable); }
From source file:org.openanzo.jdbc.container.query.FindInferred.java
/** * Find all statements in container that match the provided parameters including inferred statements * // www .j a va2 s .co m * @param container * source of data * @param subj * Subject resource to match, or wildcard if null * @param prop * Predicate uri to match, or wildcard if null * @param obj * Object value to match, or wildcard if null * @param contexts * Context values to match, or wildcard if null * @param ontology * URI of ontology that contains inferred property and object definitions * @return Iterable set of quads containing results of find operation * @throws AnzoException * if there was an error finding statements */ public static Iterable<Quad> findStatementsInferred(RDBQuadStore container, Resource subj, URI prop, Value obj, Resource[] contexts, Resource ontology) throws AnzoException { //try { Long namedGraphId = null; String graphTable = null; int metadataGraph = -1; if (contexts != null && contexts.length == 1) { Resource context = (Resource) StatementUtils.convertUriToAny(contexts[0]); if (context != null) { namedGraphId = container.getNodeLayout().fetchId(context, container.getConnection()); if (namedGraphId == null) { return Collections.<Quad>emptySet(); // required node is not even in db } metadataGraph = context.toString().startsWith(NAMESPACES.METADATAGRAPH_PREFIX) ? 1 : 0; } } else { HashSet<Value> set = new HashSet<Value>(); boolean includeAllNamedGraphs = false, includeAllMetadataGraphs = false; Map<Value, Long> graphIds = null; if (contexts != null && contexts.length > 0) { for (Resource context : contexts) { if (context.equals(GRAPHS.ALL_GRAPHS)) includeAllNamedGraphs = includeAllMetadataGraphs = true; else if (context.equals(GRAPHS.ALL_NAMEDGRAPHS)) includeAllNamedGraphs = true; else if (context.equals(GRAPHS.ALL_METADATAGRAPHS)) includeAllMetadataGraphs = true; else set.add(context); } if (set.size() > 0) { graphIds = container.getNodeLayout().resolveStoredNodes(set, false, container.getConnection(), -1); if (graphIds.size() < set.size()) { set.removeAll(graphIds.keySet()); log.debug(LogUtils.DATASOURCE_MARKER, "findStatementsInferred", new UnknownGraphException(StringUtils.join(set.iterator(), ", "))); if (graphIds.size() == 0 && !includeAllNamedGraphs && !includeAllMetadataGraphs) { return Collections.<Quad>emptyList();// required node is not even in db } } if (graphIds.size() > 0) { graphTable = SQLQueryConstants.defaultGraphsTempTable; //container.populateValidGraphs(graphIds, includeAllNamedGraphs, includeAllMetadataGraphs, graphTable); //container.populateValidGraphs(graphIds, includeAllNamedGraphs, graphTable); } } else if (includeAllNamedGraphs || includeAllMetadataGraphs) { metadataGraph = includeAllNamedGraphs ? (includeAllMetadataGraphs ? -1 : 0) : 1; } } } return findStatementsInferred(container, subj, prop, obj, namedGraphId, metadataGraph, graphTable, ontology); /*} catch (SQLException e) { throw new AnzoException(ExceptionConstants.ERROR_TAGS.CORE_ERROR | ExceptionConstants.ERROR_TAGS.RDB_ERROR, ExceptionConstants.CLIENT.FAILED_CONTAINER_FIND_STATEMENTS, e); }*/ }
From source file:dao.DirectoryAuthorDaoDb.java
/** * getBlockedDirectories - gets the list of blocked directories * @param authorResult - list of author directories * @param memberResult - list of member directories * @return boolean - returns true or false if the author can block any this member *//*from w w w .j a va2 s . c om*/ public boolean getBlockedDirectories(List authorResult, List memberResult) { HashSet hs1 = new HashSet(authorResult); HashSet hs2 = new HashSet(memberResult); boolean isblocked = hs1.removeAll(hs2); return isblocked; }
From source file:dao.DirectoryAuthorDaoDb.java
/** * Adds the users in directory as dir authors * @param memberList memberList/*w ww.j a va2 s . co m*/ * @param directoryId directoryId * @param userId the user Login is used to check if this user has the permission to add authors * @param userLogin the user Login is used to check if this user has the permission to add authors * @return List - list of users who are not members * @throws BaseDaoException */ public String addAuthors(List memberList, String directoryId, String userId, String userLogin) throws BaseDaoException { if ((memberList == null) || RegexStrUtil.isNull(directoryId) || RegexStrUtil.isNull(userId) || RegexStrUtil.isNull(userLogin)) { throw new BaseDaoException("params are null"); } /* get existing authors */ HashSet authorList = listAuthorsOfDirectory(directoryId, userId, userLogin, DbConstants.READ_FROM_SLAVE); List userList = new ArrayList(authorList); List existingUserList = null; if (userList != null) { existingUserList = new ArrayList(); for (int i = 0; i < userList.size(); i++) { if ((Directory) userList.get(i) != null) { existingUserList.add(((Directory) userList.get(i)).getValue(DbConstants.LOGIN)); } } } /* remove existing authors from the memberList */ List idList = null; if ((existingUserList != null) && (existingUserList.size() > 0)) { HashSet hs1 = new HashSet(memberList); HashSet hs2 = new HashSet(existingUserList); if (hs1.removeAll(hs2)) { idList = new ArrayList(hs1); } else { idList = memberList; } } else { idList = memberList; } /* add only new valid users in the directory */ StringBuffer notMembers = new StringBuffer(); try { if (idList != null && idList.size() > 0) { for (int i = 0; i < idList.size(); i++) { if (idList.get(i) != null) { String mLogin = (String) idList.get(i); Hdlogin hdlogin = getLoginid(mLogin); if (hdlogin == null) { notMembers.append(mLogin); notMembers.append(" "); } else { addAuthor(directoryId, mLogin, userId, userLogin); } } } } } catch (BaseDaoException e) { throw new BaseDaoException( "Exception occured in addAuthor(), DirectoryAuthorDao for userLogin " + userLogin, e); } return notMembers.toString(); }
From source file:gedi.riboseq.inference.orf.OrfFinder.java
/** * Coordinates are in codonsRegion space! * @param index/*from w ww .j a v a 2 s .c o m*/ * @param sequence * @param sg * @param codonsRegion * @return */ public ArrayList<OrfWithCodons> findOrfs(int index, String sequence, SpliceGraph sg, ImmutableReferenceGenomicRegion<IntervalTreeSet<Codon>> codonsRegion) { SimpleDirectedGraph<Codon> fg = new SimpleDirectedGraph<Codon>("Codongraph"); // if (!codonsRegion.getReference().toString().equals("chr4+") || !codonsRegion.getRegion().contains(140_283_087)) // return 0; LeftMostInFrameAndClearList buff = new LeftMostInFrameAndClearList(); IntervalTreeSet<Codon> codons = codonsRegion.getData(); codons.removeIf(c -> c.getTotalActivity() < minCodonActivity); if (codons.size() == 0) return new ArrayList<OrfWithCodons>(); // add stop codons for easy orf inference HashSet<Codon> stopCodons = new HashSet<Codon>(); Trie<String> stop = new Trie<String>(); stop.put("TAG", "TAG"); stop.put("TGA", "TGA"); stop.put("TAA", "TAA"); stop.iterateAhoCorasick(sequence) .map(r -> new Codon(new ArrayGenomicRegion(r.getStart(), r.getEnd()), r.getValue())) .toCollection(stopCodons); for (Intron intr : sg.iterateIntrons().loop()) { ArrayGenomicRegion reg = new ArrayGenomicRegion(intr.getStart() - 2, intr.getStart(), intr.getEnd(), intr.getEnd() + 1); String cod = stop.get(SequenceUtils.extractSequence(reg, sequence)); if (cod != null) stopCodons.add(new Codon(reg, cod)); reg = new ArrayGenomicRegion(intr.getStart() - 1, intr.getStart(), intr.getEnd(), intr.getEnd() + 2); cod = stop.get(SequenceUtils.extractSequence(reg, sequence)); if (cod != null) stopCodons.add(new Codon(reg, cod)); } stopCodons.removeAll(codons); codons.addAll(stopCodons); ArrayList<OrfWithCodons> re = new ArrayList<OrfWithCodons>(); HashSet<Codon> usedForAnno = new HashSet<Codon>(); if (assembleAnnotationFirst) { // new: first use annotated transcripts in a greedy fashion ArrayList<ImmutableReferenceGenomicRegion<Transcript>> transcripts = annotation.ei(codonsRegion) .filter(t -> t.getData().isCoding()).map(t -> codonsRegion.induce(t, "T")).list(); int acount = 0; LinkedList<OrfWithCodons> orfs = new LinkedList<OrfWithCodons>(); GenomicRegion best; HashSet<Codon> aremoved = new HashSet<Codon>(); do { best = null; double bestSum = 0; for (ImmutableReferenceGenomicRegion<Transcript> tr : transcripts) { double[] a = new double[tr.getRegion().getTotalLength()]; for (Codon c : codons) { if (tr.getRegion().containsUnspliced(c)) { int p = tr.induce(c.getStart()); assert a[p] == 0; if (!aremoved.contains(c)) a[p] = c.totalActivity; if (c.isStop()) a[p] = -1; } } for (int f = 0; f < 3; f++) { int s = -1; double sum = 0; for (int p = f; p < a.length; p += 3) { if (a[p] == -1) {//stop if (sum > bestSum) { bestSum = sum; best = tr.getRegion().map(new ArrayGenomicRegion(s, p + 3)); } s = -1; sum = 0; } else sum += a[p]; if (a[p] > 0 && s == -1) s = p; } } } if (best != null) { ArrayList<Codon> cods = new ArrayList<>(); int uniqueCodons = 0; double uniqueActivity = 0; double totalActivity = 0; for (Codon c : codons) { if (best.containsUnspliced(c) && best.induce(c.getStart()) % 3 == 0) { if (aremoved.add(c)) { uniqueActivity += c.totalActivity; uniqueCodons++; } totalActivity += c.totalActivity; if (c.totalActivity > 0) cods.add(c); } } // System.out.println(codonsRegion.map(best)); if ((uniqueCodons >= minUniqueCodons || uniqueCodons == cods.size()) && uniqueActivity > minUniqueActivity && totalActivity > minOrfTotalActivity) { Collections.sort(cods); usedForAnno.addAll(cods); OrfWithCodons orf = new OrfWithCodons(index, 0, acount++, best.toArrayGenomicRegion(), cods, true); orfs.add(orf); } } } while (best != null); if (orfs.size() > 1) { // they are not necessarily connected! LinkedList<OrfWithCodons>[] connected = findConnectedOrfs(orfs); orfs.clear(); for (LinkedList<OrfWithCodons> corfs : connected) { for (boolean changed = true; changed && corfs.size() > 1;) { changed = false; if (useEM) inferOverlappingOrfActivitiesEM(corfs); else overlapUniqueCoverage(corfs); Iterator<OrfWithCodons> it = corfs.iterator(); while (it.hasNext()) { OrfWithCodons orf = it.next(); if (orf.getEstimatedTotalActivity() < minOrfTotalActivity) { it.remove(); changed = true; } } } if (corfs.size() > 1) distributeCodons(corfs); orfs.addAll(corfs); } } re.addAll(orfs); } // as edges only are represented in the splice graph, singleton codons are discarded (which does make sense anyway) for (Codon c : codons) { if (!c.isStop()) { // find unspliced successors (can be more than one, when the successor codon itself is spliced! all of them have the same start!) int max = c.getEnd() + maxAminoDist * 3; for (Codon n : codons .getIntervalsIntersecting(c.getEnd(), c.getEnd() + maxAminoDist * 3, buff.startAndClear(c)) .get()) { if (!containsInframeStop(sequence.substring(c.getEnd(), n.getStart()))) fg.addInteraction(c, n); max = n.getStart() + 2; } // find all spliced successors for each splice junction that comes before n or maxAminoDist sg.forEachIntronStartingBetween(c.getEnd(), max + 1, intron -> { for (Codon n : codons.getIntervalsIntersecting(intron.getEnd(), intron.getEnd() + maxAminoDist * 3 - (intron.getStart() - c.getEnd()), buff.startAndClear(c, intron)).get()) if (!containsInframeStop(SequenceUtils.extractSequence(new ArrayGenomicRegion(c.getStart(), intron.getStart(), intron.getEnd(), n.getStart()), sequence))) fg.addInteraction(c, n, intron); }); } } int cc = 1; for (SimpleDirectedGraph<Codon> g : fg.getWeaklyConnectedComponents()) { if (EI.wrap(g.getSources()).mapToDouble(c -> c.getTotalActivity()).sum() == 0) continue; // iterate longest paths in g LinkedList<Codon> topo = g.getTopologicalOrder(); HashSet<Codon> remInTopo = new HashSet<Codon>(topo); remInTopo.removeIf(c -> !stopCodons.contains(c) && !usedForAnno.contains(c)); HashSet<Codon> removed = new HashSet<Codon>(remInTopo); // double maxPathScore = 0; LinkedList<OrfWithCodons> orfs = new LinkedList<OrfWithCodons>(); int count = 0; while (removed.size() < topo.size()) { HashMap<Codon, MutablePair<GenomicRegion, Double>> longestPrefixes = new HashMap<Codon, MutablePair<GenomicRegion, Double>>(); for (Codon c : topo) longestPrefixes.put(c, new MutablePair<GenomicRegion, Double>(c, removed.contains(c) ? 0 : (c.getTotalActivity()))); Codon longestEnd = null; HashMap<Codon, Codon> backtracking = new HashMap<Codon, Codon>(); for (Codon c : topo) { // if (codonsRegion.map(c).getStart()==100_466_118) // System.out.println(c); // // if (codonsRegion.map(c).getStart()==100_465_842) // System.out.println(c); double len = longestPrefixes.get(c).Item2; for (AdjacencyNode<Codon> n = g.getTargets(c); n != null; n = n.next) { MutablePair<GenomicRegion, Double> pref = longestPrefixes.get(n.node); double nnact = removed.contains(n.node) ? 0 : (n.node.getTotalActivity()); if (pref.Item2 <= len + nnact) { pref.set(extendFullPath(longestPrefixes.get(c).Item1, c, n.node, n.getLabel()), len + nnact); backtracking.put(n.node, c); } } if (longestEnd == null || longestPrefixes.get(longestEnd).Item2 <= len) longestEnd = c; } // determine longest path by backtracking and mark all codons on the path as removed ArrayList<Codon> orfCodons = new ArrayList<Codon>(); double totalActivity = 0; double uniqueActivity = 0; int uniqueCodons = 0; for (Codon c = longestEnd; c != null; c = backtracking.get(c)) { if (removed.add(c) && c.getTotalActivity() > 0) { uniqueCodons++; uniqueActivity += c.getTotalActivity(); } if (c.getTotalActivity() > 0) // to remove dummy stop codons orfCodons.add(c); totalActivity += c.getTotalActivity(); } // System.out.println(codonsRegion.map(longestPrefixes.get(longestEnd).Item1)); if ((uniqueCodons >= minUniqueCodons || uniqueCodons == orfCodons.size()) && uniqueActivity > minUniqueActivity && totalActivity > minOrfTotalActivity) { Collections.reverse(orfCodons); MutablePair<GenomicRegion, Double> triple = longestPrefixes.get(longestEnd); ArrayGenomicRegion region = triple.Item1.toArrayGenomicRegion(); String lastCodon = SequenceUtils.extractSequence( region.map( new ArrayGenomicRegion(region.getTotalLength() - 3, region.getTotalLength())), sequence); OrfWithCodons orf = new OrfWithCodons(index, cc, count++, region, orfCodons, stop.containsKey(lastCodon)); orfs.add(orf); } // maxPathScore = Math.max(maxPathScore,totalActivity); } if (orfs.size() > 1) { // they are not necessarily connected! LinkedList<OrfWithCodons>[] connected = findConnectedOrfs(orfs); orfs.clear(); for (LinkedList<OrfWithCodons> corfs : connected) { for (boolean changed = true; changed && corfs.size() > 1;) { changed = false; if (useEM) inferOverlappingOrfActivitiesEM(corfs); else overlapUniqueCoverage(corfs); Iterator<OrfWithCodons> it = corfs.iterator(); while (it.hasNext()) { OrfWithCodons orf = it.next(); if (orf.getEstimatedTotalActivity() < minOrfTotalActivity) { it.remove(); changed = true; } } } if (corfs.size() > 1) distributeCodons(corfs); orfs.addAll(corfs); } } re.addAll(orfs); cc++; } return re; }
From source file:dao.CarryonDaoDb.java
public HashSet getUniqueTags(List photos) { HashSet allSet = new HashSet(); if (photos != null && photos.size() > 0) { for (int i = 0; i < photos.size(); i++) { if ((Photo) photos.get(i) != null) { String usertags = ((Photo) photos.get(i)).getValue(DbConstants.USER_TAGS); String[] yourkeys = usertags.split(","); if (i == 0) { for (int k = 0; k < yourkeys.length; k++) { if (!RegexStrUtil.isNull(yourkeys[k])) { allSet.add(yourkeys[k]); }//from w w w . j a v a2 s . c om } } else { HashSet h1 = new HashSet(); for (int k = 0; k < yourkeys.length; k++) { if (!RegexStrUtil.isNull(yourkeys[k])) { h1.add(yourkeys[k]); } } allSet.removeAll(h1); //logger.info("removeAll h1 = " + h1.toString()); //logger.info("removeAll allSet = " + allSet.toString()); allSet.addAll(h1); //logger.info("addAll h1 = " + h1.toString()); //logger.info("addAll allSet = " + allSet.toString()); } //else } //if } // for } // if return allSet; }
From source file:org.easysoa.registry.indicators.rest.IndicatorsController.java
private Map<String, IndicatorValue> computeIndicators(CoreSession session, String subprojectId, String visibility) throws Exception { //CoreSession session = SessionFactory.getSession(request); List<IndicatorProvider> computedProviders = new ArrayList<IndicatorProvider>(); List<IndicatorProvider> pendingProviders = new ArrayList<IndicatorProvider>(); Map<String, IndicatorValue> computedIndicators = new HashMap<String, IndicatorValue>(); //Map<String, Map<String, IndicatorValue>> indicatorsByCategory = new HashMap<String, Map<String, IndicatorValue>>(); HashSet<String> pendingRequiredIndicators = new HashSet<String>(indicatorProviders.size()); int previousComputedProvidersCount = -1; // Compute indicators in several passes, with respect to dependencies while (computedProviders.size() != previousComputedProvidersCount) { previousComputedProvidersCount = computedProviders.size(); /*for (Entry<String, List<IndicatorProvider>> indicatorProviderCategory : indicatorProviders.entrySet()) {*/ // Start or continue indicator category /*Map<String, IndicatorValue> categoryIndicators = indicatorsByCategory.get(indicatorProviderCategory.getKey()); if (categoryIndicators == null) { categoryIndicators = new HashMap<String, IndicatorValue>(); }*//*from www . j ava2 s. c o m*/ // Browse all providers //for (IndicatorProvider indicatorProvider : indicatorProviderCategory.getValue()) { for (IndicatorProvider indicatorProvider : indicatorProviders) { if (!computedProviders.contains(indicatorProvider)) { // Compute indicator only if the dependencies are already computed List<String> requiredIndicators = indicatorProvider.getRequiredIndicators(); boolean allRequirementsSatisfied = true; if (requiredIndicators != null) { for (String requiredIndicator : requiredIndicators) { if (!computedIndicators.containsKey(requiredIndicator)) { allRequirementsSatisfied = false; pendingRequiredIndicators.add(requiredIndicator); break; } } } // Actual indicator calculation if (allRequirementsSatisfied) { Map<String, IndicatorValue> indicators = null; try { indicators = indicatorProvider.computeIndicators(session, subprojectId, computedIndicators, visibility); } catch (Exception e) { logger.error("Failed to compute indicator '" + indicatorProvider.toString() + "': " + e.getMessage(), e); } if (indicators != null) { //categoryIndicators.putAll(indicators); computedIndicators.putAll(indicators); pendingRequiredIndicators.removeAll(indicators.entrySet()); // just in case there had been required } computedProviders.add(indicatorProvider); pendingProviders.remove(indicatorProvider); } else { pendingProviders.add(indicatorProvider); } } } //indicatorsByCategory.put(indicatorProviderCategory.getKey(), categoryIndicators); } /*}*/ // Warn if some indicators have been left pending for (IndicatorProvider pendingProvider : pendingProviders) { logger.warn(pendingProvider.getClass().getName() + " provider dependencies could not be satisfied (" + pendingProvider.getRequiredIndicators() + ")"); } if (!pendingRequiredIndicators.isEmpty()) { logger.warn("Pending required indicators : " + pendingRequiredIndicators); } //return indicatorsByCategory; return computedIndicators; }