List of usage examples for java.util HashSet addAll
boolean addAll(Collection<? extends E> c);
From source file:org.freebxml.omar.common.BindingUtility.java
/** * Gets the composed RegistryObjects within specified RegistryObject. * Based on scanning rim.xsd for </sequence>. * * @param registryObjects specifies the RegistryObjects whose composed objects are being sought. * @param depth specifies depth of fetch. -1 implies fetch all levels. 1 implies fetch immediate composed objects. */// w ww .j av a 2 s . c o m public Set getComposedRegistryObjects(Collection registryObjects, int depth) { HashSet composedObjects = new HashSet(); Iterator iter = registryObjects.iterator(); while (iter.hasNext()) { RegistryObjectType ro = (RegistryObjectType) iter.next(); composedObjects.addAll(getComposedRegistryObjects(ro, depth)); } return composedObjects; }
From source file:org.imsglobal.lti.toolProvider.ResourceLink.java
/** * Send a service request to the tool consumer. * * @param string type Message type value * @param string url URL to send request to * @param string xml XML of message request * * @return boolean True if the request successfully obtained a response *///w w w. j a va 2 s . c o m private boolean doLTI11Service(String type, String url, String xml) { boolean ok = false; this.extRequest = null; this.extRequestHeaders = null; this.extResponse = null; this.extResponseHeaders = null; if (StringUtils.isNotEmpty(url)) { String messageId = UUID.randomUUID().toString(); String xmlRequest = "<?xml version = \"1.0\" encoding = \"UTF-8\"?>\n" + "<imsx_POXEnvelopeRequest xmlns = \"http://www.imsglobal.org/services/ltiv1p1/xsd/imsoms_v1p0\">\n" + " <imsx_POXHeader>\n" + " <imsx_POXRequestHeaderInfo>\n" + " <imsx_version>V1.0</imsx_version>\n" + " <imsx_messageIdentifier>" + id + "</imsx_messageIdentifier>\n" + " </imsx_POXRequestHeaderInfo>\n" + " </imsx_POXHeader>\n" + " <imsx_POXBody>\n" + " <" + type + "Request>\n" + xml + " </" + type + "Request>\n" + " </imsx_POXBody>\n" + "</imsx_POXEnvelopeRequest>\n"; // Calculate body hash String hash = Base64.encodeBase64String(DigestUtils.sha1(xmlRequest.toString())); Map<String, String> params = new HashMap<String, String>(); params.put("oauth_body_hash", hash); HashSet<Map.Entry<String, String>> httpParams = new HashSet<Map.Entry<String, String>>(); httpParams.addAll(params.entrySet()); // Check for query parameters which need to be included in the signature Map<String, String> queryParams = new HashMap<String, String>(); String urlNoQuery = url; try { URL uri = new URL(url); String query = uri.getQuery(); if (query != null) { urlNoQuery = urlNoQuery.substring(0, urlNoQuery.length() - query.length() - 1); String[] queryItems = query.split("&"); for (int i = 0; i < queryItems.length; i++) { String[] queryItem = queryItems[i].split("=", 2); if (queryItem.length > 1) { queryParams.put(queryItem[0], queryItem[1]); } else { queryParams.put(queryItem[0], ""); } } httpParams.addAll(queryParams.entrySet()); } } catch (Exception e) { e.printStackTrace(); } // Add OAuth signature Map<String, String> header = new HashMap<String, String>(); OAuthMessage oAuthMessage = new OAuthMessage("POST", urlNoQuery, httpParams); OAuthConsumer oAuthConsumer = new OAuthConsumer("about:blank", this.consumer.getKey(), this.consumer.getSecret(), null); OAuthAccessor oAuthAccessor = new OAuthAccessor(oAuthConsumer); try { oAuthMessage.addRequiredParameters(oAuthAccessor); header.put("Authorization", oAuthMessage.getAuthorizationHeader(null)); header.put("Content-Type", "application/xml"); } catch (OAuthException e) { } catch (URISyntaxException e) { } catch (IOException e) { } try { StringEntity entity = new StringEntity(xmlRequest); // Connect to tool consumer this.extResponse = doPostRequest(url, LTIUtil.getHTTPParams(params), header, entity); // Parse XML response if (this.extResponse != null) { this.extDoc = LTIUtil.getXMLDoc(extResponse); ok = this.extDoc != null; if (ok) { Element el = LTIUtil.getXmlChild(this.extDoc.getRootElement(), "imsx_statusInfo"); ok = el != null; if (ok) { String responseCode = LTIUtil.getXmlChildValue(el, "imsx_codeMajor"); ok = responseCode != null; if (ok) { ok = responseCode.equals("success"); } } } if (!ok) { this.extResponse = null; } } } catch (UnsupportedEncodingException e) { e.printStackTrace(); } } return (this.extResponse != null); }
From source file:org.apache.rya.rdftriplestore.inference.InferenceEngine.java
/** * Given some schema mapping types to (type, property) pairs that somehow imply the key type, * and given a particular type being queried for, expand the combinations of types and * properties that can imply the query type by including any pairs that could imply subtypes of * the query type (using the subclass graph), and by expanding each property into a set of all * subproperties that imply it (using the subproperty graph). Does not consider subtypes of * potential triggering types./* w w w .j a v a2 s . c o m*/ * @param queryType The type whose possible derivations are needed * @param schemaMap Map of schema information such that each key represents a type that can * somehow be derived from (other type x property) combinations, and the value provides * those combinations that can be used for the implication. * @return Combinations of types and properties that can directly or indirectly imply the query * type according to the schema provided and the subclass/superproperty graphs. Any * individual type/property combination is sufficient. Returns an empty map if either * parameter is {@code null}. */ private Map<Resource, Set<URI>> getTypePropertyImplyingType(final Resource queryType, final Map<Resource, Map<Resource, URI>> schemaMap) { final Map<Resource, Set<URI>> implications = new HashMap<>(); if (schemaMap != null && queryType != null) { // Check for any subtypes which would in turn imply the type being queried for final HashSet<Resource> queryTypes = new HashSet<>(); queryTypes.add(queryType); if (queryType instanceof URI) { queryTypes.addAll(getSubClasses((URI) queryType)); } for (final Resource querySubType : queryTypes) { if (schemaMap.containsKey(querySubType)) { final Map<Resource, URI> otherTypeToProperty = schemaMap.get(querySubType); for (final Resource otherType : otherTypeToProperty.keySet()) { if (!implications.containsKey(otherType)) { implications.put(otherType, new HashSet<>()); } final URI property = otherTypeToProperty.get(otherType); if (property != null) { implications.get(otherType).add(property); // Also add subproperties that would in turn imply the property implications.get(otherType).addAll(getSubProperties(property)); } } } } } return implications; }
From source file:it.iit.genomics.cru.igb.bundles.mi.business.MIWorker.java
@Override public ArrayList<MIResult> doInBackground() { // Display the log tab MIView.getInstance().getResultsTabbedPan().setSelectedIndex(0); UniprotkbUtils uniprotUtil = UniprotkbUtils.getInstance(query.getTaxid()); // Create a new Symmetry manager for each query symManager = new MISymManager(this.trackId); Collection<SeqSymmetry> selectedSyms = query.getSelectedSymmetries(); // Initialize progress property. progressBar.setIndeterminate(false); ProgressManager progressManager = new ProgressManager(5); setProgress(progressManager.getProgress()); Set<String> queryUniprotAcs = new HashSet<>(); Set<String> targetUniprotAcs = new HashSet<>(); // interactions MapOfMap<String, String> uniprotAc2uniprotAcs; // Interaction found ArrayList<MIResult> resultsInBackground = new ArrayList<>(); logAndPublish("map selection to genome and proteins.."); // Step 1/* w ww . jav a 2 s. com*/ progressManager.nextMajorStep(selectedSyms.size()); // Get gene Symmetries covered by query symmetries // don't create container at the moment: many genes syms for a single // gene may be present // Order list of syms SymListOrderer list = new SymListOrderer(); for (SeqSymmetry querySym : selectedSyms) { list.addSymmetry(querySym); } for (BioSeq chr : list.getSequences()) { ArrayList<SeqSymmetry> querySyms = list.getSymmetries(chr); getGenes(querySyms, chr, true); // for (MIGene gene : getGenes(querySyms, chr, true)) { // // load exons (we only need this for the selected ones) // miGene2selectedSyms.add(gene, querySym); // } progressManager.nextStep(); setProgress(progressManager.getProgress()); } // Step 2 progressManager.nextMajorStep(miGene2selectedSyms.keySet().size()); // Associate selected residues for (MIGene gene : miGene2selectedSyms.keySet()) { logAndPublish("Associate residues to " + gene.getID()); MoleculeEntry protein = gene.getProtein(); if (protein != null) { queryUniprotAcs.add(protein.getUniprotAc()); MISymContainer container = symManager.getByProtein(protein); symManager.addGeneSymmetry(container, gene); symManager.addSelectedSymmetry(container, gene); } else { igbLogger.getLogger().warn("No protein for {0}", gene.getID()); } progressManager.nextStep(); setProgress(progressManager.getProgress()); } // Step 3 progressManager.nextMajorStep(queryUniprotAcs.size()); // Get interactors uniprotAc2uniprotAcs = new MapOfMap<>(queryUniprotAcs); logAndPublish("get interactions"); HashMap<String, MoleculeEntry> targetUniprotEntries = new HashMap<>(); InteractionManager interactors = new InteractionManager(); for (String ac : queryUniprotAcs) { logAndPublish("Get interactions for " + ac); if (ac == null) { continue; } try { if (false == PsicquicInitWorker.nullServer.equals(query.getPsiquicServer()) && null != query.getPsiquicServer()) { for (Interaction interaction : PsicquicUtils.getInstance() .getInteractors(query.getPsiquicServer(), ac)) { interactors.merge(interaction); } } } catch (BridgesRemoteAccessException be) { igbLogger.severe("Cannot access PSICQUIC server!"); break; } // // Add interactors from User structures? // if (null != query.getUserStructuresPath() && query.searchUserStructures()) { // Interactome3DLocalRepository userStructures = UserStructuresManager.getInstance() // .getUserRepository(query.getUserStructuresPath()); // for (String interactorAc : userStructures.getInteractors(ac)) { // interactors.getOrCreateInteraction(ac, interactorAc).addType(INTERACTION_TYPE_I3D); // uniprotNeedMapping.add(interactorAc); // } // } // Add interactors from I3D? if (query.searchInteractome3D() || query.searchDSysMap()) { // Check or download I3D interaction file // get it from local repository? Interactome3DLocalRepository userStructures; // System.out.println("I3D cache: " + MIBundleConfiguration.getInstance().getI3DStructuresDirectory()); if (null != MIBundleConfiguration.getInstance().getI3DStructuresDirectory()) { userStructures = UserStructuresManager.getInstance() .getUserRepository(MIBundleConfiguration.getInstance().getI3DStructuresDirectory()); } else { I3DDownload download = new I3DDownload(MIBundleConfiguration.getInstance().getCachePath()); if (false == download.isDatDownloaded(query.getTaxid())) { logAndPublish("download interactions from Interactome3D"); download.downloadDat(query.getTaxid()); } // get interactions userStructures = UserStructuresManager.getInstance() .getUserRepository(download.getI3DdatPath(query.getTaxid())); } for (String interactorAc : userStructures.getInteractors(ac)) { interactors.getOrCreateInteraction(ac, interactorAc) .addType("direct interaction (Interactome3D)"); uniprotNeedMapping.add(interactorAc); } } // add interactors from PDB structures if (query.searchPDB() || query.searchPDBLocal() || query.searchEPPIC()) { MoleculeEntry entry = symManager.getByProteinAc(ac).getEntry(); PDBWSClient client = new PDBWSClient(); // Do only 10 by 10 List<String> pdbs = new ArrayList<>(); pdbs.addAll(entry.getPdbs()); while (false == pdbs.isEmpty()) { List<String> subset = pdbs.subList(0, Math.min(10, pdbs.size())); pdbs = pdbs.subList(Math.min(10, pdbs.size()), pdbs.size()); if (query.searchPPI() || query.searchNucleicAcid()) { MoleculeDescription molDesc; try { molDesc = client.getDescription(subset); } catch (BridgesRemoteAccessException be) { igbLogger.severe("Cannot access PDB!"); break; } if (molDesc != null) { for (StructureID structureId : molDesc.getStructureId()) { for (Polymer polymer : structureId.getPolymers()) { if (polymer.getPolymerDescription() == null) { igbLogger.severe("No description for " + structureId.getId()); } if (null != polymer.getType()) { switch (polymer.getType()) { case "protein": if (query.searchPPI() && null != polymer.getMacromolecule()) { String proteinAc = polymer.getMacromolecule().getAccession().get(0); if (false == proteinAc.equals(entry.getUniprotAc()) || polymer.getChains().size() > 1) { interactors.getOrCreateInteraction(ac, proteinAc) .addType(INTERACTION_TYPE_PDB); uniprotNeedMapping.add(ac); } } break; case "dna": if (false == query.searchNucleicAcid()) { break; } // Merge all DNA entries, use "DNA // as name rather that the // desciption MISymContainer dnaSym = symManager .getByProteinAc(MoleculeEntry.TAXID_DNA); uniprotNeedMapping.add(ac); interactors.getOrCreateInteraction(ac, MoleculeEntry.TAXID_DNA) .addType(INTERACTION_TYPE_PDB); if (dnaSym == null) { MoleculeEntry dnaEntry = new MoleculeEntry(MoleculeEntry.TAXID_DNA); dnaEntry.setSequence(""); dnaEntry.setTaxid(MoleculeEntry.TAXID_DNA); targetUniprotEntries.put(MoleculeEntry.TAXID_DNA, dnaEntry); dnaEntry.addGeneName(MoleculeEntry.TAXID_DNA); dnaSym = symManager.getByProtein(dnaEntry); } MoleculeEntry dnaEntry = dnaSym.getEntry(); for (Chain chain : polymer.getChains()) { ChainMapping chainMapping = new ChainMapping(structureId.getId(), chain.getId(), 0, 0); dnaEntry.addChain(structureId.getId(), chainMapping, "unspecified"); } break; case "rna": if (false == query.searchNucleicAcid()) { break; } uniprotNeedMapping.add(ac); // Merge all RNA entries, use "RNA // as name rather that the // desciption MISymContainer rnaSym = symManager .getByProteinAc(MoleculeEntry.TAXID_RNA); interactors.getOrCreateInteraction(ac, MoleculeEntry.TAXID_RNA) .addType(INTERACTION_TYPE_PDB); if (rnaSym == null) { MoleculeEntry rnaEntry = new MoleculeEntry(MoleculeEntry.TAXID_RNA); rnaEntry.setSequence(""); rnaEntry.setTaxid(MoleculeEntry.TAXID_RNA); targetUniprotEntries.put(MoleculeEntry.TAXID_RNA, rnaEntry); rnaEntry.addGeneName(MoleculeEntry.TAXID_RNA); rnaSym = symManager.getByProtein(rnaEntry); } MoleculeEntry rnaEntry = rnaSym.getEntry(); for (Chain chain : polymer.getChains()) { ChainMapping chainMapping = new ChainMapping(structureId.getId(), chain.getId(), 0, 0); rnaEntry.addChain(structureId.getId(), chainMapping, "unspecified"); } break; } } } } } } if (query.searchLigands() && false == query.searchEPPIC()) { try { for (Ligand ligand : client.getLigands(subset)) { /** * Only non polymer ligands */ if (false == ligand.isNonPolymer()) { continue; } int numAtoms = 0; for (String atom : ligand.getFormula().split(" ")) { String num = atom.replaceAll("\\D+", "").trim(); if ("".equals(num)) { numAtoms++; } else { numAtoms += Integer.parseInt(num); } } if (numAtoms <= 10) { igbLogger.info("Skip ligand: " + ligand.getFormula()); continue; } uniprotNeedMapping.add(ac); MISymContainer misym = symManager.getByProteinAc(ligand.getChemicalName()); interactors.getOrCreateInteraction(ac, ligand.getChemicalName()) .addType(INTERACTION_TYPE_PDB); if (misym == null) { MoleculeEntry ligandEntry = new MoleculeEntry(ligand.getChemicalName()); ligandEntry.setSequence(""); ligandEntry.setTaxid(MoleculeEntry.TAXID_LIGAND); ligandEntry.addGeneName(ligand.getChemicalId()); targetUniprotEntries.put(ligand.getChemicalName(), ligandEntry); misym = symManager.getByProtein(ligandEntry); } MoleculeEntry ligandEntry = misym.getEntry(); ChainMapping chainMapping = new ChainMapping(ligand.getStructureId(), "ligand", 0, 0); ligandEntry.addChain(ligand.getStructureId(), chainMapping, "unspecified"); } } catch (BridgesRemoteAccessException be) { igbLogger.severe("Cannot access PDB!"); break; } } } } if (query.searchModifications()) { MoleculeEntry entry = symManager.getByProteinAc(ac).getEntry(); for (ModifiedResidue modification : entry.getModifications()) { MISymContainer misym = symManager.getByProteinAc(modification.getDescription()); uniprotNeedMapping.add(ac); if (misym == null) { interactors.getOrCreateInteraction(ac, modification.getDescription()) .addType("direct interaction (Uniprot)"); // interactors.add(modification.getDescription(), // "association"); MoleculeEntry ligandEntry = new MoleculeEntry(modification.getDescription()); ligandEntry.setSequence(""); ligandEntry.setTaxid(MoleculeEntry.TAXID_MODIFICATION); ligandEntry.addGeneName(modification.getDescription()); targetUniprotEntries.put(modification.getDescription(), ligandEntry); symManager.getByProtein(ligandEntry); } } } Collection<String> interactorUniprotAcs = interactors.getInteractors(); for (String interactorUniprotAc : interactorUniprotAcs) { // Skip interaction if we the type of query is INTRA (i.e. only // interactions between selected genes) // and one of the protein was not selected if (QueryType.EXTRA.equals(query.getQueryType()) || queryUniprotAcs.contains(interactorUniprotAc)) { uniprotAc2uniprotAcs.add(ac, interactorUniprotAc); targetUniprotAcs.add(interactorUniprotAc); // String key = ac + "#" + interactorUniprotAc; // interactionTypes.addAll(key, // interactors.get(interactorUniprotAc)); // At this point we may not have created the symmetry } } progressManager.nextStep(); setProgress(progressManager.getProgress()); } // Only look for uniprot Acs for which we don't have an entry yet HashSet<String> uniprotAcToSearch = new HashSet<>(); uniprotAcToSearch.addAll(targetUniprotAcs); uniprotAcToSearch.removeAll(symManager.getProteinAcs()); // Allow proteins from other species try { targetUniprotEntries .putAll(uniprotUtil.getUniprotEntriesFromUniprotAccessions(uniprotAcToSearch, false)); } catch (BridgesRemoteAccessException be) { igbLogger.severe("Cannot access Uniprot!"); return resultsInBackground; } for (MoleculeEntry entry : targetUniprotEntries.values()) { MISymContainer container = symManager.getByProtein(entry); if (container == null) { } } // missing ones? Collection<String> missingUniprotAcs = new ArrayList<>(); missingUniprotAcs.addAll(uniprotAcToSearch); missingUniprotAcs.removeAll(targetUniprotEntries.keySet()); for (String missingAc : missingUniprotAcs) { MICommons.getInstance().addProteinToBlackList(missingAc); } for (MISymContainer container : symManager.getQueryContainers()) { if (null != container.getEntry()) { targetUniprotEntries.put(container.getEntry().getUniprotAc(), container.getEntry()); } } // Do I need it if I don't need symmetries? // Step 4 progressManager.nextMajorStep(targetUniprotEntries.values().size()); for (MoleculeEntry uniprotEntry : targetUniprotEntries.values()) { logAndPublish("create symmetry for " + uniprotEntry.getUniprotAc()); // Get symmetry, it has not been necessarily created MISymContainer container = symManager.getByProtein(uniprotEntry); Collection<String> geneIds; // Check if we are using Ensembl web service or QuickLoad. if (EnsemblGeneManager.class.isInstance(geneManager)) { geneIds = uniprotEntry.getEnsemblGenes(); } else { geneIds = new HashSet<>(); geneIds.addAll(uniprotEntry.getGeneNames()); geneIds.addAll(uniprotEntry.getRefseqs()); geneIds.addAll(uniprotEntry.getEnsemblGenes()); } SimpleSymWithProps overlappingSym = new SimpleSymWithProps(); overlappingSym.setProperty(TrackLineParser.ITEM_RGB, Color.RED); overlappingSym.setID(this.trackId + "-" + uniprotEntry.getGeneName()); for (String geneId : geneIds) { Collection<MIGene> genes = geneManager.getByID(geneId); // For each gene create a "result symmetry", which will be // displayed in the interaction track if (genes.isEmpty()) { continue; } RangeMerger merger = new RangeMerger(); for (MIGene gene : genes) { if (null == gene) { continue; } if (null != uniprotEntry.getVarSpliceAC(gene.getID())) { gene.getUniprotAcs().add(uniprotEntry.getUniprotAc()); gene.setProtein(uniprotEntry); symManager.addGeneSymmetry(container, gene); BioSeq chromosome = geneManager.getSequence(gene.getChromosomeName()); if (chromosome == null) { igbLogger.severe("Unavailable sequence: " + gene.getChromosomeName() + ", there may be a network problem."); continue; } merger.addRange(chromosome.getId(), new Range(gene.getMin(), gene.getMax())); } } for (String seq : merger.getSequences()) { BioSeq chromosome = geneManager.getSequence(seq); if (chromosome == null) { igbLogger.severe("No sequence for chromosome: " + seq); } for (Range range : merger.getRanges(seq)) { SeqSpan span = new SimpleSeqSpan(range.getMin(), range.getMax(), chromosome); // Check if it has already this span boolean hasSpan = false; for (int i = 0; i < overlappingSym.getSpanCount(); i++) { SeqSpan otherSpan = overlappingSym.getSpan(i); if (otherSpan.getMin() == span.getMin() && otherSpan.getMax() == span.getMax()) { hasSpan = true; break; } } if (false == hasSpan) { overlappingSym.addSpan(span); } } } if (false == genes.isEmpty()) { // we found it break; } } symManager.setResultSym(container, overlappingSym); progressManager.nextStep(); setProgress(progressManager.getProgress()); } for (String ac : uniprotNeedMapping) { MISymContainer proteinContainer = symManager.getByProteinAc(ac); for (MIGene gene : proteinContainer.getMiGenes()) { if (false == miGene2selectedSyms.containsKey(gene)) { continue; } for (SeqSymmetry selectedSym : miGene2selectedSyms.get(gene)) { logAndPublish("Load residues for " + gene.getID()); geneManager.loadTranscriptSequence(selectedSym.getSpanSeq(0), gene); // Maybe the protein was already assigned to the gene. // In order to be sure we are working on the right one, // Don't use the protein variable, but get it fromthe gene ArrayList<AAPosition> aaPositions = new ArrayList<>(); // symmetry are 0-based exclusive, // use max -1 to have inclusive coordinates Collection<AAPosition> positions = AAPositionManager.getAAPositionManager(query.getLabel()) .getAAPositions(gene, selectedSym.getSpan(0).getMin(), selectedSym.getSpan(0).getMax() - 1); aaPositions.addAll(positions); for (AAPosition aa : aaPositions) { gene2pos.add(gene, aa); } symManager.addSelectedResidues(gene.getProtein(), aaPositions); } } } // Step 5 // don't add twice the same interaction HashSet<String> interactionsDone = new HashSet<>(); progressManager.nextMajorStep(symManager.getQueryContainers().size()); for (MISymContainer container : symManager.getQueryContainers()) { logAndPublish(container.getEntry().getGeneName()); if (null == container.getEntry()) { continue; } if (null == container.getResultSym()) { continue; } String queryUniprotAc = container.getEntry().getUniprotAc(); if (null == uniprotAc2uniprotAcs.get(queryUniprotAc)) { continue; } if (MICommons.getInstance().isBlackListed(queryUniprotAc)) { continue; } for (String targetUniprotAc : uniprotAc2uniprotAcs.get(queryUniprotAc)) { if (MICommons.getInstance().isBlackListed(targetUniprotAc)) { continue; } // An interaction may be slected twice, as A-B and B-A, // avoid this. if (interactionsDone.contains(targetUniprotAc + "#" + queryUniprotAc) || interactionsDone.contains(queryUniprotAc + "#" + targetUniprotAc)) { continue; } interactionsDone.add(queryUniprotAc + "#" + targetUniprotAc); MISymContainer targetContainer = symManager.getByProteinAc(targetUniprotAc); if (targetContainer == null) { continue; } if (targetContainer.getEntry() == null) { continue; } if (targetContainer.getResultSym() == null) { continue; } MIResult result = new MIResult(trackId, container, targetContainer, interactors.getOrCreateInteraction(container.getEntry().getUniprotAc(), targetContainer.getEntry().getUniprotAc()), query, symManager); resultsInBackground.add(result); miSymmetries.add(targetContainer.getResultSym()); } progressManager.nextStep(); setProgress(progressManager.getProgress()); } AAPositionManager.removeManager(query.getLabel()); return resultsInBackground; }
From source file:com.vmware.bdd.manager.ClusterManager.java
private void checkExtraRequiredPackages() { logger.info("check if extra required packages(mailx and wsdl4j) have been installed for Ironfan."); if (!extraPackagesExisted) { File yumRepoPath = new File(Constants.SERENGETI_YUM_REPO_PATH); // use hs to record the packages that have not been added final HashSet<String> hs = new HashSet<String>(); hs.addAll(extraRequiredPackages); // scan the files under the serengeti yum repo directory File[] rpmList = yumRepoPath.listFiles(new FileFilter() { @Override//from w w w .j av a2s . c o m public boolean accept(File f) { String fname = f.getName(); int idx = fname.indexOf("-"); if (idx > 0) { String packName = fname.substring(0, idx); if (extraRequiredPackages.contains(packName)) { String regx = packName + commRegex; Pattern pat = Pattern.compile(regx); if (pat.matcher(fname).matches()) { hs.remove(packName); return true; } } } return false; } }); if (!hs.isEmpty()) { logger.info("cannot find all the needed packages, stop and return error now. "); throw BddException.EXTRA_PACKAGES_NOT_FOUND(hs.toString()); } logger.info("the check is successful: all needed packages are there."); extraPackagesExisted = true; } }
From source file:org.epics.archiverappliance.retrieval.DataRetrievalServlet.java
/** * If the pv is hosted on another appliance, proxy retrieval requests from that appliance * We expect to return immediately after this method. * @param req/* w ww . j ava2 s. c o m*/ * @param resp * @param pvName * @param useChunkedEncoding * @param dataRetrievalURLForPV * @throws IOException */ private void proxyRetrievalRequest(HttpServletRequest req, HttpServletResponse resp, String pvName, boolean useChunkedEncoding, String dataRetrievalURLForPV) throws IOException { try { // TODO add some intelligent business logic to determine if redirect/proxy. // It may be beneficial to support both and choose based on where the client in calling from or perhaps from a header? boolean redirect = false; if (redirect) { logger.debug("Data for pv " + pvName + "is elsewhere. Redirecting to appliance " + dataRetrievalURLForPV); URI redirectURI = new URI(dataRetrievalURLForPV + "/" + req.getPathInfo()); String redirectURIStr = redirectURI.normalize().toString() + "?" + req.getQueryString(); logger.debug("URI for redirect is " + redirectURIStr); resp.sendRedirect(redirectURIStr); return; } else { logger.debug("Data for pv " + pvName + "is elsewhere. Proxying appliance " + dataRetrievalURLForPV); URI redirectURI = new URI(dataRetrievalURLForPV + "/" + req.getPathInfo()); String redirectURIStr = redirectURI.normalize().toString() + "?" + req.getQueryString(); logger.debug("URI for proxying is " + redirectURIStr); // if(useChunkedEncoding) { // resp.addHeader("Transfer-Encoding", "chunked"); // } CloseableHttpClient httpclient = HttpClients.createDefault(); HttpGet getMethod = new HttpGet(redirectURIStr); getMethod.addHeader("Connection", "close"); // https://www.nuxeo.com/blog/using-httpclient-properly-avoid-closewait-tcp-connections/ try (CloseableHttpResponse response = httpclient.execute(getMethod)) { if (response.getStatusLine().getStatusCode() == 200) { HttpEntity entity = response.getEntity(); HashSet<String> proxiedHeaders = new HashSet<String>(); proxiedHeaders.addAll(Arrays.asList(MimeResponse.PROXIED_HEADERS)); Header[] headers = response.getAllHeaders(); for (Header header : headers) { if (proxiedHeaders.contains(header.getName())) { logger.debug("Adding headerName " + header.getName() + " and value " + header.getValue() + " when proxying request"); resp.addHeader(header.getName(), header.getValue()); } } if (entity != null) { logger.debug("Obtained a HTTP entity of length " + entity.getContentLength()); try (OutputStream os = resp.getOutputStream(); InputStream is = new BufferedInputStream(entity.getContent())) { byte buf[] = new byte[10 * 1024]; int bytesRead = is.read(buf); while (bytesRead > 0) { os.write(buf, 0, bytesRead); resp.flushBuffer(); bytesRead = is.read(buf); } } } else { throw new IOException("HTTP response did not have an entity associated with it"); } } else { logger.error("Invalid status code " + response.getStatusLine().getStatusCode() + " when connecting to URL " + redirectURIStr + ". Sending the errorstream across"); try (ByteArrayOutputStream os = new ByteArrayOutputStream()) { try (InputStream is = new BufferedInputStream(response.getEntity().getContent())) { byte buf[] = new byte[10 * 1024]; int bytesRead = is.read(buf); while (bytesRead > 0) { os.write(buf, 0, bytesRead); bytesRead = is.read(buf); } } resp.addHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*"); resp.sendError(response.getStatusLine().getStatusCode(), new String(os.toByteArray())); } } } } return; } catch (URISyntaxException ex) { throw new IOException(ex); } }
From source file:com.pari.reports.request.handlers.ManageExportHandlerImpl.java
private HashSet<Object> getMembers(LWObjectGroup group) { HashSet<Object> deviceIds = new HashSet<Object>(); if (group.getMemberIds() != null && group.getMemberIds().length > 0) { deviceIds.addAll(Arrays.asList(group.getMemberIds())); }//from w ww . jav a 2 s .c om if (group.getChildren() != null) { for (LWObjectGroup child : group.getChildren()) { deviceIds.addAll(getMembers(child)); } } if (getRequestDetails().getReportId().equals("manage_interface_groups")) { return deviceIds; } return getGroupMembers(deviceIds.toArray()); }
From source file:it.cnr.icar.eric.common.BindingUtility.java
/** * Gets the composed RegistryObjects within specified RegistryObject. * Based on scanning rim.xsd for </sequence>. * * @param registryObjects specifies the RegistryObjects whose composed objects are being sought. * @param depth specifies depth of fetch. -1 implies fetch all levels. 1 implies fetch immediate composed objects. *///from w w w.j a va 2 s . co m @SuppressWarnings({ "unchecked", "rawtypes" }) public Set getComposedRegistryObjects(Collection<?> registryObjects, int depth) { HashSet composedObjects = new HashSet(); Iterator<?> iter = registryObjects.iterator(); while (iter.hasNext()) { RegistryObjectType ro = (RegistryObjectType) iter.next(); composedObjects.addAll(getComposedRegistryObjects(ro, depth)); } return composedObjects; }
From source file:org.alfresco.repo.security.person.PersonServiceImpl.java
private NodeRef handleDuplicates(List<NodeRef> refs, String searchUserName) { if (processDuplicates) { NodeRef best = findBest(refs);/*from w ww . j a v a2 s. c o m*/ HashSet<NodeRef> toHandle = new HashSet<NodeRef>(); toHandle.addAll(refs); toHandle.remove(best); addDuplicateNodeRefsToHandle(toHandle); return best; } else { String userNameSensitivity = " (user name is case-" + (userNameMatcher.getUserNamesAreCaseSensitive() ? "sensitive" : "insensitive") + ")"; String domainNameSensitivity = ""; if (!userNameMatcher.getDomainSeparator().equals("")) { domainNameSensitivity = " (domain name is case-" + (userNameMatcher.getDomainNamesAreCaseSensitive() ? "sensitive" : "insensitive") + ")"; } throw new AlfrescoRuntimeException( "Found more than one user for " + searchUserName + userNameSensitivity + domainNameSensitivity); } }
From source file:org.hyperic.hq.measurement.server.session.AvailabilityManagerImpl.java
@SuppressWarnings("unchecked") /**/*from w w w . j a va 2s.c o m*/ * get AvailabilityDataRLEs for the given DataPoints' Measurement IDs, with endData within the last 7 days. * If several AvailabilityDataRLEs exist for the same Measurement, they are listed in ascending order. * @param outOfOrderAvail * @param updateList * @return */ private Map<Integer, TreeSet<AvailabilityDataRLE>> createCurrAvails(final List<DataPoint> outOfOrderAvail, final List<DataPoint> updateList) { Map<Integer, TreeSet<AvailabilityDataRLE>> currAvails = null; final StopWatch watch = new StopWatch(); try { if (outOfOrderAvail.size() == 0 && updateList.size() == 0) { currAvails = Collections.EMPTY_MAP; } long now = TimingVoodoo.roundDownTime(System.currentTimeMillis(), 60000); HashSet<Integer> mids = getMidsWithinAllowedDataWindow(updateList, now); mids.addAll(getMidsWithinAllowedDataWindow(outOfOrderAvail, now)); if (mids.size() <= 0) { currAvails = Collections.EMPTY_MAP; } Integer[] mIds = (Integer[]) mids.toArray(new Integer[0]); currAvails = availabilityDataDAO.getHistoricalAvailMap(mIds, now - MAX_DATA_BACKLOG_TIME, false); return currAvails; } finally { if (log.isDebugEnabled()) { log.debug("AvailabilityInserter setCurrAvails: " + watch + ", size=" + currAvails.size()); } } }