List of usage examples for java.util HashMap keySet
public Set<K> keySet()
From source file:com.act.reachables.Network.java
public static JSONObject edgeObj(Edge e, HashMap<Node, Integer> order) throws JSONException { JSONObject eo = new JSONObject(); if (order != null) { // 1. when printing a graph (and not a tree), the source and target nodeMapping are identified // by the array index they appear in the nodeMapping JSONArray. Those indices are contained in the order-map. // 2. such an ordering is not required when we are working with trees, so these fields not output there. eo.put("source", order.get(e.src)); // required, and have to lookup its order in the node spec eo.put("target", order.get(e.dst)); // required, and have to lookup its order in the node spec }/* w ww. ja v a2 s . c o m*/ // eo.put("source_id", e.src.id); // only informational // eo.put("target_id", e.dst.id); // only informational // eo.put("value", 1); // weight of edge: not really needed HashMap<String, Serializable> attr = e.getAttr(); for (String k : attr.keySet()) { // only output the fields relevant to the reachables tree structures if (k.equals("under_root") || k.equals("functionalCategory") || k.equals("importantAncestor")) eo.put(k, attr.get(k).toString()); } return eo; }
From source file:com.act.reachables.Network.java
public static JSONObject nodeObj(MongoDB db, Node n) throws JSONException { Chemical thisChemical = db.getChemicalFromChemicalUUID(n.id); JSONObject no = thisChemical == null ? new JSONObject() : new JSONObject(ComputeReachablesTree.getExtendedChemicalInformationJSON(thisChemical)); no.put("id", n.id); HashMap<String, Serializable> attr = n.getAttr(); for (String k : attr.keySet()) { // only output the fields relevants to the reachables tree structure if (k.equals("NameOfLen20") || k.equals("ReadableName") || k.equals("Synonyms") || k.equals("InChI") || k.equals("InChiKEY") || k.equals("parent") || k.equals("under_root") || k.equals("num_children") || k.equals("subtreeVendorsSz") || k.equals("subtreeSz") || k.equals("SMILES")) no.put(k, attr.get(k).toString()); if (k.equals("has")) no.put(k, attr.get(k));/*ww w . ja v a2 s. c o m*/ } // Object v; // String label = "" + ((v = n.getAttribute("canonical")) != null ? v : n.id ); // no.put("name", label ); // required // String layer = "" + ((v = n.getAttribute("globalLayer")) != null ? v : 1); // no.put("group", layer ); // required: node color by group return no; }
From source file:com.likya.myra.jef.utils.JobQueueOperations.java
public static ArrayList<AbstractJobType> getSubset(HashMap<String, String> jobIdList) { ArrayList<AbstractJobType> abstractJobTypeList = new ArrayList<AbstractJobType>(); for (String jobId : jobIdList.keySet()) { AbstractJobType tmpAbstractJobType = CoreFactory.getInstance().getMonitoringOperations().getJobQueue() .get(jobId).getAbstractJobType(); abstractJobTypeList.add(tmpAbstractJobType); }/*from w w w . ja va 2 s. c o m*/ return abstractJobTypeList; }
From source file:org.grameenfoundation.consulteca.utils.HttpHelpers.java
private static void addHeaders(HttpGet httpGet, HashMap<String, String> headers) { if (headers == null) { return;/*from w ww . j a v a 2 s .com*/ } Set<String> keys = headers.keySet(); for (String key : keys) { httpGet.addHeader(key, headers.get(key)); } }
From source file:org.grameenfoundation.consulteca.utils.HttpHelpers.java
private static void addHeaders(HttpPost httpPost, HashMap<String, String> headers) { if (headers == null) { return;//from w w w . j a va 2 s .c om } Set<String> keys = headers.keySet(); for (String key : keys) { httpPost.addHeader(key, headers.get(key)); } }
From source file:lineage.LineageEngine.java
/** * The main pipeline for reconstructing the cell lineage trees *//* w w w. j a v a 2 s . co m*/ public static void buildLineage(Args args) { // 1. load SNV data SNVDataStore db = new SNVDataStore(args.inputFileName, args.clustersFileName, args.normalSampleId); // 2. get the SNVs partitioned by group tag and create the appropriate SNV group objects HashMap<String, ArrayList<SNVEntry>> snvsByTag = db.getSomaticGroups(); ArrayList<SNVGroup> groups = new ArrayList<SNVGroup>(); for (String groupTag : snvsByTag.keySet()) { groups.add(new SNVGroup(groupTag, snvsByTag.get(groupTag), db.isRobustGroup(groupTag))); } if (groups.size() == 0) { logger.warning("All SNV groups have been filtered out."); return; } // 3. cluster SNVs in each group AAFClusterer clusterer = new AAFClusterer(); for (SNVGroup group : groups) { if (args.clustersFileName == null) { Cluster[] clusters = clusterer.clusterSubPopulations(group, ClusteringAlgorithms.EM, 1); logger.fine("Clustering results for group: " + group.getTag()); for (Cluster c : clusters) { logger.fine(c.toString()); } group.setSubPopulations(clusters); } else { ArrayList<Cluster> groupClusters = db.getClusters().get(group.getTag()); group.subPopulations = new Cluster[groupClusters.size()]; group.subPopulations = groupClusters.toArray(group.subPopulations); } } // 4. construct the constraint network PHYNetwork constrNetwork = new PHYNetwork(groups, db.getNumSamples()); logger.fine(constrNetwork.toString()); // 5. find all the lineage trees that pass the VAF constraints ArrayList<PHYTree> spanningTrees = constrNetwork.getLineageTrees(); logger.info("Found " + spanningTrees.size() + " valid tree(s)"); if (spanningTrees.size() == 0) { logger.info("Adjusting the network..."); // if no valid trees were found, fix the network // remove group nodes that are not robust, complete edges int delta = 0; do { int numNodes = constrNetwork.numNodes; constrNetwork = constrNetwork.fixNetwork(); spanningTrees = constrNetwork.getLineageTrees(); delta = numNodes - constrNetwork.numNodes; } while ((delta != 0) && (spanningTrees.size() <= 0)); if (spanningTrees.size() <= 0) { Parameters.ALL_EDGES = true; constrNetwork = new PHYNetwork(groups, db.getNumSamples()); spanningTrees = constrNetwork.getLineageTrees(); } logger.info("Found " + spanningTrees.size() + " valid trees after network adjustments"); } // 6. evaluate/rank the trees if (spanningTrees.size() > 0) { constrNetwork.evaluateLineageTrees(); logger.fine("Top tree\nError score: " + spanningTrees.get(0).getErrorScore()); logger.fine(spanningTrees.get(0).toString()); } // 7. result visualization if (args.showNetwork) { constrNetwork.displayNetwork(); } if (spanningTrees.size() > 0) { for (int i = 0; i < args.numShow; i++) { if (spanningTrees.size() > i) { constrNetwork.displayTree(spanningTrees.get(i), db.getSampleNames(), null, null); } else { break; } } // 8. persistent storage if (args.numSave > 0) { writeTreesToTxtFile(constrNetwork, spanningTrees, db.getSampleNames(), args); } } }
From source file:es.pode.soporte.auditoria.registrar.Registrar.java
/** * A partir de los datos enviados se recupera el tipo de bsqueda avanzada, simple, rbol curricular y tesauro * @param tabla Son los datos interceptados en formato HashMap * @return valores Los valores interceptados en formato String * //from ww w.java 2s .com */ private static String getValoresInterceptados(HashMap tabla) { StringBuffer valores = new StringBuffer(); try { for (Iterator it = tabla.keySet().iterator(); it.hasNext();) { String s = (String) it.next(); String s1 = (String) tabla.get(s); valores.append(s + ": " + s1 + " "); } } catch (Exception e) { log.error("Error captura valores interceptados: " + e); valores.append(VACIO); } return valores.toString(); }
From source file:com.likya.myra.jef.utils.JobQueueOperations.java
public static HashMap<String, AbstractJobType> toAbstractJobTypeList(HashMap<String, JobImpl> jobQueue) { HashMap<String, AbstractJobType> tmpList = new HashMap<String, AbstractJobType>(); Iterator<String> jobsIterator = jobQueue.keySet().iterator(); while (jobsIterator.hasNext()) { String jobKey = jobsIterator.next(); tmpList.put(jobKey, jobQueue.get(jobKey).getAbstractJobType()); }/*from w ww . java 2 s .c o m*/ return tmpList; }
From source file:LineageSimulator.java
public static void simulateLineageTrees(Args args) { int totalNumNodes = 0; // --- grow lineage trees --- // for (int t = 0; t < Parameters.NUM_TREES; t++) { // create the directory to store the results for each generated tree File treeDir = new File(args.simPath + "/tree" + "_" + t); treeDir.mkdirs();/*from w w w .j av a 2s . c o m*/ // initial tree (only contains the root) SimulatedTree lineageTree = new SimulatedTree(); // -- expand the tree -- int iter = 0; while (iter < Parameters.NUM_ITERATIONS || /* there must be a min number of undead nodes */ lineageTree.getNumNodes() < lineageTree.getNumDeadNodes() + Parameters.MIN_NUM_NODES + 1) { if (lineageTree.getNumNodes() >= lineageTree.getNumDeadNodes() + Parameters.MAX_NUM_NODES + 1) { break; } lineageTree.grow(); iter++; } writeOutputFile(treeDir.getAbsolutePath() + "/TREE_plain.txt", lineageTree.toString()); if (args.generateDOT) { writeOutputFile(treeDir.getAbsolutePath() + "/TREE.dot", lineageTree.toDOT()); } logger.fine("Generated tree " + t + " with " + lineageTree.getNumNodes() + " nodes."); totalNumNodes += lineageTree.getNumNodes(); // --- sampling --- // for (int s = 0; s < Parameters.NUM_SAMPLES_ARRAY.length; s++) { int numSamples = Parameters.NUM_SAMPLES_ARRAY[s]; ArrayList<TumorSample> samples = new ArrayList<TumorSample>(); HashSet<CellPopulation> subclones = new HashSet<CellPopulation>(); HashMap<Mutation.SNV, double[]> multiSampleFrequencies = new HashMap<Mutation.SNV, double[]>(); // --- collect the samples from the tree --- if (Parameters.LOCALIZED_SAMPLING) { samples = lineageTree.getKLocalizedSamples(numSamples - 1); } else { // randomized for (int i = 1; i < numSamples; i++) { samples.add(lineageTree.getSample()); } } if (args.generateSampledDOT) { writeOutputFile(treeDir.getAbsolutePath() + "/TREE_s" + numSamples + ".dot", lineageTree.toColoredDOT(samples)); } lineageTree.resetColors(); // --- populate the SNV VAFs for each sample --- for (int i = 1; i < numSamples; i++) { // + default normal sample 0 TumorSample sample = samples.get(i - 1); HashMap<Mutation.SNV, Double> freqMap = sample.getSNVFrequencies(); for (Mutation.SNV snv : freqMap.keySet()) { if (multiSampleFrequencies.containsKey(snv)) { multiSampleFrequencies.get(snv)[i] = freqMap.get(snv); } else { multiSampleFrequencies.put(snv, new double[numSamples]); multiSampleFrequencies.get(snv)[i] = freqMap.get(snv); } } subclones.addAll(sample.cellPopulationCounts.keySet()); } HashMap<Mutation.SNV, String> binaryProfiles = null; if (args.outputSampleProfile) { binaryProfiles = getBinaryProfile(multiSampleFrequencies, numSamples); } // --- store true VAFs --- String VAFFileName = treeDir.getAbsolutePath() + "/VAF_s" + numSamples + "_true.txt"; writeVAFsToFile(VAFFileName, multiSampleFrequencies, binaryProfiles, numSamples); // --- generate VAFs with simulated coverage and sequencing error --- for (int c = 0; c < Parameters.COVERAGE_ARRAY.length; c++) { int coverage = Parameters.COVERAGE_ARRAY[c]; VAFFileName = treeDir.getAbsolutePath() + "/VAF_s" + numSamples + "_" + coverage + "X.txt"; HashMap<Mutation.SNV, double[]> noisyMultiSampleFrequencies = addNoise(multiSampleFrequencies, coverage, numSamples); writeVAFsToFile(VAFFileName, noisyMultiSampleFrequencies, binaryProfiles, numSamples); } // --- store subclone information for evaluation --- String lineageFileName = treeDir.getAbsolutePath() + "/SUBCLONES_s" + numSamples + ".txt"; writeSubclonesToFile(lineageFileName, subclones); } if ((t + 1) % 1 == 0) logger.info("[PROGRESS] Simulated " + (t + 1) + " trees."); } logger.info("[SUMMARY] Simulated " + Parameters.NUM_TREES + " trees. Average number of nodes / tree = " + (double) totalNumNodes / (Parameters.NUM_TREES)); }
From source file:bookChapter.theoretical.AnalyzeTheoreticalMSMSCalculation.java
private static String result(MSnSpectrum msms, double precursorTolerance, HashSet<DBEntry> peptideAndMass, double fragmentTolerance, int correctionFactor, boolean hasAllPossCharge) throws IllegalArgumentException, IOException, MzMLUnmarshallerException { String res = ""; HashMap<Peptide, Boolean> allSelectedPeps = getSelectedTheoPeps(msms, precursorTolerance, peptideAndMass); // select peptides within a given precursor tolerance int scoredPeps = allSelectedPeps.size(); ArrayList<Identify> sequestResults = new ArrayList<Identify>(), andromedaResults = new ArrayList<Identify>(); // for every peptide... calculate each score... for (Peptide selectedPep : allSelectedPeps.keySet()) { Identify toCalculateSequest = new Identify(msms, selectedPep, fragmentTolerance, true, allSelectedPeps.get(selectedPep), scoredPeps, correctionFactor, hasAllPossCharge), toCalculateAndromeda = new Identify(msms, selectedPep, fragmentTolerance, false, allSelectedPeps.get(selectedPep), scoredPeps, correctionFactor, hasAllPossCharge); if (toCalculateSequest.getScore() != Double.NEGATIVE_INFINITY) { sequestResults.add(toCalculateSequest); andromedaResults.add(toCalculateAndromeda); }/*w w w .j a va2 s . c om*/ } if (!sequestResults.isEmpty()) { HashSet<Identify> theBestSEQUESTResults = getBestResult(sequestResults), theBestAndromedaResults = getBestResult(andromedaResults); res = printInfo(theBestAndromedaResults, theBestSEQUESTResults); } return res; }