List of usage examples for java.util HashMap containsKey
public boolean containsKey(Object key)
From source file:frequencyanalysis.FrequencyAnalysis.java
public static List<Item> findRepeatedFreq(String input) { HashMap repeatedCount = new HashMap<String, Integer>(); for (int i = 0; i < input.length(); i++) { if (i + 1 < input.length() && input.charAt(i) == input.charAt(i + 1)) { String key = String.valueOf(input.charAt(i)) + String.valueOf(input.charAt(i + 1)); if (!repeatedCount.containsKey(key)) { repeatedCount.put(key, 1); } else { int tempCount = (int) repeatedCount.get(key); tempCount++;//w ww . ja v a 2 s . c om repeatedCount.put(key, tempCount); } } } return sortByValue(repeatedCount); }
From source file:com.ibm.bi.dml.hops.globalopt.GDFEnumOptimizer.java
private static void rSetRuntimePlanConfig(Plan p, HashMap<Long, Plan> memo) { ExecType CLUSTER = OptimizerUtils.isSparkExecutionMode() ? ExecType.SPARK : ExecType.MR; //basic memoization including containment check if (memo.containsKey(p.getNode().getID())) { Plan pmemo = memo.get(p.getNode().getID()); if (!p.getInterestingProperties().equals(pmemo.getInterestingProperties())) { //replace plan in memo with new plan //TODO this would require additional cleanup in special cases if (_resolve.resolveMismatch(pmemo.getRewriteConfig(), p.getRewriteConfig())) memo.put(p.getNode().getID(), p); //logging of encounter plan mismatch LOG.warn("Configuration mismatch on shared node (" + p.getNode().getHop().getHopID() + "). Falling back to heuristic '" + _resolve.getName() + "'."); LOG.warn(p.getInterestingProperties().toString()); LOG.warn(memo.get(p.getNode().getID()).getInterestingProperties()); _planMismatches++;//from w w w . j a va 2s.c o m return; } } //set plan configuration Hop hop = p.getNode().getHop(); if (hop != null) { RewriteConfig rc = p.getRewriteConfig(); //set exec type hop.setForcedExecType(rc.getExecType()); //set blocksizes and reblock hop.setRowsInBlock(rc.getBlockSize()); hop.setColsInBlock(rc.getBlockSize()); if (rc.getExecType() == CLUSTER) //after blocksize update { //TODO double check dataop condition - side effect from plan validity boolean reblock = HopRewriteUtils.alwaysRequiresReblock(hop) || (hop.hasMatrixInputWithDifferentBlocksizes() && !(hop instanceof DataOp)); hop.setRequiresReblock(reblock); } else hop.setRequiresReblock(false); } //process childs if (p.getChilds() != null) for (Plan c : p.getChilds()) rSetRuntimePlanConfig(c, memo); //memoization (mark as processed) memo.put(p.getNode().getID(), p); }
From source file:com.act.reachables.Network.java
public static JSONArray get(MongoDB db, Set<Node> nodes, Set<Edge> edges) throws JSONException { // init the json object with structure: // {//from w ww . j a va2 s . com // "nodeMapping":[ // { "name":"Myriel", "group":1 }, ... // ], // "links":[ // { "source":1, "target":0, "value":1 }, ... // ] // } // nodeMapping.group specifies the node color // links.value specifies the edge weight JSONArray json = new JSONArray(); HashMap<Long, Set<Node>> treenodes = new HashMap<Long, Set<Node>>(); HashMap<Long, Set<Edge>> treeedges = new HashMap<Long, Set<Edge>>(); for (Node n : nodes) { Long k = (Long) n.getAttribute("under_root"); if (!treenodes.containsKey(k)) { treenodes.put(k, new HashSet<Node>()); treeedges.put(k, new HashSet<Edge>()); } treenodes.get(k).add(n); } for (Edge e : edges) { Long k = (Long) e.getAttribute("under_root"); if (!treeedges.containsKey(k)) { throw new RuntimeException("Fatal: Edge found rooted under a tree (under_root) that has no node!"); } treeedges.get(k).add(e); } for (Long root : treenodes.keySet()) { JSONObject tree = new JSONObject(); HashMap<Node, Integer> nodeOrder = new HashMap<Node, Integer>(); tree.put("nodeMapping", nodeListObj(db, treenodes.get(root), nodeOrder /*inits this ordering*/)); tree.put("links", edgeListObj(treeedges.get(root), nodeOrder /* uses the ordering */)); json.put(tree); } return json; }
From source file:com.example.common.ApiRequestFactory.java
/** * Generate the API XML request body/* ww w. j av a 2 s . co m*/ */ @SuppressWarnings("unchecked") private static String generateXmlRequestBody(Object params) { if (params == null) { return "<request version=\"2\"></request>"; } HashMap<String, Object> requestParams; if (params instanceof HashMap) { requestParams = (HashMap<String, Object>) params; } else { return "<request version=\"2\"></request>"; } final StringBuilder buf = new StringBuilder(); // TODO: add local_version parameter if exist // 2010/12/29 update version to 2 to get comments from bbs buf.append("<request version=\"2\""); if (requestParams.containsKey("local_version")) { buf.append(" local_version=\"" + requestParams.get("local_version") + "\" "); requestParams.remove("local_version"); } buf.append(">"); // add parameter node final Iterator<String> keySet = requestParams.keySet().iterator(); while (keySet.hasNext()) { final String key = keySet.next(); if ("upgradeList".equals(key)) { buf.append("<products>"); List<PackageInfo> productsList = (List<PackageInfo>) requestParams.get(key); for (PackageInfo info : productsList) { buf.append("<product package_name=\"").append(info.packageName); buf.append("\" version_code=\"").append(info.versionCode).append("\"/>"); } buf.append("</products>"); continue; } else if ("appList".equals(key)) { buf.append("<apps>"); List<UpgradeInfo> productsList = (List<UpgradeInfo>) requestParams.get(key); for (UpgradeInfo info : productsList) { buf.append("<app package_name=\"").append(info.pkgName); buf.append("\" version_code=\"").append(info.versionCode); buf.append("\" version_name=\"").append(info.versionName); buf.append("\" app_name=\"").append(wrapText(info.name)); // buf.append("\" md5=\"").append(info.md5); buf.append("\"/>"); } buf.append("</apps>"); continue; } buf.append("<").append(key).append(">"); buf.append(requestParams.get(key)); buf.append("</").append(key).append(">"); } // add the enclosing quote buf.append("</request>"); return buf.toString(); }
From source file:org.hfoss.posit.web.Communicator.java
/** * cleanup the item key,value pairs so that we can receive and save to the internal database * @param rMap//from w ww . j av a 2 s . c om */ public static void cleanupOnReceive(HashMap<String, Object> rMap) { rMap.put(PositDbHelper.FINDS_SYNCED, PositDbHelper.FIND_IS_SYNCED); rMap.put(PositDbHelper.FINDS_GUID, rMap.get("barcode_id")); //rMap.put(PositDbHelper.FINDS_GUID, rMap.get("barcode_id")); rMap.put(PositDbHelper.FINDS_PROJECT_ID, projectId); if (rMap.containsKey("add_time")) { rMap.put(PositDbHelper.FINDS_TIME, rMap.get("add_time")); rMap.remove("add_time"); } if (rMap.containsKey("images")) { if (Utils.debug) Log.d(TAG, "contains image key"); rMap.put(PositDbHelper.PHOTOS_IMAGE_URI, rMap.get("images")); rMap.remove("images"); } }
From source file:edu.illinois.cs.cogcomp.transliteration.CSPTransliteration.java
public static double GetBestProbability(int position, String originalWord1, String word1, String word2, CSPModel model, HashMap<Triple<Integer, String, String>, Double> memoizationTable) { double result; Triple<Integer, String, String> v = new Triple<Integer, String, String>(position, word1, word2); if (memoizationTable.containsKey(v)) { return memoizationTable.get(v); //we've been down this road before }// ww w.j a va 2 s . c o m result = 0; if (word1.length() == 0 && word2.length() == 0) return 1; //perfect null-to-null alignment int maxSubstringLength1f = Math.min(word1.length(), model.maxSubstringLength); int maxSubstringLength2f = Math.min(word2.length(), model.maxSubstringLength); String[] leftContexts = WikiTransliteration.GetLeftFallbackContexts(originalWord1, position, Math.max(model.segContextSize, model.productionContextSize)); double minProductionProbability1 = 1; for (int i = 1; i <= maxSubstringLength1f; i++) //for each possible substring in the first word... { minProductionProbability1 *= model.minProductionProbability; String substring1 = word1.substring(0, i); String[] rightContexts = WikiTransliteration.GetRightFallbackContexts(originalWord1, position + i, Math.max(model.segContextSize, model.productionContextSize)); double segProb; if (model.segProbs.size() == 0) segProb = 1; else { segProb = 0; for (int k = model.productionContextSize; k >= 0; k--) { Triple<String, String, String> v5 = new Triple<>(leftContexts[k], substring1, rightContexts[k]); if (model.segProbs.containsKey(v5)) { segProb = model.segProbs.get(v5); break; } } } double minProductionProbability2 = 1; for (int j = 1; j <= maxSubstringLength2f; j++) //foreach possible substring in the second { minProductionProbability2 *= model.minProductionProbability; if ((word1.length() - i) * model.maxSubstringLength >= word2.length() - j && (word2.length() - j) * model.maxSubstringLength >= word1.length() - i) //if we get rid of these characters, can we still cover the remainder of word2? { double minProductionProbability; if (model.smoothMode == CSPModel.SmoothMode.BySource) minProductionProbability = minProductionProbability1; else if (model.smoothMode == CSPModel.SmoothMode.ByMax) minProductionProbability = Math.min(minProductionProbability1, minProductionProbability2); else //if (model.smoothMode == SmoothMode.BySum) minProductionProbability = minProductionProbability1 * minProductionProbability2; String substring2 = word2.substring(0, j); //Pair<Triple<String, String, String>, String> production = new Pair<Triple<String, String, String>, String>(new Triple<String, String, String>(leftProductionContext, substring1, rightProductionContext), substring2); double prob; if (model.productionProbs.size() == 0) prob = 1; else { prob = 0; for (int k = model.productionContextSize; k >= 0; k--) { Pair<Triple<String, String, String>, String> v4 = new Pair<>( new Triple<>(leftContexts[k], substring1, rightContexts[k]), substring2); if (model.productionProbs.containsKey(v4)) { prob = model.productionProbs.get(v4); break; } } prob = Math.max(prob, minProductionProbability); } double remainder = prob * GetBestProbability(position + i, originalWord1, word1.substring(i), word2.substring(j), model, memoizationTable); if (remainder > result) result = remainder; //maximize //record this remainder in our results //result.x += remainder.x * prob * segProb; //result.y += remainder.y * segProb; } } } memoizationTable.put(new Triple<>(position, word1, word2), result); return result; }
From source file:org.opendatakit.survey.android.provider.SubmissionProvider.java
@SuppressWarnings("unchecked") private static final void putElementValue(HashMap<String, Object> dataMap, ColumnDefinition defn, Object value) {// ww w .j a va 2s . c om List<ColumnDefinition> nesting = new ArrayList<ColumnDefinition>(); ColumnDefinition cur = defn.getParent(); while (cur != null) { nesting.add(cur); cur = cur.getParent(); } HashMap<String, Object> elem = dataMap; for (int i = nesting.size() - 1; i >= 0; --i) { cur = nesting.get(i); if (elem.containsKey(cur.getElementName())) { elem = (HashMap<String, Object>) elem.get(cur.getElementName()); } else { elem.put(cur.getElementName(), new HashMap<String, Object>()); elem = (HashMap<String, Object>) elem.get(cur.getElementName()); } } elem.put(defn.getElementName(), value); }
From source file:net.doubledoordev.backend.webserver_old.methods.Post.java
/** * Handle post requests from the login page *//*from ww w .j a v a2 s .co m*/ private static void handleLogin(HashMap<String, Object> dataObject, NanoHTTPD.HTTPSession session, Map<String, String> map) { if (map.containsKey("username") && map.containsKey("password")) { User user = Settings.getUserByName(map.get("username")); if (user != null && user.verify(map.get("password"))) { session.getCookies().set(COOKIE_KEY, user.getUsername() + "|" + user.getPasshash(), 30); dataObject.put("user", user); } else dataObject.put("message", "Login failed."); } else if (map.containsKey("logout")) { session.getCookies().delete(COOKIE_KEY); dataObject.remove("user"); } else if (dataObject.containsKey("user") && map.containsKey("oldPassword") && map.containsKey("newPassword")) { User user = (User) dataObject.get("user"); if (user.updatePassword(map.get("oldPassword"), map.get("newPassword"))) { session.getCookies().set(COOKIE_KEY, user.getUsername() + "|" + user.getPasshash(), 30); } else dataObject.put("message", "Old password was wrong."); } else dataObject.put("message", "Form error."); }
From source file:gov.nih.nci.rembrandt.web.helper.PCAAppletHelper.java
public static String generateParams(String sessionId, String taskId) { String htm = ""; DecimalFormat nf = new DecimalFormat("0.0000"); try {/*from w w w .j ava2s . c o m*/ //retrieve the Finding from cache and build the list of PCAData points PrincipalComponentAnalysisFinding principalComponentAnalysisFinding = (PrincipalComponentAnalysisFinding) businessTierCache .getSessionFinding(sessionId, taskId); ArrayList<PrincipalComponentAnalysisDataPoint> pcaData = new ArrayList(); Collection<ClinicalFactorType> clinicalFactors = new ArrayList<ClinicalFactorType>(); List<String> sampleIds = new ArrayList(); Map<String, PCAresultEntry> pcaResultMap = new HashMap<String, PCAresultEntry>(); List<PCAresultEntry> pcaResults = principalComponentAnalysisFinding.getResultEntries(); for (PCAresultEntry pcaEntry : pcaResults) { sampleIds.add(pcaEntry.getSampleId()); pcaResultMap.put(pcaEntry.getSampleId(), pcaEntry); } Collection<SampleResultset> validatedSampleResultset = ClinicalDataValidator .getValidatedSampleResultsetsFromSampleIDs(sampleIds, clinicalFactors); if (validatedSampleResultset != null) { String id; PCAresultEntry entry; for (SampleResultset rs : validatedSampleResultset) { id = rs.getBiospecimen().getSpecimenName(); entry = pcaResultMap.get(id); PrincipalComponentAnalysisDataPoint pcaPoint = new PrincipalComponentAnalysisDataPoint(id, entry.getPc1(), entry.getPc2(), entry.getPc3()); String diseaseName = rs.getDisease().getValueObject(); if (diseaseName != null) { pcaPoint.setDiseaseName(diseaseName); } else { pcaPoint.setDiseaseName(DiseaseType.NON_TUMOR.name()); } GenderDE genderDE = rs.getGenderCode(); if (genderDE != null) { String gt = genderDE.getValueObject(); if (gt != null) { GenderType genderType = GenderType.valueOf(gt); if (genderType != null) { pcaPoint.setGender(genderType); } } } Long survivalLength = rs.getSurvivalLength(); if (survivalLength != null) { //survival length is stored in days in the DB so divide by 30 to get the //approx survival in months double survivalInMonths = survivalLength.doubleValue() / 30.0; pcaPoint.setSurvivalInMonths(survivalInMonths); } pcaData.add(pcaPoint); } } //make a hashmap // [key=group] hold the array of double[][]s HashMap<String, ArrayList> hm = new HashMap(); //now we should have a collection of PCADataPts double[][] pts = new double[pcaData.size()][3]; for (int i = 0; i < pcaData.size(); i++) { //just create a large 1 set for now //are we breaking groups by gender or disease? PrincipalComponentAnalysisDataPoint pd = pcaData.get(i); pts[i][0] = pd.getPc1value(); pts[i][1] = pd.getPc2value(); pts[i][2] = pd.getPc3value(); ArrayList<double[]> al; try { if (hm.containsKey(pd.getDiseaseName())) { //already has it, so add this one al = (ArrayList) hm.get(pd.getDiseaseName()); } else { al = new ArrayList(); hm.put(pd.getDiseaseName(), new ArrayList()); } if (!al.contains(pts[i])) { al.add(pts[i]); } hm.put(pd.getDiseaseName(), al); } catch (Exception e) { System.out.print(e.toString()); } } int r = hm.size(); if (r == 1) { } //hm should now contain a hashmap of all the disease groups //generate the param tags htm += "<param name=\"key\" value=\"" + taskId + "\" >\n"; htm += "<param name=\"totalPts\" value=\"" + pts.length + "\" >\n"; htm += "<param name=\"totalGps\" value=\"" + hm.size() + "\" >\n"; int ii = 0; for (Object k : hm.keySet()) { String key = k.toString(); //for each group Color diseaseColor = Color.GRAY; if (DiseaseType.valueOf(key) != null) { DiseaseType disease = DiseaseType.valueOf(key); diseaseColor = disease.getColor(); } ArrayList<double[]> al = hm.get(key); htm += "<param name=\"groupLabel_" + ii + "\" value=\"" + key + "\" >\n"; htm += "<param name=\"groupCount_" + ii + "\" value=\"" + al.size() + "\" >\n"; htm += "<param name=\"groupColor_" + ii + "\" value=\"" + diseaseColor.getRGB() + "\" >\n"; int jj = 0; for (double[] d : al) { String comm = nf.format(d[0]) + "," + nf.format(d[1]) + "," + nf.format(d[2]); String h = "<param name=\"pt_" + ii + "_" + jj + "\" value=\"" + comm + "\">\n"; htm += h; jj++; } ii++; } /* //for bulk rendering for(int i=0; i<pts.length; i++) { String comm = String.valueOf(pts[i][0]) + "," + String.valueOf(pts[i][1]) + "," + String.valueOf(pts[i][2]); String h = "<param name=\"pt_"+i+"\" value=\""+ comm +"\">\n"; //htm += h; } */ } //try catch (Exception e) { } return htm; }
From source file:edu.illinois.cs.cogcomp.transliteration.CSPTransliteration.java
public static Pair<Double, Double> GetProbability(int position, String originalWord1, String word1, String word2, CSPModel model, HashMap<Triple<Integer, String, String>, Pair<Double, Double>> memoizationTable) { Pair<Double, Double> result; Triple<Integer, String, String> v = new Triple<>(position, word1, word2); if (memoizationTable.containsKey(v)) { return memoizationTable.get(v); }/* ww w . j a v a 2 s .c om*/ result = new Pair<>(0.0, 0.0); if (word1.length() == 0 && word2.length() == 0) //record probabilities { result.setFirst(1.0); //null -> null is always a perfect alignment result.setSecond(1.0); return result; //end of the line } int maxSubstringLength1f = Math.min(word1.length(), model.maxSubstringLength); int maxSubstringLength2f = Math.min(word2.length(), model.maxSubstringLength); String[] leftContexts = WikiTransliteration.GetLeftFallbackContexts(originalWord1, position, Math.max(model.segContextSize, model.productionContextSize)); double minProductionProbability1 = 1; for (int i = 1; i <= maxSubstringLength1f; i++) //for each possible substring in the first word... { minProductionProbability1 *= model.minProductionProbability; String substring1 = word1.substring(0, i); String[] rightContexts = WikiTransliteration.GetRightFallbackContexts(originalWord1, position + i, Math.max(model.segContextSize, model.productionContextSize)); double segProb; if (model.segProbs.size() == 0) segProb = 1; else { segProb = 0; for (int k = model.productionContextSize; k >= 0; k--) { Triple<String, String, String> v2 = new Triple<>(leftContexts[k], substring1, rightContexts[k]); if (model.segProbs.containsKey(v2)) { segProb = model.segProbs.get(v2); break; } } } double minProductionProbability2 = 1; for (int j = 1; j <= maxSubstringLength2f; j++) //foreach possible substring in the second { minProductionProbability2 *= model.minProductionProbability; if ((word1.length() - i) * model.maxSubstringLength >= word2.length() - j && (word2.length() - j) * model.maxSubstringLength >= word1.length() - i) //if we get rid of these characters, can we still cover the remainder of word2? { double minProductionProbability; if (model.smoothMode == CSPModel.SmoothMode.BySource) minProductionProbability = minProductionProbability1; else if (model.smoothMode == CSPModel.SmoothMode.ByMax) minProductionProbability = Math.min(minProductionProbability1, minProductionProbability2); else //if (model.smoothMode == SmoothMode.BySum) minProductionProbability = minProductionProbability1 * minProductionProbability2; String substring2 = word2.substring(0, j); //Pair<Triple<String, String, String>, String> production = new Pair<Triple<String, String, String>, String>(new Triple<String, String, String>(leftProductionContext, substring1, rightProductionContext), substring2); double prob; if (model.productionProbs.size() == 0) prob = 1; else { prob = 0; for (int k = model.productionContextSize; k >= 0; k--) { Pair<Triple<String, String, String>, String> v3 = new Pair<>( new Triple<>(leftContexts[k], substring1, rightContexts[k]), substring2); if (model.productionProbs.containsKey(v3)) { prob = model.productionProbs.get(v3); break; } } prob = Math.max(prob, minProductionProbability); } Pair<Double, Double> remainder = GetProbability(position + i, originalWord1, word1.substring(i), word2.substring(j), model, memoizationTable); //record this remainder in our results result.setFirst(result.getFirst() + remainder.getFirst() * prob * segProb); result.setSecond(result.getSecond() + remainder.getSecond() * segProb); } } } memoizationTable.put(new Triple<>(position, word1, word2), result); return result; }