List of usage examples for java.util HashMap containsKey
public boolean containsKey(Object key)
From source file:com.krawler.spring.profileHandler.profileHandlerDAOImpl.java
public void saveUserLogin(HashMap<String, Object> requestParams) throws ServiceException { String userLoginId = ""; UserLogin userLogin = null;/*from w ww .j a v a 2 s. co m*/ try { if (requestParams.containsKey("userloginid") && requestParams.get("userloginid") != null) { userLoginId = requestParams.get("userloginid").toString(); userLogin = (UserLogin) get(UserLogin.class, userLoginId); } else { userLogin = new UserLogin(); } userLogin.setLastActivityDate(new Date()); if (requestParams.containsKey("userName") && requestParams.get("userName") != null) { String userName = requestParams.get("userName").toString(); userLogin.setUserName(userName); } if (requestParams.containsKey("password") && requestParams.get("password") != null) { String password = requestParams.get("password").toString(); userLogin.setPassword(password); } saveOrUpdate(userLogin); } catch (Exception e) { throw ServiceException.FAILURE("profileHandlerDAOImpl.saveUserLogin", e); } }
From source file:GeneticAlgorithm.SystemToSolve.java
private boolean is_there_fluxes(HashMap data) { boolean is_it_there = false; int count = 0; for (ModelReaction reaction : Reactions) { String reaction_ID = reaction.getReactionID(); if (data.containsKey(reaction_ID)) { count++;//from w w w .j a va2s. c o m } } if (count > 0) { is_it_there = true; } return is_it_there; }
From source file:org.zaproxy.zap.extension.multiFuzz.impl.http.HttpFuzzResultDialog.java
private void updateValues() { stateSet = new DefaultPieDataset(); resultSet = new DefaultPieDataset(); sizeSet = new DefaultCategoryDataset(); rttSet = new DefaultCategoryDataset(); HashMap<String, Integer> statesMap = new HashMap<>(); HashMap<String, Integer> resultMap = new HashMap<>(); for (HttpFuzzRecord r : model.getEntries()) { if (r.isIncluded() && r instanceof HttpFuzzRequestRecord) { if (statesMap.containsKey(r.getReason())) { statesMap.put(r.getReason(), statesMap.get(r.getReason()) + 1); } else { statesMap.put(r.getReason(), 1); }//from w ww . j ava2 s . c o m if (resultMap.containsKey(r.getResult().first)) { resultMap.put(r.getResult().first, resultMap.get(r.getResult().first) + 1); } else { resultMap.put(r.getResult().first, 1); } sizeSet.addValue(r.getSize(), "Row 1", r.getName()); rttSet.addValue(r.getRTT(), "Row 1", r.getName()); } else if (r.isIncluded() && r instanceof HttpFuzzRecordGroup) { updateValues(((HttpFuzzRecordGroup) r).getMembers(), statesMap, resultMap); } } for (String key : statesMap.keySet()) { stateSet.setValue(key, statesMap.get(key)); } for (String key : resultMap.keySet()) { resultSet.setValue(key, resultMap.get(key)); } }
From source file:edu.cornell.mannlib.semservices.service.impl.AgrovocService.java
protected String getDbpediaDescription(String uri) throws Exception { String descriptionSource = " (Source: DBpedia)"; String description = new String(); String qs = "" + "PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> \n" + "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> \n" + "PREFIX foaf: <http://xmlns.com/foaf/0.1/> \n" + "PREFIX dbpedia-owl: <http://dbpedia.org/ontology/>\n" + "SELECT DISTINCT ?description WHERE { \n" + "<" + uri + "> rdfs:comment ?description . \n" + "FILTER (LANG(?description)='en' ) \n" + "}"; // System.out.println(qs); List<HashMap> resultList = new ArrayList<HashMap>(); QueryExecution qexec = null;/*from www.j av a 2s . co m*/ try { Query query = QueryFactory.create(qs); qexec = QueryExecutionFactory.sparqlService(this.dbpedia_endpoint, query); resultList = new ArrayList<HashMap>(); ResultSet resultSet = qexec.execSelect(); int resultSetSize = 0; while (resultSet.hasNext()) { resultSetSize++; QuerySolution solution = resultSet.nextSolution(); Iterator varnames = solution.varNames(); HashMap<String, String> hm = new HashMap<String, String>(); while (varnames.hasNext()) { String name = (String) varnames.next(); RDFNode rdfnode = solution.get(name); // logger.info("rdf node name, type: "+ name // +", "+getRDFNodeType(rdfnode)); if (rdfnode.isLiteral()) { Literal literal = rdfnode.asLiteral(); String nodeval = literal.getString(); hm.put(name, nodeval); } else if (rdfnode.isResource()) { Resource resource = rdfnode.asResource(); String nodeval = resource.toString(); hm.put(name, nodeval); } } resultList.add(hm); } description = ""; for (HashMap map : resultList) { if (map.containsKey("description")) { description = (String) map.get("description"); } } } catch (Exception ex) { throw ex; } // Adding source so it is clear that this description comes from DBPedia return description + descriptionSource; }
From source file:org.bungeni.ext.integration.bungeniportal.BungeniServiceAccess.java
public boolean hasDocumentBeeenEditedByTheEditor(HashMap<String, String> docPropsMap, boolean rootSectionExists) { if (docPropsMap.containsKey("BungeniDocType") && docPropsMap.containsKey("DocSource") && docPropsMap.containsKey("DocEditor") && rootSectionExists) { if (docPropsMap.get("DocSource").equals("BungeniPortal") && docPropsMap.get("DocEditor").equals("BungeniEditor")) { return true; }//from w ww . jav a 2s. c om } return false; }
From source file:com.dbmojo.QueryExecutor.java
/** Serialize the result set to JSON. */ private String serializeToJson(ArrayList<HashMap> resList) throws JSONException { JSONArray resArray = new JSONArray(); final int rlen = resList.size(); for (int i = 0; i < rlen; i++) { JSONObject jObj = new JSONObject(); final HashMap tHashMap = resList.get(i); if (tHashMap.containsKey("message")) { jObj.put("message", (String) tHashMap.get("message")); }//from w w w . j a v a 2s. com if (tHashMap.containsKey("status")) { jObj.put("status", (String) tHashMap.get("status")); } if (tHashMap.containsKey("types")) { jObj.put("types", new JSONArray(((ArrayList) tHashMap.get("types")).toArray())); } if (tHashMap.containsKey("cols")) { jObj.put("cols", new JSONArray(((ArrayList) tHashMap.get("cols")).toArray())); } if (tHashMap.containsKey("rows")) { JSONArray tJarr = new JSONArray(); final ArrayList rows = (ArrayList) tHashMap.get("rows"); final int tlen = rows.size(); for (int v = 0; v < tlen; v++) { tJarr.put(new JSONArray(((ArrayList) rows.get(v)).toArray())); } jObj.put("rows", tJarr); } resArray.put(jObj); } return resArray.toString(); }
From source file:annis.gui.flatquerybuilder.ValueField.java
@Override public void textChange(TextChangeEvent event) { ReducingStringComparator rsc = sq.getRSC(); String fm = sq.getFilterMechanism(); if (!"generic".equals(fm)) { ConcurrentSkipListSet<String> notInYet = new ConcurrentSkipListSet<>(); String txt = event.getText(); if (!txt.equals("")) { scb.removeAllItems();//from w w w .j a v a 2s . com for (Iterator<String> it = values.keySet().iterator(); it.hasNext();) { String s = it.next(); if (rsc.compare(s, txt, fm) == 0) { scb.addItem(s); } else { notInYet.add(s); } } //startsWith for (String s : notInYet) { if (rsc.startsWith(s, txt, fm)) { scb.addItem(s); notInYet.remove(s); } } //contains for (String s : notInYet) { if (rsc.contains(s, txt, fm)) { scb.addItem(s); } } } else { buildValues(this.vm); } } else { String txt = event.getText(); HashMap<Integer, Collection> levdistvals = new HashMap<>(); if (txt.length() > 1) { scb.removeAllItems(); for (String s : values.keySet()) { Integer d = StringUtils.getLevenshteinDistance(removeAccents(txt).toLowerCase(), removeAccents(s).toLowerCase()); if (levdistvals.containsKey(d)) { levdistvals.get(d).add(s); } if (!levdistvals.containsKey(d)) { Set<String> newc = new TreeSet<>(); newc.add(s); levdistvals.put(d, newc); } } SortedSet<Integer> keys = new TreeSet<>(levdistvals.keySet()); for (Integer k : keys.subSet(0, 10)) { List<String> valueList = new ArrayList(levdistvals.get(k)); Collections.sort(valueList, String.CASE_INSENSITIVE_ORDER); for (String v : valueList) { scb.addItem(v); } } } } }
From source file:com.krawler.spring.profileHandler.profileHandlerDAOImpl.java
public KwlReturnObject getAllManagers(HashMap<String, Object> requestParams) throws ServiceException { List ll = new ArrayList(); int dl = 0;/* w w w.j a va2s . c o m*/ String companyid = ""; try { if (requestParams.containsKey("companyid") && requestParams.get("companyid") != null) { companyid = requestParams.get("companyid").toString(); } String role = " and ( bitwise_and( roleID , 2 ) = 2 ) "; String SELECT_USER_INFO = "from User u where company.companyID=? and deleteflag=0 " + role; ll = executeQuery(SELECT_USER_INFO, new Object[] { companyid }); dl = ll.size(); } catch (Exception e) { throw ServiceException.FAILURE("profileHandlerDAOImpl.getAllManagers", e); } return new KwlReturnObject(true, KWLErrorMsgs.S01, "", ll, dl); }
From source file:LineageSimulator.java
public static void simulateLineageTrees(Args args) { int totalNumNodes = 0; // --- grow lineage trees --- // for (int t = 0; t < Parameters.NUM_TREES; t++) { // create the directory to store the results for each generated tree File treeDir = new File(args.simPath + "/tree" + "_" + t); treeDir.mkdirs();// ww w . jav a 2s . co m // initial tree (only contains the root) SimulatedTree lineageTree = new SimulatedTree(); // -- expand the tree -- int iter = 0; while (iter < Parameters.NUM_ITERATIONS || /* there must be a min number of undead nodes */ lineageTree.getNumNodes() < lineageTree.getNumDeadNodes() + Parameters.MIN_NUM_NODES + 1) { if (lineageTree.getNumNodes() >= lineageTree.getNumDeadNodes() + Parameters.MAX_NUM_NODES + 1) { break; } lineageTree.grow(); iter++; } writeOutputFile(treeDir.getAbsolutePath() + "/TREE_plain.txt", lineageTree.toString()); if (args.generateDOT) { writeOutputFile(treeDir.getAbsolutePath() + "/TREE.dot", lineageTree.toDOT()); } logger.fine("Generated tree " + t + " with " + lineageTree.getNumNodes() + " nodes."); totalNumNodes += lineageTree.getNumNodes(); // --- sampling --- // for (int s = 0; s < Parameters.NUM_SAMPLES_ARRAY.length; s++) { int numSamples = Parameters.NUM_SAMPLES_ARRAY[s]; ArrayList<TumorSample> samples = new ArrayList<TumorSample>(); HashSet<CellPopulation> subclones = new HashSet<CellPopulation>(); HashMap<Mutation.SNV, double[]> multiSampleFrequencies = new HashMap<Mutation.SNV, double[]>(); // --- collect the samples from the tree --- if (Parameters.LOCALIZED_SAMPLING) { samples = lineageTree.getKLocalizedSamples(numSamples - 1); } else { // randomized for (int i = 1; i < numSamples; i++) { samples.add(lineageTree.getSample()); } } if (args.generateSampledDOT) { writeOutputFile(treeDir.getAbsolutePath() + "/TREE_s" + numSamples + ".dot", lineageTree.toColoredDOT(samples)); } lineageTree.resetColors(); // --- populate the SNV VAFs for each sample --- for (int i = 1; i < numSamples; i++) { // + default normal sample 0 TumorSample sample = samples.get(i - 1); HashMap<Mutation.SNV, Double> freqMap = sample.getSNVFrequencies(); for (Mutation.SNV snv : freqMap.keySet()) { if (multiSampleFrequencies.containsKey(snv)) { multiSampleFrequencies.get(snv)[i] = freqMap.get(snv); } else { multiSampleFrequencies.put(snv, new double[numSamples]); multiSampleFrequencies.get(snv)[i] = freqMap.get(snv); } } subclones.addAll(sample.cellPopulationCounts.keySet()); } HashMap<Mutation.SNV, String> binaryProfiles = null; if (args.outputSampleProfile) { binaryProfiles = getBinaryProfile(multiSampleFrequencies, numSamples); } // --- store true VAFs --- String VAFFileName = treeDir.getAbsolutePath() + "/VAF_s" + numSamples + "_true.txt"; writeVAFsToFile(VAFFileName, multiSampleFrequencies, binaryProfiles, numSamples); // --- generate VAFs with simulated coverage and sequencing error --- for (int c = 0; c < Parameters.COVERAGE_ARRAY.length; c++) { int coverage = Parameters.COVERAGE_ARRAY[c]; VAFFileName = treeDir.getAbsolutePath() + "/VAF_s" + numSamples + "_" + coverage + "X.txt"; HashMap<Mutation.SNV, double[]> noisyMultiSampleFrequencies = addNoise(multiSampleFrequencies, coverage, numSamples); writeVAFsToFile(VAFFileName, noisyMultiSampleFrequencies, binaryProfiles, numSamples); } // --- store subclone information for evaluation --- String lineageFileName = treeDir.getAbsolutePath() + "/SUBCLONES_s" + numSamples + ".txt"; writeSubclonesToFile(lineageFileName, subclones); } if ((t + 1) % 1 == 0) logger.info("[PROGRESS] Simulated " + (t + 1) + " trees."); } logger.info("[SUMMARY] Simulated " + Parameters.NUM_TREES + " trees. Average number of nodes / tree = " + (double) totalNumNodes / (Parameters.NUM_TREES)); }
From source file:com.mcongrove.pebble.TitaniumPebbleModule.java
@Kroll.method private void addReceivers() { if (isListeningToPebble) { if (connectedReceiver == null) { connectedReceiver = new BroadcastReceiver() { @Override/*from w w w.j ava 2 s. c o m*/ public void onReceive(Context context, Intent intent) { Log.d(LCAT, "watchDidConnect"); setConnectedCount(0); fireEvent("watchConnected", new Object[] {}); } }; PebbleKit.registerPebbleConnectedReceiver(getApplicationContext(), connectedReceiver); } if (disconnectedReceiver == null) { disconnectedReceiver = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { Log.d(LCAT, "watchDidDisconnect"); setConnectedCount(0); fireEvent("watchDisconnected", new Object[] {}); } }; PebbleKit.registerPebbleDisconnectedReceiver(getApplicationContext(), disconnectedReceiver); } if (dataReceiver == null) { dataReceiver = new PebbleKit.PebbleDataReceiver(uuid) { @Override public void receiveData(final Context context, final int transactionId, final PebbleDictionary data) { if (!data.contains(0)) { Log.e(LCAT, "listenToConnectedWatch: Received message, data corrupt"); PebbleKit.sendNackToPebble(context, transactionId); return; } PebbleKit.sendAckToPebble(context, transactionId); try { JSONArray jsonArray = new JSONArray(data.toJsonString()); if (jsonArray.length() > 0) { JSONObject jsonObject = jsonArray.getJSONObject(0); if (jsonObject.has("value")) { Log.i(LCAT, "listenToConnectedWatch: Received message"); HashMap message = new HashMap(); message.put("message", jsonObject.getString("value")); fireEvent("update", message); } } } catch (Throwable e) { Log.e(LCAT, "listenToConnectedWatch: Received message, data corrupt"); } } }; PebbleKit.registerReceivedDataHandler(getApplicationContext(), dataReceiver); } if (ackReceiver == null) { ackReceiver = new PebbleKit.PebbleAckReceiver(uuid) { @Override public void receiveAck(Context context, int transactionId) { Log.i(LCAT, "Received ACK for transaction: " + transactionId); if (callbacks.containsKey(transactionId)) { HashMap callbackArray = (HashMap) callbacks.get(transactionId); if (callbackArray.containsKey("success")) { KrollFunction successCallback = (KrollFunction) callbackArray.get("success"); successCallback.call(getKrollObject(), new Object[] {}); } } } }; PebbleKit.registerReceivedAckHandler(getApplicationContext(), ackReceiver); } if (nackReceiver == null) { nackReceiver = new PebbleKit.PebbleNackReceiver(uuid) { @Override public void receiveNack(Context context, int transactionId) { Log.e(LCAT, "Received NACK for transaction: " + transactionId); if (callbacks.containsKey(transactionId)) { HashMap callbackArray = (HashMap) callbacks.get(transactionId); if (callbackArray.containsKey("error")) { KrollFunction errorCallback = (KrollFunction) callbackArray.get("error"); errorCallback.call(getKrollObject(), new Object[] {}); } } } }; PebbleKit.registerReceivedNackHandler(getApplicationContext(), nackReceiver); } } }