List of usage examples for java.util HashMap containsKey
public boolean containsKey(Object key)
From source file:com.klarna.checkout.stubs.HttpClientStub.java
/** * Uppercase data to simulate it having passed on to the server. * * @throws IOException in case of an IO error. *//* w w w . ja v a2 s.co m*/ private void fixData() throws IOException { if (this.httpUriReq.getMethod().equals("POST")) { HttpEntityEnclosingRequest h = (HttpEntityEnclosingRequest) this.httpUriReq; InputStream is = h.getEntity().getContent(); java.util.Scanner s = new java.util.Scanner(is).useDelimiter("\\A"); String str = ""; while (s.hasNext()) { str = str.concat(s.next()); } JSONParser jsonParser = new JSONParser(); HashMap<String, String> obj = null; try { obj = (HashMap<String, String>) jsonParser.parse(str); } catch (ParseException ex) { Logger.getLogger(HttpClientStub.class.getName()).log(Level.SEVERE, null, ex); } if (obj != null && obj.containsKey("test")) { str = obj.get("test").toUpperCase(); this.data.put("test", str); } ByteArrayInputStream in = new ByteArrayInputStream(JSONObject.toJSONString(this.data).getBytes()); this.lastResponse.setEntity(new InputStreamEntity(in, in.available())); } }
From source file:edu.cornell.mannlib.vitro.webapp.controller.freemarker.ManageGrantsForIndividualController.java
HashMap<String, List<Map<String, String>>> getGrants(String subjectUri, VitroRequest vreq) { VClassDao vcDao = vreq.getUnfilteredAssertionsWebappDaoFactory().getVClassDao(); String queryStr = QueryUtils.subUriForQueryVar(GRANT_QUERY, "subject", subjectUri); log.debug("queryStr = " + queryStr); HashMap<String, List<Map<String, String>>> subclassToGrants = new HashMap<String, List<Map<String, String>>>(); try {// www .ja va 2s .c o m ResultSet results = QueryUtils.getQueryResults(queryStr, vreq); while (results.hasNext()) { QuerySolution soln = results.nextSolution(); RDFNode subclassUri = soln.get("subclass"); if (subclassUri != null) { String subclassUriStr = soln.get("subclass").toString(); VClass vClass = vcDao.getVClassByURI(subclassUriStr); String subclass = ((vClass.getName() == null) ? subclassUriStr : vClass.getName()); if (!subclassToGrants.containsKey(subclass)) { subclassToGrants.put(subclass, new ArrayList<Map<String, String>>()); //list of grant information } List<Map<String, String>> grantsList = subclassToGrants.get(subclass); grantsList.add(QueryUtils.querySolutionToStringValueMap(soln)); } } } catch (Exception e) { log.error(e, e); } return subclassToGrants; }
From source file:edu.cornell.mannlib.vitro.webapp.controller.freemarker.ManagePeopleForOrganizationController.java
HashMap<String, List<Map<String, String>>> getPeople(String subjectUri, VitroRequest vreq) { VClassDao vcDao = vreq.getUnfilteredAssertionsWebappDaoFactory().getVClassDao(); String queryStr = QueryUtils.subUriForQueryVar(PEOPLE_QUERY, "subject", subjectUri); log.debug("queryStr = " + queryStr); HashMap<String, List<Map<String, String>>> subclassToPeople = new HashMap<String, List<Map<String, String>>>(); try {// w w w . j a v a2 s .c o m ResultSet results = QueryUtils.getQueryResults(queryStr, vreq); while (results.hasNext()) { QuerySolution soln = results.nextSolution(); RDFNode subclassUri = soln.get("subclass"); if (subclassUri != null) { String subclassUriStr = soln.get("subclass").toString(); VClass vClass = vcDao.getVClassByURI(subclassUriStr); String subclass = ((vClass.getName() == null) ? subclassUriStr : vClass.getName()); if (!subclassToPeople.containsKey(subclass)) { subclassToPeople.put(subclass, new ArrayList<Map<String, String>>()); } List<Map<String, String>> peopleList = subclassToPeople.get(subclass); peopleList.add(QueryUtils.querySolutionToStringValueMap(soln)); } } } catch (Exception e) { log.error(e, e); } return subclassToPeople; }
From source file:jp.ac.tohoku.ecei.sb.metabolome.lims.gui.MainWindowController.java
public JFreeChart getChartForCompound(CompoundImpl compound) { IntensityMatrixImpl intensityMatrix = dataManager.getIntensityMatrix(); //DefaultCategoryDataset data = new DefaultCategoryDataset(); XYSeriesCollection data = new XYSeriesCollection(); HashMap<Plate, XYSeries> dataMap = new HashMap<>(); for (Injection injection : intensityMatrix.getColumnKeys()) { Plate p = injection.getPlate();/*from w w w . j a v a 2 s . c om*/ if (!dataMap.containsKey(p)) { XYSeries oneSeries = new XYSeries(p.getName()); data.addSeries(oneSeries); dataMap.put(p, oneSeries); } dataMap.get(p).add(injection.getRunIndex(), intensityMatrix.get(compound, injection)); } List<Sample> globalQC = intensityMatrix.getGlobalQCSamples(); if (globalQC.size() > 0) { XYSeries globalQCData = new XYSeries("GlobalQC"); Sample globalQCSample = globalQC.get(0); for (Injection injection : intensityMatrix.getInjectionsBySample(globalQCSample)) { globalQCData.add(injection.getRunIndex(), intensityMatrix.get(compound, injection)); } data.addSeries(globalQCData); } return ChartFactory.createXYLineChart(compound.toString(), "Injection", "Intensity", data, PlotOrientation.VERTICAL, true, false, false); }
From source file:eu.lod2.DeleteGraphs.java
private WebAPIResult parse_graph_api_result(String result) throws Exception { ObjectMapper mapper = new ObjectMapper(); // can reuse, share globally TypeReference<HashMap<String, Object>> typeRef = new TypeReference<HashMap<String, Object>>() { };/*from ww w. j av a 2 s . co m*/ HashMap<String, Object> userData = mapper.readValue(result, typeRef); WebAPIResult graphs = null; // List<String> graphs = null; if (userData.containsKey("graphs")) { Object ographs = userData.get("graphs"); try { HashMap<String, Object> oographs = (HashMap<String, Object>) ographs; if (oographs.containsKey("resultList")) { Object graphsList = oographs.get("resultList"); graphs = new WebAPIResult((List<String>) graphsList); Object more = oographs.get("more"); graphs.nextquery = (Boolean) more; Object start = oographs.get("start"); Object lsize = oographs.get("listSize"); Integer istart = (Integer) start; Integer ilsize = (Integer) lsize; int from = istart.intValue() + ilsize.intValue(); graphs.nextquery_params = "from=" + from; } ; } catch (Exception e) { System.err.println(e.getMessage()); } ; } ; return graphs; }
From source file:de.hbz.lobid.helper.CompareJsonMaps.java
public boolean writeFileAndTestJson(final JsonNode actual, final JsonNode expected) { // generated data to map final HashMap<String, String> actualMap = new HashMap<>(); extractFlatMapFromJsonNode(actual, actualMap); // expected data to map final HashMap<String, String> expectedMap = new HashMap<>(); extractFlatMapFromJsonNode(expected, expectedMap); CompareJsonMaps.logger.debug("\n##### remove good entries ###"); Iterator<String> it = actualMap.keySet().iterator(); removeContext(it);/* ww w.ja va 2s . com*/ it = expectedMap.keySet().iterator(); removeContext(it); for (final Entry<String, String> e : expectedMap.entrySet()) { CompareJsonMaps.logger.debug("Trying to remove " + e.getKey() + "..."); if (!actualMap.containsKey(e.getKey())) { CompareJsonMaps.logger.warn("At least this element is missing in actual: " + e.getKey()); return false; } if (e.getKey().endsWith("Order]")) { handleOrderedValues(actualMap, e); } else { handleUnorderedValues(actualMap, e); } } if (!actualMap.isEmpty()) { CompareJsonMaps.logger.warn("Fail - no Equality! These keys/values were NOT expected:"); actualMap.forEach((key, val) -> CompareJsonMaps.logger.warn("KEY=" + key + " VALUE=" + val)); } else CompareJsonMaps.logger.info("Succeeded - resources are equal"); return actualMap.size() == 0; }
From source file:android.syncml.pim.vcard.VCardComposer.java
/** Loop append TEL property. */ private void appendPhoneStr(List<ContactStruct.PhoneData> phoneList, int version) { HashMap<String, String> numMap = new HashMap<String, String>(); String joinMark = version == VERSION_VCARD21_INT ? ";" : ","; for (ContactStruct.PhoneData phone : phoneList) { String type;// w ww. jav a 2 s. com if (!isNull(phone.data)) { type = getPhoneTypeStr(phone); if (version == VERSION_VCARD30_INT && type.indexOf(";") != -1) { type = type.replace(";", ","); } if (numMap.containsKey(phone.data)) { type = numMap.get(phone.data) + joinMark + type; } numMap.put(phone.data, type); } } for (Map.Entry<String, String> num : numMap.entrySet()) { if (version == VERSION_VCARD21_INT) { mResult.append("TEL;"); } else { // vcard3.0 mResult.append("TEL;TYPE="); } mResult.append(num.getValue()).append(":").append(num.getKey()).append(mNewline); } }
From source file:de.tudarmstadt.tk.statistics.importer.ExternalResultsReader.java
public static List<SampleData> splitData(SampleData data, StatsConfig config) { List<SampleData> splitted = new ArrayList<SampleData>(); //Use lists instead of sets to maintain order of model metadata ArrayList<String> featureSets = new ArrayList<String>(); ArrayList<String> classifiers = new ArrayList<String>(); for (Pair<String, String> metadata : data.getModelMetadata()) { if (!classifiers.contains(metadata.getLeft())) { classifiers.add(metadata.getLeft()); }/*from w w w. ja v a 2 s .c om*/ if (!featureSets.contains(metadata.getRight())) { featureSets.add(metadata.getRight()); } } //Only separate data if there's more than one independent variable if (!(featureSets.size() > 1 && classifiers.size() > 1)) { splitted.add(data); return splitted; } List<String> it = (config .getFixIndependentVariable() == StatsConfigConstants.INDEPENDENT_VARIABLES_VALUES.Classifier) ? classifiers : featureSets; for (String fixed : it) { ArrayList<Pair<String, String>> modelMetadata = new ArrayList<Pair<String, String>>(); HashMap<String, ArrayList<ArrayList<Double>>> samples = new HashMap<String, ArrayList<ArrayList<Double>>>(); HashMap<String, ArrayList<Double>> sampleAverages = new HashMap<String, ArrayList<Double>>(); for (int i = 0; i < data.getModelMetadata().size(); i++) { Pair<String, String> model = data.getModelMetadata().get(i); boolean eq = (config .getFixIndependentVariable() == StatsConfigConstants.INDEPENDENT_VARIABLES_VALUES.Classifier) ? model.getLeft().equals(fixed) : model.getRight().equals(fixed); if (eq) { modelMetadata.add(model); for (String measure : data.getSamples().keySet()) { if (!samples.containsKey(measure)) { samples.put(measure, new ArrayList<ArrayList<Double>>()); sampleAverages.put(measure, new ArrayList<Double>()); } samples.get(measure).add(data.getSamples().get(measure).get(i)); sampleAverages.get(measure).add(data.getSamplesAverage().get(measure).get(i)); } } } ArrayList<Pair<String, String>> baselineModelData = new ArrayList<Pair<String, String>>(); if (data.isBaselineEvaluation()) { Pair<String, String> baselineModel = null; for (int i = 0; i < data.getBaselineModelMetadata().size(); i++) { boolean eq = (config .getFixIndependentVariable() == StatsConfigConstants.INDEPENDENT_VARIABLES_VALUES.Classifier) ? data.getBaselineModelMetadata().get(i).getLeft().equals(fixed) : data.getBaselineModelMetadata().get(i).getRight().equals(fixed); if (eq) { baselineModel = data.getBaselineModelMetadata().get(i); break; } } if (baselineModel != null) { baselineModelData.add(baselineModel); int modelIndex = modelMetadata.indexOf(baselineModel); modelMetadata.remove(modelIndex); modelMetadata.add(0, baselineModel); for (String measure : data.getSamples().keySet()) { ArrayList<Double> s = samples.get(measure).get(modelIndex); samples.get(measure).remove(modelIndex); samples.get(measure).add(0, s); double a = sampleAverages.get(measure).get(modelIndex); sampleAverages.get(measure).remove(modelIndex); sampleAverages.get(measure).add(0, a); } } else { logger.log(Level.ERROR, "Missing baseline model! Please check if baseline indicators are set correctly in the input file, and if they correspond correctly to the fixIndependentVariable property in the configuration. In case of both varying feature sets and classifiers, baseline indicators have to be set multiple times."); System.err.println( "Missing baseline model! Please check if baseline indicators are set correctly in the input file, and if they correspond correctly to the fixIndependentVariable property in the configuration. In case of both varying feature sets and classifiers, baseline indicators have to be set multiple times."); System.exit(1); } } SampleData newData = new SampleData(null, samples, sampleAverages, data.getDatasetNames(), modelMetadata, baselineModelData, data.getPipelineType(), data.getnFolds(), data.getnRepetitions()); splitted.add(newData); } return splitted; }
From source file:fr.paris.lutece.plugins.document.service.attributes.DefaultManager.java
/** * Get parameters map//from www .j av a2 s .com * @param nAttributeId the attribute identifier * @param locale The current locale * @return a map of parameters */ protected Map<String, List<String>> getParameters(int nAttributeId, Locale locale) { HashMap<String, List<String>> mapParameters = new HashMap<String, List<String>>(); Collection<AttributeTypeParameter> listParameters = getAttributeParametersValues(nAttributeId, locale); for (AttributeTypeParameter parameter : listParameters) { mapParameters.put(parameter.getName(), parameter.getValueList()); } // Set all missing parameters with their default values for (AttributeTypeParameter parameter : getExtraParameters(locale)) { if (!mapParameters.containsKey(parameter.getName())) { mapParameters.put(parameter.getName(), parameter.getDefaultValue()); } } return mapParameters; }
From source file:com.linkedin.databus.core.TestDbusEventBufferPersistence.java
@Test public void testMetaFileCloseMult() throws Exception { int maxEventBufferSize = 1144; int maxIndividualBufferSize = 500; int bufNum = maxEventBufferSize / maxIndividualBufferSize; if (maxEventBufferSize % maxIndividualBufferSize > 0) bufNum++;/*from w w w.jav a 2 s . co m*/ DbusEventBuffer.StaticConfig config = getConfig(maxEventBufferSize, maxIndividualBufferSize, 100, 500, AllocationPolicy.MMAPPED_MEMORY, _mmapDirStr, true); // create buffer mult DbusEventBufferMult bufMult = createBufferMult(config); // Save all the files and validate the meta files. bufMult.close(); for (DbusEventBuffer dbusBuf : bufMult.bufIterable()) { File metaFile = new File(_mmapDir, dbusBuf.metaFileName()); // check that we don't have the files Assert.assertTrue(metaFile.exists()); validateFiles(metaFile, bufNum); } File[] entries = _mmapDir.listFiles(); // When we create a new multi-buffer, we should get renamed files as well as new files. bufMult = createBufferMult(config); entries = _mmapDir.listFiles(); // Has session dirs and renamed meta files. // Create an info file for one buffer. DbusEventBuffer buf = bufMult.bufIterable().iterator().next(); buf.saveBufferMetaInfo(true); File infoFile = new File(_mmapDir, buf.metaFileName() + ".info"); Assert.assertTrue(infoFile.exists()); // Create a session directory that has one file in it. File badSes1 = new File(_mmapDir, DbusEventBuffer.getSessionPrefix() + "m"); badSes1.mkdir(); badSes1.deleteOnExit(); File junkFile = new File(badSes1.getAbsolutePath() + "/junkFile"); junkFile.createNewFile(); junkFile.deleteOnExit(); // Create a directory that is empty File badSes2 = new File(_mmapDir, DbusEventBuffer.getSessionPrefix() + "n"); badSes2.mkdir(); badSes2.deleteOnExit(); // Create a good file under mmap directory that we don't want to see removed. final String goodFile = "GoodFile"; File gf = new File(_mmapDir, goodFile); gf.createNewFile(); // Now close the multibuf, and see that the new files are still there. // We should have deleted the unused sessions and info files. bufMult.close(); HashSet<String> validEntries = new HashSet<String>(bufNum); for (DbusEventBuffer dbusBuf : bufMult.bufIterable()) { File metaFile = new File(_mmapDir, dbusBuf.metaFileName()); // check that we don't have the files Assert.assertTrue(metaFile.exists()); validateFiles(metaFile, bufNum); validEntries.add(metaFile.getName()); DbusEventBufferMetaInfo mi = new DbusEventBufferMetaInfo(metaFile); mi.loadMetaInfo(); validEntries.add(mi.getSessionId()); } validEntries.add(goodFile); // Now we should be left with meta files, and session dirs and nothing else. entries = _mmapDir.listFiles(); for (File f : entries) { Assert.assertTrue(validEntries.contains(f.getName())); validEntries.remove(f.getName()); } Assert.assertTrue(validEntries.isEmpty()); // And everything else should have moved to the .BAK directory entries = _mmapBakDir.listFiles(); HashMap<String, File> fileHashMap = new HashMap<String, File>(entries.length); for (File f : entries) { fileHashMap.put(f.getName(), f); } Assert.assertTrue(fileHashMap.containsKey(badSes1.getName())); Assert.assertTrue(fileHashMap.get(badSes1.getName()).isDirectory()); Assert.assertEquals(fileHashMap.get(badSes1.getName()).listFiles().length, 1); Assert.assertEquals(fileHashMap.get(badSes1.getName()).listFiles()[0].getName(), junkFile.getName()); fileHashMap.remove(badSes1.getName()); Assert.assertTrue(fileHashMap.containsKey(badSes2.getName())); Assert.assertTrue(fileHashMap.get(badSes2.getName()).isDirectory()); Assert.assertEquals(fileHashMap.get(badSes2.getName()).listFiles().length, 0); fileHashMap.remove(badSes2.getName()); // We should have the renamed meta files in the hash now. for (File f : entries) { if (f.getName().startsWith(DbusEventBuffer.getMmapMetaInfoFileNamePrefix())) { Assert.assertTrue(fileHashMap.containsKey(f.getName())); Assert.assertTrue(f.isFile()); fileHashMap.remove(f.getName()); } } Assert.assertTrue(fileHashMap.isEmpty()); // One more test to make sure we create the BAK directory dynamically if it does not exist. FileUtils.deleteDirectory(_mmapBakDir); bufMult = createBufferMult(config); entries = _mmapDir.listFiles(); // Create an info file for one buffer. buf = bufMult.bufIterable().iterator().next(); buf.saveBufferMetaInfo(true); infoFile = new File(_mmapDir, buf.metaFileName() + ".info"); Assert.assertTrue(infoFile.exists()); bufMult.close(); entries = _mmapBakDir.listFiles(); fileHashMap = new HashMap<String, File>(entries.length); for (File f : entries) { fileHashMap.put(f.getName(), f); } Assert.assertTrue(fileHashMap.containsKey(infoFile.getName())); Assert.assertTrue(fileHashMap.get(infoFile.getName()).isFile()); }