List of usage examples for java.util HashMap get
public V get(Object key)
From source file:com.act.lcms.db.analysis.StandardIonAnalysis.java
/** * This function returns the best SNR values and their times for each metlin ion based on the StandardIonResult * datastructure and plots diagnostics./*w ww . j a v a2s.c om*/ * @param lcmsDir The directory where the LCMS scan data can be found. * @param db The DB connection to query. * @param positiveStandardWell This is the positive standard well against which the snr comparison is done. * @param negativeStandardWells These are the negative standard wells which are used for benchmarking. * @param plateCache A hash of Plates already accessed from the DB. * @param chemical This is chemical of interest we are running ion analysis against. * @param plottingDir This is the directory where the plotting diagnostics will live. * @param restrictedTimeWindows This map of ion to list of doubles represent time windows over which the analysis has * to be done. * @return The StandardIonResult datastructure which contains the standard ion analysis results. * @throws Exception */ public static StandardIonResult getSnrResultsForStandardWellComparedToValidNegativesAndPlotDiagnostics( File lcmsDir, DB db, StandardWell positiveStandardWell, List<StandardWell> negativeStandardWells, HashMap<Integer, Plate> plateCache, String chemical, String plottingDir, Map<String, List<Double>> restrictedTimeWindows) throws Exception { Plate plate = plateCache.get(positiveStandardWell.getPlateId()); if (plate == null) { plate = Plate.getPlateById(db, positiveStandardWell.getPlateId()); plateCache.put(plate.getId(), plate); } List<Pair<String, Double>> searchMZs; Pair<String, Double> searchMZ = Utils.extractMassFromString(db, chemical); if (searchMZ != null) { searchMZs = Collections.singletonList(searchMZ); } else { throw new RuntimeException("Could not find Mass Charge value for " + chemical); } List<StandardWell> allWells = new ArrayList<>(); allWells.add(positiveStandardWell); allWells.addAll(negativeStandardWells); ChemicalToMapOfMetlinIonsToIntensityTimeValues peakData = AnalysisHelper.readStandardWellScanData(db, lcmsDir, searchMZs, ScanData.KIND.STANDARD, plateCache, allWells, false, null, null, USE_SNR_FOR_LCMS_ANALYSIS, positiveStandardWell.getChemical()); if (peakData == null || peakData.getIonList().size() == 0) { return null; } LinkedHashMap<String, XZ> snrResults = WaveformAnalysis .performSNRAnalysisAndReturnMetlinIonsRankOrderedBySNR(peakData, chemical, restrictedTimeWindows); String bestMetlinIon = AnalysisHelper.getBestMetlinIonFromPossibleMappings(snrResults); Map<String, String> plottingFileMappings = peakData .plotPositiveAndNegativeControlsForEachMetlinIon(searchMZ, plottingDir, chemical, allWells); StandardIonResult result = new StandardIonResult(); result.setChemical(chemical); result.setAnalysisResults(snrResults); result.setStandardWellId(positiveStandardWell.getId()); result.setPlottingResultFilePaths(plottingFileMappings); result.setBestMetlinIon(bestMetlinIon); return result; }
From source file:com.ibm.bi.dml.runtime.matrix.WriteCSVMR.java
public static JobReturn runJob(MRJobInstruction inst, String[] inputs, InputInfo[] inputInfos, long[] rlens, long[] clens, int[] brlens, int[] bclens, String csvWriteInstructions, int numReducers, int replication, byte[] resultIndexes, String[] outputs) throws Exception { JobConf job = new JobConf(WriteCSVMR.class); job.setJobName("WriteCSV-MR"); byte[] realIndexes = new byte[inputs.length]; for (byte b = 0; b < realIndexes.length; b++) realIndexes[b] = b;/*from w ww.j a va 2s .com*/ //set up the input files and their format information MRJobConfiguration.setUpMultipleInputs(job, realIndexes, inputs, inputInfos, brlens, bclens, true, ConvertTarget.CSVWRITE); //set up the dimensions of input matrices MRJobConfiguration.setMatricesDimensions(job, realIndexes, rlens, clens); //set up the block size MRJobConfiguration.setBlocksSizes(job, realIndexes, brlens, bclens); MRJobConfiguration.setCSVWriteInstructions(job, csvWriteInstructions); //set up the replication factor for the results job.setInt("dfs.replication", replication); //set up preferred custom serialization framework for binary block format if (MRJobConfiguration.USE_BINARYBLOCK_SERIALIZATION) MRJobConfiguration.addBinaryBlockSerializationFramework(job); long maxRlen = 0; for (long rlen : rlens) if (rlen > maxRlen) maxRlen = rlen; //set up the number of reducers (according to output size) int numRed = determineNumReducers(rlens, clens, ConfigurationManager.getConfig().getIntValue(DMLConfig.NUM_REDUCERS), (int) maxRlen); job.setNumReduceTasks(numRed); byte[] resultDimsUnknown = new byte[resultIndexes.length]; MatrixCharacteristics[] stats = new MatrixCharacteristics[resultIndexes.length]; OutputInfo[] outputInfos = new OutputInfo[outputs.length]; HashMap<Byte, Integer> indexmap = new HashMap<Byte, Integer>(); for (int i = 0; i < stats.length; i++) { indexmap.put(resultIndexes[i], i); resultDimsUnknown[i] = (byte) 0; stats[i] = new MatrixCharacteristics(); outputInfos[i] = OutputInfo.CSVOutputInfo; } CSVWriteInstruction[] ins = MRInstructionParser.parseCSVWriteInstructions(csvWriteInstructions); for (CSVWriteInstruction in : ins) stats[indexmap.get(in.output)].set(rlens[in.input], clens[in.input], -1, -1); // Print the complete instruction if (LOG.isTraceEnabled()) inst.printCompleteMRJobInstruction(stats); //set up what matrices are needed to pass from the mapper to reducer MRJobConfiguration.setUpOutputIndexesForMapper(job, realIndexes, "", "", csvWriteInstructions, resultIndexes); //set up the multiple output files, and their format information MRJobConfiguration.setUpMultipleOutputs(job, resultIndexes, resultDimsUnknown, outputs, outputInfos, true, true); // configure mapper and the mapper output key value pairs job.setMapperClass(CSVWriteMapper.class); job.setMapOutputKeyClass(TaggedFirstSecondIndexes.class); job.setMapOutputValueClass(MatrixBlock.class); //configure reducer job.setReducerClass(CSVWriteReducer.class); job.setOutputKeyComparatorClass(TaggedFirstSecondIndexes.Comparator.class); job.setPartitionerClass(TaggedFirstSecondIndexes.FirstIndexRangePartitioner.class); //job.setOutputFormat(UnPaddedOutputFormat.class); MatrixCharacteristics[] inputStats = new MatrixCharacteristics[inputs.length]; for (int i = 0; i < inputs.length; i++) { inputStats[i] = new MatrixCharacteristics(rlens[i], clens[i], brlens[i], bclens[i]); } //set unique working dir MRJobConfiguration.setUniqueWorkingDir(job); RunningJob runjob = JobClient.runJob(job); /* Process different counters */ Group group = runjob.getCounters().getGroup(MRJobConfiguration.NUM_NONZERO_CELLS); for (int i = 0; i < resultIndexes.length; i++) { // number of non-zeros stats[i].setNonZeros(group.getCounter(Integer.toString(i))); } return new JobReturn(stats, outputInfos, runjob.isSuccessful()); }
From source file:de.adesso.referencer.search.helper.ElasticConfig.java
public static String buildSearchQuery(HashMap<String, String> fieldvalue) { String result = null;// w w w . ja v a 2 s .c om if (fieldvalue == null) return null; if (fieldvalue.size() <= 0) return null; result = "{\"query\": { \"bool\": {\"must\": ["; String matchString = null; for (String s : fieldvalue.keySet()) { matchString = buildMatchString(s, fieldvalue.get(s)); if (matchString != null) result += "\n" + matchString + ","; } result = result.substring(0, result.length() - 1) + "\n"; result += "]}}}"; return result; }
From source file:net.mutil.util.HttpUtil.java
public static String connServerForResultPost(String strUrl, HashMap<String, String> entityMap) throws ClientProtocolException, IOException { String strResult = ""; URL url = new URL(HttpUtil.getPCURL() + strUrl); HttpURLConnection conn = (HttpURLConnection) url.openConnection(); conn.setRequestMethod("POST"); StringBuilder entitySb = new StringBuilder(""); Object[] entityKeys = entityMap.keySet().toArray(); for (int i = 0; i < entityKeys.length; i++) { String key = (String) entityKeys[i]; if (i == 0) { entitySb.append(key + "=" + entityMap.get(key)); } else {/*from ww w . j a v a 2 s .c o m*/ entitySb.append("&" + key + "=" + entityMap.get(key)); } } byte[] entity = entitySb.toString().getBytes("UTF-8"); System.out.println(url.toString() + entitySb.toString()); conn.setConnectTimeout(5000); conn.setDoOutput(true); conn.setRequestProperty("Content-Type", "application/x-www-form-urlencoded"); conn.setRequestProperty("Content-Length", String.valueOf(entity.length)); conn.getOutputStream().write(entity); if (conn.getResponseCode() == 200) { InputStream inputstream = conn.getInputStream(); StringBuffer buffer = new StringBuffer(); byte[] b = new byte[4096]; for (int n; (n = inputstream.read(b)) != -1;) { buffer.append(new String(b, 0, n)); } strResult = buffer.toString(); } return strResult; }
From source file:org.hfoss.posit.web.Communicator.java
/** * cleanup the item key,value pairs so that we can receive and save to the internal database * @param rMap//from w ww . j a va2 s. co m */ public static void cleanupOnReceive(HashMap<String, Object> rMap) { rMap.put(PositDbHelper.FINDS_SYNCED, PositDbHelper.FIND_IS_SYNCED); rMap.put(PositDbHelper.FINDS_GUID, rMap.get("barcode_id")); //rMap.put(PositDbHelper.FINDS_GUID, rMap.get("barcode_id")); rMap.put(PositDbHelper.FINDS_PROJECT_ID, projectId); if (rMap.containsKey("add_time")) { rMap.put(PositDbHelper.FINDS_TIME, rMap.get("add_time")); rMap.remove("add_time"); } if (rMap.containsKey("images")) { if (Utils.debug) Log.d(TAG, "contains image key"); rMap.put(PositDbHelper.PHOTOS_IMAGE_URI, rMap.get("images")); rMap.remove("images"); } }
From source file:com.likya.myra.jef.utils.JobQueueOperations.java
public static HashMap<String, AbstractJobType> toAbstractJobTypeList(HashMap<String, JobImpl> jobQueue) { HashMap<String, AbstractJobType> tmpList = new HashMap<String, AbstractJobType>(); Iterator<String> jobsIterator = jobQueue.keySet().iterator(); while (jobsIterator.hasNext()) { String jobKey = jobsIterator.next(); tmpList.put(jobKey, jobQueue.get(jobKey).getAbstractJobType()); }//w w w . j a v a 2s.c o m return tmpList; }
From source file:Main.java
public static HashMap<String, List<Object[]>> getUnitDataMapList(List<Object[]> list, int[] indexnum) { HashMap<String, List<Object[]>> dataMap = new HashMap<String, List<Object[]>>(); for (int i = 0; i < list.size(); i++) { StringBuffer returnStringBuffer = new StringBuffer(); for (int ai = 0; ai < indexnum.length; ai++) { int index = indexnum[ai]; Object obj = list.get(i)[index]; String gunit = obj.toString(); if (ai == 0) { returnStringBuffer.append(gunit); } else { returnStringBuffer.append("(" + gunit + ")"); }//from w w w .j av a 2 s . c o m } String unit = returnStringBuffer.toString(); if (dataMap.containsKey(unit)) { dataMap.get(unit).add((Object[]) list.get(i)); } else { ArrayList<Object[]> rowdata = new ArrayList<Object[]>(); rowdata.add((Object[]) list.get(i)); dataMap.put(unit, rowdata); } } return dataMap; }
From source file:com.nubits.nubot.trading.TradeUtils.java
/** * * @param args/*from w w w.ja v a 2 s . c o m*/ * @param encoding * @return */ public static String buildQueryString(HashMap<String, String> args, String encoding) { String result = new String(); for (String hashkey : args.keySet()) { if (result.length() > 0) { result += '&'; } try { result += URLEncoder.encode(hashkey, encoding) + "=" + URLEncoder.encode(args.get(hashkey), encoding); } catch (Exception ex) { LOG.severe(ex.toString()); } } return result; }
From source file:jsave.Utils.java
public static HashMap<String, Object> read_arraydesc(final RandomAccessFile raf) throws IOException, Exception { HashMap<String, Object> arraydesc = new HashMap<>(); arraydesc.put("arrstart", read_long(raf)); int arrStart = (int) arraydesc.get("arrstart"); switch (arrStart) { case 8://from w w w. j a va2 s .co m skip_bytes(raf, 4); arraydesc.put("nbytes", read_long(raf)); arraydesc.put("nelements", read_long(raf)); arraydesc.put("ndims", read_long(raf)); skip_bytes(raf, 8); arraydesc.put("nmax", read_long(raf)); int length = (int) arraydesc.get("nmax"); int[] dims = new int[length]; for (int i = 0; i < length; i++) { dims[i] = read_long(raf); } arraydesc.put("dims", dims); break; case 18: throw new UnsupportedOperationException("arrstart=18 is not supported"); //warnings.warn("Using experimental 64-bit array read") //_skip_bytes(f, 8) //arraydesc['nbytes'] = _read_uint64(f) //arraydesc['nelements'] = _read_uint64(f) //arraydesc['ndims'] = _read_long(f) //_skip_bytes(f, 8) //arraydesc['nmax'] = 8 //arraydesc['dims'] = [] //for d in range(arraydesc['nmax']): // v = _read_long(f) // if v != 0: // raise Exception("Expected a zero in ARRAY_DESC") // arraydesc['dims'].append(_read_long(f)) //break; default: throw new Exception("Unknown ARRSTART: " + arraydesc.get("arrstart")); } return arraydesc; }
From source file:fr.lissi.belilif.om2m.rest.WebServiceActions.java
/** * Do post./*from w w w .j av a2 s. c o m*/ * * @param uri * the uri * @param body * the body * @param headers * the headers * @return the int * @throws ClientProtocolException * the client protocol exception * @throws IOException * Signals that an I/O exception has occurred. * @throws HttpResponseException * the http response exception */ public static int doPost(String uri, String body, HashMap<String, String> headers) throws ClientProtocolException, IOException, HttpResponseException { // TODO delete before commit // System.out.println("doPost>> uri:"+uri +"\nbody:"+body+"\n"); CloseableHttpClient httpclient = HttpClients.createDefault(); int resp = -1; try { HttpPost httpPost = new HttpPost(uri); for (String key : headers.keySet()) { httpPost.addHeader(key, headers.get(key)); // System.out.println("header:"+key+"/"+headers.get(key)); } // System.out.println("doPost<<"); httpPost.setEntity(new StringEntity(body)); CloseableHttpResponse response = httpclient.execute(httpPost); resp = response.getStatusLine().getStatusCode(); if (response.getStatusLine().getStatusCode() == HttpStatus.SC_CREATED) { response.close(); } else { throw new HttpResponseException(response.getStatusLine().getStatusCode(), response.getStatusLine().getReasonPhrase()); } } finally { httpclient.close(); } return resp; }