List of usage examples for java.util HashMap containsKey
public boolean containsKey(Object key)
From source file:DIA_Umpire_Quant.DIA_Umpire_ProtQuant.java
private static void SaintOutput(LCMSID protID, LCMSID IDsummary, FragmentSelection fragselection, FileWriter interactionfile, String filename, String samplename, HashMap<String, String> PreyID, int quanttype) throws IOException { for (String key : protID.ProteinList.keySet()) { if (IDsummary.ProteinList.containsKey(key)) { ProtID protein = IDsummary.ProteinList.get(key); float abundance = 0f; if (quanttype == 1) { abundance = protein.GetAbundanceByMS1_IBAQ(); } else if (quanttype == 2) { abundance = protein.GetAbundanceByTopCorrFragAcrossSample( fragselection.TopPeps.get(protein.getAccNo()), fragselection.TopFrags); }//from w w w .j a v a 2s. c om if (abundance > 0) { interactionfile.write(FilenameUtils.getBaseName(filename) + "\t" + samplename + "\t" + protein.getAccNo() + "\t" + abundance + "\n"); if (!PreyID.containsKey(protein.getAccNo())) { PreyID.put(protein.getAccNo(), /*protein.Sequence.length()+"\t"+*/ protein.GetGeneName()); } } } } }
From source file:com.bluexml.side.forms.generator.alfresco.chiba.FormGenerator.java
public boolean shouldGenerate(HashMap<String, List<IFile>> modelsInfo, String id_metamodel) { return modelsInfo.containsKey(ClazzPackage.eNS_URI) || modelsInfo.containsKey(FormPackage.eNS_URI); }
From source file:com.concursive.connect.web.modules.calendar.portlets.main.SaveEventInviteesAction.java
private boolean scheduleDimdimMeeting(Connection db, MeetingInviteesBean meetingInviteesBean) throws SQLException { Meeting meeting = meetingInviteesBean.getMeeting(); if (!StringUtils.hasText(meeting.getDimdimUrl()) || !StringUtils.hasText(meeting.getDimdimUsername()) || !StringUtils.hasText(meeting.getDimdimPassword())) { meeting.addError("inviteesError", "Missing Dimdim credentials"); return false; }// w w w . jav a 2 s. co m HashMap<String, String> resultMap = DimDimUtils.processDimdimMeeting(meetingInviteesBean, null); if (resultMap.containsKey(DimDimUtils.DIMDIM_CODE_SUCCESS)) { String dimdimMeetingId = resultMap.get(DimDimUtils.DIMDIM_CODE_SUCCESS); meeting.setDimdimMeetingId(dimdimMeetingId); meeting.update(db); return true; } meeting.addError("inviteesError", resultMap.get(resultMap.keySet().toArray()[0])); return false; }
From source file:com.twosigma.beakerx.chart.ChartDetails.java
protected GraphicsActionObject getDetailsFromMessage(HashMap content) { GraphicsActionObject ret = null;// www . j a v a2s . c o m if (content.containsKey("params")) { HashMap params = (HashMap) content.get("params"); if (params.containsKey("type")) { String type = (String) params.get("type"); switch (type) { case "CategoryGraphicsActionObject": { ret = new CategoryGraphicsActionObject(); CategoryGraphicsActionObject retObject = (CategoryGraphicsActionObject) ret; if (params.containsKey("category")) { retObject.setCategory((int) params.get("category")); } if (params.containsKey("series")) { retObject.setSeries((int) params.get("series")); } } break; case "CombinedPlotActionObject": { ret = new CombinedPlotActionObject(); CombinedPlotActionObject retObject = (CombinedPlotActionObject) ret; if (params.containsKey("subplotIndex")) { retObject.setSubplotIndex((int) params.get("subplotIndex")); } if (params.containsKey("index")) { retObject.setIndex((int) params.get("index")); } } break; case "XYGraphicsActionObject": { ret = new XYGraphicsActionObject(); XYGraphicsActionObject retObject = (XYGraphicsActionObject) ret; if (params.containsKey("index")) { retObject.setIndex((int) params.get("index")); } } break; } if (params.containsKey("actionType")) { CommActions value = CommActions.getByAction((String) params.get("actionType")); ret.setActionType(value); } if (params.containsKey("tag")) { ret.setTag((String) params.get("tag")); } if (params.containsKey("key")) { ret.setKey((String) params.get("key")); } } } return ret; }
From source file:controller.PresentatieController.java
@RequestMapping(value = { "/presentatie/inschrijven" }, method = RequestMethod.POST) public @ResponseBody String register(@RequestBody Long id) throws JSONException { System.out.println(id);//from ww w . ja va2 s .c o m JSONObject output = new JSONObject(); output.put("success", false); Presentation p = presentationRepository.findOne(id); if (p == null) { output.put("message", "Kan de presentatie niet vinden."); return output.toString(); } //check for free spots if (p.getAttendees().size() == p.getLocation().getCapacity()) { output.put("message", "Er is geen plaats meer vrij"); return output.toString(); } //check if user is already attending presentation at this timeframe Authentication auth = SecurityContextHolder.getContext().getAuthentication(); User user = (User) auth.getPrincipal(); if (!(user instanceof Student)) { output.put("message", "Enkel studenten kunnen zich inschrijven voor een presentatie"); return output.toString(); } Student student = (Student) user; HashMap<Date, List<TimeFrame>> timeframes = new HashMap<>(); for (GuestRequest gr : student.getGuestRequests()) { Presentation pr = gr.getPresentation(); if (!timeframes.containsKey(pr.getDate())) { timeframes.put(pr.getDate(), new ArrayList<TimeFrame>()); timeframes.get(pr.getDate()).add(pr.getTimeFrame()); } if (!timeframes.get(pr.getDate()).contains(pr.getTimeFrame())) { timeframes.get(pr.getDate()).add(pr.getTimeFrame()); } else { output.put("message", "U heeft zich al ingeschreven voor een presentatie op hetzelfde tijdstip"); return output.toString(); } } GuestRequest n = new GuestRequest(); n.setPresentation(p); n.setStudent(student); student.getGuestRequests().add(n); //p.getAttendees().add(student); //presentationRepository.flush(); guestRequestRepository.saveAndFlush(n); output.put("success", true); return output.toString(); }
From source file:de.tudarmstadt.tk.statistics.importer.ExternalResultsReader.java
public static void readMUGCCV(String filePath) { String outFileName = "AggregatedTrainTest.csv"; logger.log(Level.INFO, String.format("Importing data from directory %s.", filePath)); // Method requires input directory. Check this condition. File directory = new File(filePath); if (directory.isDirectory()) { System.err.println("Please specify a file. Aborting."); return;/*from w ww . ja v a 2 s . co m*/ } //Empty previous output file, if there was one File outputFile = new File(directory.getParentFile(), outFileName); if (outputFile.exists()) { outputFile.delete(); } try { String header = "Train;Test;Classifier;FeatureSet;Measure;Value"; PrintWriter out = new PrintWriter(new FileWriter(outputFile, true)); out.println(header); out.close(); } catch (IOException e) { System.err.println("Error while writing aggregated Train-Test file."); e.printStackTrace(); } ArrayList<String> outputRows = new ArrayList<String>(); // iterate all rows List<String[]> inputRowsFirstFile = new ArrayList<>(); inputRowsFirstFile = readAndCheckCSV(filePath, ';'); // first: order by train set ArrayList<ExternalResults> extResults = new ArrayList<>(); for (int i = 0; i < inputRowsFirstFile.size(); i++) { ExternalResults results = new ExternalResults(); // identify current train/test split String[] datasetNames = inputRowsFirstFile.get(i)[0].split(","); results.trainSetName = datasetNames[0].replace("CV: ", "").replace(" ", ""); // set classifier name results.classifierParameters = inputRowsFirstFile.get(i)[1]; // read feature set results.featureSetName = inputRowsFirstFile.get(i)[2]; // read classification results results.recall = Double.parseDouble(inputRowsFirstFile.get(i)[3]); results.fMeasure = Double.parseDouble(inputRowsFirstFile.get(i)[4]); results.precision = Double.parseDouble(inputRowsFirstFile.get(i)[5]); results.accuracy = Double.parseDouble(inputRowsFirstFile.get(i)[10]) / 100; extResults.add(results); } HashMap<String, ArrayList<ExternalResults>> extResultsByTrainTestFeature = new HashMap<>(); // order by test set for (ExternalResults result : extResults) { String IdKey = result.trainSetName + result.testSetName + result.featureSetName; if (extResultsByTrainTestFeature.containsKey(IdKey)) { extResultsByTrainTestFeature.get(IdKey).add(result); } else { extResultsByTrainTestFeature.put(IdKey, new ArrayList<ExternalResults>()); extResultsByTrainTestFeature.get(IdKey).add(result); } } ArrayList<ExternalResults> aggregatedResults = new ArrayList<>(); // aggregate results or keep as are for (Entry<String, ArrayList<ExternalResults>> trainTestSplit : extResultsByTrainTestFeature.entrySet()) { ExternalResults aggrResult = new ExternalResults(); double recall = 0; double fMeasure = 0; double precision = 0; double accuracy = 0; int nrClassifiers = 0; // for all entries that are from the same train/test split and use the same feature set -> aggregate results for (ExternalResults result : trainTestSplit.getValue()) { aggrResult.testSetName = result.testSetName; aggrResult.trainSetName = result.trainSetName; aggrResult.classifierParameters = result.classifierParameters; aggrResult.featureSetName = result.featureSetName; recall += result.recall; fMeasure += result.fMeasure; precision += result.precision; accuracy += result.accuracy; nrClassifiers++; } aggrResult.accuracy = (accuracy / nrClassifiers); aggrResult.fMeasure = (fMeasure / nrClassifiers); aggrResult.recall = (recall / nrClassifiers); aggrResult.precision = (precision / nrClassifiers); aggregatedResults.add(aggrResult); } // write values of measure for (ExternalResults result : aggregatedResults) { String outputRow = String.format("%s;%s;%s;%s;%s;%s", result.trainSetName, result.testSetName, "0", result.featureSetName, "Percent Correct", result.accuracy); outputRows.add(outputRow); outputRow = String.format("%s;%s;%s;%s;%s;%s", result.trainSetName, result.testSetName, "0", result.featureSetName, "Weighted Precision", result.precision); outputRows.add(outputRow); outputRow = String.format("%s;%s;%s;%s;%s;%s", result.trainSetName, result.testSetName, "0", result.featureSetName, "Weighted Recall", result.recall); outputRows.add(outputRow); outputRow = String.format("%s;%s;%s;%s;%s;%s", result.trainSetName, result.testSetName, "0", result.featureSetName, "Weighted F-Measure", result.fMeasure); outputRows.add(outputRow); } // Write aggregated data to a new file try { PrintWriter out = new PrintWriter(new FileWriter(outputFile, true)); for (String s : outputRows) { out.println(s); } out.close(); } catch (IOException e) { System.err.println("Error while writing aggregated Train-Test file."); e.printStackTrace(); } logger.log(Level.INFO, String.format("Finished import. The aggregated data was written to %s.", outFileName)); }
From source file:de.tudarmstadt.tk.statistics.importer.ExternalResultsReader.java
public static void readMUGCTrainTest(String filePath) { String outFileName = "AggregatedTrainTest.csv"; logger.log(Level.INFO, String.format("Importing data from directory %s.", filePath)); // Method requires input directory. Check this condition. File directory = new File(filePath); if (directory.isDirectory()) { System.err.println("Please specify a file. Aborting."); return;/*from w w w .ja v a2 s. c o m*/ } //Empty previous output file, if there was one File outputFile = new File(directory.getParentFile(), outFileName); if (outputFile.exists()) { outputFile.delete(); } try { String header = "Train;Test;Classifier;FeatureSet;Measure;Value"; PrintWriter out = new PrintWriter(new FileWriter(outputFile, true)); out.println(header); out.close(); } catch (IOException e) { System.err.println("Error while writing aggregated Train-Test file."); e.printStackTrace(); } ArrayList<String> outputRows = new ArrayList<String>(); // iterate all rows List<String[]> inputRowsFirstFile = new ArrayList<>(); inputRowsFirstFile = readAndCheckCSV(filePath, ';'); // first: order by train set ArrayList<ExternalResults> extResults = new ArrayList<>(); for (int i = 0; i < inputRowsFirstFile.size(); i++) { ExternalResults results = new ExternalResults(); // identify current train/test split String[] datasetNames = inputRowsFirstFile.get(i)[0].replace("TRAIN:", "").replace("TEST:", "") .split(","); results.trainSetName = datasetNames[0].replace(" ", ""); results.testSetName = datasetNames[1].replace(" ", ""); // set classifier name results.classifierParameters = inputRowsFirstFile.get(i)[1]; // read feature set results.featureSetName = inputRowsFirstFile.get(i)[2]; // read classification results results.recall = Double.parseDouble(inputRowsFirstFile.get(i)[3]); results.fMeasure = Double.parseDouble(inputRowsFirstFile.get(i)[4]); results.precision = Double.parseDouble(inputRowsFirstFile.get(i)[5]); results.accuracy = Double.parseDouble(inputRowsFirstFile.get(i)[10]) / 100; extResults.add(results); } HashMap<String, ArrayList<ExternalResults>> extResultsByTrainTestFeature = new HashMap<>(); // order by test set for (ExternalResults result : extResults) { String IdKey = result.trainSetName + result.testSetName + result.featureSetName; if (extResultsByTrainTestFeature.containsKey(IdKey)) { extResultsByTrainTestFeature.get(IdKey).add(result); } else { extResultsByTrainTestFeature.put(IdKey, new ArrayList<ExternalResults>()); extResultsByTrainTestFeature.get(IdKey).add(result); } } ArrayList<ExternalResults> aggregatedResults = new ArrayList<>(); // aggregate results or keep as are for (Entry<String, ArrayList<ExternalResults>> trainTestSplit : extResultsByTrainTestFeature.entrySet()) { ExternalResults aggrResult = new ExternalResults(); double recall = 0; double fMeasure = 0; double precision = 0; double accuracy = 0; int nrClassifiers = 0; // for all entries that are from the same train/test split and use the same feature set -> aggregate results for (ExternalResults result : trainTestSplit.getValue()) { aggrResult.testSetName = result.testSetName; aggrResult.trainSetName = result.trainSetName; aggrResult.classifierParameters = result.classifierParameters; aggrResult.featureSetName = result.featureSetName; recall += result.recall; fMeasure += result.fMeasure; precision += result.precision; accuracy += result.accuracy; nrClassifiers++; } aggrResult.accuracy = (accuracy / nrClassifiers); aggrResult.fMeasure = (fMeasure / nrClassifiers); aggrResult.recall = (recall / nrClassifiers); aggrResult.precision = (precision / nrClassifiers); aggregatedResults.add(aggrResult); } // write values of measure for (ExternalResults result : aggregatedResults) { String outputRow = String.format("%s;%s;%s;%s;%s;%s", result.trainSetName, result.testSetName, "0", result.featureSetName, "Percent Correct", result.accuracy); outputRows.add(outputRow); outputRow = String.format("%s;%s;%s;%s;%s;%s", result.trainSetName, result.testSetName, "0", result.featureSetName, "Weighted Precision", result.precision); outputRows.add(outputRow); outputRow = String.format("%s;%s;%s;%s;%s;%s", result.trainSetName, result.testSetName, "0", result.featureSetName, "Weighted Recall", result.recall); outputRows.add(outputRow); outputRow = String.format("%s;%s;%s;%s;%s;%s", result.trainSetName, result.testSetName, "0", result.featureSetName, "Weighted F-Measure", result.fMeasure); outputRows.add(outputRow); } // Write aggregated data to a new file try { PrintWriter out = new PrintWriter(new FileWriter(outputFile, true)); for (String s : outputRows) { out.println(s); } out.close(); } catch (IOException e) { System.err.println("Error while writing aggregated Train-Test file."); e.printStackTrace(); } logger.log(Level.INFO, String.format("Finished import. The aggregated data was written to %s.", outFileName)); }
From source file:hu.ppke.itk.nlpg.purepos.decoder.BeamedViterbi.java
private void update(HashMap<NGram<Integer>, Node> beam, NGram<Integer> newState, Double newWeight, Node fromNode) {//from w w w .j a v a 2s. c o m if (!beam.containsKey(newState)) { // logger.trace("\t\t\tAS: " + newNGram + " from " + context // + " with " + newValue); beam.put(newState, new Node(newState, newWeight, fromNode)); } else if (beam.get(newState).getWeight() < newWeight) { // logger.trace("\t\t\tUS: " + old + " to " + newNGram + " from " // + context + " with " + newValue); beam.get(newState).setPrevious(fromNode); beam.get(newState).setWeight(newWeight); } else { // logger.trace("\t\t\tNU: " + old + " to " + newNGram + " from " // + context + " with " + newValue); } }
From source file:de.hybris.platform.mpintgordermanagement.interceptor.OrderEntryPrepareInterceptor.java
@Override public void onPrepare(OrderEntryModel orderEntrymodel, InterceptorContext ctx) throws InterceptorException { String threadLocalKey = "entriedOnce-" + orderEntrymodel.getEntryNumber(); HashMap<String, String> contextThreadLocalMap = contextThreadLocal.get(); if (contextThreadLocalMap.containsKey(threadLocalKey)) { return;// w w w. j a v a2s .c o m } contextThreadLocalMap.put(threadLocalKey, "true"); if (orderEntrymodel instanceof TmallOrderEntryModel) { final TmallOrderEntryModel tmallOrderEntryModel = (TmallOrderEntryModel) orderEntrymodel; try { onPrepareTmallOrderEntry(tmallOrderEntryModel); } catch (Exception e) { log.error("Handle order entry status mapping service failed for " + orderEntrymodel, e); logMessage(Boolean.FALSE, "Sync status to hybris", tmallOrderEntryModel, "Sync status to hybris failed due to unknown reason:" + e.getMessage()); } } contextThreadLocalMap.remove(threadLocalKey); }
From source file:com.dalamar.watcher.TrainStationWatcher.java
public void diffWithDB() { ArrayList<LateTrain> trainsInDB = (ArrayList<LateTrain>) ltd.clear().betweenDates(date, date).station(name) .executeQuery();/*w ww .jav a2s . c o m*/ LateTrain[] trains = null; try { trains = fetchLateTrains(); } catch (IOException ex) { Logger.getLogger(TrainStationWatcher.class.getName()).log(Level.SEVERE, null, ex); } if (trains == null) return; if (trainsInDB.isEmpty()) { for (LateTrain tr : trains) { tr.setToStation(name); tr.setDateCaptured(date); // temporary for test. please fix later to Calendar.getInstance().getTime(); ltd.save(tr); } } else { HashMap<String, LateTrain> trainNumToTrain = getTrainMap(trainsInDB); for (LateTrain tr : trains) { if (trainNumToTrain.containsKey(tr.getTrainNumber())) { diffSingleTrain(tr, trainNumToTrain); } else { tr.setToStation(this.name); tr.setDateCaptured(date);// temporary for test. please fix later to Calendar.getInstance().getTime(); ltd.save(tr); } } } rectifyInDB(); }