List of usage examples for java.util LinkedHashMap size
int size();
From source file:Simulator.PerformanceCalculation.java
public JPanel waitTime2() { LinkedHashSet no = new LinkedHashSet(); LinkedHashMap<Integer, ArrayList<Double>> wait1 = new LinkedHashMap<>(); for (Map.Entry<Integer, TraceObject> entry : l.getLocalTrace().entrySet()) { TraceObject traceObject = entry.getValue(); if (wait1.get(traceObject.getSurgeonId()) == null) { ArrayList details = new ArrayList(); details.add(traceObject.getWaitTime2()); wait1.put(traceObject.getSurgeonId(), details); } else {/*from w ww .j av a2 s . c o m*/ wait1.get(traceObject.getSurgeonId()).add(traceObject.getWaitTime2()); } no.add(traceObject.getSurgeonId()); } String[] column = new String[no.size()]; String series1 = "Wait Time 2"; for (int i = 0; i < no.size(); i++) { column[i] = "Surgeon " + (i + 1); } DefaultCategoryDataset dataset = new DefaultCategoryDataset(); LinkedHashMap<Integer, Double> average = new LinkedHashMap<>(); for (Map.Entry<Integer, ArrayList<Double>> entry : wait1.entrySet()) { Integer integer = entry.getKey(); ArrayList<Double> arrayList = entry.getValue(); double total = 0; for (Double double1 : arrayList) { total += double1; } average.put(integer, total / arrayList.size()); } for (int i = 1; i <= average.size(); i++) { dataset.addValue(Math.round(average.get(i) / 600), series1, column[i - 1]); } JFreeChart chart = ChartFactory.createBarChart("Wait Time 2", // chart title "Surgeon ID", // domain axis label "Days", // range axis label dataset, // data PlotOrientation.VERTICAL, // orientation true, // include legend true, // tooltips? false // URLs? ); return new ChartPanel(chart); }
From source file:Simulator.PerformanceCalculation.java
public JPanel waitTime() { LinkedHashSet no = new LinkedHashSet(); LinkedHashMap<Integer, ArrayList<Double>> wait1 = new LinkedHashMap<>(); for (Map.Entry<Integer, TraceObject> entry : l.getLocalTrace().entrySet()) { TraceObject traceObject = entry.getValue(); if (wait1.get(traceObject.getSurgeonId()) == null) { ArrayList details = new ArrayList(); details.add(traceObject.getWaitTime1()); details.add(traceObject.getWaitTime2()); wait1.put(traceObject.getSurgeonId(), details); } else {//from w ww .jav a2 s. c o m wait1.get(traceObject.getSurgeonId()).add(traceObject.getWaitTime1()); wait1.get(traceObject.getSurgeonId()).add(traceObject.getWaitTime2()); } no.add(traceObject.getSurgeonId()); } String[] column = new String[no.size()]; String series1 = "Wait Time"; for (int i = 0; i < no.size(); i++) { column[i] = "Surgeon " + (i + 1); } DefaultCategoryDataset dataset = new DefaultCategoryDataset(); LinkedHashMap<Integer, Double> average = new LinkedHashMap<>(); for (Map.Entry<Integer, ArrayList<Double>> entry : wait1.entrySet()) { Integer integer = entry.getKey(); ArrayList<Double> arrayList = entry.getValue(); double total = 0; for (Double double1 : arrayList) { total += double1; } average.put(integer, total / (arrayList.size() / 2)); } for (int i = 1; i <= average.size(); i++) { dataset.addValue(Math.round(average.get(i) / 600), series1, column[i - 1]); } JFreeChart chart = ChartFactory.createBarChart("Wait Time", // chart title "Surgeon ID", // domain axis label "Days", // range axis label dataset, // data PlotOrientation.VERTICAL, // orientation true, // include legend true, // tooltips? false // URLs? ); return new ChartPanel(chart); }
From source file:org.peerfact.impl.network.gnp.topology.GnpSpace.java
/** * Calculates good positions for all Hosts in Map * //w w w . j a v a 2 s. co m * @param monitorResheduling * number of rescheduling the downhill simplex */ private void insertCoordinates(int monitorResheduling) { GnpSpace.calculationStepStatus = 2; coordinateIndex.clear(); LinkedHashMap<Integer, Host> peers = this.getMapRef().getHostIndex(); int c = 0; for (Host host : peers.values()) { GnpSpace.calculationProgressStatus = c; if (host.getHostType() == Host.HOST) { GnpPosition coord = this.insertCoordinateDownhillSimplex(host, monitorResheduling); coordinateIndex.put(host.getIpAddress(), coord); c++; if (c % 1000 == 0) { log.debug(c + " of " + peers.size() + " are positioned in gnp"); } } if (!calculationInProgress) { return; } } GnpSpace.calculationStepStatus = 0; GnpSpace.calculationInProgress = false; }
From source file:com.chiorichan.database.DatabaseEngine.java
@SuppressWarnings("unchecked") public LinkedHashMap<String, Object> selectOne(String table, Object where) throws SQLException { LinkedHashMap<String, Object> result = select(table, where); if (result == null || result.size() < 1) return null; return (LinkedHashMap<String, Object>) result.get("0"); }
From source file:com.google.gwt.emultest.java.util.LinkedHashMapTest.java
public void testIsEmpty() { LinkedHashMap<String, String> srcMap = new LinkedHashMap<String, String>(); checkEmptyLinkedHashMapAssumptions(srcMap); LinkedHashMap<String, String> dstMap = new LinkedHashMap<String, String>(); checkEmptyLinkedHashMapAssumptions(dstMap); dstMap.putAll(srcMap);/*from w ww .j a va 2s . c om*/ assertTrue(dstMap.isEmpty()); dstMap.put(KEY_KEY, VALUE_VAL); assertFalse(dstMap.isEmpty()); dstMap.remove(KEY_KEY); assertTrue(dstMap.isEmpty()); assertEquals(dstMap.size(), 0); }
From source file:org.openmeetings.app.remote.MainService.java
/** * This Method is jsut for testing you can find the corresponding CLietn * Function in xmlcrm/auth/checkLoginData.lzx * //from w w w .ja v a 2s . com * @param myObject2 * @return */ public int testObject(Object myObject2) { try { @SuppressWarnings("rawtypes") LinkedHashMap myObject = (LinkedHashMap) myObject2; log.debug("testObject " + myObject.size()); log.debug("testObject " + myObject.get(1)); log.debug("testObject " + myObject.get("stringObj")); return myObject.size(); } catch (Exception e) { log.error("ex: ", e); } return -1; }
From source file:Simulator.PerformanceCalculation.java
public JPanel costAnaylsis() { LinkedHashSet no = new LinkedHashSet(); LinkedHashMap<Integer, ArrayList<Double>> wait1 = new LinkedHashMap<>(); for (Map.Entry<Integer, TraceObject> entry : l.getLocalTrace().entrySet()) { TraceObject traceObject = entry.getValue(); if (wait1.get(traceObject.getSurgeonId()) == null) { ArrayList details = new ArrayList(); details.add(traceObject.getWaitTime1()); details.add(traceObject.getWaitTime2()); wait1.put(traceObject.getSurgeonId(), details); } else {//from ww w . j av a 2 s. c om wait1.get(traceObject.getSurgeonId()).add(traceObject.getWaitTime1()); wait1.get(traceObject.getSurgeonId()).add(traceObject.getWaitTime2()); } no.add(traceObject.getSurgeonId()); } String[] column = new String[no.size()]; String series1 = "Cost"; for (int i = 0; i < no.size(); i++) { column[i] = "Surgeon " + (i + 1); } DefaultCategoryDataset dataset = new DefaultCategoryDataset(); int totalCost = 0; LinkedHashMap<Integer, Double> average = new LinkedHashMap<>(); for (Map.Entry<Integer, ArrayList<Double>> entry : wait1.entrySet()) { Integer integer = entry.getKey(); ArrayList<Double> arrayList = entry.getValue(); double total = 0; for (Double double1 : arrayList) { total += double1; } totalCost += total * Configuration.costOfPatientWaiting; average.put(integer, total / 600); } for (int i = 1; i <= average.size(); i++) { dataset.addValue(Math.round(average.get(i) * Configuration.costOfPatientWaiting), series1, column[i - 1]); } totalCostClass = totalCost; JFreeChart chart = ChartFactory.createBarChart("Cost", // chart title "Surgeon ID", // domain axis label "$", // range axis label dataset, // data PlotOrientation.VERTICAL, // orientation true, // include legend true, // tooltips? false // URLs? ); return new ChartPanel(chart); }
From source file:org.apache.tez.mapreduce.input.TestMultiMRInput.java
@Test(timeout = 5000) public void testSingleSplit() throws Exception { Path workDir = new Path(TEST_ROOT_DIR, "testSingleSplit"); JobConf jobConf = new JobConf(defaultConf); jobConf.setInputFormat(org.apache.hadoop.mapred.SequenceFileInputFormat.class); FileInputFormat.setInputPaths(jobConf, workDir); MRInputUserPayloadProto.Builder builder = MRInputUserPayloadProto.newBuilder(); builder.setGroupingEnabled(false);//from w ww. j a va 2 s . co m builder.setConfigurationBytes(TezUtils.createByteStringFromConf(jobConf)); byte[] payload = builder.build().toByteArray(); InputContext inputContext = createTezInputContext(payload); MultiMRInput input = new MultiMRInput(inputContext, 1); input.initialize(); List<Event> eventList = new ArrayList<Event>(); String file1 = "file1"; LinkedHashMap<LongWritable, Text> data1 = createInputData(localFs, workDir, jobConf, file1, 0, 10); SequenceFileInputFormat<LongWritable, Text> format = new SequenceFileInputFormat<LongWritable, Text>(); InputSplit[] splits = format.getSplits(jobConf, 1); assertEquals(1, splits.length); MRSplitProto splitProto = MRInputHelpers.createSplitProto(splits[0]); InputDataInformationEvent event = InputDataInformationEvent.createWithSerializedPayload(0, splitProto.toByteString().asReadOnlyByteBuffer()); eventList.clear(); eventList.add(event); input.handleEvents(eventList); int readerCount = 0; for (KeyValueReader reader : input.getKeyValueReaders()) { readerCount++; while (reader.next()) { if (data1.size() == 0) { fail("Found more records than expected"); } Object key = reader.getCurrentKey(); Object val = reader.getCurrentValue(); assertEquals(val, data1.remove(key)); } } assertEquals(1, readerCount); }
From source file:at.ac.tuwien.inso.subcat.miner.SvnMiner.java
private void processCommit(String aAuthor, Date aDate, LinkedHashMap<String, FileStats> pathslist, String aMsg, String revision) throws SQLException, IOException { //TODO: Errorhandling to indicate that the SVN Log is not complete, e.g. Revisions for files are missing to have a complete file history Identity author = resolveIdentity(aAuthor); Identity committer = resolveIdentity(aAuthor); Date date = aDate;// w ww.j ava2s. co m String msg = aMsg; // Lemmatize input - delivers String List, convert it to string msg = StringUtils.join(lemmatizer.lemmatize(msg), ", "); int filecount = pathslist.size(); assert (date != null || msg != null || pathslist != null); Commit commit = model.addCommit(revision, project, author, committer, date, msg, filecount, 0, 0); Map<String, ManagedFile> commitFileCache = new HashMap(); for (Map.Entry<String, FileStats> item : pathslist.entrySet()) { if (stopped == true) { break; } FileStats stats = item.getValue(); String path = item.getKey(); switch (stats.type) { case ADD: ManagedFile addedFile = model.addManagedFile(project, path); model.addFileChange(commit, addedFile, 0, 0, 0, 0, 0); fileCache.put(path, addedFile); commitFileCache.put(path, addedFile); break; case DELETE: //TODO: Evaluate whether a file may be added and deleted in the same commit and add commitfilecache logic //TODO: Identify reason, why SVN logs sometimes contain modifies and deletes to files that do not exist and are not renamed either if (!stats.isFile) { removeBulk(path, commit); } else { if (fileCache.get(path) != null) { ManagedFile deletedFile = fileCache.get(path); model.addFileDeletion(deletedFile, commit); fileCache.remove(stats.oldPath); } else { reporter.warning(this.getName(), "could not find: " + path + " to delete in Revision " + revision + " @ SvnMiner"); } } break; case MODIFY: if (stats.isFile) { //TODO: Identify reason, why SVN logs sometimes contain modifies and deletes to files that do not exist and are not renamed either if (fileCache.get(path) != null) { ManagedFile modifiedFile = fileCache.get(path); //Check if file exists in the commit already and skip the file, if it does if (!commitFileCache.containsKey(path)) { model.addFileChange(commit, modifiedFile, 0, 0, 0, 0, 0); commitFileCache.put(path, modifiedFile); } } else { reporter.warning(this.getName(), "could not find: " + path + " to modify in Revision " + revision + " @ SvnMiner"); } } break; case RENAME: //TODO: SVN ADD and DELETE in the same commit is used as rename in SVN - this differentiation has not been implemented yet if (!stats.isFile) { renameBulk(stats.oldPath, path, commit, commitFileCache); } else { ManagedFile copyFile = model.addManagedFile(project, path); model.addFileChange(commit, copyFile, 0, 0, 0, 0, 0); fileCache.put(path, copyFile); commitFileCache.put(path, copyFile); //we dont remove during rename in SVN, since RENAME might also mean a copy command //fileCache.remove (stats.oldPath); } break; default: assert (false); } } emitTasksProcessed(1); }
From source file:ubic.gemma.core.visualization.ExperimentalDesignVisualizationServiceImpl.java
/** * Test method for now, shows how this can be used. * * @param e ee//from ww w . j a v a 2s. c o m */ @SuppressWarnings("unused") // Test method for now, shows how this can be used. protected void plotExperimentalDesign(ExpressionExperiment e) { LinkedHashMap<BioAssayValueObject, LinkedHashMap<ExperimentalFactor, Double>> layout = this .getExperimentalDesignLayout(e); List<String> efStrings = new ArrayList<>(); List<String> baStrings = new ArrayList<>(); List<double[]> rows = new ArrayList<>(); boolean first = true; int i = 0; for (BioAssayValueObject ba : layout.keySet()) { baStrings.add(ba.getName()); int j = 0; for (ExperimentalFactor ef : layout.get(ba).keySet()) { if (first) { double[] nextRow = new double[layout.size()]; rows.add(nextRow); efStrings.add(ef.getName() + " ( id=" + ef.getId() + ")"); // make sure they are unique. } double d = layout.get(ba).get(ef); rows.get(j)[i] = d; j++; } i++; first = false; } double[][] mat = rows.toArray(new double[][] {}); DoubleMatrix<String, String> data = DoubleMatrixFactory.dense(mat); data.setRowNames(efStrings); data.setColumnNames(baStrings); ColorMatrix<String, String> cm = new ColorMatrix<>(data, ColorMap.GREENRED_COLORMAP, Color.GRAY); try { this.writeImage(cm, File.createTempFile(e.getShortName() + "_", ".png")); } catch (IOException e1) { throw new RuntimeException(e1); } }