List of usage examples for java.util TreeMap keySet
public Set<K> keySet()
From source file:com.sfs.whichdoctor.dao.GroupDAOImpl.java
/** * Load identities.// w ww .j a v a2 s . c o m * * @param items the group's items * * @return the tree map< string, item bean> */ private TreeMap<String, ItemBean> loadIdentities(final TreeMap<String, ItemBean> items) { for (String key : items.keySet()) { ItemBean item = items.get(key); WhichDoctorCoreIdentityBean identity = new WhichDoctorCoreIdentityBean(); identity.setGUID(item.getObject2GUID()); identity.setDescription(item.getName()); // Load the contact details for this identity try { // Load the first work address (or primary if none) identity.setAddress(this.getAddressDAO().load(identity.getGUID(), false, "Work", "Postal")); } catch (WhichDoctorDaoException wde) { dataLogger.error("Error loading identity addresses: " + wde.getMessage()); } try { // Load all the phone numbers (mobile, work phone + fax) identity.setPhone(this.getPhoneDAO().load(identity.getGUID(), true)); } catch (WhichDoctorDaoException wde) { dataLogger.error("Error loading identity phones: " + wde.getMessage()); } try { // Load the work email (or primary if none) identity.setEmail( this.getEmailDAO().load(identity.getGUID(), false, "Unsecured Email", "Work Email")); } catch (WhichDoctorDaoException wde) { dataLogger.error("Error loading identity emails: " + wde.getMessage()); } item.setIdentity(identity); items.put(key, item); } return items; }
From source file:uk.ac.leeds.ccg.andyt.projects.moses.process.RegressionReport_UK1.java
/** * * @param a_SAR_File//w w w . j a v a 2 s .c o m * @param a_CAS_File * @return Object[] result where; * result[0] is a String[] of variable names * result[1] is a double[number of variables][no of data items] * of a_SAR_data * result[2] is a double[number of variables][no of data items] * of a_CAS_data * @throws IOException */ protected static Object[] loadDataISARHP_ISARCEP(File a_SAR_File, File a_CAS_File) throws IOException { Object[] result = new Object[3]; TreeMap<String, double[]> a_SAROptimistaionConstraints_TreeMap = loadCASOptimistaionConstraints(a_SAR_File); TreeMap<String, double[]> a_CASOptimistaionConstraints_TreeMap = loadCASOptimistaionConstraints(a_CAS_File); Vector<String> variables = GeneticAlgorithm_ISARHP_ISARCEP.getVariableList(); variables.add(0, "Zone_Code"); String[] variableNames = new String[0]; variableNames = variables.toArray(variableNames); result[0] = variableNames; // Format (Flip) data double[][] a_SAR_Data = new double[variables.size() - 1][a_SAROptimistaionConstraints_TreeMap.size()]; double[][] a_CAS_Data = new double[variables.size() - 1][a_SAROptimistaionConstraints_TreeMap.size()]; String oa; double[] a_SARExpectedRow; double[] a_CASObservedRow; int j = 0; Iterator<String> iterator_String = a_SAROptimistaionConstraints_TreeMap.keySet().iterator(); while (iterator_String.hasNext()) { oa = iterator_String.next(); a_SARExpectedRow = a_SAROptimistaionConstraints_TreeMap.get(oa); a_CASObservedRow = a_CASOptimistaionConstraints_TreeMap.get(oa); if (a_SARExpectedRow == null) { System.out.println( "Warning a_SARExpectedRow == null in loadDataISARHP_ISARCEP(File,File) for OA " + oa); } else { if (a_CASObservedRow == null) { System.out.println( "Warning a_CASObservedRow == null in loadDataISARHP_ISARCEP(File,File) for OA " + oa); } else { for (int i = 0; i < variables.size() - 1; i++) { a_SAR_Data[i][j] = a_SARExpectedRow[i]; a_CAS_Data[i][j] = a_CASObservedRow[i]; } } } j++; } result[1] = a_SAR_Data; result[2] = a_CAS_Data; return result; }
From source file:net.triptech.buildulator.DataParser.java
/** * Parses the text data./* w ww.j a v a 2s.c o m*/ * * @param text the text * * @return the tree map< integer, tree map< integer, string>> */ public static String[][] parseTextData(final String text) { TreeMap<Integer, TreeMap<Integer, String>> rowData = new TreeMap<Integer, TreeMap<Integer, String>>(); // This counter holds the maximum number of columns provided int maxNumberOfTokens = 0; if (text != null) { BufferedReader in = new BufferedReader(new StringReader(text)); String line; int lineCounter = 0; try { while ((line = in.readLine()) != null) { TreeMap<Integer, String> parsedLine = new TreeMap<Integer, String>(); SmartTokenizer tabTokenizer = new SmartTokenizer(line, "\t"); if (tabTokenizer.countTokens() > 1) { parsedLine = tokenizerToMap(tabTokenizer); } else { SmartTokenizer commaTokenizer = new SmartTokenizer(line, ","); parsedLine = tokenizerToMap(commaTokenizer); } if (parsedLine.size() > maxNumberOfTokens) { maxNumberOfTokens = parsedLine.size(); } rowData.put(lineCounter, parsedLine); lineCounter++; } } catch (IOException ioe) { // Error reading string } } String[][] parsedData = new String[rowData.size()][]; // Now cycle through all the parsed data // Ensure that each row has the same (max) number of tokens for (int rowIndex : rowData.keySet()) { TreeMap<Integer, String> parsedLine = rowData.get(rowIndex); // This map holds the final values TreeMap<Integer, String> columnTokens = new TreeMap<Integer, String>(); for (int i = 0; i < maxNumberOfTokens; i++) { String value = ""; if (parsedLine.containsKey(i)) { value = parsedLine.get(i); } columnTokens.put(i, value); } parsedData[rowIndex] = new String[columnTokens.size()]; for (int columnIndex : columnTokens.keySet()) { String value = columnTokens.get(columnIndex); parsedData[rowIndex][columnIndex] = value; } } return parsedData; }
From source file:samza.samza_test.SamzaCountWindow.java
@SuppressWarnings("unchecked") @Override// w w w . j a va 2 s . c om public void process(IncomingMessageEnvelope envelope, MessageCollector collector, TaskCoordinator coordinator) { try { String input = mapper.readValue((byte[]) envelope.getMessage(), String.class); String[] parts = input.split(" "); long timestamp = Long.parseLong(parts[1]); //long timestamp = System.currentTimeMillis(); if (timestamp < timeStart) { timeStart = timestamp; } if (timestamp > timeEnd) { timeEnd = timestamp; } if (Integer.parseInt(parts[2]) == 0) { startLast = timestamp; startLastFlows = totalFlows; } //////////////////////////////////////// EMPTY FRAMEWORK //////////////////////////////////////// if (parts[0].equals("empty")) { totalFlows += Integer.parseInt(parts[2]); if (totalFlows == windowLimit) { long postProcessingTime = System.currentTimeMillis(); if (timeEnd < postProcessingTime) { timeEnd = postProcessingTime; } long speed = windowLimit / (timeEnd - timeStart); //rychlost v tocich za milisekundu = prumer v tisicich toku za vterinu String msg = "CountWindow se dopocital na hodnotu " + String.valueOf(windowLimit) + " toku :), prumerna rychlost zpracovani byla " + String.valueOf(speed) + "k toku za vterinu"; collector.send(new OutgoingMessageEnvelope(new SystemStream("kafka", "out"), mapper.writeValueAsBytes(msg))); cleanVars(); } if (totalFlows > windowLimit) { String msg = "Chyba zpracovani, soucet toku nesedi do count okna!"; collector.send(new OutgoingMessageEnvelope(new SystemStream("kafka", "out"), mapper.writeValueAsBytes(msg))); cleanVars(); } } //////////////////////////////////////// TEST FILTER //////////////////////////////////////// if (parts[0].equals("filter")) { totalFlows += Integer.parseInt(parts[2]); filtered += Integer.parseInt(parts[3]); String IP = parts[4]; if (totalFlows == windowLimit) { long postProcessingTime = System.currentTimeMillis(); if (timeEnd < postProcessingTime) { timeEnd = postProcessingTime; } long speed = windowLimit / (timeEnd - timeStart); //rychlost v tocich za milisekundu = prumer v tisicich toku za vterinu String msg = "CountWindow se dopocital na hodnotu " + String.valueOf(windowLimit) + " toku :), IP adresa " + IP + " mela " + String.valueOf(filtered) + " toku. Prumerna rychlost zpracovani byla " + String.valueOf(speed) + "k toku za vterinu"; collector.send(new OutgoingMessageEnvelope(new SystemStream("kafka", "out"), mapper.writeValueAsBytes(msg))); cleanVars(); } if (totalFlows > windowLimit) { String msg = "Chyba zpracovani, soucet toku nesedi do count okna!"; collector.send(new OutgoingMessageEnvelope(new SystemStream("kafka", "out"), mapper.writeValueAsBytes(msg))); cleanVars(); } } //////////////////////////////////////// TEST COUNT //////////////////////////////////////// if (parts[0].equals("count")) { totalFlows += Integer.parseInt(parts[2]); packets += Integer.parseInt(parts[3]); String IP = parts[4]; if (totalFlows == windowLimit) { long postProcessingTime = System.currentTimeMillis(); if (timeEnd < postProcessingTime) { timeEnd = postProcessingTime; } long speed = windowLimit / (timeEnd - timeStart); //rychlost v tocich za milisekundu = prumer v tisicich toku za vterinu String msg = "CountWindow se dopocital na hodnotu " + String.valueOf(windowLimit) + " toku :), IP adresa " + IP + " mela " + String.valueOf(packets) + " paketu. Prumerna rychlost zpracovani byla " + String.valueOf(speed) + "k toku za vterinu"; collector.send(new OutgoingMessageEnvelope(new SystemStream("kafka", "out"), mapper.writeValueAsBytes(msg))); //speed = (windowLimit-startLastFlows)/(timeEnd-startLast); //msg = "Mereni od startu posledniho: , IP adresa " + IP + " mela " + String.valueOf(packets) +" paketu. Prumerna rychlost zpracovani byla "+String.valueOf(speed)+"k toku za vterinu"; //collector.send(new OutgoingMessageEnvelope(new SystemStream("kafka", "samza-stats"), mapper.writeValueAsBytes(msg))); cleanVars(); } if (totalFlows > windowLimit) { String msg = "Chyba zpracovani, soucet toku nesedi do count okna!"; collector.send(new OutgoingMessageEnvelope(new SystemStream("kafka", "out"), mapper.writeValueAsBytes(msg))); cleanVars(); } } //////////////////////////////////////// TEST AGGREGATE //////////////////////////////////////// if (parts[0].equals("aggregate")) { totalFlows += Integer.parseInt(parts[2]); for (String field : parts) { String[] divided = field.split("="); if (divided.length > 1) { String IP = divided[0]; if (IP.charAt(0) == '{') { IP = IP.substring(1); } int packetsCount = Integer.parseInt(divided[1].substring(0, divided[1].length() - 1)); if (top.containsKey(IP)) { int packetsFromMap = top.get(IP); top.put(IP, packetsFromMap + packetsCount); } else { top.put(IP, packetsCount); } } } if (totalFlows == windowLimit) { Iterator<String> it = top.keySet().iterator(); StringBuilder sb = new StringBuilder(); while (it.hasNext()) { String key = it.next(); sb.append(key).append(" ").append(String.valueOf(top.get(key))).append(", "); } long postProcessingTime = System.currentTimeMillis(); if (timeEnd < postProcessingTime) { timeEnd = postProcessingTime; } long speed = windowLimit / (timeEnd - timeStart); //rychlost v tocich za milisekundu = prumer v tisicich toku za vterinu //String msg = "CountWindow se dopocital na hodnotu "+String.valueOf(windowLimit)+" toku :). Prumerna rychlost zpracovani byla "+String.valueOf(speed)+"k toku za vterinu. Vypis agregace: "+sb.toString(); String msg = "CountWindow se dopocital na hodnotu " + String.valueOf(windowLimit) + " toku :). Prumerna rychlost zpracovani byla " + String.valueOf(speed) + "k toku za vterinu. Vypis agregace: ne v testovacim rezimu, pro IP 141.57.244.116 je paketu:" + String.valueOf(top.get("141.57.244.116")); collector.send(new OutgoingMessageEnvelope(new SystemStream("kafka", "out"), mapper.writeValueAsBytes(msg))); cleanVars(); } if (totalFlows > windowLimit) { String msg = "Chyba zpracovani, soucet toku nesedi do count okna!"; collector.send(new OutgoingMessageEnvelope(new SystemStream("kafka", "out"), mapper.writeValueAsBytes(msg))); cleanVars(); } } //////////////////////////////////////// TEST TOP N //////////////////////////////////////// if (parts[0].equals("topn")) { totalFlows += Integer.parseInt(parts[2]); for (String field : parts) { String[] divided = field.split("="); if (divided.length > 1) { String IP = divided[0]; if (IP.charAt(0) == '{') { IP = IP.substring(1); } int packetsCount = Integer.parseInt(divided[1].substring(0, divided[1].length() - 1)); if (top.containsKey(IP)) { int packetsFromMap = top.get(IP); top.put(IP, packetsFromMap + packetsCount); } else { top.put(IP, packetsCount); } } } if (totalFlows == windowLimit) { ValueComparator bvc = new ValueComparator(top); TreeMap<String, Integer> sorted = new TreeMap<>(bvc); sorted.putAll(top); Iterator<String> it = sorted.keySet().iterator(); int i = 1; StringBuilder sb = new StringBuilder(); while (it.hasNext()) { String key = it.next(); sb.append(String.valueOf(i)).append(" ").append(key).append(" ") .append(String.valueOf(top.get(key))).append(", "); i++; if (i > 10) { break; } } long postProcessingTime = System.currentTimeMillis(); if (timeEnd < postProcessingTime) { timeEnd = postProcessingTime; } long speed = windowLimit / (timeEnd - timeStart); //rychlost v tocich za milisekundu = prumer v tisicich toku za vterinu String msg = "CountWindow se dopocital na hodnotu " + String.valueOf(windowLimit) + " toku :). Prumerna rychlost zpracovani byla " + String.valueOf(speed) + "k toku za vterinu. Vypis TOP 10: " + sb.toString(); collector.send(new OutgoingMessageEnvelope(new SystemStream("kafka", "out"), mapper.writeValueAsBytes(msg))); cleanVars(); } if (totalFlows > windowLimit) { String msg = "Chyba zpracovani, soucet toku nesedi do count okna!"; collector.send(new OutgoingMessageEnvelope(new SystemStream("kafka", "out"), mapper.writeValueAsBytes(msg))); cleanVars(); } } //////////////////////////////////////// TEST SYN SCAN //////////////////////////////////////// if (parts[0].equals("scan")) { totalFlows += Integer.parseInt(parts[2]); for (String field : parts) { String[] divided = field.split("="); if (divided.length > 1) { String IP = divided[0]; if (IP.charAt(0) == '{') { IP = IP.substring(1); } int packetsCount = Integer.parseInt(divided[1].substring(0, divided[1].length() - 1)); if (top.containsKey(IP)) { int packetsFromMap = top.get(IP); top.put(IP, packetsFromMap + packetsCount); } else { top.put(IP, packetsCount); } } } if (totalFlows == windowLimit) { ValueComparator bvc = new ValueComparator(top); TreeMap<String, Integer> sorted = new TreeMap<>(bvc); sorted.putAll(top); Iterator<String> it = sorted.keySet().iterator(); int i = 1; StringBuilder sb = new StringBuilder(); while (it.hasNext()) { String key = it.next(); sb.append(String.valueOf(i)).append(" ").append(key).append(" ") .append(String.valueOf(top.get(key))).append(", "); i++; if (i > 100) { break; } } long postProcessingTime = System.currentTimeMillis(); if (timeEnd < postProcessingTime) { timeEnd = postProcessingTime; } long speed = windowLimit / (timeEnd - timeStart); //rychlost v tocich za milisekundu = prumer v tisicich toku za vterinu String msg = "CountWindow se dopocital na hodnotu " + String.valueOf(windowLimit) + " toku :). Prumerna rychlost zpracovani byla " + String.valueOf(speed) + "k toku za vterinu. Vypis TOP 10: " + sb.toString(); collector.send(new OutgoingMessageEnvelope(new SystemStream("kafka", "out"), mapper.writeValueAsBytes(msg))); cleanVars(); } if (totalFlows > windowLimit) { String msg = "Chyba zpracovani, soucet toku nesedi do count okna!"; collector.send(new OutgoingMessageEnvelope(new SystemStream("kafka", "out"), mapper.writeValueAsBytes(msg))); cleanVars(); } } } catch (IOException | NumberFormatException e) { Logger.getLogger(SamzaCountWindow.class.getName()).log(Level.SEVERE, null, e); } }
From source file:it.uniroma2.sag.kelp.learningalgorithm.clustering.kernelbasedkmeans.KernelBasedKMeansEngine.java
/** * Count the reassignment as a stopping criteria for the algorithm * //from ww w. j av a 2 s .c om * @param exampleIdToClusterMap * The map of assignment for the previous iteration * @param clusterList * The actual clusters * @return */ private int countReassigment(TreeMap<Long, Integer> exampleIdToClusterMap, List<Cluster> clusterList) { int reassignment = 0; TreeMap<Long, Integer> currentExampleIdToClusterMap = new TreeMap<Long, Integer>(); int clusterId = 0; for (Cluster cluster : clusterList) { for (ClusterExample clusterExample : cluster.getExamples()) { currentExampleIdToClusterMap.put(clusterExample.getExample().getId(), clusterId); } clusterId++; } for (Long currentExId : currentExampleIdToClusterMap.keySet()) { if (exampleIdToClusterMap.get(currentExId).intValue() != currentExampleIdToClusterMap.get(currentExId) .intValue()) reassignment++; } return reassignment; }
From source file:com.npower.wurfl.ListManager.java
public TreeMap<String, WurflDevice> getSpecialActualDeviceElementsList() { TreeMap<String, WurflDevice> specialActualDeviceElementsList = new TreeMap<String, WurflDevice>(); CapabilityMatrix cm = this.getObjectsManager().getCapabilityMatrixInstance(); TreeMap<String, Element> actualXOMDevices = wu.getActualDeviceElementsList(); Iterator<String> keys = actualXOMDevices.keySet().iterator(); while (keys.hasNext()) { String key = keys.next(); Element el = actualXOMDevices.get(key); WurflDevice wd = new WurflDevice(el); String bn = cm.getCapabilityForDevice(key, "brand_name"); String mn = cm.getCapabilityForDevice(key, "model_name"); wd.setBrandName(bn);// ww w .j a v a2 s . c om wd.setModelName(mn); specialActualDeviceElementsList.put(key, wd); } return specialActualDeviceElementsList; }
From source file:com.npower.wurfl.ListManager.java
/** * Return HashMap of HashMaps brand->modelname->WurflDevice * //from w w w. j a v a2 s . co m */ public TreeMap<String, TreeMap<String, WurflDevice>> getDeviceGroupedByBrand() { if (actualDevicesByBrand.isEmpty()) { TreeMap<String, WurflDevice> act_devices = getActualDeviceElementsList(); Iterator<String> keys = act_devices.keySet().iterator(); while (keys.hasNext()) { String key = keys.next(); WurflDevice wd = act_devices.get(key); String bn = wd.getBrandName(); if (actualDevicesByBrand.get(bn) == null) { // new brand TreeMap<String, WurflDevice> hm = new TreeMap<String, WurflDevice>(); hm.put(wd.getModelName(), wd); actualDevicesByBrand.put(bn, hm); } else { // add to existing brand TreeMap<String, WurflDevice> hm = actualDevicesByBrand.get(bn); hm.put(wd.getModelName(), wd); } } } return actualDevicesByBrand; }
From source file:com.npower.wurfl.ListManager.java
/** * Return Ordered ArrayList of Brand Name * // ww w . ja va2 s. com */ public ArrayList<String> getDeviceBrandList() { if (brandList.isEmpty()) { TreeMap<String, TreeMap<String, WurflDevice>> lol = getDeviceGroupedByBrand(); brandList = new ArrayList<String>(lol.keySet()); Collections.sort(brandList); } return brandList; }
From source file:model.plate.ANATestResult.java
public void diagnose2(double control) { //titer,positivity,r2,pattern final Comparator<DiagnosisConstant.ANA_Titer> titerComparator = new Comparator<DiagnosisConstant.ANA_Titer>() { @Override/* w ww. jav a 2 s . co m*/ public int compare(DiagnosisConstant.ANA_Titer t, DiagnosisConstant.ANA_Titer t1) { if (t.getId() < 0) { throw new RuntimeException("Titer: " + t.name()); } if (t1.getId() < 0) { throw new RuntimeException("Titer: " + t.name()); } if (t.getId() > 6) { throw new RuntimeException("Titer: " + t.name()); } if (t1.getId() > 6) { throw new RuntimeException("Titer: " + t.name()); } return t.getId() < t1.getId() ? -1 : t.getId() == t1.getId() ? 0 : 1; } }; TreeMap<DiagnosisConstant.ANA_Titer, Double> decreasingSignals = new TreeMap<>(titerComparator); decreasingSignals.putAll(signals); SimpleRegression regression = new SimpleRegression(); Iterator<DiagnosisConstant.ANA_Titer> it = decreasingSignals.keySet().iterator(); DiagnosisConstant.ANA_Titer t; Double signal; while (it.hasNext()) { t = it.next(); signal = decreasingSignals.get(t); if (signal == null) continue; // posCtrl=signal>posCtrl?signal:posCtrl; ??1:40, regression.addData((double) t.getId(), signal); if (signal > control) {// * PlateConstants.PositiveCutOffRatio titer = t; } } r2 = regression.getRSquare(); if (r2 < PlateConstants.R2_TH) { warningMessage.add(WarningMessage.SampleLinearity.getId()); } if (titer == null) titer = DiagnosisConstant.ANA_Titer.ANA_LESS_1_40; if (DiagnosisConstant.ANA_Titer.ANA_LESS_1_40.equals(titer) || titer.getId() < 2) {//1:40 System.out.println(); for (DiagnosisConstant.ANA_Titer t1 : decreasingSignals.keySet()) { System.out.println( this.julien_barcode + " Sample vs Control (th=" + PlateConstants.PositiveCutOffRatio + ")"); System.out.println(t1 + ": signal=" + decreasingSignals.get(t1) + "\tv.s.\tcontrol=" + control + " (" + decreasingSignals.get(t1) / control + ")"); } System.out.println(); positivity = DiagnosisConstant.ANA_Result.NEGATIVE; warningMessage.add(WarningMessage.WeakPositive.getId()); } else { positivity = DiagnosisConstant.ANA_Result.POSITIVE; } }
From source file:chatbot.Chatbot.java
/***************************************************************************************************** * * @param input// w ww .j a v a2 s .co m * @return */ public String matchBestInput(String input) { ArrayList<String> result = new ArrayList<>(); TreeMap<Float, ArrayList<Integer>> sortedSim = matchInputFull(input); if (sortedSim == null || sortedSim.keySet().size() < 1 || sortedSim.lastKey() < .1) { return "I don't know"; } Object[] floats = sortedSim.keySet().toArray(); int numClusters = 3; if (floats.length < numClusters) numClusters = floats.length; float[] floatarray = new float[floats.length]; for (int i = 0; i < floats.length; i++) floatarray[i] = (float) floats[i]; ArrayList<ArrayList<Float>> res = KMeans.run(floatarray.length, floatarray, numClusters); ArrayList<Float> topCluster = res.get(res.size() - 2); while (res.get(res.size() - 2).size() > 3 && numClusters < floats.length) { numClusters++; res = KMeans.run(floatarray.length, floatarray, numClusters); topCluster = res.get(res.size() - 2); //System.out.println("Info in TFIDF.matchBestInput(): " + res); //System.out.println("Info in TFIDF.matchBestInput(): " + topCluster); } for (int i = 0; i < topCluster.size(); i++) { ArrayList<Integer> temp = sortedSim.get(topCluster.get(i)); for (int j = 0; j < temp.size(); j++) result.add(lines.get(temp.get(j).intValue())); } ArrayList<String> resultNoProfanity = profanityFilter(result); ArrayList<String> rankedResponses = rankResponses(resultNoProfanity, input); return chooseBestResponse(rankedResponses); }