List of usage examples for java.util Map clear
void clear();
From source file:com.esd.ps.RegListController.java
/** * ??// www.j a va2 s. co m * * @param idArr * @param request * @return */ @RequestMapping(value = "/export", method = RequestMethod.POST) @ResponseBody public Map<String, Object> export(String beginDate, String endDate, HttpServletRequest request, HttpSession session) { Map<String, Object> map = new HashMap<>(); boolean b = true; String FileDownloadPath = "null"; int districtId = Integer.parseInt(session.getAttribute(Constants.ID).toString()); SimpleDateFormat sdf = new SimpleDateFormat(Constants.DATE_FORMAT); SimpleDateFormat formatter = new SimpleDateFormat(Constants.DATE_FORMAT_HAVE_LINE); try { // String url = request.getSession().getServletContext().getRealPath("/"); // File downloadPath = new File(url + Constants.EXCELTEMP); if (!(downloadPath.exists())) { downloadPath.mkdir(); } // ??File.separator // String uuid = UUID.randomUUID().toString(); String pinyin = session.getAttribute(Constants.PINYIN).toString(); String exportPath = null, fileName = null; if (beginDate.isEmpty() || beginDate == null || beginDate.trim().length() == 0) { fileName = pinyin + "_" + sdf.format(new Date()); } else { fileName = sdf.format(sdf.parse(beginDate)) + "-" + sdf.format(sdf.parse(endDate)) + "_" + pinyin + "_" + sdf.format(new Date()); Date myDate = formatter.parse(endDate); Calendar c = Calendar.getInstance(); c.setTime(myDate); c.add(Calendar.DATE, 1); myDate = c.getTime(); endDate = sdf.format(myDate); } exportPath = downloadPath + File.separator + fileName + ".xls"; List<Registration> list = registrationService.getAllByTimeAndDistrictId(districtId, beginDate, endDate); // b = PoiCreateExcel.createRegistrationExcel(exportPath, list); if (b) { String destPath = request.getLocalAddr() + ":" + request.getLocalPort() + request.getContextPath(); FileDownloadPath = "http://" + destPath + "/" + Constants.EXCELTEMP + "/" + fileName + ".xls"; } } catch (ParseException e) { e.printStackTrace(); } map.clear(); map.put(Constants.WRONGPATH, FileDownloadPath); return map; }
From source file:net.sf.taverna.t2.provenance.lineageservice.EventProcessor.java
/** * fills in the VBs for the global inputs -- this removes the need for explicit events * that account for these value bindings... */// ww w . jav a 2 s. c o m public void patchTopLevelnputs() { // for each input I to topLevelDataflow: // pick first outgoing arc with sink P:X // copy value X to I -- this can be a collection, so copy everything // get all global input vars // logger.info("\n\n BACKPATCHING GLOBAL INPUTS with dataflowDepth = "+dataflowDepth+"*******\n"); List<Var> inputs = null; try { inputs = getPq().getInputVars(topLevelDataflowName, topLevelDataflowID, getWfInstanceID()); for (Var input : inputs) { // logger.info("global input: "+input.getVName()); Map<String, String> queryConstraints = new HashMap<String, String>(); queryConstraints.put("sourceVarNameRef", input.getVName()); queryConstraints.put("sourcePNameRef", input.getPName()); List<Arc> outgoingArcs = getPq().getArcs(queryConstraints); // any arc will do, use the first String targetPname = outgoingArcs.get(0).getSinkPnameRef(); String targetVname = outgoingArcs.get(0).getSinkVarNameRef(); // logger.info("copying values from ["+targetPname+":"+targetVname+"] for instance ID: ["+wfInstanceID+"]"); queryConstraints.clear(); queryConstraints.put("varNameRef", targetVname); queryConstraints.put("V.pNameRef", targetPname); queryConstraints.put("VB.wfInstanceRef", getWfInstanceID()); queryConstraints.put("V.wfInstanceRef", topLevelDataflowID); List<VarBinding> VBs = getPq().getVarBindings(queryConstraints); // logger.info("found the following VBs:"); for (VarBinding vb : VBs) { // logger.info(vb.getValue()); // insert VarBinding back into VB with the global input varname vb.setPNameRef(input.getPName()); vb.setVarNameRef(input.getVName()); getPw().addVarBinding(vb); // logger.info("added"); } } } catch (SQLException e) { logger.warn("Patch top level inputs problem for provenance", e); } catch (IndexOutOfBoundsException e) { logger.error("Could not patch top level", e); } }
From source file:com.krawler.spring.crm.leadModule.crmLeadCommonController.java
public JSONObject getModuleRecord(HttpServletRequest request, String moduleName, String companyid, JSONObject leadjObj, Map<String, Object> moduleFields, Map<String, Object> ColumnMappedList) throws ServiceException, JSONException, DataInvalidateException, SessionExpiredException { KwlReturnObject kmsg = null;/* w w w .j a va 2 s. co m*/ JSONArray leadcustomFieldJArray = leadjObj.getJSONArray("customfield"); JSONArray moduleCustomfieldArray = new JSONArray(); JSONObject modulejObj = new JSONObject(); moduleFields.clear(); ColumnMappedList.clear(); try { ArrayList filter_params = new ArrayList(); ArrayList filter_names = new ArrayList(); filter_names.add("m.company.companyID"); filter_params.add(companyid); filter_names.add("m.modulefield.moduleName"); filter_params.add(moduleName); HashMap<String, Object> requestParams = new HashMap<String, Object>(); requestParams = new HashMap<String, Object>(); requestParams.put("filter_names", filter_names); requestParams.put("filter_params", filter_params); kmsg = crmCommonDAOObj.getMappedHeaders(requestParams); HashMap<String, String> customFieldMap = new HashMap<String, String>(); for (int cnt = 0; cnt < leadcustomFieldJArray.length(); cnt++) { JSONObject custfindObj = leadcustomFieldJArray.getJSONObject(cnt); String fieldname = custfindObj.getString(Constants.Crm_custom_field); String fielddbname = custfindObj.getString(fieldname); customFieldMap.put(custfindObj.getString("filedid"), custfindObj.getString(fielddbname)); } List list = kmsg.getEntityList(); Iterator ite = list.iterator(); StringBuilder refNotFoundStr = new StringBuilder(); while (ite.hasNext()) { boolean refNotFoundFlag = false; LeadConversionMappings mapobj = (LeadConversionMappings) ite.next(); DefaultHeader leadDefault = mapobj.getLeadfield(); DefaultHeader moduleDefault = mapobj.getModulefield(); String newVal = ""; if (leadDefault.isCustomflag()) { String fieldId = leadDefault.getPojoheadername(); newVal = customFieldMap.get(fieldId); if (leadDefault.getXtype().equals("4")) { // normal custom combo ArrayList filterNames = new ArrayList<String>(); ArrayList filterValues = new ArrayList<Object>(); filterNames.add("id"); filterValues.add(newVal); filterNames.add("fieldid"); filterValues.add(fieldId); filterNames.add(LeadConstants.Crm_deleteflag); filterValues.add(0); requestParams.clear(); requestParams.put("companyid", companyid); requestParams.put("addMissingMaster", "false"); List li = importDao.getCustomComboID(requestParams, "value", filterNames, filterValues); if (li.size() > 0) { newVal = li.get(0).toString(); } else { refNotFoundFlag = true; } } else if (leadDefault.getXtype().equals("7")) { String multiVal = StringUtil.getmultiSelectedColumnValue(newVal); newVal = ""; ArrayList filterNames = new ArrayList<String>(); ArrayList filterValues = new ArrayList<Object>(); filterNames.add("INid"); filterValues.add(multiVal); filterNames.add("fieldid"); filterValues.add(fieldId); filterNames.add(LeadConstants.Crm_deleteflag); filterValues.add(0); requestParams.clear(); requestParams.put("companyid", companyid); requestParams.put("addMissingMaster", "false"); List<String> li = importDao.getCustomComboID(requestParams, "value", filterNames, filterValues); StringBuilder newValbuild = new StringBuilder(); for (String s : li) { newValbuild.append(s).append(","); } if (newValbuild.length() > 0) { newVal = newValbuild.substring(0, newValbuild.length() - 1); } else { refNotFoundFlag = true; } } else if (leadDefault.getXtype().equals("8")) { // reference custom combo ArrayList filterNames = new ArrayList<String>(); ArrayList filterValues = new ArrayList<Object>(); filterNames.add(leadDefault.getRefFetchColumn_HbmName()); filterValues.add(newVal); requestParams.clear(); requestParams.put("companyid", companyid); requestParams.put("defaultheader", moduleDefault.getDefaultHeader().trim()); requestParams.put("addMissingMaster", "false"); List<String> li = importDao.getRefModuleData(requestParams, leadDefault.getRefModule_PojoClassName(), leadDefault.getRefDataColumn_HbmName(), leadDefault.getConfigid(), filterNames, filterValues); if (li.size() > 0) { newVal = li.get(0); } else { refNotFoundFlag = true; } } } else { newVal = leadjObj.getString(leadDefault.getRecordname()); if (!StringUtil.isNullOrEmpty(newVal)) {// get value from default column if (leadDefault.getXtype().equals("4")) { // Single-Select drop down case handled : Kuldeep Singh ArrayList filterNames = new ArrayList<String>(); ArrayList filterValues = new ArrayList<Object>(); filterNames.add(leadDefault.getRefFetchColumn_HbmName()); filterValues.add(newVal); requestParams.clear(); requestParams.put("companyid", companyid); requestParams.put("defaultheader", moduleDefault.getDefaultHeader().trim()); List<String> li = importDao.getRefModuleData(requestParams, leadDefault.getRefModule_PojoClassName(), leadDefault.getRefDataColumn_HbmName(), leadDefault.getConfigid(), filterNames, filterValues); if (li.size() > 0) { newVal = li.get(0); } else { refNotFoundFlag = true; } } else if (leadDefault.getXtype().equals("7")) { // Multi-Select drop down case handled : Kuldeep Singh String multiVal = StringUtil.getmultiSelectedColumnValue(newVal); newVal = ""; ArrayList filterNames = new ArrayList<String>(); ArrayList filterValues = new ArrayList<Object>(); filterNames.add("IN" + leadDefault.getRefFetchColumn_HbmName()); filterValues.add(multiVal); requestParams.clear(); requestParams.put("companyid", companyid); requestParams.put("defaultheader", moduleDefault.getDefaultHeader().trim()); List<String> li = importDao.getRefModuleData(requestParams, leadDefault.getRefModule_PojoClassName(), leadDefault.getRefDataColumn_HbmName(), leadDefault.getConfigid(), filterNames, filterValues); StringBuilder newValbuild = new StringBuilder(); for (String s : li) { newValbuild.append(s).append(","); } if (newValbuild.length() > 0) { newVal = newValbuild.substring(0, newValbuild.length() - 1); } else { refNotFoundFlag = true; } } } } if (moduleDefault.isCustomflag()) { if (!StringUtil.isNullOrEmpty(newVal)) { String moduleDefaultfieldId = moduleDefault.getPojoheadername(); if (moduleDefault.getXtype().equals("4")) {//default to normal custom column ArrayList filterNames = new ArrayList<String>(); ArrayList filterValues = new ArrayList<Object>(); filterNames.add("value"); filterValues.add(newVal); filterNames.add("fieldid"); filterValues.add(moduleDefaultfieldId); filterNames.add(LeadConstants.Crm_deleteflag); filterValues.add(0); requestParams.clear(); requestParams.put("companyid", companyid); requestParams.put("addMissingMaster", "false"); List<String> li = importDao.getCustomComboID(requestParams, "id", filterNames, filterValues); if (li.size() > 0) { newVal = li.get(0); } else { refNotFoundFlag = true; } } else if (moduleDefault.getXtype().equals("7")) { String multiVal = StringUtil.getmultiSelectedColumnValue(newVal); newVal = ""; ArrayList filterNames = new ArrayList<String>(); ArrayList filterValues = new ArrayList<Object>(); filterNames.add("INvalue"); filterValues.add(multiVal); filterNames.add("fieldid"); filterValues.add(moduleDefaultfieldId); filterNames.add(LeadConstants.Crm_deleteflag); filterValues.add(0); requestParams.clear(); requestParams.put("companyid", companyid); requestParams.put("addMissingMaster", "false"); List<String> li = importDao.getCustomComboID(requestParams, "id", filterNames, filterValues); StringBuilder newValbuild = new StringBuilder(); for (String s : li) { newValbuild.append(s).append(","); } if (newValbuild.length() > 0) { newVal = newValbuild.substring(0, newValbuild.length() - 1); } else { refNotFoundFlag = true; } } else if (moduleDefault.getXtype().equals("8")) { // For default to reference custom combo ArrayList filterNames = new ArrayList<String>(); ArrayList filterValues = new ArrayList<Object>(); filterNames.add(moduleDefault.getRefDataColumn_HbmName()); filterValues.add(newVal); requestParams.clear(); requestParams.put("companyid", companyid); requestParams.put("defaultheader", moduleDefault.getDefaultHeader().trim()); requestParams.put("addMissingMaster", "false"); List li = importDao.getRefModuleData(requestParams, moduleDefault.getRefModule_PojoClassName(), moduleDefault.getRefFetchColumn_HbmName(), moduleDefault.getConfigid(), filterNames, filterValues); if (li.size() > 0) { newVal = li.get(0).toString(); } else { refNotFoundFlag = true; } } if (refNotFoundStr.length() == 0 && !refNotFoundFlag) { JSONObject tempJobj = new JSONObject(); tempJobj.put("filedid", moduleDefaultfieldId); tempJobj.put(Constants.Crm_custom_field, moduleDefault.getDefaultHeader()); tempJobj.put(moduleDefault.getDefaultHeader(), moduleDefault.getDbcolumnname()); tempJobj.put(moduleDefault.getDbcolumnname(), newVal); tempJobj.put("refcolumn_name", Constants.Custom_Column_Prefix + moduleDefault.getDbcolumnrefname()); tempJobj.put("xtype", moduleDefault.getXtype()); moduleCustomfieldArray.put(tempJobj); } else { if (refNotFoundFlag) { refNotFoundStr.append(moduleDefault.getDefaultHeader()).append(": ").append(newVal) .append(", <BR />"); } } } moduleFields.put(moduleDefault.getRecordname(), newVal); } else { if (!StringUtil.isNullOrEmpty(newVal)) { if (moduleDefault.getXtype().equals("4")) { // Single-Select drop down case handled : Kuldeep Singh ArrayList filterNames = new ArrayList<String>(); ArrayList filterValues = new ArrayList<Object>(); filterNames.add(moduleDefault.getRefDataColumn_HbmName()); filterValues.add(newVal); requestParams.clear(); requestParams.put("companyid", companyid); requestParams.put("defaultheader", moduleDefault.getDefaultHeader().trim()); requestParams.put("addMissingMaster", "false"); List li = importDao.getRefModuleData(requestParams, moduleDefault.getRefModule_PojoClassName(), moduleDefault.getRefFetchColumn_HbmName(), moduleDefault.getConfigid(), filterNames, filterValues); if (li.size() > 0) { newVal = li.get(0).toString(); } else { refNotFoundFlag = true; } } else if (moduleDefault.getXtype().equals("7")) { // Multi-Select drop down case handled : Kuldeep Singh String multiVal = StringUtil.getmultiSelectedColumnValue(newVal); newVal = ""; ArrayList filterNames = new ArrayList<String>(); ArrayList filterValues = new ArrayList<Object>(); filterNames.add("IN" + moduleDefault.getRefDataColumn_HbmName()); filterValues.add(multiVal); requestParams.clear(); requestParams.put("companyid", companyid); requestParams.put("defaultheader", moduleDefault.getDefaultHeader().trim()); requestParams.put("addMissingMaster", "false"); List<String> li = importDao.getRefModuleData(requestParams, moduleDefault.getRefModule_PojoClassName(), moduleDefault.getRefFetchColumn_HbmName(), moduleDefault.getConfigid(), filterNames, filterValues); StringBuilder newValbuild = new StringBuilder(); for (String s : li) { newValbuild.append(s).append(","); } if (newValbuild.length() > 0) { newVal = newValbuild.substring(0, newValbuild.length() - 1); } else { refNotFoundFlag = true; } } } moduleFields.put(moduleDefault.getRecordname(), newVal); if (refNotFoundStr.length() == 0 && !refNotFoundFlag) { modulejObj.put(moduleDefault.getRecordname(), newVal); } else { if (refNotFoundFlag) { refNotFoundStr.append(moduleDefault.getDefaultHeader()).append(": ").append(newVal) .append(", <BR />"); } } } ColumnMappedList.put(moduleDefault.getRecordname(), leadDefault); // mapped module record name with lead Display name } if (refNotFoundStr.length() == 0) { modulejObj.put("customfield", moduleCustomfieldArray); modulejObj.put("success", true); } else { modulejObj.put("success", false); modulejObj.put("msg", refNotFoundStr.substring(0, refNotFoundStr.length() - 1)); } } catch (Exception ex) { logger.warn(ex.getMessage(), ex); } return modulejObj; }
From source file:com.sldeditor.test.unit.tool.vector.VectorToolTest.java
@Test public void testVectorToolDBDataSource() { TestMissingSLDAttributes testAttribute = new TestMissingSLDAttributes(); List<CheckAttributeInterface> checkList = new ArrayList<CheckAttributeInterface>(); checkList.add(testAttribute);// w ww .j a v a 2s . c o m CheckAttributeFactory.setOverideCheckList(checkList); String testsldfile = "/polygon/sld/polygon_polygonwithdefaultlabel.sld"; TestSLDEditor testSLDEditor = null; try { testSLDEditor = TestSLDEditor.createAndShowGUI2(null, null, true, null); } catch (Exception e) { e.printStackTrace(); } RenderPanelImpl.setUnderTest(true); InputStream inputStream = VectorToolTest.class.getResourceAsStream(testsldfile); if (inputStream == null) { Assert.assertNotNull("Failed to find sld test file : " + testsldfile, inputStream); } else { File f = null; try { f = stream2file(inputStream); try { testSLDEditor.openFile(f.toURI().toURL()); } catch (NullPointerException nullException) { nullException.printStackTrace(); StackTraceElement[] stackTraceElements = nullException.getStackTrace(); System.out.println(stackTraceElements[0].getMethodName()); } f.delete(); } catch (IOException e1) { e1.printStackTrace(); } } // Fields extracted from the SLD file DataSourceInterface dataSource = DataSourceFactory.createDataSource(null); Collection<PropertyDescriptor> propertyList = dataSource.getPropertyDescriptorList(); assertEquals(2, propertyList.size()); Map<String, PropertyDescriptor> map = new HashMap<String, PropertyDescriptor>(); for (PropertyDescriptor property : propertyList) { map.put(property.getName().getLocalPart(), property); } AttributeDescriptor name = (AttributeDescriptor) map.get("name"); assertNotNull(name); GeometryDescriptor geometry = (GeometryDescriptor) map.get("geom"); assertNotNull(geometry); File tempFolder = Files.createTempDir(); TestVectorTool vectorTool = new TestVectorTool(testSLDEditor); try { InputStream gpkgInputStream = VectorToolTest.class .getResourceAsStream("/test/sld_cookbook_polygon.gpkg"); final File gpkgFile = new File(tempFolder, "sld_cookbook_polygon.gpkg"); try (FileOutputStream out = new FileOutputStream(gpkgFile)) { IOUtils.copy(gpkgInputStream, out); } DatabaseConnection databaseConnection = DatabaseConnectionFactory .getConnection(gpkgFile.getAbsolutePath()); DatabaseFeatureClassNode dbFCTreeNode = new DatabaseFeatureClassNode(null, databaseConnection, "sld_cookbook_polygon"); DatabaseConnectionManager.getInstance().addNewConnection(null, databaseConnection); vectorTool.testSetDataSource(dbFCTreeNode); dataSource = DataSourceFactory.createDataSource(null); propertyList = dataSource.getPropertyDescriptorList(); assertEquals(3, propertyList.size()); map.clear(); for (PropertyDescriptor property : propertyList) { map.put(property.getName().getLocalPart(), property); } name = (AttributeDescriptor) map.get("name"); assertNotNull(name); geometry = (GeometryDescriptor) map.get("geometry"); assertNotNull(geometry); AttributeDescriptor pop = (AttributeDescriptor) map.get("pop"); assertNotNull(pop); // Create SLD from geopackage layer vectorTool.testImportFeatureClass(dbFCTreeNode); dataSource = DataSourceFactory.createDataSource(null); propertyList = dataSource.getPropertyDescriptorList(); assertEquals(3, propertyList.size()); map.clear(); for (PropertyDescriptor property : propertyList) { map.put(property.getName().getLocalPart(), property); } name = (AttributeDescriptor) map.get("name"); assertNotNull(name); geometry = (GeometryDescriptor) map.get("geometry"); assertNotNull(geometry); pop = (AttributeDescriptor) map.get("pop"); assertNotNull(pop); // Release locks dataSource.reset(); } catch (IOException e) { e.printStackTrace(); fail(); } // Tidy up so the remaining unit tests are ok JFrame frame = testSLDEditor.getApplicationFrame(); frame.dispatchEvent(new WindowEvent(frame, WindowEvent.WINDOW_CLOSING)); testSLDEditor = null; clearDown(); // Delete the shape files we extracted purgeDirectory(tempFolder); }
From source file:com.joliciel.csvLearner.maxent.MaxentBestFeatureObserver.java
@Override public void onTerminate() { bestFeaturesPerOutcome = new TreeMap<String, List<NameValuePair>>(); totalPerOutcome = new TreeMap<String, Double>(); bestFeatureTotalPerOutcome = new TreeMap<String, Double>(); filePercentagePerOutcome = new TreeMap<String, Map<String, Double>>(); fileNames = new TreeSet<String>(); for (Entry<String, Map<String, Double>> entry : featureMap.entrySet()) { String outcome = entry.getKey(); LOG.debug("outcome: " + outcome); Map<String, Double> featureTotals = entry.getValue(); Map<String, Double> fileTotals = new TreeMap<String, Double>(); PriorityQueue<NameValuePair> heap = new PriorityQueue<NameValuePair>(featureTotals.size(), new NameValueDescendingComparator()); double grandTotal = 0.0; for (Entry<String, Double> featureTotal : featureTotals.entrySet()) { NameValuePair pair = new NameValuePair(featureTotal.getKey(), featureTotal.getValue()); heap.add(pair);/* w w w. j a v a 2 s . c o m*/ grandTotal += featureTotal.getValue(); String featureKey = featureTotal.getKey(); if (featureKey.contains(CSVLearner.NOMINAL_MARKER)) featureKey = featureKey.substring(0, featureKey.indexOf(CSVLearner.NOMINAL_MARKER)); String fileName = this.featureToFileMap.get(featureKey); Double fileTotalObj = fileTotals.get(fileName); double fileTotal = fileTotalObj == null ? 0 : fileTotalObj.doubleValue(); fileTotals.put(fileName, fileTotal + featureTotal.getValue()); } List<NameValuePair> bestFeatures = new ArrayList<NameValuePair>(); double bestFeatureTotal = 0.0; for (int i = 0; i < n; i++) { NameValuePair pair = heap.poll(); if (pair == null) break; LOG.debug("Feature: " + pair.getName() + ", Total: " + pair.getValue()); bestFeatures.add(pair); bestFeatureTotal += pair.getValue(); } bestFeaturesPerOutcome.put(outcome, bestFeatures); totalPerOutcome.put(outcome, grandTotal); bestFeatureTotalPerOutcome.put(outcome, bestFeatureTotal); // convert the file totals to percentages for (Entry<String, Double> fileTotal : fileTotals.entrySet()) { double filePercentage = fileTotal.getValue() / grandTotal; fileTotal.setValue(filePercentage); fileNames.add(fileTotal.getKey()); } filePercentagePerOutcome.put(outcome, fileTotals); featureTotals.clear(); } featureMap.clear(); featureMap = null; }
From source file:com.datatorrent.lib.logs.MultiWindowDimensionAggregation.java
@Override public void beginWindow(long arg0) { Map<String, Map<String, Number>> currentWindowMap = cacheOject.get(currentWindow); if (currentWindowMap == null) { currentWindowMap = new HashMap<String, Map<String, Number>>(); } else {/*from w w w.jav a2 s .c o m*/ for (Map.Entry<String, Map<String, Number>> tupleEntry : currentWindowMap.entrySet()) { String tupleKey = tupleEntry.getKey(); Map<String, Number> tupleValue = tupleEntry.getValue(); int currentPattern = 0; for (Pattern pattern : patternList) { Matcher matcher = pattern.matcher(tupleKey); if (matcher.matches()) { String currentPatternString = dimensionArrayString.get(currentPattern); Map<String, KeyValPair<MutableDouble, Integer>> currentPatternMap = outputMap .get(currentPatternString); if (currentPatternMap != null) { StringBuilder builder = new StringBuilder(matcher.group(2)); for (int i = 1; i < dimensionArray.get(currentPattern).length; i++) { builder.append("," + matcher.group(i + 2)); } KeyValPair<MutableDouble, Integer> currentDimensionKeyValPair = currentPatternMap .get(builder.toString()); if (currentDimensionKeyValPair != null) { currentDimensionKeyValPair.getKey() .add(0 - tupleValue.get(dimensionKeyVal).doubleValue()); currentDimensionKeyValPair.setValue(currentDimensionKeyValPair.getValue() - 1); if (currentDimensionKeyValPair.getKey().doubleValue() == 0.0) { currentPatternMap.remove(builder.toString()); } } } break; } currentPattern++; } } } currentWindowMap.clear(); if (patternList == null || patternList.isEmpty()) { setUpPatternList(); } }
From source file:com.pironet.tda.SunJDKParser.java
private int[] dumpBlockingMonitors(DefaultMutableTreeNode catLockingTree, MonitorMap mmap) { final Map<String, DefaultMutableTreeNode> directChildMap = new HashMap<>(); // Top level of our display model //****************************************************************** // Figure out what threads are blocking and what threads are blocked //****************************************************************** int blockedThreads = fillBlockingThreadMaps(mmap, directChildMap); int contendedLocks = directChildMap.size(); //******************************************************************** // Renormalize this from a flat tree (depth==1) into a structured tree //******************************************************************** reNormalizeBlockingThreadTree(mmap, directChildMap); //******************************************************************** // Recalculate the number of blocked threads and add remaining top-level threads to our display model //******************************************************************** for (final Object o : directChildMap.entrySet()) { DefaultMutableTreeNode threadNode = (DefaultMutableTreeNode) ((Map.Entry) o).getValue(); updateChildCount(threadNode, true); ((Category) catLockingTree.getUserObject()).addToCatNodes(threadNode); }/*from w w w .j a v a 2 s . c o m*/ directChildMap.clear(); return new int[] { contendedLocks, blockedThreads }; }
From source file:com.inmobi.databus.partition.TestAbstractClusterReader.java
public void testReadFromStartTimeWithinStream() throws Exception { initializeMinList();/*from ww w .j ava 2 s .co m*/ initializePartitionCheckpointList(); Map<Integer, PartitionCheckpoint> expectedDeltaPck = new HashMap<Integer, PartitionCheckpoint>(); Calendar cal = Calendar.getInstance(); cal.setTime(DatabusStreamWaitingReader.getDateFromStreamDir(streamDir, databusFiles[0].getParent())); cal.add(Calendar.MINUTE, 1); String fsUri = fs.getUri().toString(); PartitionReaderStatsExposer prMetrics = new PartitionReaderStatsExposer(testStream, "c1", partitionId.toString(), consumerNumber, fsUri); preader = new PartitionReader(partitionId, partitionCheckpointList, fs, buffer, streamDir, conf, inputFormatClass, cal.getTime(), 1000, isDatabusData(), prMetrics, true, partitionMinList, null); preader.init(); Assert.assertTrue(buffer.isEmpty()); Assert.assertEquals(preader.getReader().getClass().getName(), ClusterReader.class.getName()); Assert.assertEquals(((ClusterReader) preader.getReader()).getReader().getClass().getName(), DatabusStreamWaitingReader.class.getName()); preader.execute(); Date fromTime = cal.getTime(); Date toTime = getTimeStampFromFile(databusFiles[1]); TestUtil.prepareExpectedDeltaPck(fromTime, toTime, expectedDeltaPck, null, streamDir, partitionMinList, partitionCheckpointList, true, false); TestUtil.assertBuffer(DatabusStreamWaitingReader.getHadoopStreamFile(fs.getFileStatus(databusFiles[1])), 2, 0, 100, partitionId, buffer, isDatabusData(), expectedDeltaPck); expectedDeltaPck.clear(); fromTime = getTimeStampFromFile(databusFiles[1]); toTime = getTimeStampFromFile(databusFiles[2]); TestUtil.prepareExpectedDeltaPck(fromTime, toTime, expectedDeltaPck, fs.getFileStatus(databusFiles[1]), streamDir, partitionMinList, partitionCheckpointList, false, false); TestUtil.assertBuffer(DatabusStreamWaitingReader.getHadoopStreamFile(fs.getFileStatus(databusFiles[2])), 3, 0, 100, partitionId, buffer, isDatabusData(), expectedDeltaPck); Assert.assertTrue(buffer.take().getMessage() instanceof EOFMessage); Assert.assertTrue(buffer.isEmpty()); Assert.assertNotNull(preader.getReader()); Assert.assertEquals(preader.getReader().getClass().getName(), ClusterReader.class.getName()); Assert.assertEquals(((ClusterReader) preader.getReader()).getReader().getClass().getName(), DatabusStreamWaitingReader.class.getName()); Assert.assertEquals(prMetrics.getHandledExceptions(), 0); Assert.assertEquals(prMetrics.getMessagesReadFromSource(), 200); Assert.assertEquals(prMetrics.getMessagesAddedToBuffer(), 200); Assert.assertEquals(prMetrics.getWaitTimeUnitsNewFile(), 0); Assert.assertTrue(prMetrics.getCumulativeNanosForFetchMessage() > 0); }
From source file:net.sf.appstatus.web.pages.StatusPage.java
public void doGetHTML(StatusWebHandler webHandler, HttpServletRequest req, HttpServletResponse resp) throws UnsupportedEncodingException, IOException { setup(resp, "text/html"); ServletOutputStream os = resp.getOutputStream(); Map<String, String> valuesMap = new HashMap<String, String>(); List<ICheckResult> results = webHandler.getAppStatus().checkAll(req.getLocale()); Collections.sort(results);/*from w w w . j a v a2 s. c o m*/ boolean statusOk = true; int statusCode = 200; for (ICheckResult r : results) { if (r.getCode() != ICheckResult.OK && r.isFatal()) { resp.setStatus(500); statusCode = 500; statusOk = false; break; } } valuesMap.put("statusOk", String.valueOf(statusOk)); valuesMap.put("statusCode", String.valueOf(statusCode)); // STATUS TABLE StrBuilder sbStatusTable = new StrBuilder(); if (HtmlUtils.generateBeginTable(sbStatusTable, results.size())) { HtmlUtils.generateHeaders(sbStatusTable, "", "Group", "Name", "Description", "Code", "Resolution"); for (ICheckResult r : results) { HtmlUtils.generateRow(sbStatusTable, getStatus(r), r.getGroup(), r.getProbeName(), r.getDescription(), String.valueOf(r.getCode()), r.getResolutionSteps()); } HtmlUtils.generateEndTable(sbStatusTable, results.size()); } valuesMap.put("statusTable", sbStatusTable.toString()); // PROPERTIES TABLE StrBuilder sbPropertiesTable = new StrBuilder(); Map<String, Map<String, String>> properties = webHandler.getAppStatus().getProperties(); if (HtmlUtils.generateBeginTable(sbPropertiesTable, properties.size())) { HtmlUtils.generateHeaders(sbPropertiesTable, "", "Group", "Name", "Value"); for (Entry<String, Map<String, String>> cat : properties.entrySet()) { String category = cat.getKey(); for (Entry<String, String> r : cat.getValue().entrySet()) { HtmlUtils.generateRow(sbPropertiesTable, Resources.STATUS_PROP, category, r.getKey(), r.getValue()); } } HtmlUtils.generateEndTable(sbPropertiesTable, properties.size()); } valuesMap.put("propertiesTable", sbPropertiesTable.toString()); String content = HtmlUtils.applyLayout(valuesMap, PAGECONTENTLAYOUT); valuesMap.clear(); valuesMap.put("content", content); os.write(getPage(webHandler, valuesMap).getBytes(ENCODING)); }