List of usage examples for java.util LinkedHashMap get
public V get(Object key)
From source file:org.cyberoam.iview.charts.Chart.java
/** * Function to write a given ChartID to pdf file * @param pdfFileName specifies the pdf where chart is written.Please specify full path with the filename. * @param reportGroup id specifies the chart to be generated * @param startdate specifies start date * @param enddate specifies end date//from www . j a va 2 s . c o m * @param limit specifies number of records per record to be written in report */ public static void generatePDFReportGroup(OutputStream out, int reportGroupID, String applianceID, String startDate, String endDate, String limit, int[] deviceIDs, HttpServletRequest request, LinkedHashMap paramMap) throws Exception { float width = 768; float height = 1024; float rec_hieght = 470; Rectangle pagesize = new Rectangle(768, 1024); Document document = new Document(pagesize, 30, 30, 30, 30); JFreeChart chart = null; SqlReader sqlReader = new SqlReader(false); //CyberoamLogger.sysLog.debug("pdf:"+pdfFileName); CyberoamLogger.sysLog.debug("reportGroupID:" + reportGroupID); CyberoamLogger.sysLog.debug("applianceID:" + applianceID); CyberoamLogger.sysLog.debug("startDate:" + startDate); CyberoamLogger.sysLog.debug("endDate:" + endDate); CyberoamLogger.sysLog.debug("limit:" + limit); try { //PdfWriter writer = PdfWriter.getInstance(document, response!=null ? response.getOutputStream():new FileOutputStream(pdfFileName)); PdfWriter writer = PdfWriter.getInstance(document, out); writer.setPageEvent(new Chart()); document.addAuthor("iView"); document.addSubject("iView Report"); document.open(); PdfContentByte contentByte = writer.getDirectContent(); ReportGroupBean reportGroupBean = ReportGroupBean.getRecordbyPrimarykey(reportGroupID); ArrayList reportList = reportGroupBean.getReportIdByReportGroupId(reportGroupID); ReportBean reportBean; ResultSetWrapper rsw = null; String seperator = System.getProperty("file.separator"); //String path=System.getProperty("catalina.home") +seperator+"webapps" +seperator+"ROOT" + seperator + "images" + seperator + "iViewPDF.jpg"; String path = InitServlet.contextPath + seperator + "images" + seperator + "iViewPDF.jpg"; Image iViewImage = Image.getInstance(path); iViewImage.scaleAbsolute(750, 900); //iViewImage.scaleAbsolute(600,820); iViewImage.setAbsolutePosition(10, 10); document.add(iViewImage); document.add(new Paragraph("\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n")); /* * Generating Table on the First Page of Report providing summary of Content */ PdfPTable frontPageTable = new PdfPTable(2); PdfPCell dataCell; ReportGroupRelationBean reportGroupRelationBean; String reportName = ""; Color tableHeadBackColor = new Color(150, 174, 190); Color tableContentBackColor = new Color(229, 232, 237); Color tableBorderColor = new Color(229, 232, 237); dataCell = new PdfPCell(new Phrase(new Chunk("Report Profile", FontFactory.getFont(FontFactory.HELVETICA_BOLD, 16, Font.PLAIN, new Color(255, 255, 255))))); dataCell.setBackgroundColor(tableHeadBackColor); dataCell.setBorderColor(tableBorderColor); frontPageTable.addCell(dataCell); /** * Getting dynamic title. */ String title = ""; if (paramMap != null) { title = paramMap.get("title").toString(); paramMap.remove("title"); } if (request != null) title = getFormattedTitle(request, reportGroupBean, true); dataCell = new PdfPCell(new Phrase(new Chunk(title, FontFactory.getFont(FontFactory.HELVETICA_BOLD, 16, Font.PLAIN, new Color(255, 255, 255))))); dataCell.setBackgroundColor(tableHeadBackColor); dataCell.setBorderColor(tableBorderColor); frontPageTable.addCell(dataCell); dataCell = new PdfPCell(new Phrase(new Chunk("Start Date", FontFactory.getFont(FontFactory.HELVETICA_BOLD, 12, Font.PLAIN, new Color(0, 0, 0))))); dataCell.setBackgroundColor(tableContentBackColor); dataCell.setBorderColor(tableBorderColor); frontPageTable.addCell(dataCell); dataCell = new PdfPCell(new Phrase(startDate)); dataCell.setBackgroundColor(tableContentBackColor); dataCell.setBorderColor(tableBorderColor); frontPageTable.addCell(dataCell); dataCell = new PdfPCell(new Phrase(new Chunk("End Date", FontFactory.getFont(FontFactory.HELVETICA_BOLD, 12, Font.PLAIN, new Color(0, 0, 0))))); dataCell.setBackgroundColor(tableContentBackColor); dataCell.setBorderColor(tableBorderColor); frontPageTable.addCell(dataCell); dataCell = new PdfPCell(new Phrase(endDate)); dataCell.setBackgroundColor(tableContentBackColor); dataCell.setBorderColor(tableBorderColor); frontPageTable.addCell(dataCell); dataCell = new PdfPCell(new Phrase(new Chunk("iView Server Time", FontFactory.getFont(FontFactory.HELVETICA_BOLD, 12, Font.PLAIN, new Color(0, 0, 0))))); dataCell.setBackgroundColor(tableContentBackColor); dataCell.setBorderColor(tableBorderColor); frontPageTable.addCell(dataCell); java.util.Date currentDate = new java.util.Date(); dataCell = new PdfPCell(new Phrase(currentDate.toString())); dataCell.setBackgroundColor(tableContentBackColor); dataCell.setBorderColor(tableBorderColor); frontPageTable.addCell(dataCell); dataCell = new PdfPCell(new Phrase(new Chunk("Reports", FontFactory.getFont(FontFactory.HELVETICA_BOLD, 12, Font.PLAIN, new Color(0, 0, 0))))); dataCell.setBackgroundColor(tableContentBackColor); dataCell.setBorderColor(tableBorderColor); frontPageTable.addCell(dataCell); int len = reportList.size(); for (int k = 0; k < len; k++) { reportGroupRelationBean = (ReportGroupRelationBean) reportList.get(k); reportName += " " + (k + 1) + ". " + ReportBean.getRecordbyPrimarykey(reportGroupRelationBean.getReportId()).getTitle() + "\n"; } dataCell = new PdfPCell(new Phrase("\n" + reportName + "\n")); dataCell.setBackgroundColor(tableContentBackColor); dataCell.setBorderColor(tableBorderColor); frontPageTable.addCell(dataCell); dataCell = new PdfPCell(new Phrase(new Chunk("Device Names (IP Address)", FontFactory.getFont(FontFactory.HELVETICA_BOLD, 12, Font.PLAIN, new Color(0, 0, 0))))); dataCell.setBackgroundColor(tableContentBackColor); dataCell.setBorderColor(tableBorderColor); frontPageTable.addCell(dataCell); DeviceBean deviceBean = null; String deviceNameWithIP = ""; if (deviceIDs != null) { for (int i = 0; i < deviceIDs.length; i++) { deviceBean = DeviceBean.getRecordbyPrimarykey(deviceIDs[i]); if (deviceBean != null) { deviceNameWithIP += " " + (i + 1) + ". " + deviceBean.getName() + " (" + deviceBean.getIp() + ")\n"; } } } dataCell = new PdfPCell(new Phrase("\n" + deviceNameWithIP + "\n")); dataCell.setBackgroundColor(tableContentBackColor); dataCell.setBorderColor(tableBorderColor); frontPageTable.addCell(dataCell); /* * Adding Table to PDF */ document.add(frontPageTable); /* * Adding Charts and Table to PDF */ for (int i = 0; i < reportList.size(); i++) { document.newPage(); reportBean = ReportBean .getRecordbyPrimarykey(((ReportGroupRelationBean) reportList.get(i)).getReportId()); String query = null; if (request == null) { query = PrepareQuery.getQuery(reportBean, startDate, endDate, applianceID, null, null, "0", limit, paramMap); } else { PrepareQuery prepareQuery = new PrepareQuery(); query = prepareQuery.getQuery(reportBean, request); } CyberoamLogger.sysLog.debug("PDF:ReportID:" + reportBean.getReportId() + "Query->" + query); try { rsw = sqlReader.getInstanceResultSetWrapper(query); } catch (org.postgresql.util.PSQLException e) { if (query.indexOf("5min_ts_20") > -1) { query = query.substring(0, query.indexOf("5min_ts_20")) + "4hr" + query.substring(query.indexOf("5min_ts_20") + 16, query.length()); CyberoamLogger.appLog.debug("New query : " + query); rsw = sqlReader.getInstanceResultSetWrapper(query); } else { CyberoamLogger.appLog.error("Exeption in AjaxController.java " + e, e); } } catch (Exception e) { CyberoamLogger.appLog.error("Exeption in AjaxController.java " + e, e); rsw.close(); } /* * PDF Rendering work starts here */ for (int j = 0; j < (int) (rec_hieght / 16) + 1; j++) { document.add(new Paragraph("\n")); } // This fix is to resolve the problems associated with reports which don't have graphs. // If there is no graph associated with the report than no need to generate //a chart for it. GraphBean graphBean = GraphBean.getRecordbyPrimarykey(reportBean.getReportId()); //if(graphBean!=null) if (reportBean.getReportFormatId() != 2) { chart = Chart.getChart(reportBean.getReportId(), rsw, null); PdfTemplate pdfTemplate = contentByte.createTemplate(width, height); Graphics2D graphics2D = pdfTemplate.createGraphics(width, height); Rectangle2D rectangle = new Rectangle2D.Double(100, 85, 540, rec_hieght); chart.draw(graphics2D, rectangle); graphics2D.dispose(); contentByte.addTemplate(pdfTemplate, 0, 0); } else { Paragraph p = new Paragraph(reportBean.getTitle() + "\n\n", FontFactory.getFont(FontFactory.HELVETICA, 18, Font.BOLD)); p.setAlignment("center"); document.add(p); } // Retrieving PdfPTable PdfPTable pdfTable = getPdfPTable(reportBean, rsw); rsw.close(); /* * Adding Table to PDF */ document.add(pdfTable); } CyberoamLogger.appLog.info("*************Finishing Chart****************"); } catch (Exception e) { CyberoamLogger.sysLog.debug("Chart.writeChartToPDF.e" + e.getMessage(), e); } finally { sqlReader.close(); } document.close(); }
From source file:gate.util.reporting.DocTimeReporter.java
/** * Prints benchmark report in text format. * * @param reportContainer/*from w w w . ja va 2 s . co m*/ * An Object of type LinkedHashMap<String, Object> containing the * document names (with time in milliseconds) in hierarchical * structure. * @param outputFile * An object of type File representing the output report file. */ private void printToText(Object reportContainer, File outputFile) { ArrayList<String> printLines = new ArrayList<String>(); @SuppressWarnings("unchecked") LinkedHashMap<String, Object> rcHash = (LinkedHashMap<String, Object>) reportContainer; String docs = ""; if (maxDocumentInReport != ALL_DOCS) { if (allDocs.size() < maxDocumentInReport) { docs = Integer.toString(allDocs.size()); } else { docs = Integer.toString(maxDocumentInReport); } } else { docs = "All"; } printLines.add("=============================================================" + NL); if (PRMatchingRegex.equals(MATCH_ALL_PR_REGEX)) { printLines.add("Top " + docs + " expensive documents matching All PRs in " + pipelineName + NL); } else { if (matchingPRs.size() > 0) { printLines .add("Top " + docs + " expensive documents matching following PRs in " + pipelineName + NL); for (String pr : matchingPRs) { printLines.add("\t" + pr + NL); } } else { printLines.add( "No PRs matched to search string \"" + getPRMatchingRegex() + "\"" + " in " + pipelineName); printLines.add(NL); printLines.add("=============================================================" + NL); } } if (allDocs.size() > 0) { printLines.add("=============================================================" + NL); printLines.add("Document Name" + "\t" + "Time (in seconds)" + "\t" + "%" + NL); printLines.add("-------------------------------------------------------------" + NL); } Iterator<String> i = rcHash.keySet().iterator(); int count = 0; // Iterating over the report container while (i.hasNext()) { Object key = i.next(); if (!((String) key).equals("total")) { int value = Integer.parseInt((String) rcHash.get(key)); if (maxDocumentInReport == ALL_DOCS) printLines.add(key + "\t" + value / 1000.0 + "\t" + Math.round(((value / globalTotal) * 100) * 10) / 10.0 + NL); else if (count < maxDocumentInReport) printLines.add(key + "\t" + value / 1000.0 + "\t" + Math.round(((value / globalTotal) * 100) * 10) / 10.0 + NL); } count++; } if (allDocs.size() > 0) { printLines.add("-------------------------------------------------------------" + NL); printLines.add("Pipeline Total" + "\t" + globalTotal / 1000.0 + "\t" + 100 + NL + NL + NL); } BufferedWriter out = null; try { // Writing to report file out = new BufferedWriter(new FileWriter(outputFile, true)); for (String line : printLines) { out.write(line); } } catch (IOException e) { e.printStackTrace(); } finally { try { if (out != null) { out.close(); } } catch (IOException e) { e.printStackTrace(); } } }
From source file:com.web.server.WebServer.java
/** * This method obtains the content executor which executes the executor services * @param deployDirectory/*w w w. jav a 2 s. com*/ * @param resource * @param httpHeaderClient * @param serverdigester * @return byte[] */ public byte[] ObtainContentExecutor(String deployDirectory, String resource, HttpHeaderClient httpHeaderClient, Digester serverdigester, Hashtable urlClassLoaderMap, ConcurrentHashMap servletMapping, com.web.server.HttpSessionServer session) { //System.out.println("In content Executor"); String[] resourcepath = resource.split("/"); //System.out.println("createDigester1"); Method method = null; //System.out.println("createDigester2"); ////System.out.println(); com.web.server.Executors serverconfig; if (resourcepath.length > 1) { ////System.out.println(resource); try { ClassLoader oldCL = null; String urlresource = ObtainUrlFromResource(resourcepath); try { //System.out.println(servletMapping); //System.out.println(deployDirectory+"/"+resourcepath[1]); HttpSessionServer httpSession; logger.info(deployDirectory + "/" + resourcepath[1] + " " + servletMapping.get(deployDirectory + "/" + resourcepath[1])); if (servletMapping.get(deployDirectory + "/" + resourcepath[1]) != null) { WebAppConfig webAppConfig = (WebAppConfig) servletMapping .get(deployDirectory + "/" + resourcepath[1]); webAppConfig = webAppConfig.clone(); webAppConfig.setWebApplicationAbsolutePath(deployDirectory + "/" + resourcepath[1]); WebClassLoader customClassLoader = null; Class customClass = null; customClassLoader = (WebClassLoader) urlClassLoaderMap .get(deployDirectory + "/" + resourcepath[1]); oldCL = Thread.currentThread().getContextClassLoader(); Thread.currentThread().setContextClassLoader(customClassLoader); ConcurrentHashMap servletMappingsURL = webAppConfig.getServletMappingURL(); Enumeration urlPattern = servletMappingsURL.keys(); while (urlPattern.hasMoreElements()) { String pattern = (String) urlPattern.nextElement(); Pattern r = Pattern.compile(pattern.replace("*", "(.*)")); Matcher m = r.matcher(urlresource); if (m.find()) { urlresource = pattern; break; } } LinkedHashMap<String, Vector<FilterMapping>> filterMappings = webAppConfig .getFilterMappingURL(); Set<String> filterMappingKeys = filterMappings.keySet(); Iterator<String> filterMappingRoller = filterMappingKeys.iterator(); Vector<FilterMapping> filterMapping = null; while (filterMappingRoller.hasNext()) { String pattern = (String) filterMappingRoller.next(); Pattern r = Pattern.compile(pattern.replace("*", "(.*)")); Matcher m = r.matcher(urlresource); if (m.find()) { filterMapping = filterMappings.get(pattern); break; } } if (servletMappingsURL.get(urlresource) != null) { ServletMapping servletMappings = (ServletMapping) servletMappingsURL.get(urlresource); ConcurrentHashMap servlets = webAppConfig.getServlets(); Servlets servlet = (Servlets) servlets.get(servletMappings.getServletName()); HttpServlet httpServlet = null; System.out.println("Session " + session); if (session.getAttribute("SERVLETNAME:" + deployDirectory + "/" + resourcepath[1] + servletMappings.getServletName()) != null) { httpServlet = (HttpServlet) session.getAttribute("SERVLETNAME:" + deployDirectory + "/" + resourcepath[1] + servletMappings.getServletName()); httpServlet.init(); } else { Class servletClass = customClassLoader.loadClass(servlet.getServletClass()); httpServlet = (HttpServlet) servletClass.newInstance(); httpServlet.init(new WebServletConfig(servlet.getServletName().trim(), webAppConfig, customClassLoader)); httpServlet.init(); session.setAttribute("SERVLETNAME:" + deployDirectory + "/" + resourcepath[1] + servletMappings.getServletName(), httpServlet); //ClassLoaderUtil.closeClassLoader(customClassLoader); } if (httpHeaderClient.getHttpMethod().trim().toUpperCase().equals("GET") || httpHeaderClient.getHttpMethod().trim().toUpperCase().equals("POST")) { Response response = new Response(httpHeaderClient); StringBuffer servletPath = new StringBuffer(); if (resourcepath.length > 1) { int pathcount = 0; for (String servPath : resourcepath) { if (pathcount > 1) { servletPath.append("/"); servletPath.append(servPath); } pathcount++; } } String servletpath = servletPath.toString(); if (servletpath.length() == 0) servletpath = "/"; Request request = new Request(httpHeaderClient, session, servletpath, customClassLoader); if (filterMapping != null) { WebFilterChain webFilterChain = new WebFilterChain(httpServlet, webAppConfig, filterMapping, customClassLoader); webFilterChain.doFilter(request, response); } else { httpServlet.service(request, response); } //System.out.println("RESPONSE="+new String(response.getResponse())); //httpServlet.destroy(); response.flushBuffer(); return response.getResponse(); } //httpServlet.destroy(); } else { if (customClassLoader != null) { Map map = customClassLoader.classMap; if (map.get(urlresource) != null) { Class jspBaseCls = customClassLoader.loadClass((String) map.get(urlresource)); HttpJspBase jspBase = (HttpJspBase) jspBaseCls.newInstance(); WebServletConfig servletConfig = new WebServletConfig(); servletConfig.getServletContext().setAttribute( "org.apache.tomcat.InstanceManager", new WebInstanceManager(urlresource)); //servletConfig.getServletContext().setAttribute(org.apache.tomcat.InstanceManager, arg1); jspBase.init(servletConfig); jspBase._jspInit(); Response response = new Response(httpHeaderClient); StringBuffer servletPath = new StringBuffer(); if (resourcepath.length > 1) { int pathcount = 0; for (String servPath : resourcepath) { if (pathcount > 1) { servletPath.append("/"); servletPath.append(servPath); } pathcount++; } } String servletpath = servletPath.toString(); if (servletpath.length() == 0) servletpath = "/"; jspBase._jspService( new Request(httpHeaderClient, session, servletpath, customClassLoader), response); jspBase.destroy(); response.flushBuffer(); return response.getResponse(); } } } } } catch (Exception ex) { ex.printStackTrace(); } finally { if (oldCL != null) { Thread.currentThread().setContextClassLoader(oldCL); } } File file = new File(deployDirectory + "/" + resourcepath[1] + "/WEB-INF/executor-config.xml"); if (!file.exists()) { return null; } WebClassLoader customClassLoader = (WebClassLoader) urlClassLoaderMap .get(deployDirectory + "/" + resourcepath[1]); Class customClass = null; if ((file.isFile() && file.exists())) { synchronized (serverdigester) { serverconfig = (com.web.server.Executors) serverdigester.parse(file); } ConcurrentHashMap urlMap = serverconfig.getExecutorMap(); //System.out.println("ObtainUrlFromResource1"); //logger.info("urlresource"+urlresource); Executor executor = (Executor) urlMap.get(urlresource); //System.out.println("ObtainUrlFromResource2"+executor); //System.out.println("custom class Loader1"+urlClassLoaderMap); //System.out.println("custom class Loader2"+customClassLoader); //System.out.println("CUSTOM CLASS lOADER path"+deployDirectory+"/"+resourcepath[1]); ////System.out.println("custom class loader" +customClassLoader); if (executor != null && customClassLoader != null) { customClass = customClassLoader.loadClass(executor.getExecutorclass()); ExecutorInterface executorInstance = (ExecutorInterface) customClass.newInstance(); Object buffer = null; if (httpHeaderClient.getHttpMethod().trim().toUpperCase().equals("GET")) { buffer = executorInstance.doGet(httpHeaderClient); } else if (httpHeaderClient.getHttpMethod().trim().toUpperCase().equals("POST")) { buffer = executorInstance.doPost(httpHeaderClient); } if (executor.getResponseResource() != null) { httpHeaderClient.setExecutorBuffer(buffer); //System.out.println("Method:"+httpHeaderClient.getHttpMethod()); String resourceClass = (String) customClassLoader.getClassMap() .get(executor.getResponseResource().trim()); customClass = customClassLoader.loadClass(resourceClass); HttpJspBase jspBase = (HttpJspBase) customClass.newInstance(); WebServletConfig servletConfig = new WebServletConfig(); servletConfig.getServletContext().setAttribute("org.apache.tomcat.InstanceManager", new WebInstanceManager(urlresource)); //servletConfig.getServletContext().setAttribute(org.apache.tomcat.InstanceManager, arg1); jspBase.init(servletConfig); jspBase._jspInit(); Response response = new Response(httpHeaderClient); jspBase._jspService(new Request(httpHeaderClient, session, null, customClassLoader), response); jspBase.destroy(); response.flushBuffer(); return response.getResponse(); } return buffer.toString().getBytes(); } } else if (customClassLoader != null) { //System.out.println("url resource"+urlresource); String resourceClass = (String) customClassLoader.getClassMap().get(urlresource); //System.out.println(resourceClass); //System.out.println(customClassLoader.getClassMap()); if (resourceClass == null) return null; customClass = customClassLoader.loadClass(resourceClass); ExecutorInterface executorInstance = (ExecutorInterface) customClass.newInstance(); Object buffer = executorInstance.doGet(httpHeaderClient); return buffer.toString().getBytes(); } ////System.out.println("executor resource 1"); //Object buffer = method.invoke(customClass.newInstance(), new Object[]{httpHeaderClient}); // //logger.info(buffer.toString()); } catch (IOException | SAXException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (ClassNotFoundException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (IllegalAccessException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (IllegalArgumentException e) { // TODO Auto-generated catch block e.printStackTrace(); } /*catch (InvocationTargetException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (NoSuchMethodException e) { // TODO Auto-generated catch block e.printStackTrace(); } */catch (SecurityException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (InstantiationException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } } return null; }
From source file:com.mothsoft.alexis.dao.DocumentDaoImpl.java
@SuppressWarnings("unchecked") private List<ImportantTerm> getImportantTerms(FullTextQuery fullTextQuery, int count, boolean filterStopWords) { final Long start = System.currentTimeMillis(); final List<Object[]> results = fullTextQuery.list(); final LinkedHashMap<String, Tuple<Integer, Float>> termCountMap = new LinkedHashMap<String, Tuple<Integer, Float>>(); final FullTextSession fullTextSession = Search.getFullTextSession((Session) this.em.getDelegate()); final SearchFactory searchFactory = fullTextSession.getSearchFactory(); final IndexReaderAccessor ira = searchFactory.getIndexReaderAccessor(); final IndexReader reader = ira.open(com.mothsoft.alexis.domain.Document.class); final IndexSearcher searcher = new IndexSearcher(reader); final List<ImportantTerm> importantTerms; final int numDocs; try {/*from ww w . ja va2 s. com*/ numDocs = reader.numDocs(); Term luceneTerm = new Term(CONTENT_TEXT_FIELD_NAME); if (logger.isDebugEnabled()) { logger.debug(String.format("Found %d matching Lucene documents of %d in reader", results.size(), numDocs)); } // loop over all the matching documents for (final Object[] ith : results) { int docId = ((Number) ith[0]).intValue(); final TermFreqVector tfv = reader.getTermFreqVector(docId, CONTENT_TEXT_FIELD_NAME); if (tfv == null) { continue; } final String[] terms = tfv.getTerms(); final int[] freqs = tfv.getTermFrequencies(); // total document size int size = 0; for (int freq : freqs) { size += freq; } if (logger.isDebugEnabled()) { logger.debug( String.format("Lucene document %d has %d terms, to be merged with running count %d", docId, size, termCountMap.size())); } // loop over the terms and aggregate the counts and tf-idf int i = 0; for (final String term : terms) { if (StopWords.ENGLISH.contains(term)) { continue; } luceneTerm = luceneTerm.createTerm(term); final int termCount = freqs[i++]; final Tuple<Integer, Float> countScore; if (termCountMap.containsKey(term)) { countScore = termCountMap.get(term); countScore.t1 += termCount; countScore.t2 += (TFIDF.score(term, termCount, size, numDocs, searcher.docFreq(luceneTerm))); } else { countScore = new Tuple<Integer, Float>(); countScore.t1 = termCount; countScore.t2 = (TFIDF.score(term, termCount, size, numDocs, searcher.docFreq(luceneTerm))); termCountMap.put(term, countScore); } } } if (logger.isDebugEnabled()) { logger.debug("Completed Lucene document processing."); } importantTerms = new ArrayList<ImportantTerm>(termCountMap.size()); // find max TF-IDF float maxTfIdf = 0.0f; for (final Tuple<Integer, Float> ith : termCountMap.values()) { if (ith.t2 > maxTfIdf) { maxTfIdf = ith.t2; } } for (final Map.Entry<String, Tuple<Integer, Float>> entry : termCountMap.entrySet()) { final int ithCount = entry.getValue().t1; final float ithTfIdf = entry.getValue().t2; importantTerms.add(new ImportantTerm(entry.getKey(), ithCount, ithTfIdf, maxTfIdf)); } if (logger.isDebugEnabled()) { logger.debug("Completed term aggregation, will clear term map"); } termCountMap.clear(); } catch (IOException e) { throw new RuntimeException(e); } finally { try { searcher.close(); } catch (IOException e) { logger.warn("Failed to close searcher: " + e, e); } ira.close(reader); } if (logger.isDebugEnabled()) { logger.debug("Sorting terms"); } Collections.sort(importantTerms, new Comparator<ImportantTerm>() { @Override public int compare(ImportantTerm term1, ImportantTerm term2) { return -1 * term1.getTfIdf().compareTo(term2.getTfIdf()); } }); if (logger.isDebugEnabled()) { logger.debug("Term sort complete"); } if (importantTerms.isEmpty() || importantTerms.size() < count) { if (logger.isDebugEnabled()) { logger.debug("Will return full list."); } logger.debug("Timer: " + (System.currentTimeMillis() - start)); return importantTerms; } else { if (logger.isDebugEnabled()) { logger.debug( "Will return sublist containing " + count + " of " + importantTerms.size() + " terms."); } logger.debug("Timer: " + (System.currentTimeMillis() - start)); return importantTerms.subList(0, count); } }
From source file:jp.ac.u.tokyo.m.pig.udf.eval.group.InnerGroup.java
@Override public Tuple exec(Tuple aInput) throws IOException { if (aInput == null) return null; TupleFactory tTupleFactory = TupleFactory.getInstance(); BagFactory tBagFactory = DefaultBagFactory.getInstance(); // processing target | ? DataBag tTarget = DataType.toBag(aInput.get(0)); DataBag tGroupColumn = DataType.toBag(aInput.get(1)); // InnerGroup container | InnerGroup ? GroupFilterFormat tGroupFilterFormat = mGroupFilterFormat; List<GroupFilterFormatGroup> tGroupList = tGroupFilterFormat.getGroupList(); LinkedHashMap<GroupFilterFormatGroup, List<Tuple>> tInnerGroupMap = new LinkedHashMap<GroupFilterFormatGroup, List<Tuple>>(); for (GroupFilterFormatGroup tCurrentGFFGroup : tGroupList) { tInnerGroupMap.put(tCurrentGFFGroup, new ArrayList<Tuple>()); }// www . j a va 2s . c o m // Grouping Iterator<Tuple> tTargetIterator = tTarget.iterator(); Iterator<Tuple> tGroupColumnIterator = tGroupColumn.iterator(); int tOriginalColumnNum = mOriginalColumnNum; while (tGroupColumnIterator.hasNext()) { String tCurrentGroupColumnValue = (String) tGroupColumnIterator.next().get(0); Tuple tCurrentTargetTupleOrigin = tTargetIterator.next(); // The contents of Bag influence it mutually for some reason when Pig executes eval UDF which appointed Bag in parallel. // As coping, I record the number of the columns from a schema of original Bag at the time of outputSchema, and only the number succeeds a column. // The interference of the value disappears in this at least UDF implementing. It is all right to execute InnerGroup in parallel. // // NOTE ???? Pig ? Bag ?? eval UDF ?????Bag ?????? // ????outputSchema ??? Bag ????????????? // ??????? UDF ?????????InnerGroup ???? ArrayList<Object> tOriginBagColumns = new ArrayList<Object>(); Iterator<Object> tColumnIterator = tCurrentTargetTupleOrigin.getAll().iterator(); for (int tColumnCount = 0; tColumnCount < tOriginalColumnNum; tColumnCount++) { if (tColumnIterator.hasNext()) tOriginBagColumns.add(tColumnIterator.next()); else tOriginBagColumns.add(null); } int[] tJoinedValueBagIndexes = mJoinedValueBagIndexes; int[] tJoinedValueBagSizes = mJoinedValueBagSizes; // When target JoinedValueBag does not exist, overwrite tModeOfTakeOutJoinedValue in ALL_GROUP. // ?? JoinedValueBag ?????? tModeOfTakeOutJoinedValue ALL_GROUP ???? ModeOfTakeOutJoinedValue tModeOfTakeOutJoinedValue = tJoinedValueBagIndexes == null ? ModeOfTakeOutJoinedValue.ALL_GROUP : mModeOfTakeOutJoinedValue; for (GroupFilterFormatGroup tCurrentGFFGroup : tGroupList) { if (tCurrentGFFGroup.isMember(tCurrentGroupColumnValue)) { tInnerGroupMap.get(tCurrentGFFGroup) .add(tTupleFactory.newTupleNoCopy( composeTransformedTuple(tCurrentGFFGroup, tModeOfTakeOutJoinedValue, tOriginBagColumns, tJoinedValueBagIndexes, tJoinedValueBagSizes))); } } } // create return value | ? ArrayList<Object> tResultTupleList = new ArrayList<Object>(); for (GroupFilterFormatGroup tCurrentGFFGroup : tGroupList) { tResultTupleList.add(tCurrentGFFGroup.getGroupName()); tResultTupleList.add(tBagFactory.newDefaultBag(tInnerGroupMap.get(tCurrentGFFGroup))); } return tTupleFactory.newTupleNoCopy(tResultTupleList); }
From source file:com.vmware.bdd.cli.commands.ClusterCommands.java
private void prettyOutputClusterInfo(ClusterRead cluster, boolean detail) { Map<String, Map<String, String>> infraCfg = cluster.getInfrastructure_config(); Map<String, String> userMgmtCfg = null; if (MapUtils.isNotEmpty(infraCfg)) { userMgmtCfg = infraCfg.get(UserMgmtConstants.LDAP_USER_MANAGEMENT); }//from ww w .ja va2s . c om TopologyType topology = cluster.getTopologyPolicy(); printSeperator(); // list cluster level params LinkedHashMap<String, String> clusterParams = new LinkedHashMap<String, String>(); clusterParams.put("CLUSTER NAME", cluster.getName()); clusterParams.put("AGENT VERSION", cluster.getVersion()); clusterParams.put("APP MANAGER", cluster.getAppManager()); clusterParams.put("DISTRO", cluster.getDistro()); clusterParams.put("NODE TEMPLATE", cluster.getTemplateName()); String cloneType = cluster.getClusterCloneType(); if (!CommandsUtils.isBlank(cloneType)) { clusterParams.put("CLUSTER CLONE TYPE", cloneType.toUpperCase()); } if (topology != null && topology != TopologyType.NONE) { clusterParams.put("TOPOLOGY", topology.toString()); } clusterParams.put("IO SHARES", cluster.getIoShares() == null ? "" : cluster.getIoShares().toString()); clusterParams.put("STATUS", cluster.getStatus() == null ? "" : cluster.getStatus().toString()); if (cluster.getExternalHDFS() != null && !cluster.getExternalHDFS().isEmpty()) { clusterParams.put("EXTERNAL HDFS", cluster.getExternalHDFS()); } //Burst out if (!CommandsUtils.isBlank(cluster.getExternalMapReduce())) { clusterParams.put("EXTERNAL MAPREDUCE", cluster.getExternalMapReduce()); } clusterParams.put("AD/LDAP ENABLED", Boolean.toString(MapUtils.isNotEmpty(userMgmtCfg))); for (String key : clusterParams.keySet()) { System.out.printf(Constants.OUTPUT_INDENT + "%-26s:" + Constants.OUTPUT_INDENT + "%s\n", key, clusterParams.get(key)); } System.out.println(); LinkedHashMap<String, List<String>> ngColumnNamesWithGetMethodNames = new LinkedHashMap<String, List<String>>(); List<NodeGroupRead> nodegroups = cluster.getNodeGroups(); if (nodegroups != null) { ngColumnNamesWithGetMethodNames.put(Constants.FORMAT_TABLE_COLUMN_GROUP_NAME, Arrays.asList("getName")); ngColumnNamesWithGetMethodNames.put(Constants.FORMAT_TABLE_COLUMN_ROLES, Arrays.asList("getRoles")); ngColumnNamesWithGetMethodNames.put(Constants.FORMAT_TABLE_COLUMN_INSTANCE, Arrays.asList("getInstanceNum")); ngColumnNamesWithGetMethodNames.put(Constants.FORMAT_TABLE_COLUMN_CPU, Arrays.asList("getCpuNum")); ngColumnNamesWithGetMethodNames.put(Constants.FORMAT_TABLE_COLUMN_MEM, Arrays.asList("getMemCapacityMB")); ngColumnNamesWithGetMethodNames.put(Constants.FORMAT_TABLE_COLUMN_TYPE, Arrays.asList("getStorage", "getType")); ngColumnNamesWithGetMethodNames.put(Constants.FORMAT_TABLE_COLUMN_SIZE, Arrays.asList("getStorage", "getSizeGB")); try { if (detail) { prettyOutputDetailNodegroups(topology, ngColumnNamesWithGetMethodNames, nodegroups); } else CommandsUtils.printInTableFormat(ngColumnNamesWithGetMethodNames, nodegroups.toArray(), Constants.OUTPUT_INDENT); } catch (Exception e) { CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER, Constants.OUTPUT_OP_LIST, Constants.OUTPUT_OP_RESULT_FAIL, e.getMessage()); } if (detail) { prettyOutputDetailedUserMgmt(cluster.getName(), userMgmtCfg); } } }
From source file:de.ingrid.importer.udk.strategy.v32.IDCStrategy3_2_0.java
/** * Also drops all old values (if syslist already exists) ! * @param listId id of syslist/*w ww . ja va2 s. c o m*/ * @param deleteOldValues pass true if all old syslist values should be deleted before adding new ones ! * @param syslistMap_de german entries * @param syslistMap_en english entries * @param defaultEntry_de pass key of GERMAN default entry or -1 if no default entry ! * @param defaultEntry_en pass key of ENGLISH default entry or -1 if no default entry ! * @param syslistMap_descr_de pass null if no GERMAN description available * @param syslistMap_descr_en pass null if no ENGLISH description available * @throws Exception */ private void writeNewSyslist(int listId, boolean deleteOldValues, LinkedHashMap<Integer, String> syslistMap_de, LinkedHashMap<Integer, String> syslistMap_en, int defaultEntry_de, int defaultEntry_en, LinkedHashMap<Integer, String> syslistMap_descr_de, LinkedHashMap<Integer, String> syslistMap_descr_en) throws Exception { if (syslistMap_descr_de == null) { syslistMap_descr_de = new LinkedHashMap<Integer, String>(); } if (syslistMap_descr_en == null) { syslistMap_descr_en = new LinkedHashMap<Integer, String>(); } if (deleteOldValues) { // clean up, to guarantee no old values ! sqlStr = "DELETE FROM sys_list where lst_id = " + listId; jdbc.executeUpdate(sqlStr); } String psSql = "INSERT INTO sys_list (id, lst_id, entry_id, lang_id, name, maintainable, is_default, description) " + "VALUES (?,?,?,?,?,?,?,?)"; PreparedStatement psInsert = jdbc.prepareStatement(psSql); Iterator<Integer> itr = syslistMap_de.keySet().iterator(); while (itr.hasNext()) { int key = itr.next(); // german version String isDefault = "N"; if (key == defaultEntry_de) { isDefault = "Y"; } psInsert.setLong(1, getNextId()); psInsert.setInt(2, listId); psInsert.setInt(3, key); psInsert.setString(4, "de"); psInsert.setString(5, syslistMap_de.get(key)); psInsert.setInt(6, 0); psInsert.setString(7, isDefault); psInsert.setString(8, syslistMap_descr_de.get(key)); psInsert.executeUpdate(); // english version isDefault = "N"; if (key == defaultEntry_en) { isDefault = "Y"; } psInsert.setLong(1, getNextId()); psInsert.setString(4, "en"); psInsert.setString(5, syslistMap_en.get(key)); psInsert.setString(7, isDefault); psInsert.setString(8, syslistMap_descr_en.get(key)); psInsert.executeUpdate(); } psInsert.close(); }
From source file:com.ikanow.aleph2.analytics.services.DeduplicationService.java
@Override public void onObjectBatch(final Stream<Tuple2<Long, IBatchRecord>> batch, final Optional<Integer> batch_size, final Optional<JsonNode> grouping_key) { if (_deduplication_is_disabled.get()) { // no deduplication, generally shouldn't be here... //.. but if we are, make do the best we can batch.forEach(t2 -> _context.get().emitImmutableObject(t2._1(), t2._2().getJson(), Optional.empty(), Optional.empty(), Optional.empty())); return;// w ww . j a va2 s.c om } // Create big query final Tuple3<QueryComponent<JsonNode>, List<Tuple2<JsonNode, Tuple2<Long, IBatchRecord>>>, Either<String, List<String>>> fieldinfo_dedupquery_keyfields = getDedupQuery( batch, _dedup_fields.get(), _db_mapper.get()); // Get duplicate results final Tuple2<List<String>, Boolean> fields_include = getIncludeFields(_policy.get(), _dedup_fields.get(), _timestamp_field.get()); final CompletableFuture<Iterator<JsonNode>> dedup_res = fieldinfo_dedupquery_keyfields._2().isEmpty() ? CompletableFuture.completedFuture(Collections.<JsonNode>emptyList().iterator()) : _dedup_context.get().getObjectsBySpec(fieldinfo_dedupquery_keyfields._1(), fields_include._1(), fields_include._2()).thenApply(cursor -> cursor.iterator()); // Wait for it to finsh //(create handy results structure if so) final LinkedHashMap<JsonNode, LinkedList<Tuple3<Long, IBatchRecord, ObjectNode>>> mutable_obj_map = fieldinfo_dedupquery_keyfields ._2().stream() .collect(Collector.of( () -> new LinkedHashMap<JsonNode, LinkedList<Tuple3<Long, IBatchRecord, ObjectNode>>>(), (acc, t2) -> { // (ie only the first element is added, duplicate elements are removed) final Tuple3<Long, IBatchRecord, ObjectNode> t3 = Tuples._3T(t2._2()._1(), t2._2()._2(), _mapper.createObjectNode()); acc.compute(t2._1(), (k, v) -> { final LinkedList<Tuple3<Long, IBatchRecord, ObjectNode>> new_list = (null == v) ? new LinkedList<>() : v; new_list.add(t3); return new_list; }); }, (map1, map2) -> { map1.putAll(map2); return map1; })); //TODO (ALEPH-20): add timestamps to annotation //TODO (ALEPH-20): support different timestamp fields for the different buckets //TODO (ALEPH-20): really need to support >1 current enrichment job // ^^(Really really longer term you should be able to decide what objects you want and what you don't <- NOTE: don't remember what i meant here) final Iterator<JsonNode> cursor = dedup_res.join(); // Handle the results final Stream<JsonNode> records_to_delete = Lambdas.get(() -> { if (isCustom(_doc_schema.get().deduplication_policy()) || _doc_schema.get().delete_unhandled_duplicates()) { return Optionals.streamOf(cursor, true) .collect(Collectors.groupingBy( ret_obj -> getKeyFieldsAgain(ret_obj, fieldinfo_dedupquery_keyfields._3()))) .entrySet().stream().<JsonNode>flatMap(kv -> { final Optional<JsonNode> maybe_key = kv.getKey(); final Optional<LinkedList<Tuple3<Long, IBatchRecord, ObjectNode>>> matching_records = maybe_key .map(key -> mutable_obj_map.get(key)); // Stats: _mutable_stats.duplicate_keys++; _mutable_stats.duplicates_existing += kv.getValue().size(); _mutable_stats.duplicates_incoming += matching_records.map(l -> l.size()).orElse(0); //DEBUG //System.out.println("?? " + kv.getValue().size() + " vs " + maybe_key + " vs " + matching_records.map(x -> Integer.toString(x.size())).orElse("(no match)")); return matching_records .<Stream<JsonNode>>map(records -> handleDuplicateRecord(_doc_schema.get(), _custom_handler.optional().map( handler -> Tuples._2T(handler, this._custom_context.get())), _timestamp_field.get(), records, kv.getValue(), maybe_key.get(), mutable_obj_map)) .orElse(Stream.empty()); }); } else { Optionals.streamOf(cursor, true).forEach(ret_obj -> { final Optional<JsonNode> maybe_key = getKeyFieldsAgain(ret_obj, fieldinfo_dedupquery_keyfields._3()); final Optional<LinkedList<Tuple3<Long, IBatchRecord, ObjectNode>>> matching_records = maybe_key .map(key -> mutable_obj_map.get(key)); //DEBUG //System.out.println("?? " + ret_obj + " vs " + maybe_key + " vs " + matching_record.map(x -> x._2().getJson().toString()).orElse("(no match)")); // Stats: _mutable_stats.duplicate_keys++; _mutable_stats.duplicates_existing++; _mutable_stats.duplicates_incoming += matching_records.map(l -> l.size()).orElse(0); matching_records.ifPresent(records -> handleDuplicateRecord(_doc_schema.get(), _custom_handler.optional() .map(handler -> Tuples._2T(handler, this._custom_context.get())), _timestamp_field.get(), records, Arrays.asList(ret_obj), maybe_key.get(), mutable_obj_map)); }); return Stream.<JsonNode>empty(); } }); final List<Object> ids = records_to_delete.map(j -> jsonToObject(j)).filter(j -> null != j) .collect(Collectors.toList()); if (!ids.isEmpty()) { // fire a bulk deletion request mutable_uncompleted_deletes.add( _dedup_context.get().deleteObjectsBySpec(CrudUtils.allOf().withAny(AnnotationBean._ID, ids))); _mutable_stats.deleted += ids.size(); //(quickly see if we can reduce the number of outstanding requests) final Iterator<CompletableFuture<Long>> it = mutable_uncompleted_deletes.iterator(); while (it.hasNext()) { final CompletableFuture<Long> cf = it.next(); if (cf.isDone()) { it.remove(); } else break; // ie stop as soon as we hit one that isn't complete) } } _mutable_stats.nonduplicate_keys += mutable_obj_map.size(); if (Optional.ofNullable(_doc_schema.get().custom_finalize_all_objects()).orElse(false)) { mutable_obj_map.entrySet().stream() .forEach(kv -> handleCustomDeduplication( _custom_handler.optional() .map(handler -> Tuples._2T(handler, this._custom_context.get())), kv.getValue(), Collections.emptyList(), kv.getKey())); } else { // Just emit the last element of each grouped object set mutable_obj_map.values().stream().map(t -> t.peekLast()) .forEach(t -> _context.get().emitImmutableObject(t._1(), t._2().getJson(), Optional.of(t._3()), Optional.empty(), Optional.empty())); } }
From source file:org.cyberoam.iview.charts.Chart.java
public static void generatePDFReport(OutputStream out, int reportID, String applianceID, String startDate, String endDate, String limit, int[] deviceIDs, HttpServletRequest request, int reportGroupID, LinkedHashMap paramMap) throws Exception { float width = 768; float height = 1024; float rec_hieght = 470; Rectangle pagesize = new Rectangle(768, 1024); Document document = new Document(pagesize, 30, 30, 30, 30); IndexManager indexManager = null;/* ww w. ja va2s.c o m*/ JFreeChart chart = null; SqlReader sqlReader = new SqlReader(false); CyberoamLogger.sysLog.debug("reportID:" + reportID); CyberoamLogger.sysLog.debug("applianceID:" + applianceID); CyberoamLogger.sysLog.debug("startDate:" + startDate); CyberoamLogger.sysLog.debug("endDate:" + endDate); CyberoamLogger.sysLog.debug("limit:" + limit); try { //PdfWriter writer = PdfWriter.getInstance(document, response!=null ? response.getOutputStream():new FileOutputStream(pdfFileName)); PdfWriter writer = PdfWriter.getInstance(document, out); writer.setPageEvent(new Chart()); document.addAuthor("iView"); document.addSubject("iView Report"); document.open(); PdfContentByte contentByte = writer.getDirectContent(); //ReportGroupBean reportGroupBean=ReportGroupBean.getRecordbyPrimarykey(reportGroupID); //ArrayList reportList=reportGroupBean.getReportIdByReportGroupId(reportGroupID); ReportBean reportBean; ResultSetWrapper rsw = null; String seperator = System.getProperty("file.separator"); // String path=System.getProperty("catalina.home") +seperator+"webapps" +seperator+"ROOT" + seperator + "images" + seperator; String path = InitServlet.contextPath + seperator + "images" + seperator + "iViewPDF.jpg"; /* * Loading Image to add into PDF */ Image iViewImage = Image.getInstance(path); iViewImage.scaleAbsolute(750, 900); //iViewImage.scaleAbsolute(600,820); iViewImage.setAbsolutePosition(10, 10); /*Image headerImage= Image.getInstance(path+ "iViewPDFHeader.jpg"); PdfPTable headerTable = new PdfPTable(2); PdfPCell cell = new PdfPCell(headerImage); headerTable.addCell(cell); HeaderFooter docHeader=null; //document.setHeader(new HeaderFooter(new Phrase(new Chunk())), true); */ document.add(iViewImage); document.add(new Paragraph("\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n")); /* * Generating Table on the First Page of Report providing summary of Content */ PdfPTable frontPageTable = new PdfPTable(2); PdfPCell dataCell; String reportName = ""; Color tableHeadBackColor = new Color(150, 174, 190); Color tableContentBackColor = new Color(229, 232, 237); Color tableBorderColor = new Color(229, 232, 237); dataCell = new PdfPCell(new Phrase(new Chunk("Report Profile", FontFactory.getFont(FontFactory.HELVETICA_BOLD, 16, Font.PLAIN, new Color(255, 255, 255))))); dataCell.setBackgroundColor(tableHeadBackColor); dataCell.setBorderColor(tableBorderColor); frontPageTable.addCell(dataCell); if (paramMap != null) { reportName = paramMap.get("title").toString(); paramMap.remove("title"); } if (request != null) { ReportGroupBean reportGroupBean = ReportGroupBean.getRecordbyPrimarykey(reportGroupID); reportName = getFormattedTitle(request, reportGroupBean, true); } dataCell = new PdfPCell(); dataCell.addElement(new Phrase(new Chunk(reportName, FontFactory.getFont(FontFactory.HELVETICA_BOLD, 12, Font.PLAIN, new Color(255, 255, 255))))); //dataCell.addElement(new Phrase(new Chunk(ReportBean.getRecordbyPrimarykey(reportID).getTitle(), FontFactory.getFont(FontFactory.HELVETICA_BOLD, 11, Font.PLAIN, new Color(10,10,10))))); if (request != null) { dataCell.addElement(new Phrase(new Chunk(reportName + " >> ", FontFactory .getFont(FontFactory.HELVETICA_BOLD, 12, Font.PLAIN, new Color(255, 255, 255))))); dataCell.addElement( new Phrase(new Chunk(ReportBean.getRecordbyPrimarykey(reportID).getTitle(), FontFactory .getFont(FontFactory.HELVETICA_BOLD, 12, Font.PLAIN, new Color(255, 255, 255))))); } dataCell.setBackgroundColor(tableHeadBackColor); dataCell.setBorderColor(tableBorderColor); frontPageTable.addCell(dataCell); dataCell = new PdfPCell(new Phrase(new Chunk("Start Date", FontFactory.getFont(FontFactory.HELVETICA_BOLD, 12, Font.PLAIN, new Color(0, 0, 0))))); dataCell.setBackgroundColor(tableContentBackColor); dataCell.setBorderColor(tableBorderColor); frontPageTable.addCell(dataCell); dataCell = new PdfPCell(new Phrase(startDate)); dataCell.setBackgroundColor(tableContentBackColor); dataCell.setBorderColor(tableBorderColor); frontPageTable.addCell(dataCell); dataCell = new PdfPCell(new Phrase(new Chunk("End Date", FontFactory.getFont(FontFactory.HELVETICA_BOLD, 12, Font.PLAIN, new Color(0, 0, 0))))); dataCell.setBackgroundColor(tableContentBackColor); dataCell.setBorderColor(tableBorderColor); frontPageTable.addCell(dataCell); dataCell = new PdfPCell(new Phrase(endDate)); dataCell.setBackgroundColor(tableContentBackColor); dataCell.setBorderColor(tableBorderColor); frontPageTable.addCell(dataCell); dataCell = new PdfPCell(new Phrase(new Chunk("iView Server Time", FontFactory.getFont(FontFactory.HELVETICA_BOLD, 12, Font.PLAIN, new Color(0, 0, 0))))); dataCell.setBackgroundColor(tableContentBackColor); dataCell.setBorderColor(tableBorderColor); frontPageTable.addCell(dataCell); java.util.Date currentDate = new java.util.Date(); dataCell = new PdfPCell(new Phrase(currentDate.toString())); dataCell.setBackgroundColor(tableContentBackColor); dataCell.setBorderColor(tableBorderColor); frontPageTable.addCell(dataCell); dataCell = new PdfPCell(new Phrase(new Chunk("Device Names (IP Address)", FontFactory.getFont(FontFactory.HELVETICA_BOLD, 12, Font.PLAIN, new Color(0, 0, 0))))); dataCell.setBackgroundColor(tableContentBackColor); dataCell.setBorderColor(tableBorderColor); frontPageTable.addCell(dataCell); DeviceBean deviceBean = null; String deviceNameWithIP = ""; if (deviceIDs != null) { for (int i = 0; i < deviceIDs.length; i++) { deviceBean = DeviceBean.getRecordbyPrimarykey(deviceIDs[i]); if (deviceBean != null) { deviceNameWithIP += " " + (i + 1) + ". " + deviceBean.getName() + " (" + deviceBean.getIp() + ")\n"; } } } dataCell = new PdfPCell(new Phrase("\n" + deviceNameWithIP + "\n")); dataCell.setBackgroundColor(tableContentBackColor); dataCell.setBorderColor(tableBorderColor); frontPageTable.addCell(dataCell); /* * Adding Table to PDF */ document.add(frontPageTable); /* * Adding Charts and Table to PDF */ document.newPage(); reportBean = ReportBean.getRecordbyPrimarykey(reportID); String query = null; if (request == null) { query = PrepareQuery.getQuery(reportBean, startDate, endDate, applianceID, null, null, "0", limit, paramMap); } else { PrepareQuery prepareQuery = new PrepareQuery(); query = prepareQuery.getQuery(reportBean, request); } String searchQuery = ""; if (request == null) { searchQuery = null; } else { searchQuery = request.getParameter("searchquery"); } if (searchQuery != null && !"".equalsIgnoreCase(searchQuery)) { query = query.replaceFirst("where", "where " + searchQuery + " and"); } CyberoamLogger.sysLog.debug("PDF:ReportID:" + reportBean.getReportId() + "Query->" + query); try { if (query.indexOf("select") == -1 && query.indexOf("SELECT") == -1) { indexManager = new IndexManager(); rsw = indexManager.getSearch(query); //rsw=indexManager.getResutSetFromArrayList(searchRecord); } else { rsw = sqlReader.getInstanceResultSetWrapper(query); } } catch (org.postgresql.util.PSQLException e) { if (query.indexOf("5min_ts_20") > -1) { query = query.substring(0, query.indexOf("5min_ts_20")) + "4hr" + query.substring(query.indexOf("5min_ts_20") + 16, query.length()); CyberoamLogger.appLog.debug("New query : " + query); rsw = sqlReader.getInstanceResultSetWrapper(query); } else { CyberoamLogger.appLog.error("Exeption in AjaxController.java " + e, e); } } catch (Exception e) { CyberoamLogger.appLog.error("Exeption in AjaxController.java " + e, e); rsw.close(); } /* * PDF Rendering work starts here */ //if(Integer.parseInt(limit)<=10 && query.indexOf("where")>-1){ if (reportBean.getReportFormatId() != 2) { chart = Chart.getChart(reportBean.getReportId(), rsw, null); PdfTemplate pdfTemplate = contentByte.createTemplate(width, height); Graphics2D graphics2D = pdfTemplate.createGraphics(width, height); Rectangle2D rectangle = new Rectangle2D.Double(100, 85, 540, rec_hieght); chart.draw(graphics2D, rectangle); graphics2D.dispose(); contentByte.addTemplate(pdfTemplate, 0, 0); for (int j = 0; j < (int) (rec_hieght / 16) + 1; j++) { document.add(new Paragraph("\n")); } } else document.add(new Paragraph("\n")); // Retrieving PdfPTable PdfPTable pdfTable = getPdfPTable(reportBean, rsw); rsw.close(); document.add(pdfTable); CyberoamLogger.appLog.info("*************Finishing PDF Work****************"); } catch (Exception e) { CyberoamLogger.sysLog.debug("Chart.writeChartToPDF.e" + e.getMessage(), e); } finally { sqlReader.close(); } document.close(); }