List of usage examples for java.io PrintWriter append
public PrintWriter append(char c)
From source file:edu.uci.ics.crawler4j.asos.BasicCrawler.java
/** * This function is called when a page is fetched and ready to be processed * by your program./*w ww .jav a 2 s .co m*/ */ @Override public void visit(Page page, PrintWriter bw, WebDriver driver) { int docid = page.getWebURL().getDocid(); String url = page.getWebURL().getURL(); String domain = page.getWebURL().getDomain(); String path = page.getWebURL().getPath(); String subDomain = page.getWebURL().getSubDomain(); String parentUrl = page.getWebURL().getParentUrl(); String anchor = page.getWebURL().getAnchor(); System.out.println("Docid: " + docid); System.out.println("URL: " + url); System.out.println("Domain: '" + domain + "'"); System.out.println("Sub-domain: '" + subDomain + "'"); System.out.println("Path: '" + path + "'"); System.out.println("Parent page: " + parentUrl); System.out.println("Anchor text: " + anchor); if (page.getParseData() instanceof HtmlParseData) { HtmlParseData htmlParseData = (HtmlParseData) page.getParseData(); String text = htmlParseData.getText(); String html = htmlParseData.getHtml(); List<WebURL> links = htmlParseData.getOutgoingUrls(); System.out.println("Text length: " + text.length()); System.out.println("Html length: " + html.length()); System.out.println("Number of outgoing links: " + links.size()); } DateFormat dateFormat = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss"); System.out.println(i + ": " + url); if (page.getParseData() instanceof HtmlParseData) { if (url.toLowerCase().contains("pgeproduct")) { String[] propertyString = null; try { driver.get(url); System.out.println(url); propertyString = Asos.extractProperties(driver); if (propertyString != null) { if (propertyString[4] != null) { String[] sizeString = null; sizeString = Asos.getSizes(driver); String[] stockString = null; stockString = Asos.getStock(driver); int sizes = sizeString.length; if (!propertyString[8].contains("sale")) { String sale = propertyString[2]; propertyString[2] = propertyString[8]; propertyString[8] = sale; } for (int k = 0; k < sizes; k++) { if (sizes > 1) { propertyString[4] = sizeString[k]; propertyString[5] = stockString[k]; } else { propertyString[4] = "-"; propertyString[5] = "-"; } for (int j = 0; j < propertyString.length; j++) { bw.append(propertyString[j] + ","); } Date date = new Date(); bw.append(dateFormat.format(date) + ","); bw.append(url); bw.append("\r\n"); } } } } catch (Exception e) { e.printStackTrace(); } finally { } j++; } } // Header[] responseHeaders = page.getFetchResponseHeaders(); // if (responseHeaders != null) { // System.out.println("Response headers:"); // for (Header header : responseHeaders) { // System.out.println("\t" + header.getName() + ": " + // header.getValue()); // } // } i++; }
From source file:com.ephesoft.dcma.gwt.uploadbatch.server.UploadBatchImageServlet.java
private void uploadFile(HttpServletRequest req, HttpServletResponse resp, BatchSchemaService batchSchemaService, String currentBatchUploadFolderName) throws IOException { PrintWriter printWriter = resp.getWriter(); File tempFile = null;//from www. j a va2 s. com InputStream instream = null; OutputStream out = null; String uploadBatchFolderPath = batchSchemaService.getUploadBatchFolder(); String uploadFileName = ""; if (ServletFileUpload.isMultipartContent(req)) { FileItemFactory factory = new DiskFileItemFactory(); ServletFileUpload upload = new ServletFileUpload(factory); uploadFileName = ""; String uploadFilePath = ""; List<FileItem> items; try { items = upload.parseRequest(req); for (FileItem item : items) { if (!item.isFormField()) { // && "uploadFile".equals(item.getFieldName())) { uploadFileName = item.getName(); if (uploadFileName != null) { uploadFileName = uploadFileName .substring(uploadFileName.lastIndexOf(File.separator) + 1); } uploadFilePath = uploadBatchFolderPath + File.separator + currentBatchUploadFolderName + File.separator + uploadFileName; try { instream = item.getInputStream(); tempFile = new File(uploadFilePath); out = new FileOutputStream(tempFile); byte buf[] = new byte[1024]; int len = instream.read(buf); while (len > 0) { out.write(buf, 0, len); len = instream.read(buf); } } catch (FileNotFoundException e) { LOG.error("Unable to create the upload folder." + e, e); printWriter.write("Unable to create the upload folder.Please try again."); } catch (IOException e) { LOG.error("Unable to read the file." + e, e); printWriter.write("Unable to read the file.Please try again."); } finally { if (out != null) { out.close(); } if (instream != null) { instream.close(); } } } } } catch (FileUploadException e) { LOG.error("Unable to read the form contents." + e, e); printWriter.write("Unable to read the form contents.Please try again."); } } else { LOG.error("Request contents type is not supported."); printWriter.write("Request contents type is not supported."); } printWriter.write("currentBatchUploadFolderName:" + currentBatchUploadFolderName); printWriter.append("|"); printWriter.append("fileName:").append(uploadFileName); printWriter.append("|"); printWriter.flush(); }
From source file:org.openhealthtools.mdht.uml.cda.core.util.CDAModelUtil.java
private static void appendPropertyRules(PrintWriter writer, Property property, Map<String, List<Constraint>> constraintMap, Map<Constraint, List<Constraint>> subConstraintMap, List<Constraint> unprocessedConstraints, boolean markup) { String[] ol;// ww w . j av a 2s .c o m String[] li; if (markup) { ol = OL; li = LI; } else { ol = NOOL; li = NOLI; } // association typeCode and property type String assocConstraints = ""; if (property.getAssociation() != null) { assocConstraints = CDAModelUtil.computeAssociationConstraints(property, true); } StringBuffer ruleConstraints = new StringBuffer(); List<Constraint> rules = constraintMap.get(property.getName()); if (rules != null && !rules.isEmpty()) { for (Constraint constraint : rules) { unprocessedConstraints.remove(constraint); ruleConstraints.append(li[0] + CDAModelUtil.computeConformanceMessage(constraint, true)); appendSubConstraintRules(ruleConstraints, constraint, subConstraintMap, unprocessedConstraints, markup); // List<Constraint> subConstraints = subConstraintMap.get(constraint); // if (subConstraints != null && subConstraints.size() > 0) { // ruleConstraints.append(OL[0]); // for (Constraint subConstraint : subConstraints) { // unprocessedConstraints.remove(subConstraint); // ruleConstraints.append("\n<li>" + CDAModelUtil.computeConformanceMessage(subConstraint, true) + "</li>"); // } // ruleConstraints.append("</ol>"); // } ruleConstraints.append(li[1]); } } if (assocConstraints.length() > 0 || ruleConstraints.length() > 0) { // writer.append(", such that "); // writer.append(property.upperBound()==1 ? "it" : "each"); writer.append(ol[0]); writer.append(assocConstraints); writer.append(ruleConstraints); writer.append(ol[1]); } }
From source file:edu.stanford.epad.epadws.handlers.dicom.DSOUtil.java
public static boolean handleCreateDSO(String projectID, String subjectID, String studyUID, String seriesUID, HttpServletRequest httpRequest, PrintWriter responseStream, String username) { // See http://www.tutorialspoint.com/servlets/servlets-file-uploading.htm boolean uploadError = false; log.info("Received DSO create request for series " + seriesUID); try {// w w w .j av a 2 s. c o m ServletFileUpload servletFileUpload = new ServletFileUpload(); FileItemIterator fileItemIterator = servletFileUpload.getItemIterator(httpRequest); DSOEditRequest dsoEditRequest = null; String editedFrameNumbers = httpRequest.getParameter("editedFrameNumbers"); if (editedFrameNumbers == null || editedFrameNumbers.length() == 0) { dsoEditRequest = extractDSOEditRequest(fileItemIterator); } else { log.info("Uploaded mask frame numbers:" + editedFrameNumbers); String[] frameNumbers = editedFrameNumbers.split(","); List<Integer> numbers = new ArrayList<Integer>(); for (String frameNumber : frameNumbers) { if (frameNumber.trim().length() == 0) continue; numbers.add(new Integer(frameNumber.trim())); } dsoEditRequest = new DSOEditRequest(projectID, subjectID, studyUID, seriesUID, "", "", numbers); } //need to pass this all the way to segmentation writer, put into edit request String property = httpRequest.getParameter("property"); String color = httpRequest.getParameter("color"); dsoEditRequest.property = property; dsoEditRequest.color = color; log.info("DSOCreateRequest, seriesUID:" + dsoEditRequest.seriesUID + " imageUID:" + dsoEditRequest.imageUID + " aimID:" + dsoEditRequest.aimID + " number Frames:" + dsoEditRequest.editedFrameNumbers.size()); if (dsoEditRequest != null) { List<File> framesPNGMaskFiles = HandlerUtil.extractFiles(fileItemIterator, "DSOFrame", ".PNG"); if (framesPNGMaskFiles.isEmpty()) { log.warning("No PNG masks supplied in DSO create request for series " + seriesUID); uploadError = true; } else { framesPNGMaskFiles = framesPNGMaskFiles.subList(0, dsoEditRequest.editedFrameNumbers.size()); log.info("Extracted " + framesPNGMaskFiles.size() + " file mask(s) for DSO create for series " + seriesUID); String name = httpRequest.getParameter("name"); DSOEditResult dsoEditResult = DSOUtil.createNewDSO(name, dsoEditRequest, framesPNGMaskFiles, projectID, username); if (dsoEditResult != null) { responseStream.append(dsoEditResult.toJSON()); } else { log.info("Null return from createNewDSO"); uploadError = true; } } } else { log.warning("Invalid JSON header in DSO edit request for series " + seriesUID); uploadError = true; } } catch (IOException e) { log.warning("IO exception handling DSO edits for series " + seriesUID, e); uploadError = true; } catch (FileUploadException e) { log.warning("File upload exception handling DSO edits for series " + seriesUID, e); uploadError = true; } if (!uploadError) log.info("DSO successfully created ..."); return uploadError; }
From source file:com.ephesoft.gxt.uploadbatch.server.UploadBatchImageServlet.java
private void uploadFile(HttpServletRequest req, HttpServletResponse resp, BatchSchemaService batchSchemaService, String currentBatchUploadFolderName) throws IOException { PrintWriter printWriter = resp.getWriter(); File tempFile = null;//from www. jav a2 s . c o m InputStream instream = null; OutputStream out = null; String uploadBatchFolderPath = batchSchemaService.getUploadBatchFolder(); String uploadFileName = ""; if (ServletFileUpload.isMultipartContent(req)) { FileItemFactory factory = new DiskFileItemFactory(); ServletFileUpload upload = new ServletFileUpload(factory); uploadFileName = ""; String uploadFilePath = ""; List<FileItem> items; try { items = upload.parseRequest(req); for (FileItem item : items) { if (!item.isFormField()) { // && "uploadFile".equals(item.getFieldName())) { uploadFileName = item.getName(); if (uploadFileName != null) { uploadFileName = uploadFileName .substring(uploadFileName.lastIndexOf(File.separator) + 1); } uploadFilePath = uploadBatchFolderPath + File.separator + currentBatchUploadFolderName + File.separator + uploadFileName; try { instream = item.getInputStream(); tempFile = new File(uploadFilePath); out = new FileOutputStream(tempFile); byte buf[] = new byte[1024]; int len; while ((len = instream.read(buf)) > 0) { out.write(buf, 0, len); } } catch (FileNotFoundException e) { log.error("Unable to create the upload folder." + e, e); printWriter.write("Unable to create the upload folder.Please try again."); tempFile.delete(); resp.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, "unable to upload. please see server logs for more details."); } catch (IOException e) { log.error("Unable to read the file." + e, e); printWriter.write("Unable to read the file.Please try again."); tempFile.delete(); resp.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, "unable to upload. please see server logs for more details."); } finally { if (out != null) { out.close(); } if (instream != null) { instream.close(); } } } } } catch (FileUploadException e) { log.error("Unable to read the form contents." + e, e); printWriter.write("Unable to read the form contents.Please try again."); resp.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, "unable to upload. please see server logs for more details."); } } else { log.error("Request contents type is not supported."); printWriter.write("Request contents type is not supported."); } printWriter.write("currentBatchUploadFolderName:" + currentBatchUploadFolderName); printWriter.append("|"); printWriter.append("fileName:").append(uploadFileName); printWriter.append("|"); printWriter.flush(); }
From source file:com.ephesoft.gxt.systemconfig.server.ImportPluginUploadServlet.java
private void attachFile(HttpServletRequest req, HttpServletResponse resp, BatchSchemaService batchSchemaService) throws IOException { String errorMessageString = EMPTY_STRING; PrintWriter printWriter = resp.getWriter(); File tempZipFile = null;//from w w w . j ava 2s .c om InputStream instream = null; OutputStream out = null; ZipInputStream zipInputStream = null; List<ZipEntry> zipEntries = null; if (ServletFileUpload.isMultipartContent(req)) { FileItemFactory factory = new DiskFileItemFactory(); ServletFileUpload upload = new ServletFileUpload(factory); String exportSerailizationFolderPath = EMPTY_STRING; try { Properties allProperties = ApplicationConfigProperties.getApplicationConfigProperties() .getAllProperties(META_INF_APPLICATION_PROPERTIES); exportSerailizationFolderPath = allProperties.getProperty(PLUGIN_UPLOAD_PROPERTY_NAME); } catch (IOException e) { } File exportSerailizationFolder = new File(exportSerailizationFolderPath); if (!exportSerailizationFolder.exists()) { exportSerailizationFolder.mkdir(); } List<FileItem> items; try { items = upload.parseRequest(req); for (FileItem item : items) { if (!item.isFormField()) { zipFileName = item.getName(); if (zipFileName != null) { zipFileName = zipFileName.substring(zipFileName.lastIndexOf(File.separator) + 1); } zipPathname = exportSerailizationFolderPath + File.separator + zipFileName; try { instream = item.getInputStream(); tempZipFile = new File(zipPathname); if (tempZipFile.exists()) { tempZipFile.delete(); } out = new FileOutputStream(tempZipFile); byte buf[] = new byte[1024]; int len; while ((len = instream.read(buf)) > 0) { out.write(buf, 0, len); } } catch (FileNotFoundException e) { log.error("Unable to create the export folder." + e, e); printWriter.write("Unable to create the export folder.Please try again."); } catch (IOException e) { log.error("Unable to read the file." + e, e); printWriter.write("Unable to read the file.Please try again."); } finally { if (out != null) { try { out.close(); } catch (IOException ioe) { log.info("Could not close stream for file." + tempZipFile); } } if (instream != null) { try { instream.close(); } catch (IOException ioe) { log.info("Could not close stream for file." + zipFileName); } } } } } } catch (FileUploadException e) { log.error("Unable to read the form contents." + e, e); printWriter.write("Unable to read the form contents.Please try again."); } // Unnecessary code to unzip the attached file removed. zipInputStream = new ZipInputStream(new FileInputStream(zipPathname)); zipEntries = new ArrayList<ZipEntry>(); ZipEntry nextEntry = zipInputStream.getNextEntry(); while (nextEntry != null) { zipEntries.add(nextEntry); nextEntry = zipInputStream.getNextEntry(); } errorMessageString = processZipFileContents(zipEntries, zipPathname); } else { log.error("Request contents type is not supported."); printWriter.write("Request contents type is not supported."); } // Temp file is now not created. if (validZipContent) { String zipFileNameWithoutExtension = zipPathname.substring(0, zipPathname.lastIndexOf('.')); printWriter.write(SystemConfigConstants.PLUGIN_NAME + zipFileNameWithoutExtension); printWriter.append(RESULT_SEPERATOR); printWriter.append(SystemConfigConstants.JAR_FILE_PATH).append(jarFilePath); printWriter.append(RESULT_SEPERATOR); printWriter.append(SystemConfigConstants.XML_FILE_PATH).append(xmlFilePath); printWriter.append(RESULT_SEPERATOR); printWriter.flush(); } else { printWriter.write("Error while importing.Please try again." + CAUSE + errorMessageString); } }
From source file:com.ephesoft.gxt.systemconfig.server.ImportPoolServlet.java
/** * Unzip the attached zipped file./*from w w w . j a va 2 s . c o m*/ * * @param req {@link HttpServletRequest} * @param resp {@link HttpServletResponse} * @param batchSchemaService {@link BatchSchemaService} * @throws IOException */ private void attachFile(final HttpServletRequest req, final HttpServletResponse resp, final BatchSchemaService batchSchemaService) throws IOException { final PrintWriter printWriter = resp.getWriter(); File tempZipFile = null; InputStream instream = null; OutputStream out = null; String tempOutputUnZipDir = CoreCommonConstant.EMPTY_STRING; if (ServletFileUpload.isMultipartContent(req)) { final FileItemFactory factory = new DiskFileItemFactory(); final ServletFileUpload upload = new ServletFileUpload(factory); final String exportSerailizationFolderPath = batchSchemaService.getBatchExportFolderLocation(); final File exportSerailizationFolder = new File(exportSerailizationFolderPath); if (!exportSerailizationFolder.exists()) { exportSerailizationFolder.mkdir(); } String zipFileName = CoreCommonConstant.EMPTY_STRING; String zipPathname = CoreCommonConstant.EMPTY_STRING; List<FileItem> items; try { items = upload.parseRequest(req); for (final FileItem item : items) { if (!item.isFormField()) { zipFileName = item.getName(); if (zipFileName != null) { zipFileName = zipFileName.substring(zipFileName.lastIndexOf(File.separator) + 1); } zipPathname = exportSerailizationFolderPath + File.separator + zipFileName; // get only the file name not whole path if (zipFileName != null) { zipFileName = FilenameUtils.getName(zipFileName); } try { instream = item.getInputStream(); tempZipFile = new File(zipPathname); if (tempZipFile.exists()) { tempZipFile.delete(); } out = new FileOutputStream(tempZipFile); final byte buf[] = new byte[1024]; int len; while ((len = instream.read(buf)) > 0) { out.write(buf, 0, len); } } catch (final FileNotFoundException fileNotFoundException) { log.error("Unable to create the export folder." + fileNotFoundException, fileNotFoundException); printWriter.write("Unable to create the export folder.Please try again."); } catch (final IOException ioException) { log.error("Unable to read the file." + ioException, ioException); printWriter.write("Unable to read the file.Please try again."); } finally { if (out != null) { try { out.close(); } catch (final IOException ioException) { log.info("Could not close stream for file." + tempZipFile); } } if (instream != null) { try { instream.close(); } catch (final IOException ioException) { log.info("Could not close stream for file." + zipFileName); } } } } } } catch (final FileUploadException fileUploadException) { log.error("Unable to read the form contents." + fileUploadException, fileUploadException); printWriter.write("Unable to read the form contents. Please try again."); } tempOutputUnZipDir = exportSerailizationFolderPath + File.separator + zipFileName.substring(0, zipFileName.lastIndexOf(CoreCommonConstant.DOT)) + System.nanoTime(); try { FileUtils.unzip(tempZipFile, tempOutputUnZipDir); } catch (final Exception exception) { log.error("Unable to unzip the file." + exception, exception); printWriter.write("Unable to unzip the file. Please try again."); tempZipFile.delete(); } } else { log.error("Request contents type is not supported."); printWriter.write("Request contents type is not supported."); } if (tempZipFile != null) { tempZipFile.delete(); } printWriter.append(SystemConfigSharedConstants.FILE_PATH).append(tempOutputUnZipDir); //printWriter.append("filePath:").append(tempOutputUnZipDir); printWriter.append(CoreCommonConstant.PIPE); printWriter.flush(); }
From source file:org.jitsi.hammer.stats.HammerStats.java
/** * Keep track, collect and update the stats of all the * <tt>MediaStreamStats</tt> this <tt>HammerStats</tt> handles. * * Also write the results in the stats files. */// w w w . j a v a 2s .c om public void run() { PrintWriter writer = null; StringBuilder allBldr = new StringBuilder(); String delim; String delim_ = ""; synchronized (this) { threadStop = false; } logger.info("Running the main loop"); while (!threadStop) { synchronized (this) { if (overallStatsLogging || allStatsLogging || summaryStatsLogging) { if (allStatsLogging || summaryStatsLogging) { if (writer == null) { try { writer = new PrintWriter(allStatsFile, "UTF-8"); writer.print("[\n"); } catch (FileNotFoundException e) { logger.fatal("HammerStats stopping due to FileNotFound", e); stop(); } catch (UnsupportedEncodingException e) { logger.fatal("HammerStats stopping due to " + "UnsupportedEncoding", e); } } //Clear the StringBuilder allBldr.setLength(0); writer.print(delim_ + '\n'); delim_ = ","; writer.print("{\n"); writer.print(" \"timestamp\":" + System.currentTimeMillis() + ",\n"); } delim = ""; logger.info("Updating the MediaStreamStats"); for (FakeUserStats stats : fakeUserStatsList) { //We update the stats before using/reading them. stats.updateStats(); } for (FakeUserStats stats : fakeUserStatsList) { if (allStatsLogging) { allBldr.append(delim + stats.getStatsJSON(2) + '\n'); delim = ","; } if (summaryStatsLogging || overallStatsLogging) { logger.info("Adding stats values from the" + " MediaStreamStats to their" + " HammerSummaryStats objects"); audioSummaryStats.add(stats.getMediaStreamStats(MediaType.AUDIO)); videoSummaryStats.add(stats.getMediaStreamStats(MediaType.VIDEO)); } } if (allStatsLogging) { logger.info("Writing all stats to file"); writer.print(" \"users\":\n"); writer.print(" [\n"); writer.print(allBldr.toString()); writer.print(" ]"); if (summaryStatsLogging) writer.print(','); writer.print('\n'); } if (summaryStatsLogging) { logger.info("Writing summary stats to file"); writer.print(" \"summary\":\n"); writer.print(" {\n"); writer.print(" \"max\":\n"); writer.print(" {\n"); writer.print(" \"audio\":"); writer.print(audioSummaryStats.getMaxJSON() + ",\n"); writer.print(" \"video\":"); writer.print(videoSummaryStats.getMaxJSON() + '\n'); writer.print(" },\n"); writer.print(" \"mean\":\n"); writer.print(" {\n"); writer.print(" \"audio\":"); writer.print(audioSummaryStats.getMeanJSON() + ",\n"); writer.print(" \"video\":"); writer.print(videoSummaryStats.getMeanJSON() + '\n'); writer.print(" },\n"); writer.print(" \"min\":\n"); writer.print(" {\n"); writer.print(" \"audio\":"); writer.print(audioSummaryStats.getMinJSON() + ",\n"); writer.print(" \"video\":"); writer.print(videoSummaryStats.getMinJSON() + '\n'); writer.print(" },\n"); writer.print(" \"standard_deviation\":\n"); writer.print(" {\n"); writer.print(" \"audio\":"); writer.print(audioSummaryStats.getStandardDeviationJSON() + ",\n"); writer.print(" \"video\":"); writer.print(videoSummaryStats.getStandardDeviationJSON() + '\n'); writer.print(" }\n"); writer.print(" }\n"); } if (allStatsLogging || summaryStatsLogging) { writer.append("}"); writer.flush(); } } if (summaryStatsLogging || overallStatsLogging) { logger.info("Clearing the HammerSummaryStats by creating new" + " SummaryStats objects for each watched stats"); audioSummaryStats.clear(); videoSummaryStats.clear(); } } try { Thread.sleep(timeBetweenUpdate * 1000); } catch (InterruptedException e) { logger.fatal("Error during sleep in main loop : " + e); stop(); } } logger.info("Exiting the main loop"); if (writer != null) { writer.print("]\n"); writer.close(); } if (overallStatsLogging) writeOverallStats(); }
From source file:com.ephesoft.gxt.admin.server.ImportBatchClassUploadServlet.java
private void attachFile(HttpServletRequest req, HttpServletResponse resp, BatchSchemaService batchSchemaService, BatchClassService bcService, ImportBatchService imService) throws IOException { PrintWriter printWriter = resp.getWriter(); File tempZipFile = null;//from w w w. ja v a 2s . c om InputStream instream = null; OutputStream out = null; String zipWorkFlowName = "", tempOutputUnZipDir = "", zipWorkflowDesc = "", zipWorkflowPriority = ""; BatchClass importBatchClass = null; if (ServletFileUpload.isMultipartContent(req)) { FileItemFactory factory = new DiskFileItemFactory(); ServletFileUpload upload = new ServletFileUpload(factory); String exportSerailizationFolderPath = batchSchemaService.getBatchExportFolderLocation(); File exportSerailizationFolder = new File(exportSerailizationFolderPath); if (!exportSerailizationFolder.exists()) { exportSerailizationFolder.mkdir(); } String zipFileName = ""; String zipPathname = ""; List<FileItem> items; try { items = upload.parseRequest(req); for (FileItem item : items) { if (!item.isFormField()) {//&& "importFile".equals(item.getFieldName())) { zipFileName = item.getName(); if (zipFileName != null) { zipFileName = zipFileName.substring(zipFileName.lastIndexOf(File.separator) + 1); } zipPathname = exportSerailizationFolderPath + File.separator + zipFileName; // get only the file name not whole path if (zipFileName != null) { zipFileName = FilenameUtils.getName(zipFileName); } try { instream = item.getInputStream(); tempZipFile = new File(zipPathname); if (tempZipFile.exists()) { tempZipFile.delete(); } out = new FileOutputStream(tempZipFile); byte buf[] = new byte[1024]; int len; while ((len = instream.read(buf)) > 0) { out.write(buf, 0, len); } } catch (FileNotFoundException e) { log.error("Unable to create the export folder." + e, e); printWriter.write("Unable to create the export folder.Please try again."); } catch (IOException e) { log.error("Unable to read the file." + e, e); printWriter.write("Unable to read the file.Please try again."); } finally { if (out != null) { try { out.close(); } catch (IOException ioe) { log.info("Could not close stream for file." + tempZipFile); } } if (instream != null) { try { instream.close(); } catch (IOException ioe) { log.info("Could not close stream for file." + zipFileName); } } } } } } catch (FileUploadException e) { log.error("Unable to read the form contents." + e, e); printWriter.write("Unable to read the form contents.Please try again."); } tempOutputUnZipDir = exportSerailizationFolderPath + File.separator + zipFileName.substring(0, zipFileName.lastIndexOf('.')) + System.nanoTime(); try { FileUtils.unzip(tempZipFile, tempOutputUnZipDir); } catch (Exception e) { log.error("Unable to unzip the file." + e, e); printWriter.write("Unable to unzip the file.Please try again."); tempZipFile.delete(); } String serializableFilePath = FileUtils.getFileNameOfTypeFromFolder(tempOutputUnZipDir, SERIALIZATION_EXT); InputStream serializableFileStream = null; try { serializableFileStream = new FileInputStream(serializableFilePath); importBatchClass = (BatchClass) SerializationUtils.deserialize(serializableFileStream); zipWorkFlowName = importBatchClass.getName(); zipWorkflowDesc = importBatchClass.getDescription(); zipWorkflowPriority = "" + importBatchClass.getPriority(); } catch (Exception e) { tempZipFile.delete(); log.error("Error while importing" + e, e); printWriter.write("Error while importing.Please try again."); } finally { if (serializableFileStream != null) { try { serializableFileStream.close(); } catch (IOException ioe) { log.info("Could not close stream for file." + serializableFilePath); } } } } else { log.error("Request contents type is not supported."); printWriter.write("Request contents type is not supported."); } if (tempZipFile != null) { tempZipFile.delete(); } List<String> uncList = bcService.getAssociatedUNCList(zipWorkFlowName); DeploymentService deploymentService = this.getSingleBeanOfType(DeploymentService.class); boolean isWorkflowDeployed = deploymentService.isDeployed(zipWorkFlowName); if (null != importBatchClass) { boolean isWorkflowEqual = imService.isImportWorkflowEqualDeployedWorkflow(importBatchClass, importBatchClass.getName()); printWriter.write(AdminSharedConstants.WORK_FLOW_NAME + zipWorkFlowName); printWriter.append("|"); printWriter.write(AdminSharedConstants.WORK_FLOW_DESC + zipWorkflowDesc); printWriter.append("|"); printWriter.write(AdminSharedConstants.WORK_FLOW_PRIORITY + zipWorkflowPriority); printWriter.append("|"); printWriter.append(AdminSharedConstants.FILE_PATH).append(tempOutputUnZipDir); printWriter.append("|"); printWriter.write(AdminSharedConstants.WORKFLOW_DEPLOYED + isWorkflowDeployed); printWriter.append("|"); printWriter.write(AdminSharedConstants.WORKFLOW_EQUAL + isWorkflowEqual); printWriter.append("|"); printWriter.write(AdminSharedConstants.WORKFLOW_EXIST_IN_BATCH_CLASS + ((uncList == null || uncList.size() == 0) ? false : true)); printWriter.append("|"); } printWriter.flush(); }
From source file:edu.stanford.epad.epadws.handlers.dicom.DSOUtil.java
public static boolean handleDSOFramesEdit(String projectID, String subjectID, String studyUID, String seriesUID, String imageUID, HttpServletRequest httpRequest, PrintWriter responseStream) { // See http://www.tutorialspoint.com/servlets/servlets-file-uploading.htm boolean uploadError = false; log.info("Received DSO edit request for series " + seriesUID); String confirm = dcm4CheeDatabaseOperations.getSeriesUIDForImage(imageUID); //ml if ui do not know series uid (new dso) if (seriesUID.equals("*")) { seriesUID = confirm;/*from ww w . jav a 2 s. co m*/ } //ml if ui do not know study uid (new dso) if (studyUID.equals("*")) { studyUID = dcm4CheeDatabaseOperations.getStudyUIDForSeries(seriesUID); } if (!confirm.equals(seriesUID)) { log.warning("Invalid ImageUID for series:" + seriesUID); return true; } try { ServletFileUpload servletFileUpload = new ServletFileUpload(); FileItemIterator fileItemIterator = servletFileUpload.getItemIterator(httpRequest); DSOEditRequest dsoEditRequest = null; String editedFrameNumbers = httpRequest.getParameter("editedFrameNumbers"); if (editedFrameNumbers == null || editedFrameNumbers.length() == 0) { dsoEditRequest = extractDSOEditRequest(fileItemIterator); //ui doesn't send editedFrameNumbers, but the series uid is * if (dsoEditRequest.seriesUID.equals("*")) { dsoEditRequest.seriesUID = confirm; } if (dsoEditRequest.studyUID.equals("*")) { dsoEditRequest.studyUID = dcm4CheeDatabaseOperations.getStudyUIDForSeries(seriesUID); } } else { log.info("Uploaded mask frame numbers:" + editedFrameNumbers); String[] frameNumbers = editedFrameNumbers.split(","); List<Integer> numbers = new ArrayList<Integer>(); for (String frameNumber : frameNumbers) { if (frameNumber.trim().length() == 0) continue; numbers.add(new Integer(frameNumber.trim())); } dsoEditRequest = new DSOEditRequest(projectID, subjectID, studyUID, seriesUID, imageUID, httpRequest.getParameter("aimID"), numbers); } if (dsoEditRequest != null) { //need to pass this all the way to segmentation writer, put into edit request String property = httpRequest.getParameter("property"); String color = httpRequest.getParameter("color"); dsoEditRequest.property = property; dsoEditRequest.color = color; log.info("DSOEditRequest, imageUID:" + dsoEditRequest.imageUID + " aimID:" + dsoEditRequest.aimID + " number Frames:" + dsoEditRequest.editedFrameNumbers.size()); EpadDatabaseOperations epadDatabaseOperations = EpadDatabase.getInstance() .getEPADDatabaseOperations(); String username = httpRequest.getParameter("username"); EPADAIM aim = epadDatabaseOperations.getAIM(dsoEditRequest.aimID); if (aim != null && username != null) { EpadProjectOperations projectOperations = DefaultEpadProjectOperations.getInstance(); if (!projectOperations.isAdmin(username) && !username.equals(aim.userName) && !projectOperations.isOwner(username, projectID)) { log.warning("No permissions to update AIM:" + aim.aimID + " for user " + username); throw new Exception("No permissions to update AIM:" + aim.aimID + " for user " + username); } } List<File> editedFramesPNGMaskFiles = HandlerUtil.extractFiles(fileItemIterator, "DSOEditedFrame", ".PNG"); if (editedFramesPNGMaskFiles.isEmpty()) { log.warning("No PNG masks supplied in DSO edit request for image " + imageUID + " in series " + seriesUID); uploadError = true; } else { log.info("Extracted " + editedFramesPNGMaskFiles.size() + " file mask(s) for DSO edit for image " + imageUID + " in series " + seriesUID); if (editedFramesPNGMaskFiles.size() != dsoEditRequest.editedFrameNumbers.size()) throw new IOException("Number of files and frames number do not match"); // if (aim != null && (aim.dsoFrameNo == 0 || aim.dsoFrameNo < dsoEditRequest.editedFrameNumbers.get(0))) { // aim.dsoFrameNo = dsoEditRequest.editedFrameNumbers.get(0); // epadDatabaseOperations.updateAIMDSOFrameNo(aim.aimID, aim.dsoFrameNo); // } DSOEditResult dsoEditResult = DSOUtil.createEditedDSO(dsoEditRequest, editedFramesPNGMaskFiles, aim.seriesUID); if (dsoEditResult != null) { log.info("Copying edited frame pngs: " + dsoEditRequest.editedFrameNumbers.size()); for (int i = 0; i < dsoEditRequest.editedFrameNumbers.size(); i++) { Integer frameNumber = dsoEditRequest.editedFrameNumbers.get(i); String pngMaskDirectoryPath = baseDicomDirectory + "/studies/" + studyUID + "/series/" + seriesUID + "/images/" + imageUID + "/masks/"; String pngMaskFilePath = pngMaskDirectoryPath + frameNumber + ".png"; EPADFileUtils.copyFile(editedFramesPNGMaskFiles.get(i), new File(pngMaskFilePath)); editedFramesPNGMaskFiles.get(i).delete(); } if (dsoEditResult.aimID != null && dsoEditResult.aimID.length() > 0) { if (dsoEditResult.firstFrame != null) { log.info("update aim table dso first frame with " + dsoEditResult.firstFrame + "for aim " + dsoEditResult.aimID); epadDatabaseOperations.updateAIMDSOFrameNo(dsoEditResult.aimID, dsoEditResult.firstFrame); } List<ImageAnnotation> aims = AIMQueries.getAIMImageAnnotations( AIMSearchType.ANNOTATION_UID, dsoEditResult.aimID, "admin"); if (aims.size() > 0) { log.info("DSO Annotation: " + dsoEditResult.aimID); // String sessionID = XNATSessionOperations.getJSessionIDFromRequest(httpRequest); // ImageAnnotation imageAnnotation = aims.get(0); // PluginAIMUtil.addSegmentToImageAnnotation(imageAnnotation.getSegmentationCollection().getSegmentationList().get(0).getSopClassUID(), dsoEditResult.imageUID, imageAnnotation.getSegmentationCollection().getSegmentationList().get(0).getReferencedSopInstanceUID(), // imageAnnotation); // DICOMImageReference dsoDICOMImageReference = PluginAIMUtil.createDICOMImageReference(dsoEditResult.studyUID, dsoEditResult.seriesUID, // dsoEditResult.imageUID); // imageAnnotation.addImageReference(dsoDICOMImageReference); // try { // AIMUtil.saveImageAnnotationToServer(imageAnnotation, sessionID); // } catch (AimException e) { // // TODO Auto-generated catch block // e.printStackTrace(); // } catch (edu.stanford.hakan.aim4api.base.AimException e) { // // TODO Auto-generated catch block // e.printStackTrace(); // } } } responseStream.append(dsoEditResult.toJSON()); } else { log.info("Null return from createEditDSO"); uploadError = true; } } } else { log.warning("Invalid JSON header in DSO edit request for image " + imageUID + " in series " + seriesUID); uploadError = true; } } catch (IOException e) { log.warning("IO exception handling DSO edits for series " + seriesUID, e); uploadError = true; } catch (FileUploadException e) { log.warning("File upload exception handling DSO edits for series " + seriesUID, e); uploadError = true; } catch (Exception e) { log.warning("Exception handling DSO edits for series " + seriesUID, e); uploadError = true; } if (!uploadError) log.info("DSO successfully edited"); return uploadError; }