List of usage examples for java.util.zip ZipInputStream close
public void close() throws IOException
From source file:com.googlecode.jsfFlex.shared.tasks.sdk.UnzipTask.java
protected void performTask() { BufferedOutputStream bufferOutputStream = null; ZipInputStream zipInputStream = new ZipInputStream(new BufferedInputStream(_file)); ZipEntry entry;/* www . j a va2 s . c om*/ try { while ((entry = zipInputStream.getNextEntry()) != null) { ensureDirectoryExists(entry.getName(), entry.isDirectory()); bufferOutputStream = new BufferedOutputStream(new FileOutputStream(_dest + entry.getName()), BUFFER_SIZE); int currRead = 0; byte[] dataRead = new byte[BUFFER_SIZE]; while ((currRead = zipInputStream.read(dataRead, 0, BUFFER_SIZE)) != -1) { bufferOutputStream.write(dataRead, 0, currRead); } bufferOutputStream.flush(); bufferOutputStream.close(); } _log.debug("UnzipTask performTask has been completed with " + toString()); } catch (IOException ioExcept) { StringBuilder errorMessage = new StringBuilder(); errorMessage.append("Error in Unzip's performTask with following fields \n"); errorMessage.append(toString()); throw new ComponentBuildException(errorMessage.toString(), ioExcept); } finally { try { zipInputStream.close(); if (bufferOutputStream != null) { bufferOutputStream.close(); } } catch (IOException innerIOExcept) { _log.info("Error while closing the streams within UnzipTask's finally block"); } } }
From source file:org.freebxml.omar.common.Utility.java
/** * * Extracts Zip file contents relative to baseDir * @return An ArrayList containing the File instances for each unzipped file *//*from w ww .j av a 2s. c o m*/ public static ArrayList unZip(String baseDir, InputStream is) throws IOException { ArrayList files = new ArrayList(); ZipInputStream zis = new ZipInputStream(is); while (true) { // Get the next zip entry. Break out of the loop if there are // no more. ZipEntry zipEntry = zis.getNextEntry(); if (zipEntry == null) break; String entryName = zipEntry.getName(); if (FILE_SEPARATOR.equalsIgnoreCase("\\")) { // Convert '/' to Windows file separator entryName = entryName.replaceAll("/", "\\\\"); } String fileName = baseDir + FILE_SEPARATOR + entryName; //Make sure that directory exists. String dirName = fileName.substring(0, fileName.lastIndexOf(FILE_SEPARATOR)); File dir = new File(dirName); dir.mkdirs(); //Entry could be a directory if (!(zipEntry.isDirectory())) { //Entry is a file not a directory. //Write out the content of of entry to file File file = new File(fileName); files.add(file); FileOutputStream fos = new FileOutputStream(file); // Read data from the zip entry. The read() method will return // -1 when there is no more data to read. byte[] buffer = new byte[1000]; int n; while ((n = zis.read(buffer)) > -1) { // In real life, you'd probably write the data to a file. fos.write(buffer, 0, n); } zis.closeEntry(); fos.close(); } else { zis.closeEntry(); } } zis.close(); return files; }
From source file:org.geoserver.wfs.response.ShapeZipTest.java
private void checkShapefileIntegrity(String[] typeNames, final InputStream in) throws IOException { ZipInputStream zis = new ZipInputStream(in); ZipEntry entry = null;/*from w w w .j ava2 s .co m*/ final String[] extensions = new String[] { ".shp", ".shx", ".dbf", ".prj", ".cst" }; Set names = new HashSet(); for (String name : typeNames) { for (String extension : extensions) { names.add(name + extension); } } while ((entry = zis.getNextEntry()) != null) { final String name = entry.getName(); if (name.endsWith(".txt")) { // not part of the shapefile, it's the request dump continue; } assertTrue("Missing " + name, names.contains(name)); names.remove(name); zis.closeEntry(); } zis.close(); }
From source file:com.aurel.track.exchange.track.importer.TrackImportAction.java
/** * Render the import page//from ww w . ja v a 2s . co m */ @Override /** * Save the zip file and import the data * @return */ public String execute() { LOGGER.info("Import started"); InputStream inputStream; try { inputStream = new FileInputStream(uploadFile); } catch (FileNotFoundException e) { LOGGER.error("Getting the input stream for the zip failed with " + e.getMessage()); LOGGER.debug(ExceptionUtils.getStackTrace(e)); JSONUtility.encodeJSON(servletResponse, JSONUtility.encodeJSONFailure(getText("admin.actions.importTp.err.failed"))); return null; } /** * delete the old temporary attachment directory if exists from previous imports */ String tempAttachmentDirectory = AttachBL.getAttachDirBase() + File.separator + AttachBL.tmpAttachments; AttachBL.deleteDirectory(new File(tempAttachmentDirectory)); /** * extract the zip to a temporary directory */ ZipInputStream zipInputStream = new ZipInputStream(new BufferedInputStream(inputStream)); final int BUFFER = 2048; File unzipTempDirectory = new File(tempAttachmentDirectory); if (!unzipTempDirectory.exists()) { unzipTempDirectory.mkdirs(); } BufferedOutputStream dest = null; ZipEntry zipEntry; try { while ((zipEntry = zipInputStream.getNextEntry()) != null) { File destFile = new File(unzipTempDirectory, zipEntry.getName()); // grab file's parent directory structure int count; byte data[] = new byte[BUFFER]; // write the files to the disk FileOutputStream fos = new FileOutputStream(destFile); dest = new BufferedOutputStream(fos, BUFFER); while ((count = zipInputStream.read(data, 0, BUFFER)) != -1) { dest.write(data, 0, count); } dest.flush(); dest.close(); } zipInputStream.close(); } catch (Exception e) { LOGGER.error("Extracting the zip to the temporary directory failed with " + e.getMessage()); LOGGER.debug(ExceptionUtils.getStackTrace(e)); JSONUtility.encodeJSON(servletResponse, JSONUtility.encodeJSONFailure(getText("admin.actions.importTp.err.failed")), false); return null; } /** * get the data file (the only file from the zip which is not an attachment) */ File importDataFile = new File(tempAttachmentDirectory, ExchangeFieldNames.EXCHANGE_ZIP_ENTRY); if (!importDataFile.exists()) { LOGGER.error("The file " + ExchangeFieldNames.EXCHANGE_ZIP_ENTRY + " not found in the zip"); JSONUtility.encodeJSON(servletResponse, JSONUtility.encodeJSONFailure(getText("admin.actions.importTp.err.failed")), false); return null; } /** * field parser */ LOGGER.debug("Parsing the fields"); List<ISerializableLabelBean> customFieldsBeans = new ImporterFieldParser().parse(importDataFile); Map<Integer, Integer> fieldsMatcherMap = null; try { fieldsMatcherMap = TrackImportBL.getFieldMatchMap(customFieldsBeans); } catch (ImportExceptionList importExceptionList) { LOGGER.error("Getting the field match map failed "); JSONUtility.encodeJSON(servletResponse, ImportJSON.importErrorMessageListJSON( ErrorHandlerJSONAdapter.handleErrorList(importExceptionList.getErrorDataList(), locale), null, true)); return null; } /** * dropdown parser */ LOGGER.debug("Parsing the external dropdowns"); SortedMap<String, List<ISerializableLabelBean>> externalDropdowns = new ImporterDropdownParser() .parse(importDataFile, fieldsMatcherMap); /** * data parser */ LOGGER.debug("Parsing the items"); List<ExchangeWorkItem> externalReportBeansList = new ImporterDataParser().parse(importDataFile, fieldsMatcherMap); try { LOGGER.debug("Importing the items"); ImportCounts importCounts = TrackImportBL.importWorkItems(externalReportBeansList, externalDropdowns, fieldsMatcherMap, personID, locale); LOGGER.debug("Imported " + importCounts.getNoOfCreatedIssues() + " new issues " + " modified " + importCounts.getNoOfUpdatedIssues()); JSONUtility.encodeJSON(servletResponse, ImportJSON.importMessageJSON(true, getText("admin.actions.importTp.lbl.result", new String[] { Integer.valueOf(importCounts.getNoOfCreatedIssues()).toString(), Integer.valueOf(importCounts.getNoOfUpdatedIssues()).toString() }), true, locale), false); } catch (ImportExceptionList importExceptionList) { JSONUtility.encodeJSON(servletResponse, ImportJSON.importErrorMessageListJSON( ErrorHandlerJSONAdapter.handleErrorList(importExceptionList.getErrorDataList(), locale), null, true), false); return null; } catch (ImportException importException) { JSONUtility.encodeJSON(servletResponse, ImportJSON.importMessageJSON(false, getText(importException.getMessage()), true, locale), false); return null; } catch (Exception e) { JSONUtility.encodeJSON(servletResponse, ImportJSON.importMessageJSON(false, getText("admin.actions.importTp.err.failed"), true, locale), false); return null; } LOGGER.info("Import done"); return null; }
From source file:de.knowwe.revisions.upload.UploadRevisionZip.java
@SuppressWarnings("unchecked") @Override/*from ww w .j a v a 2s .co m*/ public void execute(UserActionContext context) throws IOException { HashMap<String, String> pages = new HashMap<>(); List<FileItem> items = null; String zipname = null; try { items = new ServletFileUpload(new DiskFileItemFactory()).parseRequest(context.getRequest()); } catch (FileUploadException e) { throw new IOException("error during processing upload", e); } for (FileItem item : items) { zipname = item.getName(); InputStream filecontent = item.getInputStream(); ZipInputStream zin = new ZipInputStream(filecontent); ZipEntry ze; while ((ze = zin.getNextEntry()) != null) { String name = ze.getName(); if (!name.contains("/")) { // this is an article String title = Strings.decodeURL(name); title = title.substring(0, title.length() - 4); String content = IOUtils.toString(zin, "UTF-8"); zin.closeEntry(); pages.put(title, content); } else { // TODO: what to do here? // this is an attachment // String[] splittedName = name.split("/"); // String title = Strings.decodeURL(splittedName[0]); // String filename = Strings.decodeURL(splittedName[1]); // // System.out.println("Attachment: " + name); // String content = IOUtils.toString(zin, "UTF-8"); // Environment.getInstance().getWikiConnector().storeAttachment(title, // filename, // context.getUserName(), zin); zin.closeEntry(); } } zin.close(); filecontent.close(); } if (zipname != null) { UploadedRevision rev = new UploadedRevision(context.getWeb(), pages, zipname); RevisionManager.getRM(context).setUploadedRevision(rev); } context.sendRedirect("../Wiki.jsp?page=" + context.getTitle()); }
From source file:hoot.services.controllers.ogr.OgrAttributesResource.java
/** * This rest endpoint uploads multipart data from UI and then generates attribute output * Example: http://localhost:8080//hoot-services/ogr/info/upload?INPUT_TYPE=DIR * Output: {"jobId":"e43feae4-0644-47fd-a23c-6249e6e7f7fb"} * /*from ww w. j av a 2 s . c o m*/ * After getting the jobId, one can track the progress through job status rest end point * Example: http://localhost:8080/hoot-services/job/status/e43feae4-0644-47fd-a23c-6249e6e7f7fb * Output: {"jobId":"e43feae4-0644-47fd-a23c-6249e6e7f7fb","statusDetail":null,"status":"complete"} * * Once status is "complete" * Result attribute can be obtained through * Example:http://localhost:8080/hoot-services/ogr/info/e43feae4-0644-47fd-a23c-6249e6e7f7fb * output: JSON of attributes * * @param inputType : [FILE | DIR] where FILE type should represents zip,shp or OMS and DIR represents FGDB * @param request * @return */ @POST @Path("/upload") @Produces(MediaType.TEXT_PLAIN) public Response processUpload(@QueryParam("INPUT_TYPE") final String inputType, @Context HttpServletRequest request) { JSONObject res = new JSONObject(); String jobId = UUID.randomUUID().toString(); try { log.debug("Starting file upload for ogr attribute Process"); Map<String, String> uploadedFiles = new HashMap<String, String>(); ; Map<String, String> uploadedFilesPaths = new HashMap<String, String>(); MultipartSerializer ser = new MultipartSerializer(); ser.serializeUpload(jobId, inputType, uploadedFiles, uploadedFilesPaths, request); List<String> filesList = new ArrayList<String>(); List<String> zipList = new ArrayList<String>(); Iterator it = uploadedFiles.entrySet().iterator(); while (it.hasNext()) { Map.Entry pairs = (Map.Entry) it.next(); String fName = pairs.getKey().toString(); String ext = pairs.getValue().toString(); String inputFileName = ""; inputFileName = uploadedFilesPaths.get(fName); JSONObject param = new JSONObject(); // If it is zip file then we crack open to see if it contains FGDB. // If so then we add the folder location and desired output name which is fgdb name in the zip if (ext.equalsIgnoreCase("ZIP")) { zipList.add(fName); String zipFilePath = homeFolder + "/upload/" + jobId + "/" + inputFileName; ZipInputStream zis = new ZipInputStream(new FileInputStream(zipFilePath)); ZipEntry ze = zis.getNextEntry(); while (ze != null) { String zipName = ze.getName(); if (ze.isDirectory()) { if (zipName.toLowerCase().endsWith(".gdb/") || zipName.toLowerCase().endsWith(".gdb")) { String fgdbZipName = zipName; if (zipName.toLowerCase().endsWith(".gdb/")) { fgdbZipName = zipName.substring(0, zipName.length() - 1); } filesList.add("\"" + fName + "/" + fgdbZipName + "\""); } } else { if (zipName.toLowerCase().endsWith(".shp")) { filesList.add("\"" + fName + "/" + zipName + "\""); } } ze = zis.getNextEntry(); } zis.closeEntry(); zis.close(); } else { filesList.add("\"" + inputFileName + "\""); } } String mergeFilesList = StringUtils.join(filesList.toArray(), ' '); String mergedZipList = StringUtils.join(zipList.toArray(), ';'); JSONArray params = new JSONArray(); JSONObject param = new JSONObject(); param.put("INPUT_FILES", mergeFilesList); params.add(param); param = new JSONObject(); param.put("INPUT_ZIPS", mergedZipList); params.add(param); String argStr = createPostBody(params); postJobRquest(jobId, argStr); } catch (Exception ex) { ResourceErrorHandler.handleError("Failed upload: " + ex.toString(), Status.INTERNAL_SERVER_ERROR, log); } res.put("jobId", jobId); return Response.ok(res.toJSONString(), MediaType.APPLICATION_JSON).build(); }
From source file:com.polyvi.xface.extension.zip.XZipExt.java
/** * ??zip?/*from www . ja v a 2s.com*/ * * @param dstFileUri * * @param is * ?zip? * @return ?? * @throws IOException * @throws FileNotFoundException * @throws IllegalArgumentException */ private void unzipFileFromStream(Uri dstFileUri, InputStream is) throws IOException, FileNotFoundException, IllegalArgumentException { if (null == dstFileUri || null == is) { XLog.e(CLASS_NAME, "Method unzipFileFromStream: params is null"); throw new IllegalArgumentException(); } ZipInputStream zis = new ZipInputStream(is); ZipEntry entry = null; Uri unZipUri = null; while (null != (entry = zis.getNextEntry())) { File unZipFile = new File(dstFileUri.getPath() + File.separator + entry.getName()); unZipUri = Uri.fromFile(unZipFile); if (entry.isDirectory()) { if (!unZipFile.exists()) { unZipFile.mkdirs(); } } else { // ??? prepareForZipDir(unZipUri); OutputStream fos = mResourceApi.openOutputStream(unZipUri); int readLen = 0; byte buffer[] = new byte[XConstant.BUFFER_LEN]; while (-1 != (readLen = zis.read(buffer))) { fos.write(buffer, 0, readLen); } fos.close(); } } zis.close(); is.close(); }
From source file:com.seer.datacruncher.services.ftp.FTPPollJobProcessor.java
@Override public void process(Exchange exchange) throws Exception { String result = ""; GenericFile file = (GenericFile) exchange.getIn().getBody(); Message message = exchange.getOut(); String inputFileName = file.getFileName(); Map<String, byte[]> resultMap = new HashMap<String, byte[]>(); if (isValidFileName(inputFileName)) { long schemaId = getSchemaIdUsingFileName(inputFileName); long userId = getUserIdFromFileName(inputFileName); if (!usersDao.isUserAssoicatedWithSchema(userId, schemaId)) { result = "User not authorized"; } else {/*from ww w . ja v a 2 s.c om*/ resultMap.put(inputFileName, file.getBody().toString().getBytes()); SchemaEntity schemaEntity = schemasDao.find(schemaId); if (schemaEntity == null) { result = "No schema found in database with Id [" + schemaId + "]"; } else { if (inputFileName.endsWith(FileExtensionType.ZIP.getAbbreviation())) { // Case 1: When user upload a Zip file - All ZIP entries should be validate one by one ZipInputStream inStream = null; try { inStream = new ZipInputStream(new ByteArrayInputStream(resultMap.get(inputFileName))); ZipEntry entry; while (!(isStreamClose(inStream)) && (entry = inStream.getNextEntry()) != null) { if (!entry.isDirectory()) { DatastreamsInput datastreamsInput = new DatastreamsInput(); datastreamsInput.setUploadedFileName(entry.getName()); byte[] byteInput = IOUtils.toByteArray(inStream); result += datastreamsInput.datastreamsInput(new String(byteInput), schemaId, byteInput); } inStream.closeEntry(); } } catch (IOException ex) { result = "Error occured during fetch records from ZIP file."; } finally { if (inStream != null) inStream.close(); } } else { DatastreamsInput datastreamsInput = new DatastreamsInput(); datastreamsInput.setUploadedFileName(inputFileName); result = datastreamsInput.datastreamsInput(new String(resultMap.get(inputFileName)), schemaId, resultMap.get(inputFileName)); } } } } else { result = "File Name not in specified format."; } // Store in Ftp location CamelContext context = exchange.getContext(); FtpComponent component = context.getComponent("ftp", FtpComponent.class); FtpEndpoint<?> endpoint = (FtpEndpoint<?>) component.createEndpoint(getFTPEndPoint()); Exchange outExchange = endpoint.createExchange(); outExchange.getIn().setBody(result); outExchange.getIn().setHeader("CamelFileName", getFileNameWithoutExtensions(inputFileName) + ".txt"); Producer producer = endpoint.createProducer(); producer.start(); producer.process(outExchange); producer.stop(); }
From source file:net.firejack.platform.service.content.broker.collection.ImportCollectionArchiveFileBroker.java
@Override protected ServiceResponse perform(ServiceRequest<NamedValues<InputStream>> request) throws Exception { InputStream inputStream = request.getData().get("inputStream"); try {// w ww.ja va 2 s. c o m Long uploadFileTime = new Date().getTime(); String randomName = SecurityHelper.generateRandomSequence(16); String temporaryUploadFileName = randomName + "." + uploadFileTime; OPFEngine.FileStoreService.upload(OpenFlame.FILESTORE_BASE, temporaryUploadFileName, inputStream, helper.getTemp()); String contentXmlUploadedFile = null; String resourceZipUploadedFile = null; ZipInputStream zipFile = new ZipInputStream(OPFEngine.FileStoreService .download(OpenFlame.FILESTORE_BASE, temporaryUploadFileName, helper.getTemp())); try { ZipEntry entry; while ((entry = zipFile.getNextEntry()) != null) { if (PackageFileType.CONTENT_XML.getOfrFileName().equals(entry.getName())) { contentXmlUploadedFile = PackageFileType.CONTENT_XML.name() + randomName + "." + uploadFileTime; OPFEngine.FileStoreService.upload(OpenFlame.FILESTORE_BASE, contentXmlUploadedFile, zipFile, helper.getTemp()); } else if (PackageFileType.RESOURCE_ZIP.getOfrFileName().equals(entry.getName())) { resourceZipUploadedFile = PackageFileType.RESOURCE_ZIP.name() + randomName + "." + uploadFileTime; OPFEngine.FileStoreService.upload(OpenFlame.FILESTORE_BASE, resourceZipUploadedFile, zipFile, helper.getTemp()); } } } catch (IOException e) { throw new BusinessFunctionException(e.getMessage()); } finally { zipFile.close(); } InputStream contentXml = OPFEngine.FileStoreService.download(OpenFlame.FILESTORE_BASE, contentXmlUploadedFile, helper.getTemp()); InputStream resourceZip = OPFEngine.FileStoreService.download(OpenFlame.FILESTORE_BASE, resourceZipUploadedFile, helper.getTemp()); importContentProcessor.importContent(contentXml, resourceZip); IOUtils.closeQuietly(contentXml); IOUtils.closeQuietly(resourceZip); } catch (IOException e) { throw new BusinessFunctionException(e.getMessage()); } catch (JAXBException e) { throw new BusinessFunctionException(e.getMessage()); } return new ServiceResponse("Content Archive has uploaded successfully.", true); }