List of usage examples for java.util.zip ZipEntry ZipEntry
public ZipEntry(ZipEntry e)
From source file:com.mweagle.tereus.commands.evaluation.common.LambdaUtils.java
protected void createStableZip(ZipOutputStream zipOS, Path parentDirectory, Path archiveRoot, MessageDigest md) throws IOException { // Sort & zip files final List<Path> childDirectories = new ArrayList<>(); final List<Path> childFiles = new ArrayList<>(); DirectoryStream<Path> dirStream = Files.newDirectoryStream(parentDirectory); for (Path eachChild : dirStream) { if (Files.isDirectory(eachChild)) { childDirectories.add(eachChild); } else {/*from w ww . j ava2 s. com*/ childFiles.add(eachChild); } } final int archiveRootLength = archiveRoot.toAbsolutePath().toString().length() + 1; childFiles.stream().sorted().forEach(eachPath -> { final String zeName = eachPath.toAbsolutePath().toString().substring(archiveRootLength); try { final ZipEntry ze = new ZipEntry(zeName); zipOS.putNextEntry(ze); Files.copy(eachPath, zipOS); md.update(Files.readAllBytes(eachPath)); zipOS.closeEntry(); } catch (IOException ex) { throw new RuntimeException(ex.getMessage()); } }); childDirectories.stream().sorted().forEach(eachPath -> { try { createStableZip(zipOS, eachPath, archiveRoot, md); } catch (IOException ex) { throw new RuntimeException(ex.getMessage()); } }); }
From source file:au.org.ala.layers.web.IntersectService.java
@RequestMapping(value = WS_INTERSECT_BATCH_DOWNLOAD, method = RequestMethod.GET) public void batchDownload(@PathVariable("id") Long id, @RequestParam(value = "csv", required = false, defaultValue = "false") Boolean csv, HttpServletRequest request, HttpServletResponse response) { BatchConsumer.start(layerIntersectDao, userProperties.getProperty("batch_path")); try {//from www . ja va2 s.c o m Map map = new HashMap(); BatchProducer.addInfoToMap(userProperties.getProperty("batch_path"), String.valueOf(id), map); if (map.get("finished") != null) { OutputStream os = response.getOutputStream(); BufferedInputStream bis = new BufferedInputStream( new FileInputStream(userProperties.getProperty("batch_path") + File.separator + id + File.separator + "sample.csv")); if (!csv) { ZipOutputStream zip = new ZipOutputStream(os); zip.putNextEntry(new ZipEntry("sample.csv")); os = zip; } byte[] buffer = new byte[4096]; int size; while ((size = bis.read(buffer)) > 0) { os.write(buffer, 0, size); } bis.close(); os.close(); } } catch (Exception e) { e.printStackTrace(); } return; }
From source file:io.fabric8.maven.generator.springboot.SpringBootGenerator.java
private ZipEntry createZipEntry(File file, String fullPath) throws IOException { ZipEntry entry = new ZipEntry(fullPath); byte[] buffer = new byte[8192]; int bytesRead = -1; try (InputStream is = new FileInputStream(file)) { CRC32 crc = new CRC32(); int size = 0; while ((bytesRead = is.read(buffer)) != -1) { crc.update(buffer, 0, bytesRead); size += bytesRead;/* ww w .jav a2s .c o m*/ } entry.setSize(size); entry.setCompressedSize(size); entry.setCrc(crc.getValue()); entry.setMethod(ZipEntry.STORED); return entry; } }
From source file:com.github.sampov2.OneJarMojo.java
private void addFilesToArchive(JarOutputStream out) throws IOException, MojoExecutionException { final List<File> dependencyJars = Collections.unmodifiableList(extractDependencyFiles(artifacts)); final List<File> systemDependencyJars = Collections .unmodifiableList(extractSystemDependencyFiles(dependencies)); // Main jar/* ww w. ja va2s . c o m*/ debug("Adding main jar main/[%s]", mainJarFilename); addToZip(new File(outputDirectory, mainJarFilename), "main/", out); // Add all dependencies, including transient dependencies, but excluding system scope dependencies debug("Adding [%s] dependency libraries...", dependencyJars.size()); for (File jar : dependencyJars) { addToZip(jar, "lib/", out); } // Add system scope dependencies debug("Adding [%s] system dependency libraries...", systemDependencyJars.size()); for (File jar : systemDependencyJars) { addToZip(jar, "lib/", out); } // Add native libraries if (binlibs != null) { for (FileSet eachFileSet : binlibs) { List<File> includedFiles = toFileList(eachFileSet); debug("Adding [%s] native libraries...", includedFiles.size()); for (File eachIncludedFile : includedFiles) { addToZip(eachIncludedFile, "binlib/", out); } } } // Add splash screen image if (splashScreen != null) { File splashFile = new File(project.getBasedir(), splashScreen); if (splashFile.exists()) { debug("Adding splash screen image [%s]", splashScreen); addToZip(out, new ZipEntry(splashScreen), new FileInputStream(splashFile)); } else { throw new MojoExecutionException("Could not find splash screen image defined in pom."); } } }
From source file:com.panet.imeta.trans.steps.xmloutput.XMLOutput.java
public boolean openNewFile() { boolean retval = false; data.writer = null;/* w w w. ja va 2 s . c o m*/ try { FileObject file = KettleVFS.getFileObject(buildFilename(true)); if (meta.isAddToResultFiles()) { // Add this to the result file names... ResultFile resultFile = new ResultFile(ResultFile.FILE_TYPE_GENERAL, file, getTransMeta().getName(), getStepname()); resultFile.setComment("This file was created with a xml output step"); addResultFile(resultFile); } OutputStream outputStream; if (meta.isZipped()) { OutputStream fos = KettleVFS.getOutputStream(file, false); data.zip = new ZipOutputStream(fos); File entry = new File(buildFilename(false)); ZipEntry zipentry = new ZipEntry(entry.getName()); zipentry.setComment("Compressed by Kettle"); data.zip.putNextEntry(zipentry); outputStream = data.zip; } else { OutputStream fos = KettleVFS.getOutputStream(file, false); outputStream = fos; } if (meta.getEncoding() != null && meta.getEncoding().length() > 0) { log.logBasic(toString(), "Opening output stream in encoding: " + meta.getEncoding()); data.writer = new OutputStreamWriter(outputStream, meta.getEncoding()); data.writer.write(XMLHandler.getXMLHeader(meta.getEncoding()).toCharArray()); } else { log.logBasic(toString(), "Opening output stream in default encoding : " + Const.XML_ENCODING); data.writer = new OutputStreamWriter(outputStream); data.writer.write(XMLHandler.getXMLHeader(Const.XML_ENCODING).toCharArray()); } // Add the name space if defined StringBuffer nameSpace = new StringBuffer(); if ((meta.getNameSpace() != null) && (!"".equals(meta.getNameSpace()))) { nameSpace.append(" xmlns=\""); nameSpace.append(meta.getNameSpace()); nameSpace.append("\""); } // OK, write the header & the parent element: data.writer.write(("<" + meta.getMainElement() + nameSpace.toString() + ">" + Const.CR).toCharArray()); retval = true; } catch (Exception e) { logError("Error opening new file : " + e.toString()); } // System.out.println("end of newFile(), splitnr="+splitnr); data.splitnr++; return retval; }
From source file:io.frictionlessdata.datapackage.Package.java
private void saveZip(String outputFilePath) throws IOException, DataPackageException { try (FileOutputStream fos = new FileOutputStream(outputFilePath)) { try (BufferedOutputStream bos = new BufferedOutputStream(fos)) { try (ZipOutputStream zos = new ZipOutputStream(bos)) { // File is not on the disk, test.txt indicates // only the file name to be put into the zip. ZipEntry entry = new ZipEntry("datapackage.json"); zos.putNextEntry(entry); zos.write(this.getJson().toString(JSON_INDENT_FACTOR).getBytes()); zos.closeEntry();//from w w w . j ava 2 s .c o m } } } }
From source file:ZipImploder.java
public void processDir(ZipOutputStream zos, File dir) throws IOException { String path = dir.getCanonicalPath(); path = path.replace('\\', '/'); if (includeDirs) { if (baseDir == null || path.length() > baseDir.length()) { String xpath = removeDrive(removeLead(path)); if (xpath.length() > 0) { xpath += '/'; ZipEntry ze = new ZipEntry(xpath); zos.putNextEntry(ze);/*from w w w . java 2s. co m*/ } } } dirCount++; String[] files = dir.list(); for (int i = 0; i < files.length; i++) { String file = files[i]; File f = new File(dir, file); if (f.isDirectory()) { processDir(zos, f); } else { processFile(zos, f); } } }
From source file:ezbake.deployer.publishers.EzAzkabanPublisher.java
/** * This will publish the artifact to Azkaban for scheduled running. The artifact should be of the format * <p/>// ww w . ja v a2 s .co m * <p/> * The artifact at this point in time will already have included the SSL certs. * <p/> * Its up to the publisher to reorganize the tar file if needed for its PaaS * * @param artifact The artifact to deploy * @param callerToken - The token of the user or application that initiated this call * @throws DeploymentException - On any exceptions */ @Override public void publish(DeploymentArtifact artifact, EzSecurityToken callerToken) throws DeploymentException { File unzippedPack = null; File azkabanZip = null; ZipOutputStream zipOutputStream = null; String flowName; final BatchJobInfo jobInfo = artifact.getMetadata().getManifest().getBatchJobInfo(); // Get the Azkaban authentication token final AuthenticationResult authenticatorResult; try { authenticatorResult = new AuthenticationManager(new URI(azConf.getAzkabanUrl()), azConf.getUsername(), azConf.getPassword()).login(); } catch (URISyntaxException e) { throw new DeploymentException(e.getMessage()); } if (authenticatorResult.hasError()) { log.error("Could not log into Azkaban: " + authenticatorResult.getError()); throw new DeploymentException(authenticatorResult.getError()); } log.info("Successfully logged into Azkaban. Now creating .zip to upload"); try { // Unzip the artifact unzippedPack = UnzipUtil.unzip(new File(unzipDir), ByteBuffer.wrap(artifact.getArtifact())); log.info("Unzipped artifact to: " + unzippedPack.getAbsolutePath()); // Create a .zip file to submit to Azkaban azkabanZip = File.createTempFile("ezbatch_", ".zip"); log.info("Created temporary zip file: " + azkabanZip.getCanonicalPath()); zipOutputStream = new ZipOutputStream(new FileOutputStream(azkabanZip)); // Copy the configs from the artifact to the top level of the zip. This should contain the Azkaban // .jobs and .properties final String configDir = UnzipUtil.getConfDirectory(unzippedPack).get(); final File configDirFile = new File(configDir); for (File f : FileUtils.listFiles(configDirFile, TrueFileFilter.TRUE, TrueFileFilter.TRUE)) { zipOutputStream.putNextEntry(new ZipArchiveEntry(f.getCanonicalPath().replaceFirst(configDir, ""))); IOUtils.copy(new FileInputStream(f), zipOutputStream); zipOutputStream.closeEntry(); } log.info("Copied configs to the .zip"); // Copy the jars from bin/ in the artifact to lib/ in the .zip file and other things to the jar as needed final String dirPrefix = unzippedPack.getAbsolutePath() + "/bin/"; for (File f : FileUtils.listFiles(new File(dirPrefix), TrueFileFilter.TRUE, TrueFileFilter.TRUE)) { zipOutputStream .putNextEntry(new ZipArchiveEntry(f.getCanonicalPath().replaceFirst(dirPrefix, "lib/"))); final JarInputStream jarInputStream = new JarInputStream(new FileInputStream(f)); final JarOutputStream jarOutputStream = new JarOutputStream(zipOutputStream); JarEntry je; while ((je = jarInputStream.getNextJarEntry()) != null) { jarOutputStream.putNextEntry(je); IOUtils.copy(jarInputStream, jarOutputStream); jarOutputStream.closeEntry(); } log.info("Created Jar file"); // Add the SSL certs to the jar final String sslPath = UnzipUtil.getSSLPath(configDirFile).get(); for (File sslFile : FileUtils.listFiles(new File(sslPath), TrueFileFilter.TRUE, TrueFileFilter.TRUE)) { if (sslFile.isFile()) { jarOutputStream.putNextEntry(new JarArchiveEntry("ssl/" + sslFile.getName())); IOUtils.copy(new FileInputStream(sslFile), jarOutputStream); jarOutputStream.closeEntry(); } } log.info("Added SSL certs to jar"); // Add the application.properties to the jar file so the jobs can read it final File appProps = new File(configDir, "application.properties"); final Properties adjustedProperties = new Properties(); adjustedProperties.load(new FileInputStream(appProps)); adjustedProperties.setProperty("ezbake.security.ssl.dir", "/ssl/"); jarOutputStream.putNextEntry(new JarArchiveEntry("application.properties")); adjustedProperties.store(jarOutputStream, null); jarOutputStream.closeEntry(); jarOutputStream.finish(); zipOutputStream.closeEntry(); } // Check to see if there are any .job files. If there aren't, this is an external job and we need to create // one for the .zip file final Collection<File> jobFiles = FileUtils.listFiles(configDirFile, new String[] { "job" }, false); if (jobFiles.isEmpty()) { // If there are no job files present then we need to create one for the user final StringBuilder sb = new StringBuilder( "type=hadoopJava\n" + "job.class=ezbatch.amino.api.EzFrameworkDriver\n" + "classpath=./lib/*\n" + "main.args=-d /ezbatch/amino/config"); for (File xmlConfig : FileUtils.listFiles(configDirFile, new String[] { "xml" }, false)) { sb.append(" -c ").append(xmlConfig.getName()); } zipOutputStream.putNextEntry(new ZipEntry("Analytic.job")); IOUtils.copy(new StringReader(sb.toString()), zipOutputStream); zipOutputStream.closeEntry(); log.info("There was no .job file so one was created for the .zip"); flowName = "Analytic"; } else { flowName = jobInfo.getFlowName(); if (flowName == null) { log.warn("Manifest did not contain flow_name. Guessing what it should be"); flowName = FilenameUtils.getBaseName(jobFiles.toArray(new File[jobFiles.size()])[0].getName()); log.info("Guessing the flow name should be:" + flowName); } } zipOutputStream.finish(); log.info("Finished creating .zip"); // Now that we've created the zip to upload, attempt to create a project for it to be uploaded to. Every .zip // file needs to be uploaded to a project, and the project may or may not already exist. final String projectName = ArtifactHelpers.getAppId(artifact) + "_" + ArtifactHelpers.getServiceId(artifact); final ProjectManager projectManager = new ProjectManager(authenticatorResult.getSessionId(), new URI(azConf.getAzkabanUrl())); final ManagerResult managerResult = projectManager.createProject(projectName, "EzBatch Deployed"); // If the project already exists, it will return an error, but really it's not a problem if (managerResult.hasError()) { if (!managerResult.getMessage().contains("already exists")) { log.error("Could not create project: " + managerResult.getMessage()); throw new DeploymentException(managerResult.getMessage()); } else { log.info("Reusing the existing project: " + projectName); } } else { log.info("Created new project: " + projectName); log.info("Path: " + managerResult.getPath()); } // Upload the .zip file to the project final UploadManager uploader = new UploadManager(authenticatorResult.getSessionId(), azConf.getAzkabanUrl(), projectName, azkabanZip); final UploaderResult uploaderResult = uploader.uploadZip(); if (uploaderResult.hasError()) { log.error("Could not upload the zip file: " + uploaderResult.getError()); throw new DeploymentException(uploaderResult.getError()); } log.info("Successfully submitted zip file to Azkaban"); // Schedule the jar to run. If the start times aren't provided, it will run in 2 minutes final ScheduleManager scheduler = new ScheduleManager(authenticatorResult.getSessionId(), new URI(azConf.getAzkabanUrl())); // Add the optional parameters if they are present if (jobInfo.isSetStartDate()) { scheduler.setScheduleDate(jobInfo.getStartDate()); } if (jobInfo.isSetStartTime()) { scheduler.setScheduleTime(jobInfo.getStartTime()); } if (jobInfo.isSetRepeat()) { scheduler.setPeriod(jobInfo.getRepeat()); } final SchedulerResult schedulerResult = scheduler.scheduleFlow(projectName, flowName, uploaderResult.getProjectId()); if (schedulerResult.hasError()) { log.error("Failure to schedule job: " + schedulerResult.getError()); throw new DeploymentException(schedulerResult.getError()); } log.info("Successfully scheduled flow: " + flowName); } catch (Exception ex) { log.error("No Nos!", ex); throw new DeploymentException(ex.getMessage()); } finally { IOUtils.closeQuietly(zipOutputStream); FileUtils.deleteQuietly(azkabanZip); FileUtils.deleteQuietly(unzippedPack); } }
From source file:ZipTransformTest.java
public void testStringZipEntryTransformerInStream() throws IOException { final String name = "foo"; String FILE_CONTENTS = "bar"; final byte[] contents = FILE_CONTENTS.getBytes(); File file1 = File.createTempFile("temp", null); File file2 = File.createTempFile("temp", null); try {/*from w w w. ja va 2s. c o m*/ // Create the ZIP file ZipOutputStream zos = new ZipOutputStream(new FileOutputStream(file1)); try { zos.putNextEntry(new ZipEntry(name)); zos.write(contents); zos.closeEntry(); } finally { IOUtils.closeQuietly(zos); } // Transform the ZIP file FileInputStream in = null; FileOutputStream out = null; try { in = new FileInputStream(file1); out = new FileOutputStream(file2); ZipUtil.transformEntry(in, name, new StringZipEntryTransformer("UTF-8") { protected String transform(ZipEntry zipEntry, String input) throws IOException { return input.toUpperCase(); } }, out); } finally { IOUtils.closeQuietly(in); IOUtils.closeQuietly(out); } // Test the ZipUtil byte[] actual = ZipUtil.unpackEntry(file2, name); assertEquals(FILE_CONTENTS.toUpperCase(), new String(actual)); } finally { FileUtils.deleteQuietly(file1); FileUtils.deleteQuietly(file2); } }
From source file:com.googlecode.clearnlp.component.AbstractStatisticalComponent.java
/** Called by {@link AbstractStatisticalComponent#saveModels(ZipOutputStream)}}. */ protected void saveStatisticalModels(ZipOutputStream zout, String entryName) throws Exception { int i, size = s_models.length; PrintStream fout;/* w w w . jav a 2 s . c om*/ for (i = 0; i < size; i++) { zout.putNextEntry(new ZipEntry(entryName + i)); fout = UTOutput.createPrintBufferedStream(zout); s_models[i].save(fout); fout.flush(); zout.closeEntry(); } }