List of usage examples for java.util.zip ZipInputStream read
public int read(byte b[]) throws IOException
b.length
bytes of data from this input stream into an array of bytes. From source file:org.pentaho.hadoop.shim.common.DistributedCacheUtilImpl.java
/** * Extract a zip archive to a directory. * * @param archive Zip archive to extract * @param dest Destination directory. This must not exist! * @return Directory the zip was extracted into * @throws IllegalArgumentException when the archive file does not exist or the destination directory already exists * @throws IOException/*from w w w.j a v a2 s. c om*/ * @throws KettleFileException */ public FileObject extract(FileObject archive, FileObject dest) throws IOException, KettleFileException { if (!archive.exists()) { throw new IllegalArgumentException("archive does not exist: " + archive.getURL().getPath()); } if (dest.exists()) { throw new IllegalArgumentException("destination already exists"); } dest.createFolder(); try { byte[] buffer = new byte[DEFAULT_BUFFER_SIZE]; int len = 0; ZipInputStream zis = new ZipInputStream(archive.getContent().getInputStream()); try { ZipEntry ze; while ((ze = zis.getNextEntry()) != null) { FileObject entry = KettleVFS.getFileObject(dest + Const.FILE_SEPARATOR + ze.getName()); FileObject parent = entry.getParent(); if (parent != null) { parent.createFolder(); } if (ze.isDirectory()) { entry.createFolder(); continue; } OutputStream os = KettleVFS.getOutputStream(entry, false); try { while ((len = zis.read(buffer)) > 0) { os.write(buffer, 0, len); } } finally { if (os != null) { os.close(); } } } } finally { if (zis != null) { zis.close(); } } } catch (Exception ex) { // Try to clean up the temp directory and all files if (!deleteDirectory(dest)) { throw new KettleFileException("Could not clean up temp dir after error extracting", ex); } throw new KettleFileException("error extracting archive", ex); } return dest; }
From source file:org.wso2.carbon.server.util.Utils.java
public static void extractFromStream(InputStream inputStream, String extractDir) throws IOException { ZipInputStream zin = null; try {//w w w. j a v a 2s . com File unzipped = new File(extractDir); // Open the ZIP file zin = new ZipInputStream(inputStream); if (!unzipped.mkdirs()) { throw new IOException("Fail to create the directory: " + unzipped.getAbsolutePath()); } ZipEntry entry; while ((entry = zin.getNextEntry()) != null) { String entryName = entry.getName(); File f = new File(extractDir + File.separator + entryName); if (entryName.endsWith("/") && !f.exists()) { // this is a // directory if (!f.mkdirs()) { throw new IOException("Fail to create the directory: " + f.getAbsolutePath()); } else { continue; } } // This is a file. Carry out File processing int lastIndexOfSlash = entryName.lastIndexOf('/'); String dirPath = ""; if (lastIndexOfSlash != -1) { dirPath = entryName.substring(0, lastIndexOfSlash); File dir = new File(extractDir + File.separator + dirPath); if (!dir.exists() && !dir.mkdirs()) { throw new IOException("Failed to create the directory: " + dir.getAbsoluteFile()); } } if (!f.isDirectory()) { OutputStream out = new FileOutputStream(f); byte[] buf = new byte[40960]; // Transfer bytes from the ZIP file to the output file int len; while ((len = zin.read(buf)) > 0) { out.write(buf, 0, len); } out.close(); } } } catch (IOException e) { String msg = "Cannot unzip archive. It is probably corrupt"; System.out.println(msg); throw e; } finally { try { if (zin != null) { zin.close(); } } catch (IOException e) { e.printStackTrace(); } } }
From source file:au.org.ala.layers.dao.ObjectDAOImpl.java
@Override public void streamObjectsGeometryById(OutputStream os, String id, String geomtype) throws IOException { logger.info("Getting object info for id = " + id + " and geometry as " + geomtype); String sql = ""; if ("kml".equals(geomtype)) { sql = "SELECT ST_AsKml(the_geom) as geometry, name, \"desc\" as description FROM objects WHERE pid=?;"; } else if ("wkt".equals(geomtype)) { sql = "SELECT ST_AsText(the_geom) as geometry FROM objects WHERE pid=?;"; } else if ("geojson".equals(geomtype)) { sql = "SELECT ST_AsGeoJSON(the_geom) as geometry FROM objects WHERE pid=?;"; } else if ("shp".equals(geomtype)) { sql = "SELECT ST_AsText(the_geom) as geometry, name, \"desc\" as description FROM objects WHERE pid=?;"; }// ww w. j ava2 s . c om List<Objects> l = jdbcTemplate.query(sql, ParameterizedBeanPropertyRowMapper.newInstance(Objects.class), id); if (l.size() > 0) { if ("shp".equals(geomtype)) { String wkt = l.get(0).getGeometry(); File zippedShapeFile = SpatialConversionUtils.buildZippedShapeFile(wkt, id, l.get(0).getName(), l.get(0).getDescription()); FileUtils.copyFile(zippedShapeFile, os); } else if ("kml".equals(geomtype)) { os.write(KML_HEADER.replace("<name></name>", "<name><![CDATA[" + l.get(0).getName() + "]]></name>") .replace("<description></description>", "<description><![CDATA[" + l.get(0).getDescription() + "]]></description>") .getBytes()); os.write(l.get(0).getGeometry().getBytes()); os.write(KML_FOOTER.getBytes()); } else { os.write(l.get(0).getGeometry().getBytes()); } } else { // get grid classes if (id.length() > 0) { // grid class pids are, 'layerPid:gridClassNumber' try { String[] s = id.split(":"); if (s.length >= 2) { int n = Integer.parseInt(s[1]); IntersectionFile f = layerIntersectDao.getConfig().getIntersectionFile(s[0]); if (f != null && f.getClasses() != null) { GridClass gc = f.getClasses().get(n); if (gc != null && ("kml".equals(geomtype) || "wkt".equals(geomtype) || "geojson".equals(geomtype) || "shp".equals(geomtype))) { // TODO: enable for type 'a' after // implementation of fields table defaultLayer // field File file = new File( f.getFilePath() + File.separator + s[1] + "." + geomtype + ".zip"); if ((f.getType().equals("a") || s.length == 2) && file.exists()) { ZipInputStream zis = null; try { zis = new ZipInputStream(new FileInputStream(file)); zis.getNextEntry(); byte[] buffer = new byte[1024]; int size; while ((size = zis.read(buffer)) > 0) { os.write(buffer, 0, size); } } catch (Exception e) { logger.error(e.getMessage(), e); } finally { if (zis != null) { try { zis.close(); } catch (Exception e) { logger.error(e.getMessage(), e); } } } } else { // polygon BufferedInputStream bis = null; InputStreamReader isr = null; try { String[] cells = null; HashMap<String, Object> map = s.length == 2 ? null : getGridIndexEntry(f.getFilePath() + File.separator + s[1], s[2]); String wkt = null; if (map != null) { cells = new String[] { s[2], String.valueOf(map.get("charoffset")) }; if (cells != null) { // get polygon wkt string File file2 = new File( f.getFilePath() + File.separator + s[1] + ".wkt"); bis = new BufferedInputStream(new FileInputStream(file2)); isr = new InputStreamReader(bis); isr.skip(Long.parseLong(cells[1])); char[] buffer = new char[1024]; int size; StringBuilder sb = new StringBuilder(); sb.append("POLYGON"); int end = -1; while (end < 0 && (size = isr.read(buffer)) > 0) { sb.append(buffer, 0, size); end = sb.toString().indexOf("))"); } end += 2; wkt = sb.toString().substring(0, end); } } else { wkt = gc.getBbox(); } if (geomtype.equals("wkt")) { os.write(wkt.getBytes()); } else { WKTReader r = new WKTReader(); Geometry g = r.read(wkt); if (geomtype.equals("kml")) { os.write(KML_HEADER.getBytes()); Encoder encoder = new Encoder(new KMLConfiguration()); encoder.setIndenting(true); encoder.encode(g, KML.Geometry, os); os.write(KML_FOOTER.getBytes()); } else if (geomtype.equals("geojson")) { FeatureJSON fjson = new FeatureJSON(); final SimpleFeatureType TYPE = DataUtilities.createType("class", "the_geom:MultiPolygon,name:String"); SimpleFeatureBuilder featureBuilder = new SimpleFeatureBuilder( TYPE); featureBuilder.add(g); featureBuilder.add(gc.getName()); fjson.writeFeature(featureBuilder.buildFeature(null), os); } else if (geomtype == "shp") { File zippedShapeFile = SpatialConversionUtils .buildZippedShapeFile(wkt, id, gc.getName(), null); FileUtils.copyFile(zippedShapeFile, os); } } } catch (Exception e) { logger.error(e.getMessage(), e); } finally { if (bis != null) { try { bis.close(); } catch (Exception e) { logger.error(e.getMessage(), e); } } if (isr != null) { try { isr.close(); } catch (Exception e) { logger.error(e.getMessage(), e); } } } } } } } } catch (Exception e) { logger.error(e.getMessage(), e); } } } }
From source file:org.wso2.carbon.pc.core.transfer.ProcessImport.java
/** * @param processZipInputStream process ZIP input stream * @param user username/*from w ww . ja v a 2 s. c o m*/ * @throws IOException * @throws RegistryException * @throws ProcessCenterException * @throws UserStoreException */ public String importProcesses(InputStream processZipInputStream, String user) throws IOException, RegistryException, ProcessCenterException, UserStoreException, JSONException, TransformerException, SAXException, ParserConfigurationException { JSONObject response = new JSONObject(); if (registryService != null) { reg = registryService.getGovernanceUserRegistry(user); this.user = user; //extract zip file stream to the system disk byte[] buffer = new byte[2048]; ZipInputStream zipInputStream = new ZipInputStream(processZipInputStream); File importsDir = new File(ProcessCenterConstants.IMPORTS_DIR); try { if (importsDir != null) { importsDir.mkdirs(); try { ZipEntry entry; while ((entry = zipInputStream.getNextEntry()) != null) { //counter++; String outpath = ProcessCenterConstants.IMPORTS_DIR + "/" + entry.getName(); String dirPath = outpath.substring(0, outpath.lastIndexOf("/")); new File(dirPath).mkdirs(); FileOutputStream fileOutputStream = null; try { fileOutputStream = new FileOutputStream(outpath); int len = 0; while ((len = zipInputStream.read(buffer)) > 0) { fileOutputStream.write(buffer, 0, len); } } finally { if (fileOutputStream != null) { fileOutputStream.close(); } } } } finally { zipInputStream.close(); } File folder = new File(ProcessCenterConstants.IMPORTS_DIR); File[] listOfFiles = folder.listFiles(); if (listOfFiles != null) { if (listOfFiles[0].isDirectory() && listOfFiles.length == 1) { String zipHomeDirectoryName = listOfFiles[0].getPath(); File zipFolder = new File(zipHomeDirectoryName); listOfProcessDirs = zipFolder.listFiles(); } } //else do the process importing for each process for (File processDir : listOfProcessDirs) { if (processDir.isDirectory()) { String processDirName = processDir.getName(); String processDirPath = processDir.getPath(); String processRxtPath = processDirPath + "/" + ProcessCenterConstants.EXPORTED_PROCESS_RXT_FILE; String processName = processDirName.substring(0, processDirName.lastIndexOf("-")); String processVersion = processDirName.substring(processDirName.lastIndexOf("-") + 1, processDirName.length()); String processAssetPath = ProcessCenterConstants.PROCESS_ASSET_ROOT + processName + "/" + processVersion; if (registryService != null) { UserRegistry reg = registryService.getGovernanceUserRegistry(user); String processPath = "processes/" + processName + "/" + processVersion; // Check whether process already exists with same name and version if (!reg.resourceExists(processPath)) { putProcessRxt(processRxtPath, processAssetPath); GovernanceUtils.associateAspect(processAssetPath, ProcessCenterConstants.DEFAULT_LIFECYCLE_NAME, reg); setImageThumbnail(processDirPath, processAssetPath); setProcessDocuments(processName, processVersion, processDirPath, processAssetPath); setProcessTags(processDirPath, processAssetPath); setProcessText(processName, processVersion, processDirPath, processAssetPath); setBPMN(processName, processVersion, processDirPath, processAssetPath); setFlowChart(processName, processVersion, processDirPath, processAssetPath); setProcessAssociations(processName, processVersion, processDirPath, processAssetPath); } } } } } else { String errMsg = "Process Importing failed due to failure in creating Imports directory"; throw new ProcessCenterException(errMsg); } } finally { //Finally remove the Imports folder if (importsDir.exists()) { FileUtils.deleteDirectory(importsDir); } } } else { String errMsg = "Process Importing failed due to unavailability of Registry Service"; throw new ProcessCenterException(errMsg); } return response.toString(); }
From source file:com.taobao.android.builder.tools.sign.LocalSignedJarBuilder.java
/** * Copies the content of a Jar/Zip archive into the receiver archive. * <p/>An optional {@link IZipEntryFilter} allows to selectively choose which files * to copy over.//www . j a v a2 s . c om * * @param input the {@link InputStream} for the Jar/Zip to copy. * @param filter the filter or <code>null</code> * @throws IOException * @throws SignedJarBuilder.IZipEntryFilter.ZipAbortException if the {@link IZipEntryFilter} filter indicated that the write * must be aborted. */ public void writeZip(InputStream input, IZipEntryFilter filter) throws IOException, IZipEntryFilter.ZipAbortException { ZipInputStream zis = new ZipInputStream(input); try { // loop on the entries of the intermediary package and put them in the final package. ZipEntry entry; while ((entry = zis.getNextEntry()) != null) { String name = entry.getName(); // do not take directories or anything inside a potential META-INF folder. if (entry.isDirectory()) { continue; } // ignore some of the content in META-INF/ but not all if (name.startsWith("META-INF/")) { // ignore the manifest file. String subName = name.substring(9); if ("MANIFEST.MF".equals(subName)) { int count; ByteArrayOutputStream out = new ByteArrayOutputStream(); while ((count = zis.read(buffer)) != -1) { out.write(buffer, 0, count); } ByteArrayInputStream swapStream = new ByteArrayInputStream(out.toByteArray()); Manifest manifest = new Manifest(swapStream); mManifest.getMainAttributes().putAll(manifest.getMainAttributes()); continue; } // special case for Maven meta-data because we really don't care about them in apks. if (name.startsWith("META-INF/maven/")) { continue; } // check for subfolder int index = subName.indexOf('/'); if (index == -1) { // no sub folder, ignores signature files. if (subName.endsWith(".SF") || name.endsWith(".RSA") || name.endsWith(".DSA")) { continue; } } } // if we have a filter, we check the entry against it if (filter != null && !filter.checkEntry(name)) { continue; } JarEntry newEntry; // Preserve the STORED method of the input entry. if (entry.getMethod() == JarEntry.STORED) { newEntry = new JarEntry(entry); } else { // Create a new entry so that the compressed len is recomputed. newEntry = new JarEntry(name); } writeEntry(zis, newEntry); zis.closeEntry(); } } finally { zis.close(); } }
From source file:org.apache.axis2.deployment.repository.util.ArchiveReader.java
/** * Creates service objects from wsdl file inside a service archive file. * * @param file <code>ArchiveFileData</code> * @throws DeploymentException <code>DeploymentException</code> *//* www . ja v a 2 s . co m*/ public HashMap<String, AxisService> processWSDLs(DeploymentFileData file) throws DeploymentException { File serviceFile = file.getFile(); // to store service come from wsdl files HashMap<String, AxisService> servicesMap = new HashMap<String, AxisService>(); boolean isDirectory = serviceFile.isDirectory(); if (isDirectory) { try { File metaInfFolder = new File(serviceFile, META_INF); if (!metaInfFolder.exists()) { metaInfFolder = new File(serviceFile, META_INF.toLowerCase()); if (!metaInfFolder.exists()) { throw new DeploymentException( Messages.getMessage(DeploymentErrorMsgs.META_INF_MISSING, serviceFile.getName())); } } processFilesInFolder(metaInfFolder, servicesMap); } catch (FileNotFoundException e) { throw new DeploymentException(e); } catch (IOException e) { throw new DeploymentException(e); } catch (XMLStreamException e) { throw new DeploymentException(e); } } else { ZipInputStream zin; FileInputStream fin; try { fin = new FileInputStream(serviceFile); zin = new ZipInputStream(fin); //TODO Check whether this WSDL is empty ZipEntry entry; byte[] buf = new byte[1024]; int read; ByteArrayOutputStream out; while ((entry = zin.getNextEntry()) != null) { String entryName = entry.getName().toLowerCase(); if (entryName.startsWith(META_INF.toLowerCase()) && entryName.endsWith(SUFFIX_WSDL)) { out = new ByteArrayOutputStream(); // we do not want to generate the services for the // imported wsdl of one file. if ((entryName.indexOf("/") != entryName.lastIndexOf("/")) || (entryName.indexOf("wsdl_") != -1)) { //only care abt the toplevel wsdl continue; } while ((read = zin.read(buf)) > 0) { out.write(buf, 0, read); } ByteArrayInputStream in = new ByteArrayInputStream(out.toByteArray()); // now the question is which version of WSDL file this archive contains. // lets check the namespace of the root element and decide. But since we are // using axiom (dude, you are becoming handy here :)), we will not build the // whole thing. OMNamespace documentElementNS = ((OMElement) XMLUtils.toOM(in)).getNamespace(); if (documentElementNS != null) { WSDLToAxisServiceBuilder wsdlToAxisServiceBuilder; if (WSDL2Constants.WSDL_NAMESPACE.equals(documentElementNS.getNamespaceURI())) { // we have a WSDL 2.0 document here. wsdlToAxisServiceBuilder = new WSDL20ToAllAxisServicesBuilder( new ByteArrayInputStream(out.toByteArray())); wsdlToAxisServiceBuilder.setBaseUri(entryName); } else if (Constants.NS_URI_WSDL11.equals(documentElementNS.getNamespaceURI())) { wsdlToAxisServiceBuilder = new WSDL11ToAllAxisServicesBuilder( new ByteArrayInputStream(out.toByteArray())); ((WSDL11ToAxisServiceBuilder) wsdlToAxisServiceBuilder) .setDocumentBaseUri(entryName); } else { throw new DeploymentException(Messages.getMessage("invalidWSDLFound")); } List<AxisService> services = processWSDLFile(wsdlToAxisServiceBuilder, serviceFile, true, new ByteArrayInputStream(out.toByteArray()), entry.getName()); if (services != null) { for (AxisService axisService : services) { if (axisService != null) { servicesMap.put(axisService.getName(), axisService); } } } } } } try { zin.close(); } catch (IOException e) { log.info(e); } try { fin.close(); } catch (IOException e) { log.info(e); } } catch (FileNotFoundException e) { throw new DeploymentException(e); } catch (IOException e) { throw new DeploymentException(e); } catch (XMLStreamException e) { throw new DeploymentException(e); } } return servicesMap; }
From source file:org.craftercms.social.migration.controllers.MainController.java
protected void extractBuildInScripts(String application, ListView lstToAdd) throws MigrationException { CodeSource src = getClass().getProtectionDomain().getCodeSource(); List<String> list = new ArrayList<String>(); byte[] buffer = new byte[1024]; if (src != null) { try {/* w ww .ja va 2 s .c o m*/ URL jar = src.getLocation(); ZipInputStream zip = new ZipInputStream(jar.openStream()); ZipEntry ze = zip.getNextEntry(); if (ze == null) { //Running from IDE or Exploded Jar no need to extract! log.debug("Loading files from FS "); loadScripts(getClass().getResource("/" + application).getFile(), lstToAdd); } else { while (ze != null) { String entryName = ze.getName(); if (entryName.startsWith(application) && entryName.endsWith(".js")) { log.debug("Extracting {} ", entryName); final File extractFile = Paths.get(Paths .get(MigrationTool.systemProperties .getString("crafter" + "" + ".migration.profile.scripts")) .toFile().getParent(), entryName).toFile(); if (!extractFile.exists()) { extractFile.createNewFile(); FileOutputStream fos = new FileOutputStream(extractFile); int len; while ((len = zip.read(buffer)) > 0) { fos.write(buffer, 0, len); } fos.close(); } } ze = zip.getNextEntry(); } } } catch (IOException ex) { log.debug("Unable to load build in scripts", ex); } } else { loadBuildInScripts(application, lstToAdd); } }
From source file:org.wso2.carbon.appmgt.hostobjects.ThemeManagerHostObject.java
/** * Deploy the uploaded custom theme to the correct custom theme directory. Files with unsupported extensions will be * omitted./*from w w w. j a va 2 s .co m*/ * * @param themeFile Theme File in zip format * @param tenant Tenant Domain * @param themeType Theme Type (Default ,<assetType> e.g webapp) * @param whitelistedExt Whitelisted file extensions * @throws AppManagementException */ private static void deployCustomTheme(FileHostObject themeFile, String tenant, String themeType, Set<String> whitelistedExt) throws AppManagementException { if (log.isDebugEnabled()) { String msg = String.format("Deploy custom theme of type :%1s for tenant :%2s", themeType, tenant); log.debug(msg); } ZipInputStream zis = null; byte[] buffer = new byte[1024]; //check store theme directory exists Path themeDir = getStoreThemePath(); if (!Files.exists(themeDir)) { String msg = "Could not found directory :" + themeDir.toString(); handleException(msg); } Path themePath = getCustomThemePath(tenant, themeType); InputStream zipInputStream = null; try { zipInputStream = themeFile.getInputStream(); } catch (ScriptException e) { handleException("Error occurred while deploying custom theme file", e); } try { if (log.isDebugEnabled()) { String msg = String.format("Create custom theme dir :%s", themePath); log.debug(msg); } //create output directory if it is not exists if (!Files.exists(themePath)) { createDirectory(themePath); } if (log.isDebugEnabled()) { String msg = "Get zip file content and deploy"; log.debug(msg); } //get the zip file content zis = new ZipInputStream(zipInputStream); //get the zipped file list entry ZipEntry ze = zis.getNextEntry(); String ext = null; while (ze != null) { String fileName = ze.getName(); Path newFilePath = themePath.resolve(fileName); if (ze.isDirectory()) { if (!Files.exists(newFilePath)) { createDirectory(newFilePath); } } else { ext = FilenameUtils.getExtension(ze.getName()); if (whitelistedExt.contains(ext)) { //create all non exists folders //else you will hit FileNotFoundException for compressed folder Path parentDir = newFilePath.getParent(); if (!Files.exists(parentDir)) { createDirectory(parentDir); } FileOutputStream fos = new FileOutputStream(newFilePath.toFile()); int len; while ((len = zis.read(buffer)) > 0) { fos.write(buffer, 0, len); } fos.close(); } else { String msg = String.format( "Unsupported file is uploaded with custom theme by tenant %1s. File : %2s ", tenant, ze.getName()); log.warn(msg); } } ze = zis.getNextEntry(); } zis.closeEntry(); zis.close(); } catch (IOException e) { handleException("Failed to deploy custom theme", e); } finally { IOUtils.closeQuietly(zis); IOUtils.closeQuietly(zipInputStream); } }
From source file:org.jboss.tools.tycho.sitegenerator.FetchSourcesFromManifests.java
public void execute() throws MojoExecutionException, MojoFailureException { if (!skip) {//from ww w . ja va2s . c o m if (this.zipCacheFolder == null) { this.zipCacheFolder = new File(project.getBasedir() + File.separator + "cache" + File.separator); } if (this.zipCacheFolder != null && !this.zipCacheFolder.isDirectory()) { try { if (!this.zipCacheFolder.exists()) { this.zipCacheFolder.mkdirs(); } } catch (Exception ex) { throw new MojoExecutionException("'zipCacheFolder' must be a directory", ex); } } if (this.outputFolder == null) { this.outputFolder = new File(project.getBasedir() + File.separator + "zips" + File.separator); } if (this.outputFolder.equals(this.zipCacheFolder)) { throw new MojoExecutionException("zipCacheFolder and outputFolder can not be the same folder"); } zipsDirectory = new File(this.outputFolder, "all"); if (!zipsDirectory.exists()) { zipsDirectory.mkdirs(); } File digestFile = new File(this.outputFolder, "ALL_REVISIONS.txt"); FileWriter dfw; StringBuffer sb = new StringBuffer(); String branch = project.getProperties().getProperty("mvngit.branch"); sb.append("-=> " + project.getGroupId() + ":" + project.getArtifactId() + ":" + project.getVersion() + columnSeparator + branch + " <=-\n"); String pluginPath = project.getBasedir() + File.separator + "target" + File.separator + "repository" + File.separator + "plugins"; String sep = " " + columnSeparator + " "; if (sourceFetchMap == null) { getLog().warn( "No <sourceFetchMap> defined in pom. Can't fetch sources without a list of plugins. Did you forget to enable fetch-source-zips profile?"); } else { for (String projectName : this.sourceFetchMap.keySet()) { String pluginNameOrBuildInfoJsonUrl = this.sourceFetchMap.get(projectName); // jbosstools-base = org.jboss.tools.common getLog().debug("For project " + projectName + ": plugin name or buildinfo.json = " + pluginNameOrBuildInfoJsonUrl); String SHA = null; String qualifier = null; String SHASource = null; // if the value is a buildinfo.json URL, not a plugin name if ((pluginNameOrBuildInfoJsonUrl.startsWith("http") || pluginNameOrBuildInfoJsonUrl.startsWith("ftp")) && pluginNameOrBuildInfoJsonUrl.matches(".+buildinfo.*json")) { getLog().debug("Read JSON from: " + pluginNameOrBuildInfoJsonUrl); ModelNode obj; try { obj = ModelNode.fromJSONStream((new URL(pluginNameOrBuildInfoJsonUrl)).openStream()); } catch (IOException e) { throw new MojoExecutionException( "Problem occurred reading " + pluginNameOrBuildInfoJsonUrl, e); } SHA = getSHA(obj); getLog().debug("Found SHA = " + SHA); // create qualifier from buildinfo.json BUILD_ALIAS and ZIPSUFFIX qualifier = getProperty(obj, "BUILD_ALIAS") + "-" + getProperty(obj, "ZIPSUFFIX"); getLog().debug("Found qualifier = " + qualifier); SHASource = pluginNameOrBuildInfoJsonUrl; } else { // find the first matching plugin jar, eg., target/repository/plugins/org.jboss.tools.common_3.6.0.Alpha2-v20140304-0055-B440.jar File[] matchingFiles = listFilesMatching(new File(pluginPath), pluginNameOrBuildInfoJsonUrl + "_.+\\.jar"); // for (File file : matchingFiles) getLog().debug(file.toString()); if (matchingFiles.length < 1) { throw new MojoExecutionException("No matching plugin found in " + pluginPath + " for " + pluginNameOrBuildInfoJsonUrl + "_.+\\.jar.\nCheck your pom.xml for this line: <" + projectName + ">" + pluginNameOrBuildInfoJsonUrl + "</" + projectName + ">"); } File jarFile = matchingFiles[0]; File manifestFile = null; try { FileInputStream fin = new FileInputStream(jarFile); manifestFile = File.createTempFile(MANIFEST, ""); OutputStream out = new FileOutputStream(manifestFile); BufferedInputStream bin = new BufferedInputStream(fin); ZipInputStream zin = new ZipInputStream(bin); ZipEntry ze = null; while ((ze = zin.getNextEntry()) != null) { // getLog().debug(ze.getName()); if (ze.getName().equals("META-INF/" + MANIFEST)) { // getLog().debug("Found " + ze.getName() + " in " + // jarFile); byte[] buffer = new byte[8192]; int len; while ((len = zin.read(buffer)) != -1) { out.write(buffer, 0, len); } out.close(); break; } } zin.close(); // getLog().debug("Saved " + jarFile + "!/META-INF/" + MANIFEST); } catch (Exception ex) { throw new MojoExecutionException("Error extracting " + MANIFEST + " from " + jarFile, ex); } // retrieve the MANIFEST.MF file, eg., org.jboss.tools.usage_1.2.100.Alpha2-v20140221-1555-B437.jar!/META-INF/MANIFEST.MF Manifest manifest; try { manifest = new Manifest(new FileInputStream(manifestFile)); } catch (Exception ex) { throw new MojoExecutionException("Error while reading manifest file " + MANIFEST, ex); } // parse out the commitId from Eclipse-SourceReferences: // scm:git:https://github.com/jbosstools/jbosstools-base.git;path="usage/plugins/org.jboss.tools.usage";commitId=184e18cc3ac7c339ce406974b6a4917f73909cc4 Attributes attr = manifest.getMainAttributes(); String ESR = null; SHA = null; ESR = attr.getValue("Eclipse-SourceReferences"); // getLog().debug(ESR); if (ESR != null) { SHA = ESR.substring(ESR.lastIndexOf(";commitId=") + 10); // getLog().debug(SHA); } else { SHA = "UNKNOWN"; } // cleanup manifestFile.delete(); qualifier = getQualifier(pluginNameOrBuildInfoJsonUrl, jarFile.toString(), true); SHASource = removePrefix(jarFile.toString(), pluginPath) + " " + MANIFEST; } // fetch github source archive for that SHA, eg., https://github.com/jbosstools/jbosstools-base/archive/184e18cc3ac7c339ce406974b6a4917f73909cc4.zip // to jbosstools-base_184e18cc3ac7c339ce406974b6a4917f73909cc4_sources.zip String URL = ""; String outputZipName = ""; try { if (SHA == null || SHA.equals("UNKNOWN")) { getLog().warn("Cannot fetch " + projectName + " sources: no Eclipse-SourceReferences in " + SHASource); } else { URL = "https://github.com/jbosstools/" + projectName + "/archive/" + SHA + ".zip"; outputZipName = projectName + "_" + SHA + "_sources.zip"; fetchUpstreamSourcesZip(projectName, SHA); } } catch (Exception ex) { throw new MojoExecutionException("Error while downloading github source archive", ex); } // github project, plugin, version, SHA, origin/branch@SHA, remote zipfile, local zipfile String revisionLine = projectName + sep + pluginNameOrBuildInfoJsonUrl + sep + qualifier + sep + SHA + sep + "origin/" + branch + "@" + SHA + sep + URL + sep + outputZipName + "\n"; // getLog().debug(revisionLine); sb.append(revisionLine); } } /* JBIDE-19467 check if SHA in buildinfo_projectName.json matches projectName_65cb06bb81773714b9e3fc1c312e33aaa068dc33_sources.zip. Note: this may fail if you've built stuff locally because those plugins will use different SHAs (eg., from a pull-request topic branch) To test this is working via commandline shell equivalent cd jbosstools-build-sites/aggregate/site for j in target/buildinfo/buildinfo_jbosstools-*; do echo -n $j; k=${j##*_}; k=${k/.json}; echo " :: $k"; cat $j | grep HEAD | head -1 | sed "s#[\t\w\ ]\+\"HEAD\" : \"\(.\+\)\",#0: \1#"; ls cache/${k}_*_sources.zip | sed -e "s#cache/${k}_\(.\+\)_sources.zip#1: \1#"; echo ""; done */ if (skipCheckSHAs) { getLog().warn( "skipCheckSHAs=true :: Skip check that buildinfo_*.json HEAD SHA matches MANIFEST.MF Eclipse-SourceReferences commitId SHA."); } else { File buildinfoFolder = new File(this.project.getBuild().getDirectory(), "buildinfo"); if (buildinfoFolder.isDirectory()) { try { File[] buildInfoFiles = listFilesMatching(buildinfoFolder, "buildinfo_.+.json"); for (int i = 0; i < buildInfoFiles.length; i++) { InputStream in = null; ModelNode obj = null; String upstreamSHA = null; String upstreamProjectName = buildInfoFiles[i].toString() .replaceAll(".+buildinfo_(.+).json", "$1"); getLog().debug(i + ": " + buildInfoFiles[i].toString() + " :: " + upstreamProjectName); try { getLog().debug("Read JSON from: " + buildInfoFiles[i].toString()); in = new FileInputStream(buildInfoFiles[i]); obj = ModelNode.fromJSONStream(in); upstreamSHA = getSHA(obj); getLog().debug("Found SHA = " + upstreamSHA); // check if there's a file called upstreamProjectName_upstreamSHA_sources.zip String outputZipName = upstreamProjectName + "_" + upstreamSHA + "_sources.zip"; File outputZipFile = new File(zipsDirectory, outputZipName); if (!outputZipFile.isFile()) { getLog().debug("Check " + outputFolder.toString() + " for " + upstreamProjectName + "_.+_sources.zip"); // find the sources we DID download, eg., jbosstools-browsersim_9255a5b7c04fb10768c14942e60092e860881d6b_sources.zip File[] wrongZipFiles = listFilesMatching(zipsDirectory, upstreamProjectName + "_.+_sources.zip"); String wrongZips = ""; for (int j = 0; j < wrongZipFiles.length; j++) { getLog().debug(wrongZipFiles[j].toString()); wrongZips += (wrongZips.isEmpty() ? "" : ", ") + wrongZipFiles[j].toString() .replaceAll(".+" + upstreamProjectName + "_(.+)_sources.zip", "$1"); } if (!wrongZips.isEmpty()) { throw new MojoFailureException("\n\n" + buildInfoFiles[i].toString() + "\ncontains " + upstreamSHA + ", but upstream " + upstreamProjectName + " project's MANIFEST.MF has Eclipse-SourceReferences \n" + "commitId " + wrongZips + ". \n\n" + "If you have locally built projects which are being aggregated here, ensure \n" + "they are built from the latest SHA from HEAD, not a local topic branch. \n\n" + "It's also possible that some recent changes have not yet been built upstream. \n" + "If that's the case, trigger a build for the " + upstreamProjectName + " project \n" + "to ensure that the latest commits have been built and can be aggregated here. \n\n" + "Or, use -DskipCheckSHAs=true to bypass this check.\n\n"); // JBIDE-22808 } else { getLog().warn("\n" + buildInfoFiles[i].toString() + "\ncontains " + upstreamSHA + ", but upstream " + upstreamProjectName + " project's MANIFEST.MF has no Eclipse-SourceReferences commitId.\n" + "Using sources from " + upstreamSHA + "."); // fetch sources from upstreamProjectName's upstreamSHA (but do not log in the digestFile) fetchUpstreamSourcesZip(upstreamProjectName, upstreamSHA); } } } finally { IOUtils.closeQuietly(in); } } } catch (Exception ex) { throw new MojoExecutionException("Problem occurred checking upstream buildinfo.json files!", ex); } } } /* JBIDE-19467 check if SHA in buildinfo_projectName.json matches projectName_65cb06bb81773714b9e3fc1c312e33aaa068dc33_sources.zip. Note: this may fail if you've built stuff locally because those plugins will use different SHAs (eg., from a pull-request topic branch) To test this is working via commandline shell equivalent cd jbosstools-build-sites/aggregate/site for j in target/buildinfo/buildinfo_jbosstools-*; do echo -n $j; k=${j##*_}; k=${k/.json}; echo " :: $k"; cat $j | grep HEAD | head -1 | sed "s#[\t\w\ ]\+\"HEAD\" : \"\(.\+\)\",#0: \1#"; ls cache/${k}_*_sources.zip | sed -e "s#cache/${k}_\(.\+\)_sources.zip#1: \1#"; echo ""; done */ if (skipCheckSHAs) { getLog().warn( "skipCheckSHAs=true :: Skip check that buildinfo_*.json HEAD SHA matches MANIFEST.MF Eclipse-SourceReferences commitId SHA."); } else { File buildinfoFolder = new File(this.project.getBuild().getDirectory(), "buildinfo"); if (buildinfoFolder.isDirectory()) { try { File[] buildInfoFiles = listFilesMatching(buildinfoFolder, "buildinfo_.+.json"); for (int i = 0; i < buildInfoFiles.length; i++) { InputStream in = null; ModelNode obj = null; String upstreamSHA = null; String upstreamProjectName = buildInfoFiles[i].toString() .replaceAll(".+buildinfo_(.+).json", "$1"); getLog().debug(i + ": " + buildInfoFiles[i].toString() + " :: " + upstreamProjectName); try { getLog().debug("Read JSON from: " + buildInfoFiles[i].toString()); in = new FileInputStream(buildInfoFiles[i]); obj = ModelNode.fromJSONStream(in); upstreamSHA = getSHA(obj); getLog().debug("Found SHA = " + upstreamSHA); // check if there's a file called upstreamProjectName_upstreamSHA_sources.zip String outputZipName = upstreamProjectName + "_" + upstreamSHA + "_sources.zip"; File outputZipFile = new File(zipsDirectory, outputZipName); if (!outputZipFile.isFile()) { getLog().debug("Check " + outputFolder.toString() + " for " + upstreamProjectName + "_.+_sources.zip"); // find the sources we DID download, eg., jbosstools-browsersim_9255a5b7c04fb10768c14942e60092e860881d6b_sources.zip File[] wrongZipFiles = listFilesMatching(zipsDirectory, upstreamProjectName + "_.+_sources.zip"); String wrongZips = ""; for (int j = 0; j < wrongZipFiles.length; j++) { getLog().debug(wrongZipFiles[j].toString()); wrongZips += (wrongZips.isEmpty() ? "" : ", ") + wrongZipFiles[j].toString() .replaceAll(".+" + upstreamProjectName + "_(.+)_sources.zip", "$1"); } if (!wrongZips.isEmpty()) { throw new MojoFailureException("\n" + buildInfoFiles[i].toString() + "\ncontains " + upstreamSHA + ", but upstream " + upstreamProjectName + " project's MANIFEST.MF has Eclipse-SourceReferences\ncommitId " + wrongZips + ". \nIf you have locally built projects which are aggregated here, \nensure they are built from the latest SHA from HEAD, not a local topic branch.\n" + "Or, use -DskipCheckSHAs=true to bypass this check."); } else { getLog().warn("\n" + buildInfoFiles[i].toString() + "\ncontains " + upstreamSHA + ", but upstream " + upstreamProjectName + " project's MANIFEST.MF has no Eclipse-SourceReferences commitId.\n" + "Using sources from " + upstreamSHA + "."); // fetch sources from upstreamProjectName's upstreamSHA (but do not log in the digestFile) fetchUpstreamSourcesZip(upstreamProjectName, upstreamSHA); } } } finally { IOUtils.closeQuietly(in); } } } catch (Exception ex) { throw new MojoExecutionException("Problem occurred checking upstream buildinfo.json files!", ex); } } } // JBDS-3364 JBDS-3208 JBIDE-19467 when not using publish.sh, unpack downloaded source zips and combine them into a single zip createCombinedZipFile(zipsDirectory, zipFiles, CACHE_ZIPS); // getLog().debug("Generating aggregate site metadata"); try { { File buildPropertiesAllXml = new File(this.outputFolder, "build.properties.all.xml"); if (!buildPropertiesAllXml.exists()) { buildPropertiesAllXml.createNewFile(); } FileOutputStream xmlOut = new FileOutputStream(buildPropertiesAllXml); allBuildProperties.storeToXML(xmlOut, null); xmlOut.close(); } { File buildPropertiesFileTxt = new File(this.outputFolder, "build.properties.file.txt"); if (!buildPropertiesFileTxt.exists()) { buildPropertiesFileTxt.createNewFile(); } FileOutputStream textOut = new FileOutputStream(buildPropertiesFileTxt); allBuildProperties.store(textOut, null); textOut.close(); } } catch (Exception ex) { throw new MojoExecutionException("Error while creating 'metadata' files", ex); } try { dfw = new FileWriter(digestFile); dfw.write(sb.toString()); dfw.close(); } catch (Exception ex) { throw new MojoExecutionException("Error writing to " + digestFile.toString(), ex); } // getLog().debug("Written to " + digestFile.toString() + ":\n\n" + sb.toString()); } else { getLog().info("fetch-sources-from-manifests (fetch-sources) :: skipped."); } }
From source file:com.informatica.um.binge.api.impl.PluginsFactory.java
/** * Extract the zip file into the output folder * @param fileName - Name of the zip file * @param outFolderName - output folder name. * @throws Exception//w w w . j av a 2 s . c o m */ protected Multimap<String, File> extractZipFile(String fileName, String outFolderName) throws Exception { LOG.debug("Going to extract zip file {} to folder {}", fileName, outFolderName); Multimap<String, File> deps = HashMultimap.create(2, 5); File outFolder = new File(outFolderName); ZipInputStream zipInput = new ZipInputStream(new FileInputStream(new File(fileName))); ZipEntry ze = null; boolean status = outFolder.mkdirs(); if (status) LOG.info("Successfully created plugin folder {}", outFolder.getAbsolutePath()); else { throw new VDSException(VDSErrorCode.PLUGIN_FOLDER_CREATE_ERROR, outFolder.getAbsolutePath()); } byte[] buffer = new byte[1024]; while ((ze = zipInput.getNextEntry()) != null) { String name = ze.getName(); // process files only if (ze.isDirectory() == false) { File file = new File(name); File newFile = null; if (file.getParentFile() != null && file.getParentFile().getName().equals(NATIVE)) { newFile = new File(nativeLibs, file.getName()); } else if (file.getParentFile() != null && file.getParentFile().getName().equals(LIB)) { newFile = Paths.get(outFolderName, LIB, file.getName()).toFile(); deps.put(LIB, newFile); } else { newFile = new File(outFolderName, file.getName()); if (name.endsWith(".jar")) deps.put(LIB, newFile); } newFile.getParentFile().mkdirs(); FileOutputStream fos = new FileOutputStream(newFile); int len; while ((len = zipInput.read(buffer)) > 0) { fos.write(buffer, 0, len); } fos.close(); } } return deps; }