List of usage examples for java.util.zip ZipOutputStream setLevel
public void setLevel(int level)
From source file:mergedoc.core.MergeManager.java
/** * ????<br>/* w w w . j a v a 2s .c o m*/ * @throws MergeDocException ?????? * @throws SAXException SAX ???? * @throws IOException ???? */ public void execute() throws MergeDocException, SAXException, IOException { if (workingState.isCanceled()) { return; } ArchiveInputStream in = null; ZipOutputStream out = null; try { in = ArchiveInputStream.create(pref.getInputArchive()); out = new ZipOutputStream(new BufferedOutputStream(new FileOutputStream(pref.getOutputArchive()))); out.setLevel(Deflater.BEST_SPEED); long start = System.currentTimeMillis(); merge(in, out); long end = System.currentTimeMillis(); workingState.setWorkTime((end - start) / 1000); } finally { if (in != null) { in.close(); } if (out != null) { out.close(); } } }
From source file:edu.stanford.epad.epadws.handlers.dicom.DownloadUtil.java
public static boolean ZipAndStreamFiles(OutputStream out, List<String> fileNames, String dirPath) { File dir_file = new File(dirPath); int dir_l = dir_file.getAbsolutePath().length(); ZipOutputStream zipout = new ZipOutputStream(out); zipout.setLevel(1); for (int i = 0; i < fileNames.size(); i++) { File f = (File) new File(dirPath + fileNames.get(i)); if (f.canRead()) { log.debug("Adding file: " + f.getAbsolutePath()); try { zipout.putNextEntry(new ZipEntry(f.getAbsolutePath().substring(dir_l + 1))); } catch (Exception e) { log.warning("Error adding to zip file", e); return false; }//from w ww . ja va2s . co m BufferedInputStream fr; try { fr = new BufferedInputStream(new FileInputStream(f)); byte buffer[] = new byte[0xffff]; int b; while ((b = fr.read(buffer)) != -1) zipout.write(buffer, 0, b); fr.close(); zipout.closeEntry(); } catch (Exception e) { log.warning("Error closing zip file", e); return false; } } } try { zipout.finish(); out.flush(); } catch (IOException e) { e.printStackTrace(); return false; } return true; }
From source file:org.opentestsystem.authoring.testitembank.service.impl.ApipZipInputFileExtractorService.java
private Map<String, ApipItemContent> getItemZipMap(final ApipManifest manifest, final ZipFile originalZip, final HashMap<String, ZipArchiveEntry> zipEntryMap) { final Map<String, ApipManifestResource> resourceMap = manifest.getResourceMap(); final Map<String, ApipItemContent> itemContentMap = new HashMap<String, ApipItemContent>(); final String apipRoot = findApipRootDirectoy(zipEntryMap); for (final ApipManifestResource resource : manifest.getResources()) { if (isItemResource(resource.getResourceType())) { final ApipItemContent content = new ApipItemContent(); ZipOutputStream zipOut = null; ByteArrayOutputStream byteStream = null; byte[] bytes = null; try { byteStream = new ByteArrayOutputStream(); zipOut = new ZipOutputStream(byteStream); zipOut.setLevel(ZipOutputStream.STORED); final ApipManifest itemManifest = new ApipManifest(); itemManifest.setIdentifier(manifest.getIdentifier() + "_" + resource.getIdentifier()); itemManifest.setSchema(manifest.getSchema()); itemManifest.setSchemaVersion(manifest.getSchemaVersion()); final List<ApipManifestResource> requiredResources = new ArrayList<ApipManifestResource>(); requiredResources.add(resource); final ZipArchiveEntry itemEntry = zipEntryMap .get(apipRoot + resource.getFileReference().getHref()); copyZipEntry(itemEntry, originalZip, zipOut); for (final ApipManifestDependency dependency : resource.getDependencies()) { final ApipManifestResource dependencyResource = resourceMap.get(dependency.getIdentifier()); if (dependencyResource != null) { final ZipArchiveEntry dependencyEntry = zipEntryMap .get(apipRoot + dependencyResource.getFileReference().getHref()); if (dependencyEntry == null) { if (ResourceTypePrefix.apipMetadata .isPrefixFor(dependencyResource.getResourceType()) || ResourceTypePrefix.apipItem .isPrefixFor(dependencyResource.getResourceType()) || ResourceTypePrefix.stimuli .isPrefixFor(dependencyResource.getResourceType())) { content.addError("item.invalid.zip.missingResource", new String[] { resource.getIdentifier(), dependency.getIdentifier() }); } else LOGGER.warn(String.format("Zip missing resource: %s, %s ", resource.getIdentifier(), dependency.getIdentifier())); } else { requiredResources.add(dependencyResource); copyZipEntry(dependencyEntry, originalZip, zipOut); }/*from w ww . j av a 2 s .com*/ } } itemManifest.setResources(requiredResources); final ZipEntry entry = new ZipEntry(apipRoot + IMS_MANIFEST_NAME); zipOut.putNextEntry(entry); zipXMLService.writeToManifestToOutputStream(itemManifest, zipOut); zipOut.closeEntry(); zipOut.flush(); zipOut.close(); bytes = byteStream.toByteArray(); content.setItemZip(bytes); } catch (final Exception e) { LOGGER.error("creating item zip for " + resource.getIdentifier(), e); } finally { try { if (zipOut != null) { zipOut.flush(); zipOut.close(); } if (byteStream != null) { byteStream.flush(); byteStream.close(); } } catch (final IOException e) { LOGGER.error("error closing zip.", e); } } itemContentMap.put(resource.getIdentifier(), content); } } return itemContentMap; }
From source file:org.egov.wtms.web.controller.reports.SearchNoticeController.java
private ZipOutputStream addFilesToZip(final InputStream inputStream, final String noticeNo, final ZipOutputStream out) { final byte[] buffer = new byte[1024]; try {/*ww w . j av a2 s . com*/ out.setLevel(Deflater.DEFAULT_COMPRESSION); out.putNextEntry(new ZipEntry(noticeNo.replaceAll("/", "_"))); int len; while ((len = inputStream.read(buffer)) > 0) out.write(buffer, 0, len); inputStream.close(); } catch (final IllegalArgumentException iae) { LOGGER.error(EXCEPTION_IN_ADDFILESTOZIP, iae); } catch (final FileNotFoundException fnfe) { LOGGER.error(EXCEPTION_IN_ADDFILESTOZIP, fnfe); } catch (final IOException ioe) { LOGGER.error(EXCEPTION_IN_ADDFILESTOZIP, ioe); } return out; }
From source file:it.greenvulcano.util.zip.ZipHelper.java
/** * Performs the <code>ZIP</code> compression of a file/directory, whose name * and parent directory are passed as arguments, on the local filesystem. * The result is written into a target file with the <code>zip</code> * extension.<br>/*www .ja v a 2s. co m*/ * The source filename may contain a regualr expression: in this * case, all the filenames matching the pattern will be compressed and put * in the same target <code>zip</code> file.<br> * * * @param srcDirectory * the source parent directory of the file/s to be zipped. Must * be an absolute pathname. * @param fileNamePattern * the name of the file to be zipped. May contain a regular expression, * possibly matching multiple files/directories. * If matching a directory, the directory is zipped with all its content as well. * @param targetDirectory * the target parent directory of the created <code>zip</code> * file. Must be an absolute pathname. * @param zipFilename * the name of the zip file to be created. Cannot be * <code>null</code>, and must have the <code>.zip</code> * extension. If a target file already exists with the same name * in the same directory, it will be overwritten. * @throws IOException * If any error occurs during file compression. * @throws IllegalArgumentException * if the arguments are invalid. */ public void zipFile(String srcDirectory, String fileNamePattern, String targetDirectory, String zipFilename) throws IOException { File srcDir = new File(srcDirectory); if (!srcDir.isAbsolute()) { throw new IllegalArgumentException( "The pathname of the source parent directory is NOT absolute: " + srcDirectory); } if (!srcDir.exists()) { throw new IllegalArgumentException( "Source parent directory " + srcDirectory + " NOT found on local filesystem."); } if (!srcDir.isDirectory()) { throw new IllegalArgumentException("Source parent directory " + srcDirectory + " is NOT a directory."); } File targetDir = new File(targetDirectory); if (!targetDir.isAbsolute()) { throw new IllegalArgumentException( "The pathname of the target parent directory is NOT absolute: " + targetDirectory); } if (!targetDir.exists()) { throw new IllegalArgumentException( "Target parent directory " + targetDirectory + " NOT found on local filesystem."); } if (!targetDir.isDirectory()) { throw new IllegalArgumentException( "Target parent directory " + targetDirectory + " is NOT a directory."); } if ((zipFilename == null) || (zipFilename.length() == 0)) { throw new IllegalArgumentException("Target zip file name is missing."); } ZipOutputStream zos = null; try { zos = new ZipOutputStream(new FileOutputStream(new File(targetDir, zipFilename))); zos.setLevel(compressionLevel); URI base = srcDir.toURI(); File[] files = srcDir.listFiles(new RegExFilenameFilter(fileNamePattern)); for (File file : files) { internalZipFile(file, zos, base); } } finally { try { if (zos != null) { zos.close(); } } catch (Exception exc) { // Do nothing } } }
From source file:fr.gael.dhus.service.job.SendLogsJob.java
@Override protected void executeInternal(JobExecutionContext arg0) throws JobExecutionException { if (!configurationManager.getSendLogsCronConfiguration().isActive()) return;//from ww w . j a v a 2s .c o m long start = System.currentTimeMillis(); logger.info("SCHEDULER : Send Administrative logs."); if (!DHuS.isStarted()) { logger.warn("SCHEDULER : Not run while system not fully initialized."); return; } String[] addresses = configurationManager.getSendLogsCronConfiguration().getAddresses().split(","); // Case of no addresses available: use system support if ((addresses == null) || (addresses.length == 0) || "".equals(addresses[0].trim())) { String email = configurationManager.getSupportConfiguration().getMail(); if ((email == null) || "".equals(email)) { throw new MailException("Support e-mail not configured, " + "system logs will not be send"); } addresses = new String[] { email }; } RollingFileAppender rollingFileAppender = (RollingFileAppender) ((org.apache.logging.log4j.core.Logger) LogManager .getRootLogger()).getAppenders().get("RollingFile"); if (rollingFileAppender == null) { throw new MailException("No rolling log file defined"); } String logPath = rollingFileAppender.getFileName(); if ((logPath == null) || logPath.trim().equals("")) { throw new MailException("Log file not defined"); } File logs = new File(logPath); if (!logs.exists()) { throw new MailException("Log file not present : " + logs.getPath()); } Date now = new Date(); SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd'@'HH:mm:ss"); df.setTimeZone(TimeZone.getTimeZone("GMT")); String docFilename = configurationManager.getNameConfiguration().getShortName().toLowerCase() + "-" + df.format(now); File zipLogs; try { zipLogs = File.createTempFile(docFilename, ".zip"); } catch (IOException e) { throw new MailException("Cannot create temporary zip log file.", e); } // compress logs file to zip format FileOutputStream fos; ZipOutputStream zos = null; FileInputStream fis = null; try { int length; byte[] buffer = new byte[1024]; ZipEntry entry = new ZipEntry(docFilename + ".txt"); fos = new FileOutputStream(zipLogs); zos = new ZipOutputStream(fos); fis = new FileInputStream(logs); zos.setLevel(Deflater.BEST_COMPRESSION); zos.putNextEntry(entry); while ((length = fis.read(buffer)) > 0) { zos.write(buffer, 0, length); } } catch (IOException e) { throw new MailException("An error occurred during compression " + "logs file, cannot send logs !", e); } finally { try { if (fis != null) { fis.close(); } if (zos != null) { zos.closeEntry(); zos.close(); } } catch (IOException e) { throw new MailException("An error occurred during compression " + "logs file, cannot send logs !", e); } } EmailAttachment attachment = new EmailAttachment(); attachment.setDescription( configurationManager.getNameConfiguration().getShortName() + " Logs " + now.toString()); attachment.setPath(zipLogs.getPath()); attachment.setName(zipLogs.getName()); // Prepare the addresses List<String> ads = new ArrayList<String>(); for (String email : addresses) { StringTokenizer tk = new StringTokenizer(email, ", "); while (tk.hasMoreTokens()) { String token = tk.nextToken().trim(); if (!token.isEmpty()) ads.add(token); } } for (String email : ads) { try { String server = configurationManager.getServerConfiguration().getExternalHostname(); String url = configurationManager.getServerConfiguration().getExternalUrl(); mailServer.send(email, null, null, "[" + configurationManager.getNameConfiguration().getShortName().toLowerCase() + "@" + server + "] logs of " + df.format(now), "Here is attached " + configurationManager.getNameConfiguration().getShortName() + " logs of \"" + url + "\" host.\n\n" + "Kind Regards.\nThe " + configurationManager.getNameConfiguration().getShortName() + " Team.", attachment); logger.info("Logs Sent to " + email); } catch (EmailException e) { throw new MailException("Cannot send logs to " + email, e); } } if (!zipLogs.delete()) { logger.warn("Cannot remove mail attachment: " + zipLogs.getAbsolutePath()); } logger.info("SCHEDULER : Send Administrative logs done - " + (System.currentTimeMillis() - start) + "ms"); }
From source file:edu.mayo.pipes.iterators.Compressor.java
/** * Create a single entry Zip archive, and prepare it for writing * * @throws IOException//from www . ja v a 2 s.c o m */ public BufferedWriter makeZipWriter() throws IOException { if (outFile == null) return null; FileOutputStream outFileStream = new FileOutputStream(outFile); ZipOutputStream zipWrite = new ZipOutputStream(outFileStream); ZipEntry zE; // Setup the zip writing things zipWrite.setMethod(ZipOutputStream.DEFLATED); zipWrite.setLevel(9); // Max compression zE = new ZipEntry("Default"); zipWrite.putNextEntry(zE); // Now can attach the writer to write to this zip entry OutputStreamWriter wStream = new OutputStreamWriter(zipWrite); writer = new BufferedWriter(wStream); comp = kZipCompression; return writer; }
From source file:fr.cirad.mgdb.exporting.markeroriented.EigenstratExportHandler.java
@Override public void exportData(OutputStream outputStream, String sModule, List<SampleId> sampleIDs, ProgressIndicator progress, DBCursor markerCursor, Map<Comparable, Comparable> markerSynonyms, int nMinimumGenotypeQuality, int nMinimumReadDepth, Map<String, InputStream> readyToExportFiles) throws Exception { // long before = System.currentTimeMillis(); File warningFile = File.createTempFile("export_warnings_", ""); FileWriter warningFileWriter = new FileWriter(warningFile); File snpFile = null;//from www. j a va2 s. co m try { snpFile = File.createTempFile("snpFile", ""); FileWriter snpFileWriter = new FileWriter(snpFile); ZipOutputStream zos = new ZipOutputStream(outputStream); if (ByteArrayOutputStream.class.isAssignableFrom(outputStream.getClass())) zos.setLevel(ZipOutputStream.STORED); if (readyToExportFiles != null) for (String readyToExportFile : readyToExportFiles.keySet()) { zos.putNextEntry(new ZipEntry(readyToExportFile)); InputStream inputStream = readyToExportFiles.get(readyToExportFile); byte[] dataBlock = new byte[1024]; int count = inputStream.read(dataBlock, 0, 1024); while (count != -1) { zos.write(dataBlock, 0, count); count = inputStream.read(dataBlock, 0, 1024); } } MongoTemplate mongoTemplate = MongoTemplateManager.get(sModule); int markerCount = markerCursor.count(); List<Individual> individuals = getIndividualsFromSamples(sModule, sampleIDs); ArrayList<String> individualList = new ArrayList<String>(); StringBuffer indFileContents = new StringBuffer(); for (int i = 0; i < sampleIDs.size(); i++) { Individual individual = individuals.get(i); if (!individualList.contains(individual.getId())) { individualList.add(individual.getId()); indFileContents .append(individual.getId() + "\t" + getIndividualGenderCode(sModule, individual.getId()) + "\t" + (individual.getPopulation() == null ? "." : individual.getPopulation()) + LINE_SEPARATOR); } } String exportName = sModule + "_" + markerCount + "variants_" + individualList.size() + "individuals"; zos.putNextEntry(new ZipEntry(exportName + ".ind")); zos.write(indFileContents.toString().getBytes()); zos.putNextEntry(new ZipEntry(exportName + ".eigenstratgeno")); int avgObjSize = (Integer) mongoTemplate .getCollection(mongoTemplate.getCollectionName(VariantRunData.class)).getStats() .get("avgObjSize"); int nChunkSize = nMaxChunkSizeInMb * 1024 * 1024 / avgObjSize; short nProgress = 0, nPreviousProgress = 0; long nLoadedMarkerCount = 0; while (markerCursor.hasNext()) { int nLoadedMarkerCountInLoop = 0; Map<Comparable, String> markerChromosomalPositions = new LinkedHashMap<Comparable, String>(); boolean fStartingNewChunk = true; markerCursor.batchSize(nChunkSize); while (markerCursor.hasNext() && (fStartingNewChunk || nLoadedMarkerCountInLoop % nChunkSize != 0)) { DBObject exportVariant = markerCursor.next(); DBObject refPos = (DBObject) exportVariant.get(VariantData.FIELDNAME_REFERENCE_POSITION); markerChromosomalPositions.put((Comparable) exportVariant.get("_id"), refPos.get(ReferencePosition.FIELDNAME_SEQUENCE) + ":" + refPos.get(ReferencePosition.FIELDNAME_START_SITE)); nLoadedMarkerCountInLoop++; fStartingNewChunk = false; } List<Comparable> currentMarkers = new ArrayList<Comparable>(markerChromosomalPositions.keySet()); LinkedHashMap<VariantData, Collection<VariantRunData>> variantsAndRuns = MgdbDao.getSampleGenotypes( mongoTemplate, sampleIDs, currentMarkers, true, null /*new Sort(VariantData.FIELDNAME_REFERENCE_POSITION + "." + ChromosomalPosition.FIELDNAME_SEQUENCE).and(new Sort(VariantData.FIELDNAME_REFERENCE_POSITION + "." + ChromosomalPosition.FIELDNAME_START_SITE))*/); // query mongo db for matching genotypes for (VariantData variant : variantsAndRuns.keySet()) // read data and write results into temporary files (one per sample) { Comparable variantId = variant.getId(); List<String> chromAndPos = Helper.split(markerChromosomalPositions.get(variantId), ":"); if (chromAndPos.size() == 0) LOG.warn("Chromosomal position not found for marker " + variantId); // LOG.debug(marker + "\t" + (chromAndPos.length == 0 ? "0" : chromAndPos[0]) + "\t" + 0 + "\t" + (chromAndPos.length == 0 ? 0l : Long.parseLong(chromAndPos[1])) + LINE_SEPARATOR); if (markerSynonyms != null) { Comparable syn = markerSynonyms.get(variantId); if (syn != null) variantId = syn; } snpFileWriter.write(variantId + "\t" + (chromAndPos.size() == 0 ? "0" : chromAndPos.get(0)) + "\t" + 0 + "\t" + (chromAndPos.size() == 0 ? 0l : Long.parseLong(chromAndPos.get(1))) + LINE_SEPARATOR); Map<String, List<String>> individualGenotypes = new LinkedHashMap<String, List<String>>(); Collection<VariantRunData> runs = variantsAndRuns.get(variant); if (runs != null) for (VariantRunData run : runs) for (Integer sampleIndex : run.getSampleGenotypes().keySet()) { SampleGenotype sampleGenotype = run.getSampleGenotypes().get(sampleIndex); String individualId = individuals .get(sampleIDs .indexOf(new SampleId(run.getId().getProjectId(), sampleIndex))) .getId(); Integer gq = null; try { gq = (Integer) sampleGenotype.getAdditionalInfo().get(VariantData.GT_FIELD_GQ); } catch (Exception ignored) { } if (gq != null && gq < nMinimumGenotypeQuality) continue; Integer dp = null; try { dp = (Integer) sampleGenotype.getAdditionalInfo().get(VariantData.GT_FIELD_DP); } catch (Exception ignored) { } if (dp != null && dp < nMinimumReadDepth) continue; String gtCode = sampleGenotype.getCode(); List<String> storedIndividualGenotypes = individualGenotypes.get(individualId); if (storedIndividualGenotypes == null) { storedIndividualGenotypes = new ArrayList<String>(); individualGenotypes.put(individualId, storedIndividualGenotypes); } storedIndividualGenotypes.add(gtCode); } for (int j = 0; j < individualList .size(); j++ /* we use this list because it has the proper ordering*/) { String individualId = individualList.get(j); List<String> genotypes = individualGenotypes.get(individualId); HashMap<Object, Integer> genotypeCounts = new HashMap<Object, Integer>(); // will help us to keep track of missing genotypes int highestGenotypeCount = 0; String mostFrequentGenotype = null; if (genotypes != null) for (String genotype : genotypes) { if (genotype.length() == 0) continue; /* skip missing genotypes */ int gtCount = 1 + MgdbDao.getCountForKey(genotypeCounts, genotype); if (gtCount > highestGenotypeCount) { highestGenotypeCount = gtCount; mostFrequentGenotype = genotype; } genotypeCounts.put(genotype, gtCount); } List<String> alleles = mostFrequentGenotype == null ? new ArrayList<String>() : variant.getAllelesFromGenotypeCode(mostFrequentGenotype); int nOutputCode = 0; if (mostFrequentGenotype == null) nOutputCode = 9; else for (String all : Helper.split(mostFrequentGenotype, "/")) if ("0".equals(all)) nOutputCode++; if (j == 0 && variant.getKnownAlleleList().size() > 2) warningFileWriter.write("- Variant " + variant.getId() + " is multi-allelic. Make sure Eigenstrat genotype encoding specifications are suitable for you.\n"); zos.write(("" + nOutputCode).getBytes()); if (genotypeCounts.size() > 1 || alleles.size() > 2) { if (genotypeCounts.size() > 1) warningFileWriter.write("- Dissimilar genotypes found for variant " + (variantId == null ? variant.getId() : variantId) + ", individual " + individualId + ". Exporting most frequent: " + nOutputCode + "\n"); if (alleles.size() > 2) warningFileWriter.write("- More than 2 alleles found for variant " + (variantId == null ? variant.getId() : variantId) + ", individual " + individualId + ". Exporting only the first 2 alleles.\n"); } } zos.write((LINE_SEPARATOR).getBytes()); } if (progress.hasAborted()) return; nLoadedMarkerCount += nLoadedMarkerCountInLoop; nProgress = (short) (nLoadedMarkerCount * 100 / markerCount); if (nProgress > nPreviousProgress) { // if (nProgress%5 == 0) // LOG.info("============= exportData: " + nProgress + "% =============" + (System.currentTimeMillis() - before)/1000 + "s"); progress.setCurrentStepProgress(nProgress); nPreviousProgress = nProgress; } } snpFileWriter.close(); zos.putNextEntry(new ZipEntry(exportName + ".snp")); BufferedReader in = new BufferedReader(new FileReader(snpFile)); String sLine; while ((sLine = in.readLine()) != null) zos.write((sLine + "\n").getBytes()); in.close(); warningFileWriter.close(); if (warningFile.length() > 0) { zos.putNextEntry(new ZipEntry(exportName + "-REMARKS.txt")); int nWarningCount = 0; in = new BufferedReader(new FileReader(warningFile)); while ((sLine = in.readLine()) != null) { zos.write((sLine + "\n").getBytes()); nWarningCount++; } LOG.info("Number of Warnings for export (" + exportName + "): " + nWarningCount); in.close(); } warningFile.delete(); zos.close(); progress.setCurrentStepProgress((short) 100); } finally { if (snpFile != null && snpFile.exists()) snpFile.delete(); } }
From source file:org.fao.geonet.services.resources.DownloadArchive.java
public Element exec(Element params, ServiceContext context) throws Exception { SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); GeonetContext gc = (GeonetContext) context.getHandlerContext(Geonet.CONTEXT_NAME); DataManager dm = gc.getBean(DataManager.class); Dbms dbms = (Dbms) context.getResourceManager().open(Geonet.Res.MAIN_DB); UserSession session = context.getUserSession(); String id = Utils.getIdentifierFromParameters(params, context); String access = Util.getParam(params, Params.ACCESS, Params.Access.PUBLIC); //--- resource required is public (thumbnails) if (access.equals(Params.Access.PUBLIC)) { File dir = new File(Lib.resource.getDir(context, access, id)); String fname = Util.getParam(params, Params.FNAME); if (fname.contains("..")) { throw new BadParameterEx("Invalid character found in resource name.", fname); }// www. j av a2s.co m File file = new File(dir, fname); return BinaryFile.encode(200, file.getAbsolutePath(), false); } //--- from here on resource required is private datafile(s) //--- check if disclaimer for this metadata has been displayed Element elData = (Element) session.getProperty(Geonet.Session.FILE_DISCLAIMER); if (elData == null) { return new Element("response"); } else { String idAllowed = elData.getChildText(Geonet.Elem.ID); if (idAllowed == null || !idAllowed.equals(id)) { return new Element("response"); } } //--- check whether notify is required boolean doNotify = false; Lib.resource.checkPrivilege(context, id, AccessManager.OPER_DOWNLOAD); doNotify = true; //--- set username for emails and logs String username = session.getUsername(); if (username == null) username = "internet"; String userId = session.getUserId(); //--- get feedback/reason for download info passed in & record in 'entered' // String name = Util.getParam(params, Params.NAME); // String org = Util.getParam(params, Params.ORG); // String email = Util.getParam(params, Params.EMAIL); // String comments = Util.getParam(params, Params.COMMENTS); Element entered = new Element("entered").addContent(params.cloneContent()); //--- get logged in user details & record in 'userdetails' Element userDetails = new Element("userdetails"); if (!username.equals("internet")) { Element elUser = dbms.select( "SELECT username, surname, name, address, state, zip, country, email, organisation FROM Users WHERE id=?", Integer.valueOf(userId)); if (elUser.getChild("record") != null) { userDetails.addContent(elUser.getChild("record").cloneContent()); } } //--- get metadata info MdInfo info = dm.getMetadataInfo(dbms, id); // set up zip output stream File zFile = File.createTempFile(username + "_" + info.uuid, ".zip"); ZipOutputStream out = new ZipOutputStream(new FileOutputStream(zFile)); //--- because often content has already been compressed out.setLevel(Deflater.NO_COMPRESSION); //--- now add the files chosen from the interface and record in 'downloaded' Element downloaded = new Element("downloaded"); File dir = new File(Lib.resource.getDir(context, access, id)); @SuppressWarnings("unchecked") List<Element> files = params.getChildren(Params.FNAME); for (Element elem : files) { String fname = elem.getText(); if (fname.contains("..")) { continue; } File file = new File(dir, fname); if (!file.exists()) throw new ResourceNotFoundEx(file.getAbsolutePath()); Element fileInfo = new Element("file"); Element details = BinaryFile.encode(200, file.getAbsolutePath(), false); String remoteURL = details.getAttributeValue("remotepath"); if (remoteURL != null) { if (context.isDebug()) context.debug("Downloading " + remoteURL + " to archive " + zFile.getName()); fileInfo.setAttribute("size", "unknown"); fileInfo.setAttribute("datemodified", "unknown"); fileInfo.setAttribute("name", remoteURL); notifyAndLog(doNotify, id, info.uuid, access, username, remoteURL + " (local config: " + file.getAbsolutePath() + ")", context); fname = details.getAttributeValue("remotefile"); } else { if (context.isDebug()) context.debug("Writing " + fname + " to archive " + zFile.getName()); fileInfo.setAttribute("size", file.length() + ""); fileInfo.setAttribute("name", fname); Date date = new Date(file.lastModified()); fileInfo.setAttribute("datemodified", sdf.format(date)); notifyAndLog(doNotify, id, info.uuid, access, username, file.getAbsolutePath(), context); } addFile(out, file.getAbsolutePath(), details, fname); downloaded.addContent(fileInfo); } //--- get metadata boolean forEditing = false, withValidationErrors = false, keepXlinkAttributes = false; Element elMd = dm.getMetadata(context, id, forEditing, withValidationErrors, keepXlinkAttributes); if (elMd == null) throw new MetadataNotFoundEx("Metadata not found - deleted?"); //--- transform record into brief version String briefXslt = stylePath + Geonet.File.METADATA_BRIEF; Element elBrief = Xml.transform(elMd, briefXslt); //--- create root element for passing all the info we've gathered //--- to license annex xslt generator Element root = new Element("root"); elBrief.setAttribute("changedate", info.changeDate); elBrief.setAttribute("currdate", now()); root.addContent(elBrief); root.addContent(downloaded); root.addContent(entered); root.addContent(userDetails); if (context.isDebug()) context.debug("Passed to metadata-license-annex.xsl:\n " + Xml.getString(root)); //--- create the license annex html file using the info in root element and //--- add it to the zip stream String licenseAnnexXslt = stylePath + Geonet.File.LICENSE_ANNEX_XSL; File licenseAnnex = File.createTempFile(username + "_" + info.uuid, ".annex"); FileOutputStream las = new FileOutputStream(licenseAnnex); Xml.transform(root, licenseAnnexXslt, las); las.close(); InputStream in = null; try { in = new FileInputStream(licenseAnnex); addFile(out, Geonet.File.LICENSE_ANNEX, in); } finally { IOUtils.closeQuietly(in); } //--- if a license is specified include any license files mirrored locally //--- for inclusion includeLicenseFiles(context, out, root); //--- export the metadata as a partial mef/zip file and add that to the zip //--- stream FIXME: some refactoring required here to avoid metadata //--- being read yet again(!) from the database by the MEF exporter String outmef = MEFLib.doExport(context, info.uuid, MEFLib.Format.PARTIAL.toString(), false, true, true); FileInputStream in2 = null; try { in2 = new FileInputStream(outmef); addFile(out, "metadata.zip", in2); } finally { IOUtils.closeQuietly(in2); } //--- now close the zip file and send it out if (out != null) out.close(); return BinaryFile.encode(200, zFile.getAbsolutePath(), true); }