List of usage examples for java.util.zip ZipInputStream close
public void close() throws IOException
From source file:org.candlepin.sync.ExporterTest.java
private void verifyContent(File export, String name, Verify v) { ZipInputStream zis = null; try {//from ww w . j a v a2 s .com zis = new ZipInputStream(new FileInputStream(export)); ZipEntry entry = null; while ((entry = zis.getNextEntry()) != null) { byte[] buf = new byte[1024]; if (entry.getName().equals("consumer_export.zip")) { OutputStream os = new FileOutputStream("/tmp/consumer_export.zip"); int n; while ((n = zis.read(buf, 0, 1024)) > -1) { os.write(buf, 0, n); } os.flush(); os.close(); File exportdata = new File("/tmp/consumer_export.zip"); // open up the zip and look for the metadata verifyContent(exportdata, name, v); } else if (entry.getName().equals(name)) { v.verify(zis, buf); } zis.closeEntry(); } } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } finally { if (zis != null) { try { zis.close(); } catch (IOException e) { e.printStackTrace(); } } } }
From source file:org.forgerock.openicf.maven.DocBookResourceMojo.java
public void execute() throws MojoExecutionException, MojoFailureException { if (skip) {//from w w w. j av a 2s. c o m getLog().info("Skipping DocBook generation"); return; } if (!("pom".equalsIgnoreCase(project.getPackaging()))) { ArtifactHandler artifactHandler = project.getArtifact().getArtifactHandler(); if (!"java".equals(artifactHandler.getLanguage())) { getLog().info( "Not executing DocBook report as the project is not a Java classpath-capable package"); return; } } try { if (!docbkxDirectory.exists()) { getLog().info("Not executing DocBook report as the project does not have DocBook source"); return; } File rootDirectory = new File(buildDirectory, "openicf-docbkx/" + artifact.getArtifactId() + "-" + artifact.getVersion()); try { FileUtils.mkdir(rootDirectory.getAbsolutePath()); MavenResourcesExecution mre = new MavenResourcesExecution(); mre.setMavenProject(getMavenProject()); mre.setEscapeWindowsPaths(true); mre.setMavenSession(session); mre.setInjectProjectBuildFilters(true); List<FileUtils.FilterWrapper> filterWrappers = null; try { filterWrappers = fileFilter.getDefaultFilterWrappers(mre); } catch (MavenFilteringException e) { filterWrappers = Collections.emptyList(); } if (docbkxDirectory.exists()) { final List<String> includes = FileUtils.getFileAndDirectoryNames(docbkxDirectory, "**", StringUtils.join(DirectoryScanner.DEFAULTEXCLUDES, ",") + ",**/*.xml", true, false, true, true); org.apache.commons.io.FileUtils.copyDirectory(docbkxDirectory, rootDirectory, new FileFilter() { public boolean accept(File pathname) { return includes.contains(pathname.getPath()); } }); List<File> files = FileUtils.getFiles(docbkxDirectory, "**/*.xml", null); for (File file : files) { try { fileFilter.copyFile(file, new File(rootDirectory, file.getName()), true, filterWrappers, getSourceEncoding()); } catch (MavenFilteringException e) { throw new MojoExecutionException(e.getMessage(), e); } } } File sharedRoot = rootDirectory.getParentFile(); CodeSource src = getClass().getProtectionDomain().getCodeSource(); if (src != null) { final ZipInputStream zip = new ZipInputStream(src.getLocation().openStream()); ZipEntry entry = null; while ((entry = zip.getNextEntry()) != null) { String name = entry.getName(); if (entry.getName().startsWith("shared")) { File destination = new File(sharedRoot, name); if (entry.isDirectory()) { if (!destination.exists()) { destination.mkdirs(); } } else { if (!destination.exists()) { FileOutputStream output = null; try { output = new FileOutputStream(destination); IOUtil.copy(zip, output); } finally { IOUtil.close(output); } } } } } zip.closeEntry(); zip.close(); } } catch (IOException e) { throw new MojoExecutionException("Error copy DocBook resources.", e); } // Generate Config and Schema DocBook Chapter CurrentLocale.set(Locale.ENGLISH); ConnectorDocBuilder generator = new ConnectorDocBuilder(this); generator.executeReport(); // Look at the content of the resourcesDirectory and create a // manifest // of the files // so that velocity can easily process any resources inside the JAR // that // need to be processed. if (attach) { RemoteResourcesBundle remoteResourcesBundle = new RemoteResourcesBundle(); remoteResourcesBundle.setSourceEncoding(getSourceEncoding()); DirectoryScanner scanner = new DirectoryScanner(); scanner.setBasedir(new File(buildDirectory, "openicf-docbkx/")); // scanner.setIncludes(new String[] { docFolder + "/**" }); scanner.addDefaultExcludes(); scanner.scan(); List<String> includedFiles = Arrays.asList(scanner.getIncludedFiles()); if (resourcesDirectory.exists()) { scanner = new DirectoryScanner(); scanner.setBasedir(resourcesDirectory); if (includes != null && includes.length != 0) { scanner.setIncludes(includes); } else { scanner.setIncludes(DEFAULT_INCLUDES); } if (excludes != null && excludes.length != 0) { scanner.setExcludes(excludes); } scanner.addDefaultExcludes(); scanner.scan(); includedFiles.addAll(Arrays.asList(scanner.getIncludedFiles())); } for (String resource : includedFiles) { remoteResourcesBundle.addRemoteResource(StringUtils.replace(resource, '\\', '/')); } RemoteResourcesBundleXpp3Writer w = new RemoteResourcesBundleXpp3Writer(); try { File f = new File(buildDirectory, "openicf-docbkx/" + BundleRemoteResourcesMojo.RESOURCES_MANIFEST); FileUtils.mkdir(f.getParentFile().getAbsolutePath()); Writer writer = new FileWriter(f); w.write(writer, remoteResourcesBundle); } catch (IOException e) { throw new MojoExecutionException("Error creating remote resources manifest.", e); } File outputFile = generateArchive(new File(buildDirectory, "openicf-docbkx/"), finalName + "-docbkx.jar"); projectHelper.attachArtifact(project, "jar", "docbkx", outputFile); } else { getLog().info("NOT adding DocBook to attached artifacts list."); } } catch (ArchiverException e) { failOnError("ArchiverException: Error while creating archive", e); } catch (IOException e) { failOnError("IOException: Error while creating archive", e); } catch (RuntimeException e) { failOnError("RuntimeException: Error while creating archive", e); } }
From source file:net.wastl.webmail.config.ExtConfigListener.java
/** * @param baseDir//from w w w. j a va 2 s . com * Parent directory of metaFile * @param metaFile * Properties file to be created. IT CAN NOT EXIST YET! * @throws IOException * if fail to create new XML Storage system */ protected void installXmlStorage(File baseDir, File metaFile) throws IOException { log.warn("Will attempt install a brand new data store"); final File dataDir = new File(baseDir, "data"); if (dataDir.exists()) throw new IOException("Target data path dir already exists: " + dataDir.getAbsolutePath()); if (!baseDir.isDirectory()) { final File parentDir = baseDir.getParentFile(); if (!parentDir.canWrite()) throw new IOException("Cannot create base RT directory '" + baseDir.getAbsolutePath() + "'"); if (!baseDir.mkdir()) throw new IOException("Failed to create base RT directory '" + baseDir.getAbsolutePath() + "'"); log.debug("Created base RT dir '" + baseDir.getAbsolutePath() + "'"); mkLockFile(); } if (!baseDir.canWrite()) throw new IOException( "Do not have privilegest to create meta file '" + metaFile.getAbsolutePath() + "'"); if (!dataDir.mkdir()) throw new IOException("Failed to create data directory '" + dataDir.getAbsolutePath() + "'"); log.debug("Created data dir '" + dataDir.getAbsolutePath() + "'"); // In my experience, you can't trust the return values of the // File.mkdir() method. But the file creations or extractions // wild fail below in that case, so that's no problem. // Could create a Properties object and save it, but why? final PrintWriter pw = new PrintWriter(new FileWriter(metaFile)); try { pw.println("webmail.data.path: ${rtconfig.dir}/data"); pw.println("webmail.mimetypes.filepath: " + "${rtconfig.dir}/mimetypes.txt"); pw.flush(); } finally { pw.close(); } final InputStream zipFileStream = getClass().getResourceAsStream("/data.zip"); if (zipFileStream == null) throw new IOException("Zip file 'data.zip' missing from web application"); final InputStream mimeInStream = getClass().getResourceAsStream("/mimetypes.txt"); if (mimeInStream == null) throw new IOException("Mime-types file 'mimetypes.txt' missing from web application"); ZipEntry entry; File newNode; FileOutputStream fileStream; long fileSize, bytesRead; int i; final byte[] buffer = new byte[10240]; final FileOutputStream mimeOutStream = new FileOutputStream(new File(baseDir, "mimetypes.txt")); try { while ((i = mimeInStream.read(buffer)) > 0) { mimeOutStream.write(buffer, 0, i); } mimeOutStream.flush(); } finally { mimeOutStream.close(); } log.debug("Extracted mime types file"); final ZipInputStream zipStream = new ZipInputStream(zipFileStream); try { while ((entry = zipStream.getNextEntry()) != null) { newNode = new File(dataDir, entry.getName()); if (entry.isDirectory()) { if (!newNode.mkdir()) throw new IOException( "Failed to extract dir '" + entry.getName() + "' from 'data.zip' file"); log.debug("Extracted dir '" + entry.getName() + "' to '" + newNode.getAbsolutePath() + "'"); zipStream.closeEntry(); continue; } fileSize = entry.getSize(); fileStream = new FileOutputStream(newNode); try { bytesRead = 0; while ((i = zipStream.read(buffer)) > 0) { fileStream.write(buffer, 0, i); bytesRead += i; } fileStream.flush(); } finally { fileStream.close(); } zipStream.closeEntry(); if (bytesRead != fileSize) throw new IOException("Expected " + fileSize + " bytes for '" + entry.getName() + ", but extracted " + bytesRead + " bytes to '" + newNode.getAbsolutePath() + "'"); log.debug("Extracted file '" + entry.getName() + "' to '" + newNode.getAbsolutePath() + "'"); } } finally { zipStream.close(); } }
From source file:org.universAAL.itests.IntegrationTest.java
/** * Helper method for extracting zipped archive provided as input stream into * given directory.//from w w w. ja v a 2s. c o m * * @param is * @param destDirStr */ private void unzipInpuStream(final InputStream is, final String destDirStr) { try { File destDir = new File(destDirStr); final int BUFFER = 1024; BufferedOutputStream dest = null; ZipInputStream zis = new ZipInputStream(new BufferedInputStream(is)); ZipEntry entry; while ((entry = zis.getNextEntry()) != null) { System.out.println("Extracting: " + entry); if (entry.getName().startsWith("META-INF")) { // META-INF (which includes MANIFEST) should not be // unpacked. It should be just ignored continue; } if (entry.isDirectory()) { File newDir = new File(destDir, entry.getName()); newDir.mkdirs(); } else { int count; byte[] data = new byte[BUFFER]; // write the files to the disk FileOutputStream fos = new FileOutputStream(new File(destDir, entry.getName())); dest = new BufferedOutputStream(fos, BUFFER); while ((count = zis.read(data, 0, BUFFER)) != -1) { dest.write(data, 0, count); } dest.flush(); dest.close(); } } zis.close(); } catch (Exception e) { e.printStackTrace(); } }
From source file:au.org.ala.layers.dao.ObjectDAOImpl.java
@Override public void streamObjectsGeometryById(OutputStream os, String id, String geomtype) throws IOException { logger.info("Getting object info for id = " + id + " and geometry as " + geomtype); String sql = ""; if ("kml".equals(geomtype)) { sql = "SELECT ST_AsKml(the_geom) as geometry, name, \"desc\" as description FROM objects WHERE pid=?;"; } else if ("wkt".equals(geomtype)) { sql = "SELECT ST_AsText(the_geom) as geometry FROM objects WHERE pid=?;"; } else if ("geojson".equals(geomtype)) { sql = "SELECT ST_AsGeoJSON(the_geom) as geometry FROM objects WHERE pid=?;"; } else if ("shp".equals(geomtype)) { sql = "SELECT ST_AsText(the_geom) as geometry, name, \"desc\" as description FROM objects WHERE pid=?;"; }/*w w w. j ava 2 s . co m*/ List<Objects> l = jdbcTemplate.query(sql, ParameterizedBeanPropertyRowMapper.newInstance(Objects.class), id); if (l.size() > 0) { if ("shp".equals(geomtype)) { String wkt = l.get(0).getGeometry(); File zippedShapeFile = SpatialConversionUtils.buildZippedShapeFile(wkt, id, l.get(0).getName(), l.get(0).getDescription()); FileUtils.copyFile(zippedShapeFile, os); } else if ("kml".equals(geomtype)) { os.write(KML_HEADER.replace("<name></name>", "<name><![CDATA[" + l.get(0).getName() + "]]></name>") .replace("<description></description>", "<description><![CDATA[" + l.get(0).getDescription() + "]]></description>") .getBytes()); os.write(l.get(0).getGeometry().getBytes()); os.write(KML_FOOTER.getBytes()); } else { os.write(l.get(0).getGeometry().getBytes()); } } else { // get grid classes if (id.length() > 0) { // grid class pids are, 'layerPid:gridClassNumber' try { String[] s = id.split(":"); if (s.length >= 2) { int n = Integer.parseInt(s[1]); IntersectionFile f = layerIntersectDao.getConfig().getIntersectionFile(s[0]); if (f != null && f.getClasses() != null) { GridClass gc = f.getClasses().get(n); if (gc != null && ("kml".equals(geomtype) || "wkt".equals(geomtype) || "geojson".equals(geomtype) || "shp".equals(geomtype))) { // TODO: enable for type 'a' after // implementation of fields table defaultLayer // field File file = new File( f.getFilePath() + File.separator + s[1] + "." + geomtype + ".zip"); if ((f.getType().equals("a") || s.length == 2) && file.exists()) { ZipInputStream zis = null; try { zis = new ZipInputStream(new FileInputStream(file)); zis.getNextEntry(); byte[] buffer = new byte[1024]; int size; while ((size = zis.read(buffer)) > 0) { os.write(buffer, 0, size); } } catch (Exception e) { logger.error(e.getMessage(), e); } finally { if (zis != null) { try { zis.close(); } catch (Exception e) { logger.error(e.getMessage(), e); } } } } else { // polygon BufferedInputStream bis = null; InputStreamReader isr = null; try { String[] cells = null; HashMap<String, Object> map = s.length == 2 ? null : getGridIndexEntry(f.getFilePath() + File.separator + s[1], s[2]); String wkt = null; if (map != null) { cells = new String[] { s[2], String.valueOf(map.get("charoffset")) }; if (cells != null) { // get polygon wkt string File file2 = new File( f.getFilePath() + File.separator + s[1] + ".wkt"); bis = new BufferedInputStream(new FileInputStream(file2)); isr = new InputStreamReader(bis); isr.skip(Long.parseLong(cells[1])); char[] buffer = new char[1024]; int size; StringBuilder sb = new StringBuilder(); sb.append("POLYGON"); int end = -1; while (end < 0 && (size = isr.read(buffer)) > 0) { sb.append(buffer, 0, size); end = sb.toString().indexOf("))"); } end += 2; wkt = sb.toString().substring(0, end); } } else { wkt = gc.getBbox(); } if (geomtype.equals("wkt")) { os.write(wkt.getBytes()); } else { WKTReader r = new WKTReader(); Geometry g = r.read(wkt); if (geomtype.equals("kml")) { os.write(KML_HEADER.getBytes()); Encoder encoder = new Encoder(new KMLConfiguration()); encoder.setIndenting(true); encoder.encode(g, KML.Geometry, os); os.write(KML_FOOTER.getBytes()); } else if (geomtype.equals("geojson")) { FeatureJSON fjson = new FeatureJSON(); final SimpleFeatureType TYPE = DataUtilities.createType("class", "the_geom:MultiPolygon,name:String"); SimpleFeatureBuilder featureBuilder = new SimpleFeatureBuilder( TYPE); featureBuilder.add(g); featureBuilder.add(gc.getName()); fjson.writeFeature(featureBuilder.buildFeature(null), os); } else if (geomtype == "shp") { File zippedShapeFile = SpatialConversionUtils .buildZippedShapeFile(wkt, id, gc.getName(), null); FileUtils.copyFile(zippedShapeFile, os); } } } catch (Exception e) { logger.error(e.getMessage(), e); } finally { if (bis != null) { try { bis.close(); } catch (Exception e) { logger.error(e.getMessage(), e); } } if (isr != null) { try { isr.close(); } catch (Exception e) { logger.error(e.getMessage(), e); } } } } } } } } catch (Exception e) { logger.error(e.getMessage(), e); } } } }
From source file:org.geoserver.kml.KMLReflectorTest.java
@Test public void testForceRasterKmz() throws Exception { final String requestUrl = "wms/kml?layers=" + getLayerId(MockData.BASIC_POLYGONS) + "&styles=&mode=download&KMSCORE=0&format=" + KMZMapOutputFormat.MIME_TYPE; MockHttpServletResponse response = getAsServletResponse(requestUrl); assertEquals(KMZMapOutputFormat.MIME_TYPE, response.getContentType()); assertEquals("attachment; filename=cite-BasicPolygons.kmz", response.getHeader("Content-Disposition")); ZipInputStream zis = new ZipInputStream(getBinaryInputStream(response)); try {/*from w ww . j av a 2 s.c o m*/ // first entry, the kml document itself ZipEntry entry = zis.getNextEntry(); assertEquals("wms.kml", entry.getName()); // we need to clone the input stream, as dom(is) closes the stream byte[] data = IOUtils.toByteArray(zis); Document dom = dom(new ByteArrayInputStream(data)); assertXpathEvaluatesTo("1", "count(//kml:Folder/kml:GroundOverlay)", dom); String href = XMLUnit.newXpathEngine().evaluate("//kml:Folder/kml:GroundOverlay/kml:Icon/kml:href", dom); assertEquals("images/layers_0.png", href); zis.closeEntry(); // the images folder entry = zis.getNextEntry(); assertEquals("images/", entry.getName()); zis.closeEntry(); // the ground overlay for the raster layer entry = zis.getNextEntry(); assertEquals("images/layers_0.png", entry.getName()); zis.closeEntry(); assertNull(zis.getNextEntry()); } finally { zis.close(); } }
From source file:io.gromit.geolite2.geonames.CountryFinder.java
/** * Read countries./*from w w w.j a va2s . c om*/ * * @param countriesLocationUrl the countries location url * @return the country finder */ private CountryFinder readCountries(String countriesLocationUrl) { ZipInputStream zipis = null; try { zipis = new ZipInputStream(new URL(countriesLocationUrl).openStream(), Charset.forName("UTF-8")); ZipEntry zipEntry = zipis.getNextEntry(); logger.info("reading " + zipEntry.getName()); if (crc == zipEntry.getCrc()) { logger.info("skipp, same CRC"); return this; } CsvParserSettings settings = new CsvParserSettings(); settings.setSkipEmptyLines(true); settings.trimValues(true); CsvFormat format = new CsvFormat(); format.setDelimiter('\t'); format.setLineSeparator("\n"); format.setCharToEscapeQuoteEscaping('\0'); format.setQuote('\0'); settings.setFormat(format); CsvParser parser = new CsvParser(settings); List<String[]> lines = parser.parseAll(new InputStreamReader(zipis, "UTF-8")); for (String[] entry : lines) { Country country = new Country(); country.setIso(entry[0]); country.setIso3(entry[1]); country.setName(entry[2]); country.setCapital(entry[3]); country.setContinent(entry[4]); country.setCurrencyCode(entry[5]); country.setCurrencyName(entry[6]); country.setPhone(entry[7]); country.setLanguage(StringUtils.substringBefore(entry[8], ",")); country.setGeonameId(NumberUtils.toInt(entry[9])); geonameMap.put(country.getGeonameId(), country); isoMap.put(country.getIso(), country); } } catch (Exception e) { throw new RuntimeException(e); } finally { try { zipis.close(); } catch (Exception e) { } ; } logger.info("loaded " + geonameMap.size() + " countries"); return this; }
From source file:org.apache.openmeetings.servlet.outputhandler.BackupImportController.java
public void performImport(InputStream is) throws Exception { File working_dir = OmFileHelper.getUploadImportDir(); if (!working_dir.exists()) { working_dir.mkdir();//from w w w . jav a 2 s. com } File f = OmFileHelper.getNewDir(working_dir, "import_" + CalendarPatterns.getTimeForStreamId(new Date())); log.debug("##### WRITE FILE TO: " + f); ZipInputStream zipinputstream = new ZipInputStream(is); ZipEntry zipentry = zipinputstream.getNextEntry(); while (zipentry != null) { String fName = zipentry.getName(); if (File.pathSeparatorChar != '\\' && fName.indexOf('\\') > -1) { fName = fName.replace('\\', '/'); } // for each entry to be extracted File fentry = new File(f, fName); File dir = fentry.isDirectory() ? fentry : fentry.getParentFile(); dir.mkdirs(); if (fentry.isDirectory()) { zipentry = zipinputstream.getNextEntry(); continue; } FileHelper.copy(zipinputstream, fentry); zipinputstream.closeEntry(); zipentry = zipinputstream.getNextEntry(); } zipinputstream.close(); /* * ##################### Import Organizations */ Serializer simpleSerializer = new Persister(); { List<Organisation> list = readList(simpleSerializer, f, "organizations.xml", "organisations", Organisation.class); for (Organisation o : list) { long oldId = o.getOrganisation_id(); o.setOrganisation_id(null); Long newId = organisationManager.addOrganisationObj(o); organisationsMap.put(oldId, newId); } } log.info("Organizations import complete, starting user import"); /* * ##################### Import Users */ { List<User> list = readUserList(f, "users.xml", "users"); for (User u : list) { OmTimeZone tz = u.getOmTimeZone(); if (tz == null || tz.getJname() == null) { String jNameTimeZone = configurationDao.getConfValue("default.timezone", String.class, "Europe/Berlin"); OmTimeZone omTimeZone = omTimeZoneDaoImpl.getOmTimeZone(jNameTimeZone); u.setOmTimeZone(omTimeZone); u.setForceTimeZoneCheck(true); } else { u.setForceTimeZoneCheck(false); } u.setStarttime(new Date()); long userId = u.getUser_id(); u.setUser_id(null); if (u.getSipUser() != null && u.getSipUser().getId() != 0) { u.getSipUser().setId(0); } usersDao.update(u, -1L); usersMap.put(userId, u.getUser_id()); } } log.info("Users import complete, starting room import"); /* * ##################### Import Rooms */ { Registry registry = new Registry(); Strategy strategy = new RegistryStrategy(registry); RegistryMatcher matcher = new RegistryMatcher(); //TODO need to be removed in the later versions Serializer serializer = new Persister(strategy, matcher); matcher.bind(Long.class, LongTransform.class); matcher.bind(Integer.class, IntegerTransform.class); registry.bind(User.class, new UserConverter(usersDao, usersMap)); registry.bind(RoomType.class, new RoomTypeConverter(roomManager)); List<Room> list = readList(serializer, f, "rooms.xml", "rooms", Room.class); for (Room r : list) { Long roomId = r.getRooms_id(); // We need to reset ids as openJPA reject to store them otherwise r.setRooms_id(null); if (r.getModerators() != null) { for (Iterator<RoomModerator> i = r.getModerators().iterator(); i.hasNext();) { RoomModerator rm = i.next(); if (rm.getUser().getUser_id() == null) { i.remove(); } } } r = roomDao.update(r, null); roomsMap.put(roomId, r.getRooms_id()); } } log.info("Room import complete, starting room organizations import"); /* * ##################### Import Room Organisations */ { Registry registry = new Registry(); Strategy strategy = new RegistryStrategy(registry); Serializer serializer = new Persister(strategy); registry.bind(Organisation.class, new OrganisationConverter(orgDao, organisationsMap)); registry.bind(Room.class, new RoomConverter(roomDao, roomsMap)); List<RoomOrganisation> list = readList(serializer, f, "rooms_organisation.xml", "room_organisations", RoomOrganisation.class); for (RoomOrganisation ro : list) { if (!ro.getDeleted()) { // We need to reset this as openJPA reject to store them otherwise ro.setRooms_organisation_id(null); roomManager.addRoomOrganisation(ro); } } } log.info("Room organizations import complete, starting appointement import"); /* * ##################### Import Appointements */ { Registry registry = new Registry(); Strategy strategy = new RegistryStrategy(registry); Serializer serializer = new Persister(strategy); registry.bind(AppointmentCategory.class, new AppointmentCategoryConverter(appointmentCategoryDaoImpl)); registry.bind(User.class, new UserConverter(usersDao, usersMap)); registry.bind(AppointmentReminderTyps.class, new AppointmentReminderTypeConverter(appointmentReminderTypDaoImpl)); registry.bind(Room.class, new RoomConverter(roomDao, roomsMap)); registry.bind(Date.class, DateConverter.class); List<Appointment> list = readList(serializer, f, "appointements.xml", "appointments", Appointment.class); for (Appointment a : list) { Long appId = a.getAppointmentId(); // We need to reset this as openJPA reject to store them otherwise a.setAppointmentId(null); if (a.getUserId() != null && a.getUserId().getUser_id() == null) { a.setUserId(null); } Long newAppId = appointmentDao.addAppointmentObj(a); appointmentsMap.put(appId, newAppId); } } log.info("Appointement import complete, starting meeting members import"); /* * ##################### Import MeetingMembers * * Reminder Invitations will be NOT send! */ { Registry registry = new Registry(); Strategy strategy = new RegistryStrategy(registry); Serializer serializer = new Persister(strategy); registry.bind(User.class, new UserConverter(usersDao, usersMap)); registry.bind(Appointment.class, new AppointmentConverter(appointmentDao, appointmentsMap)); List<MeetingMember> list = readList(serializer, f, "meetingmembers.xml", "meetingmembers", MeetingMember.class); for (MeetingMember ma : list) { if (ma.getUserid() != null && ma.getUserid().getUser_id() == null) { ma.setUserid(null); } if (!ma.getDeleted()) { // We need to reset this as openJPA reject to store them otherwise ma.setMeetingMemberId(null); meetingMemberDao.addMeetingMemberByObject(ma); } } } log.info("Meeting members import complete, starting ldap config import"); /* * ##################### Import LDAP Configs */ { List<LdapConfig> list = readList(simpleSerializer, f, "ldapconfigs.xml", "ldapconfigs", LdapConfig.class, true); for (LdapConfig c : list) { ldapConfigDao.addLdapConfigByObject(c); } } log.info("Ldap config import complete, starting recordings import"); /* * ##################### Import Recordings */ { Registry registry = new Registry(); Strategy strategy = new RegistryStrategy(registry); RegistryMatcher matcher = new RegistryMatcher(); //TODO need to be removed in the later versions Serializer serializer = new Persister(strategy, matcher); matcher.bind(Long.class, LongTransform.class); matcher.bind(Integer.class, IntegerTransform.class); registry.bind(Date.class, DateConverter.class); List<FlvRecording> list = readList(serializer, f, "flvRecordings.xml", "flvrecordings", FlvRecording.class, true); for (FlvRecording fr : list) { fr.setFlvRecordingId(0); if (fr.getRoom_id() != null) { fr.setRoom_id(roomsMap.get(fr.getRoom_id())); } if (fr.getOwnerId() != null) { fr.setOwnerId(usersMap.get(fr.getOwnerId())); } if (fr.getFlvRecordingMetaData() != null) { for (FlvRecordingMetaData meta : fr.getFlvRecordingMetaData()) { meta.setFlvRecordingMetaDataId(0); meta.setFlvRecording(fr); } } flvRecordingDao.addFlvRecordingObj(fr); } } log.info("FLVrecording import complete, starting private message folder import"); /* * ##################### Import Private Message Folders */ { List<PrivateMessageFolder> list = readList(simpleSerializer, f, "privateMessageFolder.xml", "privatemessagefolders", PrivateMessageFolder.class, true); for (PrivateMessageFolder p : list) { Long folderId = p.getPrivateMessageFolderId(); PrivateMessageFolder storedFolder = privateMessageFolderDao.getPrivateMessageFolderById(folderId); if (storedFolder == null) { p.setPrivateMessageFolderId(0); Long newFolderId = privateMessageFolderDao.addPrivateMessageFolderObj(p); messageFoldersMap.put(folderId, newFolderId); } } } log.info("Private message folder import complete, starting user contacts import"); /* * ##################### Import User Contacts */ { Registry registry = new Registry(); Strategy strategy = new RegistryStrategy(registry); Serializer serializer = new Persister(strategy); registry.bind(User.class, new UserConverter(usersDao, usersMap)); List<UserContact> list = readList(serializer, f, "userContacts.xml", "usercontacts", UserContact.class, true); for (UserContact uc : list) { Long ucId = uc.getUserContactId(); UserContact storedUC = userContactsDao.getUserContacts(ucId); if (storedUC == null && uc.getContact() != null && uc.getContact().getUser_id() != null) { uc.setUserContactId(0); Long newId = userContactsDao.addUserContactObj(uc); userContactsMap.put(ucId, newId); } } } log.info("Usercontact import complete, starting private messages item import"); /* * ##################### Import Private Messages */ { Registry registry = new Registry(); Strategy strategy = new RegistryStrategy(registry); Serializer serializer = new Persister(strategy); registry.bind(User.class, new UserConverter(usersDao, usersMap)); registry.bind(Room.class, new RoomConverter(roomDao, roomsMap)); registry.bind(Date.class, DateConverter.class); List<PrivateMessage> list = readList(serializer, f, "privateMessages.xml", "privatemessages", PrivateMessage.class, true); for (PrivateMessage p : list) { p.setPrivateMessageId(0); p.setPrivateMessageFolderId(getNewId(p.getPrivateMessageFolderId(), Maps.MESSAGEFOLDERS)); p.setUserContactId(getNewId(p.getUserContactId(), Maps.USERCONTACTS)); if (p.getRoom() != null && p.getRoom().getRooms_id() == null) { p.setRoom(null); } if (p.getTo() != null && p.getTo().getUser_id() == null) { p.setTo(null); } if (p.getFrom() != null && p.getFrom().getUser_id() == null) { p.setFrom(null); } if (p.getOwner() != null && p.getOwner().getUser_id() == null) { p.setOwner(null); } privateMessagesDao.addPrivateMessageObj(p); } } log.info("Private message import complete, starting file explorer item import"); /* * ##################### Import File-Explorer Items */ { Registry registry = new Registry(); Strategy strategy = new RegistryStrategy(registry); RegistryMatcher matcher = new RegistryMatcher(); //TODO need to be removed in the later versions Serializer serializer = new Persister(strategy, matcher); matcher.bind(Long.class, LongTransform.class); matcher.bind(Integer.class, IntegerTransform.class); registry.bind(Date.class, DateConverter.class); List<FileExplorerItem> list = readList(serializer, f, "fileExplorerItems.xml", "fileExplorerItems", FileExplorerItem.class, true); for (FileExplorerItem file : list) { // We need to reset this as openJPA reject to store them otherwise file.setFileExplorerItemId(0); Long roomId = file.getRoom_id(); file.setRoom_id(roomsMap.containsKey(roomId) ? roomsMap.get(roomId) : null); fileExplorerItemDao.addFileExplorerItem(file); } } log.info("File explorer item import complete, starting file poll import"); /* * ##################### Import Room Polls */ { Registry registry = new Registry(); Strategy strategy = new RegistryStrategy(registry); Serializer serializer = new Persister(strategy); registry.bind(User.class, new UserConverter(usersDao, usersMap)); registry.bind(Room.class, new RoomConverter(roomDao, roomsMap)); registry.bind(PollType.class, new PollTypeConverter(pollManager)); registry.bind(Date.class, DateConverter.class); List<RoomPoll> list = readList(serializer, f, "roompolls.xml", "roompolls", RoomPoll.class, true); for (RoomPoll rp : list) { pollManager.savePollBackup(rp); } } log.info("Poll import complete, starting configs import"); /* * ##################### Import Configs */ { Registry registry = new Registry(); Strategy strategy = new RegistryStrategy(registry); RegistryMatcher matcher = new RegistryMatcher(); //TODO need to be removed in the later versions Serializer serializer = new Persister(strategy, matcher); matcher.bind(Long.class, LongTransform.class); registry.bind(Date.class, DateConverter.class); registry.bind(User.class, new UserConverter(usersDao, usersMap)); List<Configuration> list = readList(serializer, f, "configs.xml", "configs", Configuration.class, true); for (Configuration c : list) { Configuration cfg = configurationDao.get(c.getConf_key()); c.setConfiguration_id(cfg == null ? null : cfg.getConfiguration_id()); if (c.getUser() != null && c.getUser().getUser_id() == null) { c.setUser(null); } if ("crypt_ClassName".equals(c.getConf_key())) { try { Class.forName(c.getConf_value()); } catch (ClassNotFoundException e) { c.setConf_value(MD5Implementation.class.getCanonicalName()); } } configurationDao.update(c, -1L); } } log.info("Configs import complete, starting copy of files and folders"); /* * ##################### Import real files and folders */ importFolders(f); log.info("File explorer item import complete, clearing temp files"); FileHelper.removeRec(f); }
From source file:net.sourceforge.vulcan.core.support.AbstractFileStore.java
@Override public final PluginMetaDataDto extractPlugin(InputStream is) throws StoreException { final File pluginsDir = getPluginsRoot(); String toplevel = null;/* www . j a v a 2 s . c o m*/ File tmpDir = null; if (!pluginsDir.exists()) { createDir(pluginsDir); } final ZipInputStream zis = new ZipInputStream(is); ZipEntry entry; try { entry = zis.getNextEntry(); if (entry == null || !entry.isDirectory()) { throw new InvalidPluginLayoutException(); } toplevel = entry.getName(); tmpDir = new File(pluginsDir, "tmp-" + toplevel); if (!tmpDir.exists()) { createDir(tmpDir); } final String id = toplevel.replaceAll("/", ""); while ((entry = zis.getNextEntry()) != null) { if (!entry.getName().startsWith(toplevel)) { throw new InvalidPluginLayoutException(); } final File out = new File(pluginsDir, "tmp-" + entry.getName()); if (entry.isDirectory()) { createDir(out); zis.closeEntry(); } else { final FileOutputStream os = new FileOutputStream(out); try { IOUtils.copy(zis, os); } finally { os.close(); zis.closeEntry(); } } } checkPluginVersion(pluginsDir, id); return createPluginConfig(new File(pluginsDir, toplevel)); } catch (DuplicatePluginIdException e) { throw e; } catch (Exception e) { if (e instanceof StoreException) { throw (StoreException) e; } throw new StoreException(e.getMessage(), e); } finally { try { zis.close(); } catch (IOException ignore) { } if (tmpDir != null && tmpDir.exists()) { try { FileUtils.deleteDirectory(tmpDir); } catch (Exception ignore) { } } } }
From source file:Main.java
public static boolean unzip(InputStream inputStream, String dest, boolean replaceIfExists) { final int BUFFER_SIZE = 4096; BufferedOutputStream bufferedOutputStream = null; boolean succeed = true; try {//w w w . j av a2s.c om ZipInputStream zipInputStream = new ZipInputStream(new BufferedInputStream(inputStream)); ZipEntry zipEntry; while ((zipEntry = zipInputStream.getNextEntry()) != null) { String zipEntryName = zipEntry.getName(); // if(!zipEntry.isDirectory()) { // File fil = new File(dest + zipEntryName); // fil.getParent() // } // file exists ? delete ? File file2 = new File(dest + zipEntryName); if (file2.exists()) { if (replaceIfExists) { try { boolean b = deleteDir(file2); if (!b) { Log.e("Haggle", "Unzip failed to delete " + dest + zipEntryName); } else { Log.d("Haggle", "Unzip deleted " + dest + zipEntryName); } } catch (Exception e) { Log.e("Haggle", "Unzip failed to delete " + dest + zipEntryName, e); } } } // extract File file = new File(dest + zipEntryName); if (file.exists()) { } else { if (zipEntry.isDirectory()) { file.mkdirs(); chmod(file, 0755); } else { // create parent file folder if not exists yet if (!file.getParentFile().exists()) { file.getParentFile().mkdirs(); chmod(file.getParentFile(), 0755); } byte buffer[] = new byte[BUFFER_SIZE]; bufferedOutputStream = new BufferedOutputStream(new FileOutputStream(file), BUFFER_SIZE); int count; while ((count = zipInputStream.read(buffer, 0, BUFFER_SIZE)) != -1) { bufferedOutputStream.write(buffer, 0, count); } bufferedOutputStream.flush(); bufferedOutputStream.close(); } } // enable standalone python if (file.getName().endsWith(".so") || file.getName().endsWith(".xml") || file.getName().endsWith(".py") || file.getName().endsWith(".pyc") || file.getName().endsWith(".pyo")) { chmod(file, 0755); } Log.d("Haggle", "Unzip extracted " + dest + zipEntryName); } zipInputStream.close(); } catch (FileNotFoundException e) { Log.e("Haggle", "Unzip error, file not found", e); succeed = false; } catch (Exception e) { Log.e("Haggle", "Unzip error: ", e); succeed = false; } return succeed; }