List of usage examples for java.util.zip GZIPInputStream close
public void close() throws IOException
From source file:org.openmrs.module.odkconnector.serialization.serializer.ListSerializerTest.java
@Test public void serialize_shouldSerializePatientInformation() throws Exception { File file = File.createTempFile("PatientSerialization", "Example"); GZIPOutputStream outputStream = new GZIPOutputStream(new BufferedOutputStream(new FileOutputStream(file))); log.info("Writing to: " + file.getAbsolutePath()); Cohort cohort = new Cohort(); cohort.addMember(6);/* w w w . j a v a 2 s . c o m*/ cohort.addMember(7); cohort.addMember(8); List<Patient> patients = new ArrayList<Patient>(); List<Obs> observations = new ArrayList<Obs>(); List<Form> forms = new ArrayList<Form>(); for (Integer patientId : cohort.getMemberIds()) { Patient patient = Context.getPatientService().getPatient(patientId); observations.addAll(Context.getObsService().getObservationsByPerson(patient)); patients.add(patient); } Serializer serializer = HandlerUtil.getPreferredHandler(Serializer.class, List.class); serializer.write(outputStream, patients); serializer.write(outputStream, observations); serializer.write(outputStream, forms); outputStream.close(); GZIPInputStream inputStream = new GZIPInputStream(new BufferedInputStream(new FileInputStream(file))); DataInputStream dataInputStream = new DataInputStream(inputStream); // total number of patients Integer patientCounter = dataInputStream.readInt(); System.out.println("Patient Counter: " + patientCounter); for (int i = 0; i < patientCounter; i++) { System.out.println("=================Patient====================="); System.out.println("Patient Id: " + dataInputStream.readInt()); System.out.println("Family Name: " + dataInputStream.readUTF()); System.out.println("Middle Name: " + dataInputStream.readUTF()); System.out.println("Last Name: " + dataInputStream.readUTF()); System.out.println("Gender: " + dataInputStream.readUTF()); System.out.println("Birth Date: " + dataInputStream.readLong()); System.out.println("Identifier" + dataInputStream.readUTF()); } Integer obsCounter = dataInputStream.readInt(); for (int j = 0; j < obsCounter; j++) { System.out.println("==================Observation================="); System.out.println("Patient Id: " + dataInputStream.readInt()); System.out.println("Concept Name: " + dataInputStream.readUTF()); byte type = dataInputStream.readByte(); if (type == ObsSerializer.TYPE_STRING) System.out.println("Value: " + dataInputStream.readUTF()); else if (type == ObsSerializer.TYPE_INT) System.out.println("Value: " + dataInputStream.readInt()); else if (type == ObsSerializer.TYPE_DOUBLE) System.out.println("Value: " + dataInputStream.readDouble()); else if (type == ObsSerializer.TYPE_DATE) System.out.println("Value: " + dataInputStream.readLong()); System.out.println("Time: " + dataInputStream.readLong()); } Integer formCounter = dataInputStream.readInt(); for (int j = 0; j < formCounter; j++) { System.out.println("==================Form================="); System.out.println("Form Id: " + dataInputStream.readInt()); } System.out.println(); inputStream.close(); }
From source file:org.apache.cxf.systest.jaxrs.JAXRSClientServerBookTest.java
@Test public void testGetBookQueryGZIP() throws Exception { String address = "http://localhost:" + PORT + "/bookstore/"; WebClient wc = WebClient.create(address); wc.acceptEncoding("gzip,deflate"); wc.encoding("gzip"); InputStream r = wc.get(InputStream.class); assertNotNull(r);/*w w w .j a va2s . c o m*/ GZIPInputStream in = new GZIPInputStream(r); String s = IOUtils.toString(in); in.close(); assertTrue(s, s.contains("id>124")); }
From source file:csiro.pidsvc.mappingstore.Manager.java
@SuppressWarnings("unchecked") protected String unwrapCompressedBackupFile(HttpServletRequest request, ICallback callback) { java.util.List<FileItem> fileList = null; GZIPInputStream gis = null; String ret = null;/*from ww w .j av a 2 s . c o m*/ try { DiskFileItemFactory fileItemFactory = new DiskFileItemFactory(); // Set the size threshold, above which content will be stored on disk. fileItemFactory.setSizeThreshold(1 * 1024 * 1024); // 1 MB // fileItemFactory.setSizeThreshold(100 * 1024); // 100 KB // Set the temporary directory to store the uploaded files of size above threshold. fileItemFactory.setRepository(new File(System.getProperty("java.io.tmpdir"))); ServletFileUpload uploadHandler = new ServletFileUpload(fileItemFactory); fileList = uploadHandler.parseRequest(request); for (FileItem item : fileList) { if (item.isFormField()) continue; try { // Try to restore the backup file as it was in binary format. gis = new GZIPInputStream(item.getInputStream()); ret = callback.process(gis); gis.close(); } catch (IOException ex) { String msg = ex.getMessage(); if (msg != null && msg.equalsIgnoreCase("Not in GZIP format")) { // Try to restore the backup file as it was unzipped. ret = callback.process(item.getInputStream()); } else throw ex; } // Process the first uploaded file only. return ret; } } catch (Exception ex) { String msg = ex.getMessage(); Throwable linkedException = ex.getCause(); _logger.warn(msg); if (linkedException != null) _logger.warn(linkedException.getMessage()); if (msg != null && msg.equalsIgnoreCase("Not in GZIP format")) return "ERROR: Unknown file format."; else return "ERROR: " + (msg == null ? "Something went wrong." : msg + (linkedException == null ? "" : " " + linkedException.getMessage())); } finally { try { // Close the stream. gis.close(); } catch (Exception ex) { } if (fileList != null) { // Delete all uploaded files. for (FileItem item : fileList) { if (!item.isFormField() && !item.isInMemory()) ((DiskFileItem) item).delete(); } } } _logger.trace("No file found."); return "ERROR: No file."; }
From source file:com.meetup.memcached.MemcachedClient.java
/** * This method loads the data from cache into a Map. * * Pass a SockIO object which is ready to receive data and a HashMap<br/> * to store the results.// w ww .j ava 2s . c o m * * @param sock socket waiting to pass back data * @param hm hashmap to store data into * @param asString if true, and if we are using NativehHandler, return string val * @throws IOException if io exception happens while reading from socket */ private void loadMulti(LineInputStream input, Map<String, Object> hm, boolean asString) throws IOException { while (true) { String line = input.readLine(); if (log.isDebugEnabled()) log.debug("++++ line: " + line); if (line.startsWith(VALUE)) { String[] info = line.split(" "); String key = info[1]; int flag = Integer.parseInt(info[2]); int length = Integer.parseInt(info[3]); if (log.isDebugEnabled()) { log.debug("++++ key: " + key); log.debug("++++ flags: " + flag); log.debug("++++ length: " + length); } // read obj into buffer byte[] buf = new byte[length]; input.read(buf); input.clearEOL(); // ready object Object o; // check for compression if ((flag & F_COMPRESSED) == F_COMPRESSED) { try { // read the input stream, and write to a byte array output stream since // we have to read into a byte array, but we don't know how large it // will need to be, and we don't want to resize it a bunch GZIPInputStream gzi = new GZIPInputStream(new ByteArrayInputStream(buf)); ByteArrayOutputStream bos = new ByteArrayOutputStream(buf.length); int count; byte[] tmp = new byte[2048]; while ((count = gzi.read(tmp)) != -1) { bos.write(tmp, 0, count); } // store uncompressed back to buffer buf = bos.toByteArray(); gzi.close(); } catch (IOException e) { // if we have an errorHandler, use its hook if (errorHandler != null) errorHandler.handleErrorOnGet(this, e, key); log.error( "++++ IOException thrown while trying to uncompress input stream for key: " + key); log.error(e.getMessage(), e); throw new NestedIOException( "++++ IOException thrown while trying to uncompress input stream for key: " + key, e); } } // we can only take out serialized objects if ((flag & F_SERIALIZED) != F_SERIALIZED) { if (primitiveAsString || asString) { // pulling out string value if (log.isInfoEnabled()) log.info("++++ retrieving object and stuffing into a string."); o = new String(buf, defaultEncoding); } else { // decoding object try { o = NativeHandler.decode(buf, flag); } catch (Exception e) { // if we have an errorHandler, use its hook if (errorHandler != null) errorHandler.handleErrorOnGet(this, e, key); log.error("++++ Exception thrown while trying to deserialize for key: " + key, e); throw new NestedIOException(e); } } } else { // deserialize if the data is serialized ContextObjectInputStream ois = new ContextObjectInputStream(new ByteArrayInputStream(buf), classLoader); try { o = ois.readObject(); if (log.isInfoEnabled()) log.info("++++ deserializing " + o.getClass()); } catch (ClassNotFoundException e) { // if we have an errorHandler, use its hook if (errorHandler != null) errorHandler.handleErrorOnGet(this, e, key); log.error("++++ ClassNotFoundException thrown while trying to deserialize for key: " + key, e); throw new NestedIOException("+++ failed while trying to deserialize for key: " + key, e); } finally { ois.close(); } } // store the object into the cache hm.put(key, o); } else if (END.equals(line)) { if (log.isDebugEnabled()) log.debug("++++ finished reading from cache server"); break; } } }
From source file:org.pentaho.di.trans.steps.blockingstep.BlockingStep.java
private Object[] getBuffer() { Object[] retval;/* ww w .j ava 2s. co m*/ // Open all files at once and read one row from each file... if (data.files.size() > 0 && (data.dis.size() == 0 || data.fis.size() == 0)) { if (log.isBasic()) { logBasic(BaseMessages.getString(PKG, "BlockingStep.Log.Openfiles")); } try { FileObject fileObject = data.files.get(0); String filename = KettleVFS.getFilename(fileObject); if (log.isDetailed()) { logDetailed(BaseMessages.getString(PKG, "BlockingStep.Log.Openfilename1") + filename + BaseMessages.getString(PKG, "BlockingStep.Log.Openfilename2")); } InputStream fi = KettleVFS.getInputStream(fileObject); DataInputStream di; data.fis.add(fi); if (meta.getCompress()) { GZIPInputStream gzfi = new GZIPInputStream(new BufferedInputStream(fi)); di = new DataInputStream(gzfi); data.gzis.add(gzfi); } else { di = new DataInputStream(fi); } data.dis.add(di); // How long is the buffer? int buffersize = di.readInt(); if (log.isDetailed()) { logDetailed(BaseMessages.getString(PKG, "BlockingStep.Log.BufferSize1") + filename + BaseMessages.getString(PKG, "BlockingStep.Log.BufferSize2") + buffersize + " " + BaseMessages.getString(PKG, "BlockingStep.Log.BufferSize3")); } if (buffersize > 0) { // Read a row from temp-file data.rowbuffer.add(data.outputRowMeta.readData(di)); } } catch (Exception e) { logError(BaseMessages.getString(PKG, "BlockingStepMeta.ErrorReadingFile") + e.toString()); logError(Const.getStackTracker(e)); } } if (data.files.size() == 0) { if (data.buffer.size() > 0) { retval = data.buffer.get(0); data.buffer.remove(0); } else { retval = null; } } else { if (data.rowbuffer.size() == 0) { retval = null; } else { retval = data.rowbuffer.get(0); data.rowbuffer.remove(0); // now get another FileObject file = data.files.get(0); DataInputStream di = data.dis.get(0); InputStream fi = data.fis.get(0); GZIPInputStream gzfi = (meta.getCompress()) ? data.gzis.get(0) : null; try { data.rowbuffer.add(0, data.outputRowMeta.readData(di)); } catch (SocketTimeoutException e) { logError(BaseMessages.getString(PKG, "System.Log.UnexpectedError") + " : " + e.toString()); logError(Const.getStackTracker(e)); setErrors(1); stopAll(); } catch (KettleFileException fe) { // empty file or EOF mostly try { di.close(); fi.close(); if (gzfi != null) { gzfi.close(); } file.delete(); } catch (IOException e) { logError( BaseMessages.getString(PKG, "BlockingStepMeta.UnableDeleteFile") + file.toString()); setErrors(1); stopAll(); return null; } data.files.remove(0); data.dis.remove(0); data.fis.remove(0); if (gzfi != null) { data.gzis.remove(0); } } } } return retval; }
From source file:com.panet.imeta.trans.steps.blockingstep.BlockingStep.java
private Object[] getBuffer() { Object[] retval;//from w ww .j a v a 2s . c o m // Open all files at once and read one row from each file... if (data.files.size() > 0 && (data.dis.size() == 0 || data.fis.size() == 0)) { if (log.isBasic()) logBasic(Messages.getString("BlockingStep.Log.Openfiles")); try { FileObject fileObject = (FileObject) data.files.get(0); String filename = KettleVFS.getFilename(fileObject); if (log.isDetailed()) logDetailed(Messages.getString("BlockingStep.Log.Openfilename1") + filename + Messages.getString("BlockingStep.Log.Openfilename2")); InputStream fi = KettleVFS.getInputStream(fileObject); DataInputStream di; data.fis.add(fi); if (meta.getCompress()) { GZIPInputStream gzfi = new GZIPInputStream(new BufferedInputStream(fi)); di = new DataInputStream(gzfi); data.gzis.add(gzfi); } else { di = new DataInputStream(fi); } data.dis.add(di); // How long is the buffer? int buffersize = di.readInt(); if (log.isDetailed()) logDetailed(Messages.getString("BlockingStep.Log.BufferSize1") + filename + Messages.getString("BlockingStep.Log.BufferSize2") + buffersize + " " + Messages.getString("BlockingStep.Log.BufferSize3")); if (buffersize > 0) { // Read a row from temp-file data.rowbuffer.add(data.outputRowMeta.readData(di)); } } catch (Exception e) { logError(Messages.getString("BlockingStepMeta.ErrorReadingFile") + e.toString()); logError(Const.getStackTracker(e)); } } if (data.files.size() == 0) { if (data.buffer.size() > 0) { retval = (Object[]) data.buffer.get(0); data.buffer.remove(0); } else { retval = null; } } else { if (data.rowbuffer.size() == 0) { retval = null; } else { retval = (Object[]) data.rowbuffer.get(0); data.rowbuffer.remove(0); // now get another FileObject file = (FileObject) data.files.get(0); DataInputStream di = (DataInputStream) data.dis.get(0); InputStream fi = (InputStream) data.fis.get(0); GZIPInputStream gzfi = (meta.getCompress()) ? (GZIPInputStream) data.gzis.get(0) : null; try { data.rowbuffer.add(0, data.outputRowMeta.readData(di)); } catch (SocketTimeoutException e) { logError(Messages.getString("System.Log.UnexpectedError") + " : " + e.toString()); //$NON-NLS-1$ //$NON-NLS-2$ logError(Const.getStackTracker(e)); setErrors(1); stopAll(); } catch (KettleFileException fe) // empty file or EOF mostly { try { di.close(); fi.close(); if (gzfi != null) gzfi.close(); file.delete(); } catch (IOException e) { logError(Messages.getString("BlockingStepMeta.UnableDeleteFile") + file.toString()); setErrors(1); stopAll(); return null; } data.files.remove(0); data.dis.remove(0); data.fis.remove(0); if (gzfi != null) data.gzis.remove(0); } } } return retval; }
From source file:com.meetup.memcached.MemcachedClient.java
/** * Retrieve a key from the server, using a specific hash. * * If the data was compressed or serialized when compressed, it will automatically<br/> * be decompressed or serialized, as appropriate. (Inclusive or)<br/> *<br/>/*from w w w . j av a 2 s .com*/ * Non-serialized data will be returned as a string, so explicit conversion to<br/> * numeric types will be necessary, if desired<br/> * * @param key key where data is stored * @param hashCode if not null, then the int hashcode to use * @param asString if true, then return string val * @return the object that was previously stored, or null if it was not previously stored */ public Object get(String key, Integer hashCode, boolean asString) { if (key == null) { log.error("key is null for get()"); return null; } try { key = sanitizeKey(key); } catch (UnsupportedEncodingException e) { // if we have an errorHandler, use its hook if (errorHandler != null) errorHandler.handleErrorOnGet(this, e, key); log.error("failed to sanitize your key!", e); return null; } // get SockIO obj using cache key SockIOPool.SockIO sock = pool.getSock(key, hashCode); if (sock == null) { if (errorHandler != null) errorHandler.handleErrorOnGet(this, new IOException("no socket to server available"), key); return null; } try { String cmd = "get " + key + "\r\n"; if (log.isDebugEnabled()) log.debug("++++ memcache get command: " + cmd); sock.write(cmd.getBytes()); sock.flush(); // ready object Object o = null; while (true) { String line = sock.readLine(); if (log.isDebugEnabled()) log.debug("++++ line: " + line); if (line.startsWith(VALUE)) { String[] info = line.split(" "); int flag = Integer.parseInt(info[2]); int length = Integer.parseInt(info[3]); if (log.isDebugEnabled()) { log.debug("++++ key: " + key); log.debug("++++ flags: " + flag); log.debug("++++ length: " + length); } // read obj into buffer byte[] buf = new byte[length]; sock.read(buf); sock.clearEOL(); if ((flag & F_COMPRESSED) == F_COMPRESSED) { try { // read the input stream, and write to a byte array output stream since // we have to read into a byte array, but we don't know how large it // will need to be, and we don't want to resize it a bunch GZIPInputStream gzi = new GZIPInputStream(new ByteArrayInputStream(buf)); ByteArrayOutputStream bos = new ByteArrayOutputStream(buf.length); int count; byte[] tmp = new byte[2048]; while ((count = gzi.read(tmp)) != -1) { bos.write(tmp, 0, count); } // store uncompressed back to buffer buf = bos.toByteArray(); gzi.close(); } catch (IOException e) { // if we have an errorHandler, use its hook if (errorHandler != null) errorHandler.handleErrorOnGet(this, e, key); log.error("++++ IOException thrown while trying to uncompress input stream for key: " + key); log.error(e.getMessage(), e); throw new NestedIOException( "++++ IOException thrown while trying to uncompress input stream for key: " + key, e); } } // we can only take out serialized objects if ((flag & F_SERIALIZED) != F_SERIALIZED) { if (primitiveAsString || asString) { // pulling out string value if (log.isInfoEnabled()) log.info("++++ retrieving object and stuffing into a string."); o = new String(buf, defaultEncoding); } else { // decoding object try { o = NativeHandler.decode(buf, flag); } catch (Exception e) { // if we have an errorHandler, use its hook if (errorHandler != null) errorHandler.handleErrorOnGet(this, e, key); log.error("++++ Exception thrown while trying to deserialize for key: " + key, e); throw new NestedIOException(e); } } } else { // deserialize if the data is serialized ContextObjectInputStream ois = new ContextObjectInputStream(new ByteArrayInputStream(buf), classLoader); try { o = ois.readObject(); if (log.isInfoEnabled()) log.info("++++ deserializing " + o.getClass()); } catch (ClassNotFoundException e) { // if we have an errorHandler, use its hook if (errorHandler != null) errorHandler.handleErrorOnGet(this, e, key); log.error("++++ ClassNotFoundException thrown while trying to deserialize for key: " + key, e); throw new NestedIOException("+++ failed while trying to deserialize for key: " + key, e); } finally { ois.close(); } } } else if (END.equals(line)) { if (log.isDebugEnabled()) log.debug("++++ finished reading from cache server"); break; } } sock.close(); sock = null; return o; } catch (IOException e) { // if we have an errorHandler, use its hook if (errorHandler != null) errorHandler.handleErrorOnGet(this, e, key); // exception thrown log.error("++++ exception thrown while trying to get object from cache for key: " + key); log.error(e.getMessage(), e); try { sock.trueClose(); } catch (IOException ioe) { log.error("++++ failed to close socket : " + sock.toString()); } sock = null; } if (sock != null) sock.close(); return null; }
From source file:net.creativeparkour.GameManager.java
static void jouer(Player p, String nomMap) throws IOException { long nano = System.nanoTime(); CPMap m = getMap(nomMap);/*from ww w . j av a 2 s . c o m*/ if (m != null && m.isPlayable()) jouer(p, m, false, true); else { if (!nomMap.isEmpty()) p.sendMessage(Config.prefix() + ChatColor.RED + Langues.getMessage("commands.play error")); nano = CPUtils.debugNanoTime("PLAY1", nano); // Importation d'ventuelles maps dans le dossier "Automatically import maps" File folder = new File(CreativeParkour.getPlugin().getDataFolder(), "/Automatically import maps"); folder.mkdirs(); File dossierDest = new File(CreativeParkour.getPlugin().getDataFolder(), "/Not imported maps"); File[] listOfFiles = folder.listFiles(); for (int i = 0; i < listOfFiles.length; i++) { File f = listOfFiles[i]; if (f.isFile() && f.getName().endsWith(".cpmap")) { GZIPInputStream gzip = new GZIPInputStream(new FileInputStream(f)); try { BufferedReader br = new BufferedReader(new InputStreamReader(gzip)); try { JsonObject jsData = new JsonParser().parse(br.readLine()).getAsJsonObject(); if (mapExistante(jsData.get("uuidMap").getAsString())) { if (p.isOp()) p.sendMessage(Config.prefix() + ChatColor.RED + Langues .getMessage("commands.import error 2").replace("%file", f.getName())); } else { CPMap map = construireMapTelechargee(jsData, CPMapState.PUBLISHED, p); if (p.isOp()) { if (map == null) { p.sendMessage(Config.prefix() + ChatColor.RED + Langues .getMessage("commands.import error").replace("%map", f.getName())); } else { p.sendMessage(Config.prefix() + ChatColor.GREEN + Langues.getMessage("commands.import success").replace("%map", map.getName())); dossierDest = new File(CreativeParkour.getPlugin().getDataFolder(), "/Imported maps"); } } } } finally { br.close(); } } finally { gzip.close(); } dossierDest.mkdirs(); Files.move(f, new File(dossierDest, f.getName())); } } nano = CPUtils.debugNanoTime("PLAY2", nano); selectionMap(p); // Ouverture de l'inventaire avec la liste des maps } // Mise jour de la liste des maps tlchargeables en boucle si c'est le premier coup if (!taskListe && Config.online() && p.hasPermission("creativeparkour.download")) { taskListe = true; Bukkit.getScheduler().runTaskTimer(CreativeParkour.getPlugin(), new Runnable() { public void run() { synchroWeb(); } }, 20, 144000); // Dans 1 seconde puis toutes les 2 heures } }
From source file:net.cbtltd.rest.nextpax.A_Handler.java
/** * Get unzipped input stream for file name. * //from w w w. j a va2s . c o m * @param fn the file name. * @return the input stream. * @throws Throwable the exception that can be thrown. */ private final synchronized InputStream ftp(String fn) throws Throwable { String urlname = "ftp://bookingnet:FEJvvn$LYGCUd-2_Vq4zI@secure.nextpax.com/" + fn + ".gz;type=i"; URL url = new URL(urlname); URLConnection urlc = url.openConnection(); byte[] buf = new byte[1024]; GZIPInputStream zinstream = new GZIPInputStream(urlc.getInputStream()); FileOutputStream outstream = new FileOutputStream(fn); int n; while ((n = zinstream.read(buf, 0, 1024)) > -1) { outstream.write(buf, 0, n); } outstream.close(); zinstream.close(); return new BufferedInputStream(new FileInputStream(fn)); }