List of usage examples for java.io PushbackInputStream read
public int read() throws IOException
From source file:com.handywedge.binarystore.store.gcs.BinaryStoreManagerImpl.java
@SuppressWarnings("unused") @Override// w w w. j a v a 2 s . c o m public BinaryInfo upload(StorageInfo storage, BinaryInfo binary, InputStream inStream) throws StoreException { logger.info("GCS update method: start."); logger.debug("" + storage.toString()); logger.debug("?" + binary.toString()); long startSingle = System.currentTimeMillis(); Storage gStorage = getGCSClient(binary.getBucketName(), true); File tempFile = null; logger.info("Uploading a new binary to GCS from a file\n"); BinaryInfo rtnBinary = new BinaryInfo(); BlobId blobId = BlobId.of(binary.getBucketName(), binary.getFileName()); List<Acl> acls = new ArrayList<>(); acls.add(Acl.of(Acl.User.ofAllUsers(), Acl.Role.READER)); BlobInfo blobInfo = BlobInfo.newBuilder(blobId).setContentType(binary.getContentType()) .setStorageClass(StorageClass.COLDLINE).setAcl(acls).build(); ByteArrayOutputStream baos = new ByteArrayOutputStream(); long written = -1L; try { written = IOUtils.copyLarge(inStream, baos, 0, BINARY_PART_SIZE_5MB); } catch (IOException e) { logger.error("IOUtils.copyLarge ?"); throw new StoreException(HttpStatus.SC_INTERNAL_SERVER_ERROR, ErrorClassification.OUT_OF_RESOURCE, e); } byte[] data = baos.toByteArray(); InputStream gcsInputStream = new ByteArrayInputStream(data); if (written < BINARY_PART_SIZE_5MB) { Blob blob = gStorage.create(blobInfo, data); rtnBinary = createReturnBinaryInfo(blob); } else { int firstByte = 0; int partNumber = 1; Boolean isFirstChunck = true; Boolean overSizeLimit = false; InputStream firstChunck = new ByteArrayInputStream(data); PushbackInputStream chunckableInputStream = new PushbackInputStream(inStream, 1); try { tempFile = File.createTempFile(UUID.randomUUID().toString().concat(binary.getFileName()), "tmp"); } catch (IOException e) { logger.error("File.createTempFile ???={}", UUID.randomUUID().toString().concat(binary.getFileName())); throw new StoreException(HttpStatus.SC_INTERNAL_SERVER_ERROR, ErrorClassification.DISK_IO_ERROR, e, UUID.randomUUID().toString().concat(binary.getFileName())); } try { while (-1 != (firstByte = chunckableInputStream.read())) { long partSize = 0; chunckableInputStream.unread(firstByte); OutputStream os = null; try { os = new BufferedOutputStream(new FileOutputStream(tempFile.getAbsolutePath(), true)); if (isFirstChunck == true) { partSize = IOUtils.copyLarge(firstChunck, os, 0, (BINARY_PART_SIZE_5MB)); isFirstChunck = false; } else { partSize = IOUtils.copyLarge(chunckableInputStream, os, 0, (BINARY_PART_SIZE_5MB)); } written += partSize; if (written > BINARY_PART_SIZE_5MB * 1024) { // 5GB overSizeLimit = true; logger.error("OVERSIZED FILE ({}). STARTING ABORT", written); break; } } finally { IOUtils.closeQuietly(os); } Boolean isLastPart = -1 == (firstByte = chunckableInputStream.read()); if (!isLastPart) { chunckableInputStream.unread(firstByte); } } } catch (IOException e) { logger.error("??????={}", UUID.randomUUID().toString().concat(binary.getFileName())); throw new StoreException(HttpStatus.SC_INTERNAL_SERVER_ERROR, ErrorClassification.DISK_IO_ERROR, e, UUID.randomUUID().toString().concat(binary.getFileName())); } try { WriteChannel writer = gStorage.writer(blobInfo); byte[] buffer = new byte[1024]; InputStream input = new FileInputStream(tempFile); int limit; while ((limit = input.read(buffer)) >= 0) { try { writer.write(ByteBuffer.wrap(buffer, 0, limit)); } catch (Exception ex) { logger.error("?????"); throw ex; } } } catch (IOException e) { logger.error("Upload???={}", blobInfo.toString()); throw new StoreException(HttpStatus.SC_INTERNAL_SERVER_ERROR, ErrorClassification.UPLOAD_FAIL, e, blobInfo.toString()); } if (null != tempFile && tempFile.exists()) { tempFile.delete(); } } Blob blob = gStorage.get(blobInfo.getBlobId()); rtnBinary = createReturnBinaryInfo(blob); long endSingle = System.currentTimeMillis(); logger.info("{} Geted : {} ms\n", binary.getFileName(), (endSingle - startSingle)); logger.info("GCS update method: end."); return binary; }
From source file:net.stuxcrystal.simpledev.configuration.parser.generators.xml.XMLParser.java
/** * Checks if a stream is actually empty. * @param pis The input stream./*from ww w .ja v a2 s . co m*/ * @return {@code true} if so. */ private static boolean isEmpty(PushbackInputStream pis) throws IOException { int b = pis.read(); boolean empty = (b == -1); pis.unread(b); return empty; }
From source file:org.apache.james.mailbox.maildir.mail.model.MaildirMessage.java
/** * Return the position in the given {@link InputStream} at which the Body of * the MailboxMessage starts/*from ww w . j av a 2s. c o m*/ */ private int bodyStartOctet(InputStream msgIn) throws IOException { // we need to pushback maximal 3 bytes PushbackInputStream in = new PushbackInputStream(msgIn, 3); int localBodyStartOctet = in.available(); int i; int count = 0; while ((i = in.read()) != -1 && in.available() > 4) { if (i == 0x0D) { int a = in.read(); if (a == 0x0A) { int b = in.read(); if (b == 0x0D) { int c = in.read(); if (c == 0x0A) { localBodyStartOctet = count + 4; break; } in.unread(c); } in.unread(b); } in.unread(a); } count++; } return localBodyStartOctet; }
From source file:org.apache.pig.builtin.Utf8StorageConverter.java
private DataBag consumeBag(PushbackInputStream in, ResourceFieldSchema fieldSchema) throws IOException { if (fieldSchema == null) { throw new IOException("Schema is null"); }//from ww w .ja v a2 s . c o m ResourceFieldSchema[] fss = fieldSchema.getSchema().getFields(); Tuple t; int buf; while ((buf = in.read()) != '{') { if (buf == -1) { throw new IOException("Unexpect end of bag"); } } if (fss.length != 1) throw new IOException("Only tuple is allowed inside bag schema"); ResourceFieldSchema fs = fss[0]; DataBag db = DefaultBagFactory.getInstance().newDefaultBag(); while (true) { t = consumeTuple(in, fs); if (t != null) db.add(t); while ((buf = in.read()) != '}' && buf != ',') { if (buf == -1) { throw new IOException("Unexpect end of bag"); } } if (buf == '}') break; } return db; }
From source file:org.apache.pig.builtin.Utf8StorageConverter.java
private Tuple consumeTuple(PushbackInputStream in, ResourceFieldSchema fieldSchema) throws IOException { if (fieldSchema == null) { throw new IOException("Schema is null"); }/*from w ww . j ava 2s . c om*/ int buf; ByteArrayOutputStream mOut; while ((buf = in.read()) != '(' || buf == '}') { if (buf == -1) { throw new IOException("Unexpect end of tuple"); } if (buf == '}') { in.unread(buf); return null; } } Tuple t = TupleFactory.getInstance().newTuple(); if (fieldSchema.getSchema() != null && fieldSchema.getSchema().getFields().length != 0) { ResourceFieldSchema[] fss = fieldSchema.getSchema().getFields(); // Interpret item inside tuple one by one based on the inner schema for (int i = 0; i < fss.length; i++) { Object field; ResourceFieldSchema fs = fss[i]; int delimit = ','; if (i == fss.length - 1) delimit = ')'; if (DataType.isComplex(fs.getType())) { field = consumeComplexType(in, fs); while ((buf = in.read()) != delimit) { if (buf == -1) { throw new IOException("Unexpect end of tuple"); } } } else { mOut = new ByteArrayOutputStream(BUFFER_SIZE); while ((buf = in.read()) != delimit) { if (buf == -1) { throw new IOException("Unexpect end of tuple"); } if (buf == delimit) break; mOut.write(buf); } field = parseSimpleType(mOut.toByteArray(), fs); } t.append(field); } } else { // No inner schema, treat everything inside tuple as bytearray Deque<Character> level = new LinkedList<Character>(); // keep track of nested tuple/bag/map. We do not interpret, save them as bytearray mOut = new ByteArrayOutputStream(BUFFER_SIZE); while (true) { buf = in.read(); if (buf == -1) { throw new IOException("Unexpect end of tuple"); } if (buf == '[' || buf == '{' || buf == '(') { level.push((char) buf); mOut.write(buf); } else if (buf == ')' && level.isEmpty()) // End of tuple { DataByteArray value = new DataByteArray(mOut.toByteArray()); t.append(value); break; } else if (buf == ',' && level.isEmpty()) { DataByteArray value = new DataByteArray(mOut.toByteArray()); t.append(value); mOut.reset(); } else if (buf == ']' || buf == '}' || buf == ')') { if (level.peek() == findStartChar((char) buf)) level.pop(); else throw new IOException("Malformed tuple"); mOut.write(buf); } else mOut.write(buf); } } return t; }
From source file:org.apache.pig.builtin.Utf8StorageConverter.java
private Map<String, Object> consumeMap(PushbackInputStream in, ResourceFieldSchema fieldSchema) throws IOException { int buf;/*from w ww .j av a 2 s . c om*/ boolean emptyMap = true; while ((buf = in.read()) != '[') { if (buf == -1) { throw new IOException("Unexpect end of map"); } } HashMap<String, Object> m = new HashMap<String, Object>(); ByteArrayOutputStream mOut = new ByteArrayOutputStream(BUFFER_SIZE); while (true) { // Read key (assume key can not contains special character such as #, (, [, {, }, ], ) while ((buf = in.read()) != '#') { // end of map if (emptyMap && buf == ']') { return m; } if (buf == -1) { throw new IOException("Unexpect end of map"); } emptyMap = false; mOut.write(buf); } String key = bytesToCharArray(mOut.toByteArray()); if (key.length() == 0) throw new IOException("Map key can not be null"); // Read value mOut.reset(); Deque<Character> level = new LinkedList<Character>(); // keep track of nested tuple/bag/map. We do not interpret, save them as bytearray while (true) { buf = in.read(); if (buf == -1) { throw new IOException("Unexpect end of map"); } if (buf == '[' || buf == '{' || buf == '(') { level.push((char) buf); } else if (buf == ']' && level.isEmpty()) // End of map break; else if (buf == ']' || buf == '}' || buf == ')') { if (level.isEmpty()) throw new IOException("Malformed map"); if (level.peek() == findStartChar((char) buf)) level.pop(); } else if (buf == ',' && level.isEmpty()) { // Current map item complete break; } mOut.write(buf); } Object value = null; if (fieldSchema != null && fieldSchema.getSchema() != null && mOut.size() > 0) { value = bytesToObject(mOut.toByteArray(), fieldSchema.getSchema().getFields()[0]); } else if (mOut.size() > 0) { // untyped map value = new DataByteArray(mOut.toByteArray()); } m.put(key, value); mOut.reset(); if (buf == ']') break; } return m; }
From source file:org.apache.tika.parser.rtf.TextExtractor.java
private void extract(PushbackInputStream in) throws IOException, SAXException, TikaException { out.startDocument();/*w ww .ja v a 2 s . c om*/ while (true) { final int b = in.read(); if (b == -1) { break; } else if (b == '\\') { parseControlToken(in); } else if (b == '{') { pushText(); processGroupStart(in); } else if (b == '}') { pushText(); processGroupEnd(); if (groupStates.isEmpty()) { // parsed document closing brace break; } } else if (groupState.objdata == true || groupState.pictDepth == 1) { embObjHandler.writeHexChar(b); } else if (b != '\r' && b != '\n' && (!groupState.ignore || nextMetaData != null || groupState.sn == true || groupState.sv == true)) { // Linefeed and carriage return are not // significant if (ansiSkip != 0) { ansiSkip--; } else { addOutputByte(b); } } } endParagraph(false); out.endDocument(); }
From source file:org.apache.tika.parser.rtf.TextExtractor.java
private void parseControlToken(PushbackInputStream in) throws IOException, SAXException, TikaException { int b = in.read(); if (b == '\'') { // escaped hex char parseHexChar(in);//w w w. ja v a 2s . co m } else if (isAlpha(b)) { // control word parseControlWord((char) b, in); } else if (b == '{' || b == '}' || b == '\\' || b == '\r' || b == '\n') { // escaped char addOutputByte(b); } else if (b != -1) { // control symbol, eg \* or \~ processControlSymbol((char) b); } }
From source file:org.apache.tika.parser.rtf.TextExtractor.java
private void parseHexChar(PushbackInputStream in) throws IOException, SAXException, TikaException { int hex1 = in.read(); if (!isHexChar(hex1)) { // DOC ERROR (malformed hex escape): ignore in.unread(hex1);/*from ww w . j a v a 2 s.co m*/ return; } int hex2 = in.read(); if (!isHexChar(hex2)) { // TODO: log a warning here, somehow? // DOC ERROR (malformed hex escape): // ignore in.unread(hex2); return; } if (ansiSkip != 0) { // Skip this ansi char since we are // still in the shadow of a unicode // escape: ansiSkip--; } else { // Unescape: addOutputByte(16 * hexValue(hex1) + hexValue(hex2)); } }