List of usage examples for java.nio.channels ReadableByteChannel read
public int read(ByteBuffer dst) throws IOException;
From source file:org.apache.hadoop.ipc.ServerRpcSSLEngineImpl.java
@Override public int read(ReadableByteChannel channel, ByteBuffer buffer, Server.Connection connection) throws IOException { int netRead = channel.read(clientNetBuffer); if (netRead == -1) { return -1; }/*from ww w. j a v a 2 s .c o m*/ int read = 0; SSLEngineResult unwrapResult; do { clientNetBuffer.flip(); unwrapResult = sslEngine.unwrap(clientNetBuffer, clientAppBuffer); clientNetBuffer.compact(); if (unwrapResult.getStatus().equals(SSLEngineResult.Status.OK)) { read += unwrapResult.bytesProduced(); clientAppBuffer.flip(); while (clientAppBuffer.hasRemaining()) { byte currentByte = clientAppBuffer.get(); try { buffer.put(currentByte); } catch (BufferOverflowException ex) { if (buffer.capacity() < maxUnWrappedDataLength) { buffer = enlargeUnwrappedBuffer(buffer, currentByte); connection.setSslUnwrappedBuffer(buffer); } else { LOG.error("Buffer overflow clientAppBuffer position: " + clientAppBuffer.position() + " but buffer capacity " + buffer.capacity(), ex); throw ex; } } } clientAppBuffer.compact(); } else if (unwrapResult.getStatus().equals(SSLEngineResult.Status.BUFFER_UNDERFLOW)) { read += unwrapResult.bytesProduced(); break; } else if (unwrapResult.getStatus().equals(SSLEngineResult.Status.BUFFER_OVERFLOW)) { clientAppBuffer = enlargeApplicationBuffer(clientAppBuffer); } else if (unwrapResult.getStatus().equals(SSLEngineResult.Status.CLOSED)) { sslEngine.closeOutbound(); doHandshake(); read = -1; break; } else { throw new IOException("SSLEngine UNWRAP invalid status: " + unwrapResult.getStatus()); } } while (clientNetBuffer.position() != 0); return read; }
From source file:org.sglover.nlp.EntityExtracter.java
private String getContent(ReadableByteChannel channel) throws IOException { StringBuilder sb = new StringBuilder(); ByteBuffer bb = ByteBuffer.allocate(2048); int c = -1;/* w w w . j a v a 2s . c om*/ do { c = channel.read(bb); bb.flip(); bb.clear(); sb.append(new String(bb.array(), "UTF-8")); } while (c != -1); String content = sb.toString(); return content; }
From source file:com.taobao.adfs.distributed.rpc.Server.java
/** * This is a wrapper around {@link ReadableByteChannel#read(ByteBuffer)}. If the amount of data is large, it writes to * channel in smaller chunks. This is to avoid jdk from creating many direct buffers as the size of ByteBuffer * increases. There should not be any performance degredation. * //from ww w . ja v a 2s .co m * @see ReadableByteChannel#read(ByteBuffer) */ private static int channelRead(ReadableByteChannel channel, ByteBuffer buffer) throws IOException { return (buffer.remaining() <= NIO_BUFFER_LIMIT) ? channel.read(buffer) : channelIO(channel, null, buffer); }
From source file:org.alfresco.repo.content.AbstractReadOnlyContentStoreTest.java
/** * Checks that the various methods of obtaining a reader are supported. *//* www. j ava2s . c o m*/ @Test public void testGetReaderForExistingContentUrl() throws Exception { ContentStore store = getStore(); String contentUrl = getExistingContentUrl(); if (contentUrl == null) { logger.warn( "Store test testGetReaderForExistingContentUrl not possible on " + store.getClass().getName()); return; } // Get the reader assertTrue("URL returned in set seems to no longer exist", store.exists(contentUrl)); ContentReader reader = store.getReader(contentUrl); assertNotNull("Reader should never be null", reader); assertTrue("Reader says content doesn't exist", reader.exists()); assertFalse("Reader should not be closed before a read", reader.isClosed()); assertFalse("The reader channel should not be open yet", reader.isChannelOpen()); // Open the channel ReadableByteChannel readChannel = reader.getReadableChannel(); readChannel.read(ByteBuffer.wrap(new byte[500])); assertFalse("Reader should not be closed during a read", reader.isClosed()); assertTrue("The reader channel should be open during a read", reader.isChannelOpen()); // Close the channel readChannel.close(); assertTrue("Reader should be closed after a read", reader.isClosed()); assertFalse("The reader channel should be closed after a read", reader.isChannelOpen()); }
From source file:com.cloud.maint.UpgradeManagerImpl.java
public String deployNewAgent(String url) { s_logger.info("Updating agent with binary from " + url); final HttpClient client = new HttpClient(s_httpClientManager); final GetMethod method = new GetMethod(url); int response; File file = null;/*from w w w . j a v a2 s .c o m*/ try { response = client.executeMethod(method); if (response != HttpURLConnection.HTTP_OK) { s_logger.warn("Retrieving the agent gives response code: " + response); return "Retrieving the file from " + url + " got response code: " + response; } final InputStream is = method.getResponseBodyAsStream(); file = File.createTempFile("agent-", "-" + Long.toString(new Date().getTime())); file.deleteOnExit(); s_logger.debug("Retrieving new agent into " + file.getAbsolutePath()); final FileOutputStream fos = new FileOutputStream(file); final ByteBuffer buffer = ByteBuffer.allocate(2048); final ReadableByteChannel in = Channels.newChannel(is); final WritableByteChannel out = fos.getChannel(); while (in.read(buffer) != -1) { buffer.flip(); out.write(buffer); buffer.clear(); } in.close(); out.close(); s_logger.debug("New Agent zip file is now retrieved"); } catch (final HttpException e) { return "Unable to retrieve the file from " + url; } catch (final IOException e) { return "Unable to retrieve the file from " + url; } finally { method.releaseConnection(); } file.delete(); return "File will be deployed."; }
From source file:org.alfresco.patch.PatchServiceImpl.java
@SuppressWarnings("resource") @Override/*from w ww . ja va 2 s . c o m*/ public PatchDocument getPatch(MultiPart resource) throws IOException { Integer blockSize = null; Integer matchCount = null; List<Integer> matchedBlocks = null; List<Patch> patches = new LinkedList<>(); // This will iterate the individual parts of the multipart response for (BodyPart bodyPart : resource.getBodyParts()) { if (bodyPart instanceof FormDataMultiPart) { System.out.printf("Multipart Body Part [Mime Type: %s]\n", bodyPart.getMediaType()); InputStream is = null; Integer size = null; Integer lastMatchIndex = null; FormDataMultiPart mp = (FormDataMultiPart) bodyPart; for (BodyPart bodyPart1 : mp.getBodyParts()) { ContentDisposition contentDisposition = bodyPart1.getContentDisposition(); if (contentDisposition instanceof FormDataContentDisposition) { FormDataContentDisposition cd = (FormDataContentDisposition) contentDisposition; String name = cd.getName(); if (name.equals("p_size")) { size = Integer.parseInt((String) bodyPart1.getEntity()); } else if (name.equals("p_last_match_idx")) { lastMatchIndex = Integer.parseInt((String) bodyPart1.getEntity()); } else if (name.equals("p_stream")) { is = (InputStream) bodyPart1.getEntity(); } } } ByteBuffer bb = ByteBuffer.allocate(1024 * 20); // TODO ReadableByteChannel channel = Channels.newChannel(is); channel.read(bb); bb.flip(); byte[] buffer = new byte[bb.limit()]; bb.get(buffer); Patch patch = new Patch(lastMatchIndex, size, buffer); patches.add(patch); } else { System.out.printf("Embedded Body Part [Mime Type: %s, Length: %s]\n", bodyPart.getMediaType(), bodyPart.getContentDisposition().getSize()); ContentDisposition contentDisposition = bodyPart.getContentDisposition(); if (contentDisposition instanceof FormDataContentDisposition) { FormDataContentDisposition cd = (FormDataContentDisposition) contentDisposition; String name = cd.getName(); if (name.equals("p_block_size")) { blockSize = Integer.parseInt((String) bodyPart.getEntity()); } else if (name.equals("p_match_count")) { matchCount = Integer.parseInt((String) bodyPart.getEntity()); } else if (name.equals("p_matched_blocks")) { String matchedBlocksStr = (String) bodyPart.getEntity(); List<String> l = Arrays.asList(matchedBlocksStr.split(",")); matchedBlocks = l.stream().filter(s -> s != null && !s.equals("")) .map(s -> Integer.parseInt(s)).collect(Collectors.toList()); } } } } PatchDocument patchDocument = new PatchDocument(blockSize, matchedBlocks, patches); return patchDocument; }
From source file:org.alfresco.cacheserver.dropwizard.resources.CacheServerResource.java
private void fastChannelCopy(final ReadableByteChannel src, final WritableByteChannel dest) throws IOException { final ByteBuffer buffer = ByteBuffer.allocateDirect(16 * 1024); while (src.read(buffer) != -1) { // prepare the buffer to be drained buffer.flip();//from www. ja v a 2 s . co m // write to the channel, may block dest.write(buffer); // If partial transfer, shift remainder down // If buffer is empty, same as doing clear() buffer.compact(); } // EOF will leave buffer in fill state buffer.flip(); // make sure the buffer is fully drained. while (buffer.hasRemaining()) { dest.write(buffer); } }
From source file:org.alfresco.cacheserver.PatchServiceRESTTest.java
public PatchDocument getPatch(MultiPart resource) throws IOException { Integer blockSize = null;//from ww w . j a va 2s . com Integer matchCount = null; List<Integer> matchedBlocks = null; List<Patch> patches = new LinkedList<>(); int c = 0; InputStream is = null; Integer size = null; Integer lastMatchIndex = null; // This will iterate the individual parts of the multipart response for (BodyPart bodyPart : resource.getBodyParts()) { // if(bodyPart instanceof FormDataMultiPart) // { // System.out.printf( // "Multipart Body Part [Mime Type: %s]\n", // bodyPart.getMediaType()); // FormDataMultiPart mp = (FormDataMultiPart)bodyPart; // for (BodyPart bodyPart1 : mp.getBodyParts()) // { ContentDisposition contentDisposition = bodyPart.getContentDisposition(); // if(contentDisposition instanceof FormDataContentDisposition) // { // FormDataContentDisposition cd = (FormDataContentDisposition)contentDisposition; Map<String, String> parameters = contentDisposition.getParameters(); String name = parameters.get("name"); MediaType mediaType = bodyPart.getMediaType(); // System.out.println("Body Part " + name); if (name.equals("p_size")) { String s = getAsString(bodyPart); size = Integer.parseInt(s); c++; } else if (name.equals("p_last_match_idx")) { String s = getAsString(bodyPart); lastMatchIndex = Integer.parseInt(s); c++; } else if (name.equals("p_stream")) { BodyPartEntity bpEntity = (BodyPartEntity) bodyPart.getEntity(); is = bpEntity.getInputStream(); c++; } else if (name.equals("p_block_size")) { String s = getAsString(bodyPart); blockSize = Integer.parseInt(s); } else if (name.equals("p_match_count")) { String s = getAsString(bodyPart); matchCount = Integer.parseInt(s); } if (c >= 3) { c = 0; ByteBuffer bb = ByteBuffer.allocate(1024 * 20); // TODO ReadableByteChannel channel = Channels.newChannel(is); channel.read(bb); bb.flip(); byte[] buffer = new byte[bb.limit()]; bb.get(buffer); Patch patch = new Patch(lastMatchIndex, size, buffer); patches.add(patch); } // } // } // ByteBuffer bb = ByteBuffer.allocate(1024*20); // TODO // ReadableByteChannel channel = Channels.newChannel(is); // channel.read(bb); // bb.flip(); // byte[] buffer = new byte[bb.limit()]; // bb.get(buffer); // Patch patch = new Patch(lastMatchIndex, size, buffer); // patches.add(patch); } // else // { // System.out.printf( // "Embedded Body Part [Mime Type: %s, Length: %s]\n", // bodyPart.getMediaType(), bodyPart.getContentDisposition().getSize()); // // ContentDisposition contentDisposition = bodyPart.getContentDisposition(); // Map<String, String> parameters = contentDisposition.getParameters(); // String name = parameters.get("name"); //// if(contentDisposition instanceof FormDataContentDisposition) //// { //// FormDataContentDisposition cd = (FormDataContentDisposition)contentDisposition; //// String name = cd.getName(); // // Object entity = bodyPart.getEntity(); //// if(entity instanceof BodyPartEntity) //// { //// BodyPartEntity bpEntity = (BodyPartEntity)entity; //// if(name.equals("p_block_size")) //// { //// blockSize = Integer.parseInt((String)entity); //// } //// else if(name.equals("p_match_count")) //// { //// matchCount = Integer.parseInt((String)bodyPart.getEntity()); //// } //// else if(name.equals("p_matched_blocks")) //// { //// String matchedBlocksStr = (String)bodyPart.getEntity(); //// List<String> l = Arrays.asList(matchedBlocksStr.split(",")); //// matchedBlocks = l.stream() //// .filter(s -> s != null && !s.equals("")) //// .map(s -> Integer.parseInt(s)) //// .collect(Collectors.toList()); //// } //// } // } // } PatchDocument patchDocument = new PatchDocument(blockSize, matchedBlocks, patches); return patchDocument; }
From source file:siddur.solidtrust.fault.FaultController.java
@RequestMapping(value = "/upload", method = RequestMethod.POST) public String handleFormUpload(@RequestParam("file") MultipartFile file, @RequestParam("version") int v, Model model, HttpSession session) throws Exception { IFaultPersister persister = getPersister(v); //upload//from ww w . j ava 2 s.c om log4j.info("Start uploading file: " + file.getName() + " with size: " + file.getSize()); File temp = File.createTempFile("data", ".csv"); log4j.info("Will save to " + temp.getAbsolutePath()); InputStream in = null; FileOutputStream fout = null; try { fout = new FileOutputStream(temp); FileChannel fcout = fout.getChannel(); in = file.getInputStream(); ReadableByteChannel cin = Channels.newChannel(in); ByteBuffer buf = ByteBuffer.allocate(1024 * 8); while (true) { buf.clear(); int r = cin.read(buf); if (r == -1) { break; } buf.flip(); fcout.write(buf); } } finally { if (in != null) { in.close(); } if (fout != null) { fout.close(); } } log4j.info("Uploading complete"); //fields BufferedReader br = null; int[] orders; try { in = new FileInputStream(temp); br = new BufferedReader(new InputStreamReader(in)); //first line for fields String firstLine = br.readLine(); orders = persister.validateTitle(firstLine); //persist persister.parseAndSave(br, orders, persister); } finally { if (br != null) { br.close(); } } return "redirect:upload.html"; }
From source file:siddur.solidtrust.newprice2.Newprice2Controller.java
@RequestMapping(value = "/upload", method = RequestMethod.POST) public String handleFormUpload(@RequestParam("file") MultipartFile file, Model model, HttpSession session) throws IOException { //upload/*from www . ja v a 2s .c o m*/ log4j.info("Start uploading file: " + file.getName() + " with size: " + file.getSize()); File temp = File.createTempFile("data", ".csv"); log4j.info("Will save to " + temp.getAbsolutePath()); InputStream in = null; FileOutputStream fout = null; try { fout = new FileOutputStream(temp); FileChannel fcout = fout.getChannel(); in = file.getInputStream(); ReadableByteChannel cin = Channels.newChannel(in); ByteBuffer buf = ByteBuffer.allocate(1024 * 8); while (true) { buf.clear(); int r = cin.read(buf); if (r == -1) { break; } buf.flip(); fcout.write(buf); } } finally { if (in != null) { in.close(); } if (fout != null) { fout.close(); } } FileStatus fs = new FileStatus(); fs.setFile(temp); log4j.info("Uploading complete"); //fields BufferedReader br = null; int[] orders; String[] fields; try { in = new FileInputStream(temp); br = new BufferedReader(new InputStreamReader(in)); //first line for fields String firstLine = br.readLine(); fields = StringUtils.split(firstLine, ";"); ; orders = new int[fields.length]; for (int i = 0; i < orders.length; i++) { orders[i] = ArrayUtils.indexOf(FIELDS, fields[i].trim()); } //count while (br.readLine() != null) { fs.next(); } } finally { if (br != null) { br.close(); } } fs.flip(); log4j.info("Total rows: " + fs.getTotalRow()); //persist carService.saveCars(fs, orders, carService); return "redirect:/v2/upload.html"; }