List of usage examples for java.nio.channels ReadableByteChannel read
public int read(ByteBuffer dst) throws IOException;
From source file:com.saasovation.common.port.adapter.messaging.slothmq.SlothWorker.java
protected String receive() { SocketChannel socketChannel = null; try {/* ww w . j a va 2s .co m*/ socketChannel = this.socket.accept(); if (socketChannel == null) { return null; // if non-blocking } ReadableByteChannel readByteChannel = Channels.newChannel(socketChannel.socket().getInputStream()); ByteArrayOutputStream byteArray = new ByteArrayOutputStream(); ByteBuffer readBuffer = ByteBuffer.allocate(8); while (readByteChannel.read(readBuffer) != -1) { readBuffer.flip(); while (readBuffer.hasRemaining()) { byteArray.write(readBuffer.get()); } readBuffer.clear(); } return new String(byteArray.toByteArray()); } catch (IOException e) { logger.error("Failed to receive because: {}: Continuing...", e.getMessage(), e); return null; } finally { if (socketChannel != null) { try { socketChannel.close(); } catch (IOException e) { // ignore } } } }
From source file:edu.usc.pgroup.floe.client.FloeClient.java
/** * Uploads the file to the coordinator.// www .ja va2s . co m * The file is uploaded relative to the coordinator's scratch folder. * * @param fileName name of the file to be stored on the coordinator. * @return the base fileName which may be used for downloading the file * later. */ public final String uploadFileSync(final String fileName) { String baseFile = FilenameUtils.getName(fileName); try { int fid = getClient().beginFileUpload(baseFile); ReadableByteChannel inChannel = Channels.newChannel(new FileInputStream(fileName)); ByteBuffer buffer = ByteBuffer.allocate(Utils.Constants.BUFFER_SIZE); while (inChannel.read(buffer) > 0) { buffer.flip(); getClient().uploadChunk(fid, buffer); buffer.clear(); } inChannel.close(); getClient().finishUpload(fid); } catch (TException e) { LOGGER.error(e.getMessage()); throw new RuntimeException(e); } catch (FileNotFoundException e) { LOGGER.error(e.getMessage()); throw new RuntimeException(e); } catch (IOException e) { LOGGER.error(e.getMessage()); throw new RuntimeException(e); } return baseFile; }
From source file:siddur.solidtrust.classic.ClassicController.java
@RequestMapping(value = "/upload", method = RequestMethod.POST) public String handleFormUpload(@RequestParam("file") MultipartFile file, Model model, HttpSession session) throws Exception { //upload// w w w .j a v a2 s .co m log4j.info("Start uploading file: " + file.getName() + " with size: " + file.getSize()); File temp = File.createTempFile("data", ".csv"); log4j.info("Will save to " + temp.getAbsolutePath()); InputStream in = null; FileOutputStream fout = null; try { fout = new FileOutputStream(temp); FileChannel fcout = fout.getChannel(); in = file.getInputStream(); ReadableByteChannel cin = Channels.newChannel(in); ByteBuffer buf = ByteBuffer.allocate(1024 * 8); while (true) { buf.clear(); int r = cin.read(buf); if (r == -1) { break; } buf.flip(); fcout.write(buf); } } finally { if (in != null) { in.close(); } if (fout != null) { fout.close(); } } log4j.info("Uploading complete"); //fields BufferedReader br = null; int[] orders; try { in = new FileInputStream(temp); br = new BufferedReader(new InputStreamReader(in)); //first line for fields String firstLine = br.readLine(); orders = persister.validateTitle(firstLine); //persist persister.parseAndSave(br, orders, persister); } finally { if (br != null) { br.close(); } } return "redirect:upload.html"; }
From source file:org.openhab.io.transport.cul.internal.network.CULNetworkHandlerImpl.java
private void processRead(SelectionKey key) throws Exception { ReadableByteChannel ch = (ReadableByteChannel) key.channel(); int bytesOp = 0, bytesTotal = 0; while (readBuf.hasRemaining() && (bytesOp = ch.read(readBuf)) > 0) { bytesTotal += bytesOp;/*from w ww .j a v a 2 s . c o m*/ } logger.debug("Read {} bytes from network", bytesTotal); if (bytesTotal > 0) { readBuf.flip(); onRead(readBuf); readBuf.compact(); } else if (bytesOp == -1) { logger.info("peer closed read channel"); ch.close(); } }
From source file:org.wso2.carbon.inbound.endpoint.protocol.hl7.core.MLLPSourceHandler.java
@Override public void inputReady(IOSession session) { ReadableByteChannel ch = (ReadableByteChannel) session.channel(); MLLPContext mllpContext = (MLLPContext) session.getAttribute(MLLPConstants.MLLP_CONTEXT); inputBuffer.clear();//from w ww . j a v a 2s.c o m try { int read; while ((read = ch.read(inputBuffer.getByteBuffer())) > 0) { inputBuffer.flip(); try { mllpContext.getCodec().decode(inputBuffer.getByteBuffer(), mllpContext); } catch (MLLProtocolException e) { handleException(session, mllpContext, e); clearInputBuffers(mllpContext); return; } catch (HL7Exception e) { handleException(session, mllpContext, e); if (mllpContext.isAutoAck()) { mllpContext.setNackMode(true); mllpContext.setHl7Message(HL7MessageUtils.createDefaultNack(e.getMessage())); mllpContext.requestOutput(); } else { hl7Processor.processError(mllpContext, e); } return; } catch (IOException e) { shutdownConnection(session, mllpContext, e); return; } } if (mllpContext.getCodec().isReadComplete()) { if (mllpContext.isAutoAck()) { mllpContext.requestOutput(); bufferFactory.release(inputBuffer); inputBuffer = bufferFactory.getBuffer(); } try { hl7Processor.processRequest(mllpContext); } catch (Exception e) { shutdownConnection(session, mllpContext, e); } } if (read < 0) { clearInputBuffers(mllpContext); session.close(); } } catch (IOException e) { shutdownConnection(session, mllpContext, e); } }
From source file:com.doplgangr.secrecy.FileSystem.File.java
public java.io.File readFile(CryptStateListener listener) { decrypting = true;// w w w .j ava 2 s. co m InputStream is = null; OutputStream out = null; java.io.File outputFile = null; try { outputFile = java.io.File.createTempFile("tmp" + name, "." + FileType, storage.getTempFolder()); outputFile.mkdirs(); outputFile.createNewFile(); AES_Encryptor enc = new AES_Encryptor(key); is = new CipherInputStream(new FileInputStream(file), enc.decryptstream()); listener.setMax((int) file.length()); ReadableByteChannel inChannel = Channels.newChannel(is); FileChannel outChannel = new FileOutputStream(outputFile).getChannel(); ByteBuffer byteBuffer = ByteBuffer.allocate(Config.bufferSize); while (inChannel.read(byteBuffer) >= 0 || byteBuffer.position() > 0) { byteBuffer.flip(); outChannel.write(byteBuffer); byteBuffer.compact(); listener.updateProgress((int) outChannel.size()); } inChannel.close(); outChannel.close(); Util.log(outputFile.getName(), outputFile.length()); return outputFile; } catch (FileNotFoundException e) { listener.onFailed(2); Util.log("Encrypted File is missing", e.getMessage()); } catch (IOException e) { Util.log("IO Exception while decrypting", e.getMessage()); if (e.getMessage().contains("pad block corrupted")) listener.onFailed(1); else e.printStackTrace(); } catch (Exception e) { e.printStackTrace(); } finally { listener.Finished(); decrypting = false; try { if (is != null) { is.close(); } if (out != null) { out.close(); } } catch (IOException e) { e.printStackTrace(); } } // An error occured. Too Bad if (outputFile != null) storage.purgeFile(outputFile); return null; }
From source file:org.commoncrawl.hadoop.io.S3GetMetdataJob.java
@org.junit.Test public void testMapper() throws Exception { final ArcFileReader reader = new ArcFileReader(); Thread thread = new Thread(new Runnable() { public void run() { try { while (reader.hasMoreItems()) { ArcFileItem item = new ArcFileItem(); reader.getNextItem(item); map(new Text(item.getUri()), item, null, null); }/*from w ww . ja va 2s . c om*/ LOG.info("NO MORE ITEMS... BYE"); } catch (IOException e) { LOG.error(StringUtils.stringifyException(e)); } } }); // run the thread ... thread.start(); File file = new File("/Users/rana/Downloads/1213886083018_0.arc.gz"); ReadableByteChannel channel = Channels.newChannel(new FileInputStream(file)); try { int totalBytesRead = 0; for (;;) { ByteBuffer buffer = ByteBuffer.allocate(ArcFileReader.DEFAULT_BLOCK_SIZE); int bytesRead = channel.read(buffer); LOG.info("Read " + bytesRead + " From File"); if (bytesRead == -1) { reader.finished(); break; } else { buffer.flip(); totalBytesRead += buffer.remaining(); reader.available(buffer); } } } finally { channel.close(); } // now wait for thread to die ... LOG.info("Done Reading File.... Waiting for ArcFileThread to DIE"); thread.join(); LOG.info("Done Reading File.... ArcFileThread to DIED"); }
From source file:org.darkware.wpman.security.ChecksumDatabase.java
/** * Perform a checksum calculation on the given {@link ReadableByteChannel}. Other code should not create * implementations which are dependant on any particular characteristics of the checksum, but the checksum * is very likely to be based on a cryptographic-strength hash. The results of the checksum are encoded as * a base64 {@code String}.//from ww w.j a va2 s . co m * * @param channel The {@code ReadableByteChannel} to read data from. * @return A Base64 encoded {@code String} representing the checksum. * @throws IOException If there was an error while reading data from the channel. * @see Base64#encodeBase64String(byte[]) */ protected String doChecksum(ReadableByteChannel channel) throws IOException { Hasher hasher = Hashing.sha256().newHasher(); final ByteBuffer block = ByteBuffer.allocate(4096); while (channel.isOpen()) { int bytesRead = channel.read(block); if (bytesRead > 0) { block.flip(); hasher.putBytes(block.array(), 0, block.limit()); block.clear(); } else if (bytesRead == -1) { channel.close(); } } return Base64.encodeBase64String(hasher.hash().asBytes()); }
From source file:net.bobah.mail.Dupes.java
@Override public void run() { final Multimap<HashCode, File> dupes = Multimaps.newListMultimap(new HashMap<HashCode, Collection<File>>(), new Supplier<List<File>>() { @Override//from w ww .j a va2 s . c om public List<File> get() { return new LinkedList<File>(); } }); for (final File dir : dirs) { if (!dir.isDirectory()) { log.warn("{} does not exist or is not a directory, ignored", dir); } final Collection<File> files = findFiles(dir, ""); log.info("found {} files in {}, submitting to analyzer", files.size(), dir.getAbsolutePath()); for (final File file : files) { executor.submit(new Runnable() { @Override public void run() { final ExecutionContext cxt = Dupes.this.cxt.get(); ReadableByteChannel ch = null; try { cxt.sw.start(); // map file, take just 1 meg of data to cxt.hash and calc the function // final HashCode code = Files.hash(file, hashfunc); ch = Channels.newChannel(new FileInputStream(file)); ByteBuffer buf = ByteBuffer.wrap(cxt.buf); final int len = ch.read(buf); if (len == 0) return; final HashCode code = hashfunc.hashBytes(cxt.buf, 0, Ints.checkedCast(len)); synchronized (dupes) { dupes.put(code, file); } cxt.sw.stop(); log.debug("{} -> {} ({}) - {} us", file, code, DateFormat.getInstance().format(file.lastModified()), cxt.sw.elapsed(TimeUnit.MILLISECONDS)); } catch (Exception e) { log.debug("exception", e); } finally { cxt.recycle(); if (ch != null) try { ch.close(); } catch (IOException unused) { } ; } } }); } log.info("done submitting {} to analyzer", dir.getAbsolutePath()); } try { shutdownExecutor(executor, log); } catch (InterruptedException e) { log.debug("exception", e); } for (Collection<File> filez : dupes.asMap().values()) { if (filez.size() == 1) continue; log.info("dupes found: {}", filez); } }