List of usage examples for java.nio.channels FileChannel open
public static FileChannel open(Path path, OpenOption... options) throws IOException
From source file:com.reactive.hzdfs.io.MemoryMappedChunkHandler.java
/** * Read mode.//from w w w.j a va 2 s . c o m * @param f * @param chunkSize * @throws IOException */ public MemoryMappedChunkHandler(File f, int chunkSize) throws IOException { super(f); iStream = FileChannel.open(file.toPath(), StandardOpenOption.READ); readSize = chunkSize; chunks = fileSize % readSize == 0 ? (int) ((fileSize / readSize)) : (int) ((fileSize / readSize) + 1); mapBuff = iStream.map(MapMode.READ_ONLY, 0, getFileSize()); if (log.isDebugEnabled()) { debugInitialParams(); log.debug("Reading source file. Expected chunks to send- " + chunks); } }
From source file:io.druid.query.groupby.epinephelinae.LimitedTemporaryStorage.java
/** * Create a new temporary file. All methods of the returned output stream may throw * {@link TemporaryStorageFullException} if the temporary storage area fills up. * * @return output stream to the file/* w w w . j a va 2 s .c om*/ * * @throws TemporaryStorageFullException if the temporary storage area is full * @throws IOException if something goes wrong while creating the file */ public LimitedOutputStream createFile() throws IOException { if (bytesUsed.get() >= maxBytesUsed) { throw new TemporaryStorageFullException(maxBytesUsed); } synchronized (files) { if (closed) { throw new ISE("Closed"); } FileUtils.forceMkdir(storageDirectory); final File theFile = new File(storageDirectory, StringUtils.format("%08d.tmp", files.size())); final EnumSet<StandardOpenOption> openOptions = EnumSet.of(StandardOpenOption.CREATE_NEW, StandardOpenOption.WRITE); final FileChannel channel = FileChannel.open(theFile.toPath(), openOptions); files.add(theFile); return new LimitedOutputStream(theFile, Channels.newOutputStream(channel)); } }
From source file:io.pravega.segmentstore.storage.impl.extendeds3.S3FileSystemImpl.java
@Synchronized @Override//from www . j a v a 2s . co m public void putObject(String bucketName, String key, Range range, Object content) { Path path = Paths.get(this.baseDir, bucketName, key); try (FileChannel channel = FileChannel.open(path, StandardOpenOption.WRITE)) { long startOffset = range.getFirst(); long length = range.getLast() + 1 - range.getFirst(); do { long bytesTransferred = channel.transferFrom(Channels.newChannel((InputStream) content), range.getFirst(), range.getLast() + 1 - range.getFirst()); length -= bytesTransferred; startOffset += bytesTransferred; } while (length > 0); AclSize aclKey = aclMap.get(key); aclMap.put(key, aclKey.withSize(range.getLast() + 1)); } catch (IOException e) { throw new S3Exception("NoObject", 404, "NoSuchKey", key); } }
From source file:io.undertow.server.handlers.SenderTestCase.java
@BeforeClass public static void setup() { HttpHandler lotsOfSendsHandler = new HttpHandler() { @Override// w w w.j av a 2 s . com public void handleRequest(final HttpServerExchange exchange) throws Exception { boolean blocking = exchange.getQueryParameters().get("blocking").getFirst().equals("true"); if (blocking) { if (exchange.isInIoThread()) { exchange.startBlocking(); exchange.dispatch(this); return; } } final Sender sender = exchange.getResponseSender(); class SendClass implements Runnable, IoCallback { int sent = 0; @Override public void run() { sent++; sender.send("a", this); } @Override public void onComplete(final HttpServerExchange exchange, final Sender sender) { if (sent++ == SENDS) { sender.close(); return; } sender.send("a", this); } @Override public void onException(final HttpServerExchange exchange, final Sender sender, final IOException exception) { exception.printStackTrace(); exchange.endExchange(); } } new SendClass().run(); } }; HttpHandler lotsOfTransferHandler = new HttpHandler() { @Override public void handleRequest(final HttpServerExchange exchange) throws Exception { boolean blocking = exchange.getQueryParameters().get("blocking").getFirst().equals("true"); if (blocking) { if (exchange.isInIoThread()) { exchange.startBlocking(); exchange.dispatch(this); return; } } URI uri = SenderTestCase.class.getResource(SenderTestCase.class.getSimpleName() + ".class").toURI(); Path file = Paths.get(uri); final FileChannel channel = FileChannel.open(file, StandardOpenOption.READ); exchange.setResponseContentLength(channel.size() * TXS); final Sender sender = exchange.getResponseSender(); class SendClass implements Runnable, IoCallback { int sent = 0; @Override public void run() { sent++; try { channel.position(0); } catch (IOException e) { } sender.transferFrom(channel, this); } @Override public void onComplete(final HttpServerExchange exchange, final Sender sender) { if (sent++ == TXS) { sender.close(); return; } try { channel.position(0); } catch (IOException e) { } sender.transferFrom(channel, this); } @Override public void onException(final HttpServerExchange exchange, final Sender sender, final IOException exception) { exception.printStackTrace(); exchange.endExchange(); } } new SendClass().run(); } }; final HttpHandler fixedLengthSender = new HttpHandler() { @Override public void handleRequest(final HttpServerExchange exchange) throws Exception { exchange.getResponseSender().send(HELLO_WORLD); } }; PathHandler handler = new PathHandler().addPrefixPath("/lots", lotsOfSendsHandler) .addPrefixPath("/fixed", fixedLengthSender).addPrefixPath("/transfer", lotsOfTransferHandler); DefaultServer.setRootHandler(handler); }
From source file:com.oneops.daq.dao.PerfDao.java
/** * Sets the state filename and open a file channel for writing. * * @param filename file name of state/*from w w w .j av a 2 s . c o m*/ */ public void setStateFilename(String filename) { stateFilename = filename; File sFile = new File(stateFilename); try { if (!sFile.exists()) { sFile.createNewFile(); } logger.info("Creating the file channel for " + stateFilename); statChannel = FileChannel.open(Paths.get(stateFilename), StandardOpenOption.WRITE); } catch (Exception ex) { logger.error("Error setting stat file." + sFile.getAbsolutePath(), ex); System.exit(1); } }
From source file:edu.harvard.iq.dataverse.dataaccess.TabularSubsetGenerator.java
public TabularSubsetGenerator(DataFile datafile, List<DataVariable> variables) throws IOException { if (!datafile.isTabularData()) { throw new IOException("DataFile is not tabular data."); }/*w w w . j a va2 s.co m*/ setVarCount(datafile.getDataTable().getVarQuantity().intValue()); setCaseCount(datafile.getDataTable().getCaseQuantity().intValue()); StorageIO<DataFile> dataAccess = datafile.getStorageIO(); if (!dataAccess.isLocalFile()) { throw new IOException("Subsetting is supported on local files only!"); } //File tabfile = datafile.getFileSystemLocation().toFile(); File tabfile = dataAccess.getFileSystemPath().toFile(); File rotatedImageFile = getRotatedImage(tabfile, getVarCount(), getCaseCount()); long[] columnEndOffsets = extractColumnOffsets(rotatedImageFile, getVarCount(), getCaseCount()); fileChannel = (FileChannel.open(Paths.get(rotatedImageFile.getAbsolutePath()), StandardOpenOption.READ)); if (variables == null || variables.size() < 1 || variables.size() > getVarCount()) { throw new IOException("Illegal number of variables in the subset request"); } subsetcount = variables.size(); columnTotalOffsets = new long[subsetcount]; columnTotalLengths = new long[subsetcount]; columnByteBuffers = new ByteBuffer[subsetcount]; if (subsetcount == 1) { if (!datafile.getDataTable().getId().equals(variables.get(0).getDataTable().getId())) { throw new IOException("Variable in the subset request does not belong to the datafile."); } dbgLog.fine("single variable subset; setting fileChannel position to " + extractColumnOffset(columnEndOffsets, variables.get(0).getFileOrder())); fileChannel.position(extractColumnOffset(columnEndOffsets, variables.get(0).getFileOrder())); columnTotalLengths[0] = extractColumnLength(columnEndOffsets, variables.get(0).getFileOrder()); columnTotalOffsets[0] = 0; } else { columnEntries = new byte[subsetcount][]; columnBufferSizes = new int[subsetcount]; columnBufferOffsets = new int[subsetcount]; columnStartOffsets = new long[subsetcount]; int i = 0; for (DataVariable var : variables) { if (!datafile.getDataTable().getId().equals(var.getDataTable().getId())) { throw new IOException("Variable in the subset request does not belong to the datafile."); } columnByteBuffers[i] = ByteBuffer.allocate(MAX_COLUMN_BUFFER); columnTotalLengths[i] = extractColumnLength(columnEndOffsets, var.getFileOrder()); columnStartOffsets[i] = extractColumnOffset(columnEndOffsets, var.getFileOrder()); if (columnTotalLengths[i] < MAX_COLUMN_BUFFER) { columnByteBuffers[i].limit((int) columnTotalLengths[i]); } fileChannel.position(columnStartOffsets[i]); columnBufferSizes[i] = fileChannel.read(columnByteBuffers[i]); columnBufferOffsets[i] = 0; columnTotalOffsets[i] = columnBufferSizes[i]; i++; } } }
From source file:com.github.jinahya.verbose.codec.BinaryCodecTest.java
protected final void encodeDecode(final ReadableByteChannel expectedChannel) throws IOException { if (expectedChannel == null) { throw new NullPointerException("null expectedChannel"); }/*from ww w . jav a 2 s . com*/ final Path encodedPath = Files.createTempFile("test", null); getRuntime().addShutdownHook(new Thread(() -> { try { Files.delete(encodedPath); } catch (final IOException ioe) { ioe.printStackTrace(System.err); } })); final WritableByteChannel encodedChannel = FileChannel.open(encodedPath, StandardOpenOption.WRITE); final ByteBuffer decodedBuffer = ByteBuffer.allocate(128); final ByteBuffer encodedBuffer = ByteBuffer.allocate(decodedBuffer.capacity() << 1); while (expectedChannel.read(decodedBuffer) != -1) { decodedBuffer.flip(); // limit -> position; position -> zero encoder.encode(decodedBuffer, encodedBuffer); encodedBuffer.flip(); encodedChannel.write(encodedBuffer); encodedBuffer.compact(); // position -> n + 1; limit -> capacity decodedBuffer.compact(); } decodedBuffer.flip(); while (decodedBuffer.hasRemaining()) { encoder.encode(decodedBuffer, encodedBuffer); encodedBuffer.flip(); encodedChannel.write(encodedBuffer); encodedBuffer.compact(); } encodedBuffer.flip(); while (encodedBuffer.hasRemaining()) { encodedChannel.write(encodedBuffer); } }
From source file:divconq.util.IOUtil.java
public static Memory readEntireFileToMemory(Path file) { try (FileChannel ch = FileChannel.open(file, StandardOpenOption.READ)) { Memory mem = new Memory(); // TODO improve mem to read right from channel... ByteBuffer bb = ByteBuffer.allocate(4096); int amt = ch.read(bb); while (amt != -1) { bb.flip();/*from w w w.jav a 2s. c o m*/ mem.write(bb); bb.clear(); amt = ch.read(bb); } mem.setPosition(0); return mem; } catch (IOException x) { } return null; }
From source file:at.ac.tuwien.infosys.util.ImageUtil.java
private void saveFile(URL url, Path file) throws IOException { ReadableByteChannel rbc = Channels.newChannel(url.openStream()); FileChannel channel = FileChannel.open(file, EnumSet.of(StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.WRITE)); channel.transferFrom(rbc, 0, Long.MAX_VALUE); channel.close();/*w w w.j a v a 2 s.c om*/ }
From source file:com.netflix.genie.web.services.impl.DiskJobFileServiceImpl.java
/** * {@inheritDoc}//from ww w.ja v a2 s. co m */ @Override // TODO: We should be careful about how large the byte[] is. Perhaps we should have precondition to protect memory // or we should wrap calls to this in something that chunks it off an input stream or just take this in as // input stream public void updateFile(final String jobId, final String relativePath, final long startByte, final byte[] data) throws IOException { log.debug("Attempting to write {} bytes from position {} into log file {} for job {}", data.length, startByte, relativePath, jobId); final Path jobFile = this.jobsDirRoot.resolve(jobId).resolve(relativePath); if (Files.notExists(jobFile)) { // Make sure all the directories exist on disk final Path logFileParent = jobFile.getParent(); if (logFileParent != null) { this.createOrCheckDirectory(logFileParent); } } else if (Files.isDirectory(jobFile)) { // TODO: Perhaps this should be different exception throw new IllegalArgumentException(relativePath + " is a directory not a file. Unable to update"); } try (FileChannel fileChannel = FileChannel.open(jobFile, EnumSet.of(StandardOpenOption.CREATE, StandardOpenOption.WRITE, StandardOpenOption.SPARSE))) { // Move the byteChannel to the start byte fileChannel.position(startByte); // The size and length are ignored in this implementation as we just assume we're writing everything atm // TODO: Would it be better to provide an input stream and buffer the output? final ByteBuffer byteBuffer = ByteBuffer.wrap(data); while (byteBuffer.hasRemaining()) { fileChannel.write(byteBuffer); } } }