List of usage examples for java.nio.file StandardOpenOption READ
StandardOpenOption READ
To view the source code for java.nio.file StandardOpenOption READ.
Click Source Link
From source file:com.arpnetworking.metrics.common.tailer.StatefulTailer.java
private void fileLoop() { SeekableByteChannel reader = null; InitialPosition nextInitialPosition = _initialPosition; try {/*w w w . java 2 s. c om*/ while (isRunning()) { // Attempt to open the file try { reader = Files.newByteChannel(_file, StandardOpenOption.READ); LOGGER.trace().setMessage("Opened file").addData("file", _file).log(); } catch (final NoSuchFileException e) { _listener.fileNotFound(); _trigger.waitOnTrigger(); } if (reader != null) { // Position the reader resume(reader, nextInitialPosition); _listener.fileOpened(); // Any subsequent file opens we should start at the beginning nextInitialPosition = InitialPosition.START; // Read the file readLoop(reader); // Reset per file state IOUtils.closeQuietly(reader); reader = null; _hash = Optional.empty(); } } // Clients may elect to kill the stateful tailer on an exception by calling stop, or they // may log the exception and continue. In the latter case it is strongly recommended that // clients pause before continuing; otherwise, if the error persists the stateful tailer // may create non-trivial load on the io subsystem. // NOTE: Any non-exception throwable will kill the stateful tailer. } catch (final InterruptedException e) { Thread.currentThread().interrupt(); handleThrowable(e); // CHECKSTYLE.OFF: IllegalCatch - Allow clients to decide how to handle exceptions } catch (final Exception e) { // CHECKSTYLE.ON: IllegalCatch handleThrowable(e); } finally { IOUtils.closeQuietly(reader); reader = null; _hash = Optional.empty(); } }
From source file:edu.harvard.iq.dataverse.dataaccess.TabularSubsetGenerator.java
public TabularSubsetGenerator(DataFile datafile, List<DataVariable> variables) throws IOException { if (!datafile.isTabularData()) { throw new IOException("DataFile is not tabular data."); }//from w w w .j a va 2s.co m setVarCount(datafile.getDataTable().getVarQuantity().intValue()); setCaseCount(datafile.getDataTable().getCaseQuantity().intValue()); StorageIO<DataFile> dataAccess = datafile.getStorageIO(); if (!dataAccess.isLocalFile()) { throw new IOException("Subsetting is supported on local files only!"); } //File tabfile = datafile.getFileSystemLocation().toFile(); File tabfile = dataAccess.getFileSystemPath().toFile(); File rotatedImageFile = getRotatedImage(tabfile, getVarCount(), getCaseCount()); long[] columnEndOffsets = extractColumnOffsets(rotatedImageFile, getVarCount(), getCaseCount()); fileChannel = (FileChannel.open(Paths.get(rotatedImageFile.getAbsolutePath()), StandardOpenOption.READ)); if (variables == null || variables.size() < 1 || variables.size() > getVarCount()) { throw new IOException("Illegal number of variables in the subset request"); } subsetcount = variables.size(); columnTotalOffsets = new long[subsetcount]; columnTotalLengths = new long[subsetcount]; columnByteBuffers = new ByteBuffer[subsetcount]; if (subsetcount == 1) { if (!datafile.getDataTable().getId().equals(variables.get(0).getDataTable().getId())) { throw new IOException("Variable in the subset request does not belong to the datafile."); } dbgLog.fine("single variable subset; setting fileChannel position to " + extractColumnOffset(columnEndOffsets, variables.get(0).getFileOrder())); fileChannel.position(extractColumnOffset(columnEndOffsets, variables.get(0).getFileOrder())); columnTotalLengths[0] = extractColumnLength(columnEndOffsets, variables.get(0).getFileOrder()); columnTotalOffsets[0] = 0; } else { columnEntries = new byte[subsetcount][]; columnBufferSizes = new int[subsetcount]; columnBufferOffsets = new int[subsetcount]; columnStartOffsets = new long[subsetcount]; int i = 0; for (DataVariable var : variables) { if (!datafile.getDataTable().getId().equals(var.getDataTable().getId())) { throw new IOException("Variable in the subset request does not belong to the datafile."); } columnByteBuffers[i] = ByteBuffer.allocate(MAX_COLUMN_BUFFER); columnTotalLengths[i] = extractColumnLength(columnEndOffsets, var.getFileOrder()); columnStartOffsets[i] = extractColumnOffset(columnEndOffsets, var.getFileOrder()); if (columnTotalLengths[i] < MAX_COLUMN_BUFFER) { columnByteBuffers[i].limit((int) columnTotalLengths[i]); } fileChannel.position(columnStartOffsets[i]); columnBufferSizes[i] = fileChannel.read(columnByteBuffers[i]); columnBufferOffsets[i] = 0; columnTotalOffsets[i] = columnBufferSizes[i]; i++; } } }
From source file:srebrinb.compress.sevenzip.SevenZFile.java
/** * Reads a file as 7z archive/*w w w . j a va 2 s . com*/ * * @param filename the file to read * @param password optional password if the archive is encrypted - * the byte array is supposed to be the UTF16-LE encoded * representation of the password. * @throws IOException if reading the archive fails */ public SevenZFile(final File filename, final byte[] password) throws IOException { this(Files.newByteChannel(filename.toPath(), EnumSet.of(StandardOpenOption.READ)), filename.getAbsolutePath(), password, true); }
From source file:at.tfr.securefs.xnio.MessageHandlerImpl.java
@Override public void handleMessage(String json, MessageSender messageSender) throws IOException { log.debug("handleMessage: " + json); final Message message = objectMapper.readValue(json, Message.class); Path path = configuration.getBasePath().resolve(message.getPath()); if (!path.relativize(configuration.getBasePath()).toString().equals("..")) { throw new SecurityException("invalid path spec: " + message.getPath()); }//from ww w . j a v a 2 s.co m try { final String uniqueKey = message.getUniqueKey(); // find the Channel for this data stream: StreamInfo<ChannelPipe<StreamSourceChannel, StreamSinkChannel>> info = activeStreams.getStreams() .get(uniqueKey); if (message.getType() == MessageType.OPEN && info != null) { log.warn("illegal state on Open stream: " + message); IoUtils.safeClose(info.getStream().getRightSide()); messageSender.send(new Message(MessageType.ERROR, message.getPath()).key(uniqueKey)); } switch (message.getType()) { case ERROR: log.info("error from Client: " + json); case CLOSE: { if (info != null) { IoUtils.safeClose(info.getStream().getRightSide()); } } break; case OPEN: { switch (message.getSubType()) { case READ: { final InputStream is = Files.newInputStream(path, StandardOpenOption.READ); final InputStream cis = new CipherInputStream(is, getCipher(message, Cipher.DECRYPT_MODE)); final ChannelPipe<StreamSourceChannel, StreamSinkChannel> pipe = xnioWorker .createHalfDuplexPipe(); pipe.getLeftSide().getReadSetter().set(new SecureChannelWriterBase(message) { @Override protected void write(Message message) { try { messageSender.send(message); } catch (Exception e) { log.warn("cannot write message=" + message + " : " + e, e); } } }); pipe.getLeftSide().getCloseSetter().set(new ChannelListener<StreamSourceChannel>() { @Override public void handleEvent(StreamSourceChannel channel) { activeStreams.getStreams().remove(uniqueKey); messageSender.send(new Message(MessageType.CLOSE, message.getPath()).key(uniqueKey)); } }); pipe.getRightSide().getWriteSetter().set(new ChannelListener<StreamSinkChannel>() { private byte[] bytes = new byte[Constants.BUFFER_SIZE]; @Override public void handleEvent(StreamSinkChannel channel) { try { int count = 0; while ((count = cis.read(bytes, 0, bytes.length)) > 0) { if (count > 0) { Channels.writeBlocking(pipe.getRightSide(), ByteBuffer.wrap(bytes, 0, count)); } if (count < 0) { pipe.getRightSide().close(); } else { channel.resumeWrites(); } } } catch (Exception e) { log.warn("cannot read from cypher: " + e, e); IoUtils.safeClose(channel); } } }); activeStreams.getStreams().put(uniqueKey, new StreamInfo<ChannelPipe<StreamSourceChannel, StreamSinkChannel>>(pipe, message.getPath())); // start sending data: pipe.getLeftSide().resumeReads(); pipe.getRightSide().resumeWrites(); } break; case WRITE: { Files.createDirectories(path.getParent()); OutputStream os = Files.newOutputStream(path, StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.WRITE); OutputStream cos = new CipherOutputStream(os, getCipher(message, Cipher.ENCRYPT_MODE)); ChannelPipe<StreamSourceChannel, StreamSinkChannel> pipe = xnioWorker.createHalfDuplexPipe(); pipe.getLeftSide().getReadSetter().set(new SecureChannelReaderBase() { @Override public void handleEvent(StreamSourceChannel channel) { readChannel(message, cos, pipe, channel); } }); pipe.getLeftSide().getCloseSetter().set(new SecureChannelReaderBase() { @Override public void handleEvent(StreamSourceChannel channel) { try { cos.close(); activeStreams.getStreams().remove(pipe.toString()); messageSender .send(new Message(MessageType.CLOSE, message.getPath()).key(uniqueKey)); log.info("closed channel: " + pipe.toString()); } catch (IOException e) { log.warn("cannot close stream: message=" + message + " : " + e, e); } } }); activeStreams.getStreams().put(uniqueKey, new StreamInfo<ChannelPipe<StreamSourceChannel, StreamSinkChannel>>(pipe, message.getPath())); // start receiving data: pipe.getLeftSide().resumeReads(); } break; default: messageSender.send(new Message(MessageType.ERROR, message.getPath()).key(uniqueKey)); break; } } break; case DATA: { if (info != null) { Channels.writeBlocking(info.getStream().getRightSide(), ByteBuffer.wrap(message.getBytes())); } else { messageSender.send(new Message(MessageType.ERROR, message.getPath()).key(uniqueKey)); } } break; } } catch (IOException e) { log.warn("cannot handle message: " + message + " : " + e, e); throw e; } catch (Exception e) { log.warn("cannot handle message: " + message + " : " + e, e); throw new IOException("cannot handle message: " + message + " : " + e, e); } }
From source file:org.carcv.core.model.file.FileCarImage.java
/** * Loads a part corresponding to the rectangular region from the image into memory. Calls * {@link #loadFragment(InputStream, Rectangle)} internally. * * @param rect specifies the rectangular region to load as the image * @throws IOException if an error during loading occurs *///from ww w. jav a 2 s. c om public void loadFragment(Rectangle rect) throws IOException { InputStream inStream = Files.newInputStream(persistablePath.getPath(), StandardOpenOption.READ); loadFragment(inStream, rect); }
From source file:org.apache.nifi.controller.service.ControllerServiceLoader.java
public List<ControllerServiceNode> loadControllerServices(final ControllerServiceProvider provider) throws IOException { final SchemaFactory schemaFactory = SchemaFactory.newInstance(XMLConstants.W3C_XML_SCHEMA_NS_URI); final DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory.newInstance(); InputStream fis = null;// w w w . ja va2 s. com BufferedInputStream bis = null; documentBuilderFactory.setNamespaceAware(true); final List<ControllerServiceNode> services = new ArrayList<>(); try { final URL configurationResource = this.getClass().getResource("/ControllerServiceConfiguration.xsd"); if (configurationResource == null) { throw new NullPointerException("Unable to load XML Schema for ControllerServiceConfiguration"); } final Schema schema = schemaFactory.newSchema(configurationResource); documentBuilderFactory.setSchema(schema); final DocumentBuilder builder = documentBuilderFactory.newDocumentBuilder(); builder.setErrorHandler(new org.xml.sax.ErrorHandler() { @Override public void fatalError(final SAXParseException err) throws SAXException { logger.error("Config file line " + err.getLineNumber() + ", col " + err.getColumnNumber() + ", uri " + err.getSystemId() + " :message: " + err.getMessage()); if (logger.isDebugEnabled()) { logger.error("Error Stack Dump", err); } throw err; } @Override public void error(final SAXParseException err) throws SAXParseException { logger.error("Config file line " + err.getLineNumber() + ", col " + err.getColumnNumber() + ", uri " + err.getSystemId() + " :message: " + err.getMessage()); if (logger.isDebugEnabled()) { logger.error("Error Stack Dump", err); } throw err; } @Override public void warning(final SAXParseException err) throws SAXParseException { logger.warn(" Config file line " + err.getLineNumber() + ", uri " + err.getSystemId() + " : message : " + err.getMessage()); if (logger.isDebugEnabled()) { logger.warn("Warning stack dump", err); } throw err; } }); //if controllerService.xml does not exist, create an empty file... fis = Files.newInputStream(this.serviceConfigXmlPath, StandardOpenOption.READ); bis = new BufferedInputStream(fis); if (Files.size(this.serviceConfigXmlPath) > 0) { final Document document = builder.parse(bis); final NodeList servicesNodes = document.getElementsByTagName("services"); final Element servicesElement = (Element) servicesNodes.item(0); final List<Element> serviceNodes = DomUtils.getChildElementsByTagName(servicesElement, "service"); for (final Element serviceElement : serviceNodes) { //get properties for the specific controller task - id, name, class, //and schedulingPeriod must be set final String serviceId = DomUtils.getChild(serviceElement, "identifier").getTextContent() .trim(); final String serviceClass = DomUtils.getChild(serviceElement, "class").getTextContent().trim(); //set the class to be used for the configured controller task final ControllerServiceNode serviceNode = provider.createControllerService(serviceClass, serviceId, false); //optional task-specific properties for (final Element optionalProperty : DomUtils.getChildElementsByTagName(serviceElement, "property")) { final String name = optionalProperty.getAttribute("name").trim(); final String value = optionalProperty.getTextContent().trim(); serviceNode.setProperty(name, value); } services.add(serviceNode); provider.enableControllerService(serviceNode); } } } catch (SAXException | ParserConfigurationException sxe) { throw new IOException(sxe); } finally { FileUtils.closeQuietly(fis); FileUtils.closeQuietly(bis); } return services; }
From source file:org.kalypso.grid.BinaryGeoGrid.java
/** * Crates a new grid file with the given size and scale.<br> * The grid is then opened in write mode, so its values can then be set.<br> * The grid must be disposed afterwards in order to flush the written information. * * /*from w w w . j ava 2 s .co m*/ * @param fillGrid * If set to <code>true</code>, the grid will be initially filled with no-data values. Else, the grid values * are undetermined. */ public static BinaryGeoGrid createGrid(final File file, final int sizeX, final int sizeY, final int scale, final Coordinate origin, final Coordinate offsetX, final Coordinate offsetY, final String sourceCRS, final boolean fillGrid) throws GeoGridException { try { final FileChannel channel = FileChannel.open(file.toPath(), StandardOpenOption.READ, StandardOpenOption.WRITE, StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.CREATE); return new BinaryGeoGrid(channel, sizeX, sizeY, scale, origin, offsetX, offsetY, sourceCRS, fillGrid); } catch (final IOException e) { throw new GeoGridException("Could not find binary grid file: " + file.getAbsolutePath(), e); } }
From source file:divconq.util.IOUtil.java
public static Memory readEntireFileToMemory(Path file) { try (FileChannel ch = FileChannel.open(file, StandardOpenOption.READ)) { Memory mem = new Memory(); // TODO improve mem to read right from channel... ByteBuffer bb = ByteBuffer.allocate(4096); int amt = ch.read(bb); while (amt != -1) { bb.flip();//from ww w .java 2 s . com mem.write(bb); bb.clear(); amt = ch.read(bb); } mem.setPosition(0); return mem; } catch (IOException x) { } return null; }
From source file:org.wikidata.wdtk.util.DirectoryManagerImpl.java
@Override public InputStream getInputStreamForFile(String fileName, CompressionType compressionType) throws IOException { Path filePath = this.directory.resolve(fileName); InputStream fileInputStream = Files.newInputStream(filePath, StandardOpenOption.READ); switch (compressionType) { case NONE://from w w w .j a va2s .c o m return fileInputStream; case GZIP: return new GZIPInputStream(fileInputStream); case BZ2: return new BZip2CompressorInputStream(new BufferedInputStream(fileInputStream)); default: throw new IllegalArgumentException("Unsupported compresion type: " + compressionType); } }
From source file:it.greenvulcano.configuration.BaseConfigurationManager.java
@Override public byte[] extract(String name, String entry) { Path configurationArchivePath = getConfigurationPath(name); try (ZipInputStream configurationArchive = new ZipInputStream( Files.newInputStream(configurationArchivePath, StandardOpenOption.READ))) { ZipEntry zipEntry = null; while ((zipEntry = configurationArchive.getNextEntry()) != null) { if (zipEntry.getName().equals(entry)) { byte[] entryData = IOUtils.toByteArray(configurationArchive); return entryData; }//w ww .ja va2 s . co m } } catch (Exception e) { LOG.error("Failed to extract entry " + entry + " from archive " + configurationArchivePath, e); } return new byte[] {}; }