List of usage examples for java.io EOFException EOFException
public EOFException(String s)
EOFException
with the specified detail message. From source file:savant.data.sources.TabixDataSource.java
/** * Check our source file to see how many columns we have. If possible, figure * out their names.//from w ww . j a v a 2s . co m * * This is only intended as a temporary hack until we get a more flexible DataFormatForm * which lets you set up column->field mappings. */ private void inferMapping() throws IOException { BlockCompressedInputStream input = new BlockCompressedInputStream( NetworkUtils.getSeekableStreamForURI(uri)); String line = TabixReader.readLine(input); if (line == null) { throw new EOFException("End of file"); } // If we're lucky, the file starts with a comment line with the field-names in it. // That's what UCSC puts there, as does Savant. In some files (e.g. VCF), this // magical comment line may be preceded by a ton of metadata comment lines. String lastCommentLine = null; String commentChar = Character.toString(reader.getCommentChar()); while (line.startsWith(commentChar)) { lastCommentLine = line; line = TabixReader.readLine(input); } input.close(); int numCols = 1; for (int i = 0; i < line.length(); i++) { if (line.charAt(i) == '\t') { numCols++; } } // The chrom, start, and end fields are generally enough to uniquely determine which of the well-known formats we have. if (matchesMapping(ColumnMapping.BED)) { // It's a Bed file, but we can't set the mapping, because it may have a variable number of actual columns. columnNames = new String[] { "chrom", "start", "end", "name", "score", "strand", "thickStart", "thickEnd", "itemRgb", "blockCount", "blockStarts", "blockSizes" }; } else if (matchesMapping(ColumnMapping.KNOWNGENE)) { columnNames = new String[] { "Name", "Reference", "Strand", "Transcription start", "Transcription end", "Coding start", "Coding end", null, null, null, "Unique ID", "Alternate name", null, null, null }; mapping = ColumnMapping.KNOWNGENE; } else if (matchesMapping(ColumnMapping.REFSEQ) && numCols == 16) { columnNames = new String[] { null, "Transcript name", "Reference", "Strand", "Transcription start", "Transcription end", "Coding start", "Coding end", null, null, null, "Unique ID", "Gene name", null, null, null }; mapping = ColumnMapping.REFSEQ; } else if (matchesMapping(ColumnMapping.GFF)) { // Based on chrom/start/end fields, it's impossible to distinguish between GFF and GTF files. // We have to look at column 8, which will have special values for GTF. String attributes = line.substring(line.lastIndexOf('\t') + 1); if (attributes.contains("gene_id") && attributes.contains("transcript_id")) { columnNames = new String[] { "Reference", "Source", "Feature", "Start", "End", "Score", "Strand", "Frame", "Attributes" }; mapping = ColumnMapping.GTF; } else { columnNames = new String[] { "Reference", "Source", "Feature", "Start", "End", "Score", "Strand", "Frame", "Group" }; mapping = ColumnMapping.GFF; } } else if (matchesMapping(ColumnMapping.PSL)) { columnNames = new String[] { "Matches", "Mismatches", "Matches that are part of repeats", "Number of 'N' bases", "Number of inserts in query", "Number of bases inserted in query", "Number of inserts in target", "Number of bases inserted in target", "Strand", "Query sequence name", "Query sequence size", "Alignment start in query", "Alignment end in query", "Target sequence name", "Target sequence size", "Alignment start in target", "Alignment end in target", null, null, null }; mapping = ColumnMapping.PSL; } else if (matchesMapping(ColumnMapping.VCF)) { columnNames = new String[] { "Reference", "Position", "ID", "Reference base(s)", "Alternate non-reference alleles", "Quality", "Filter", "Additional information", "Format" }; mapping = ColumnMapping.VCF; } if (lastCommentLine != null) { if (mapping == null) { columnNames = lastCommentLine.substring(1).split("\\t"); // If user has screwed up the comment line in a bed file, make sure it doesn't lead us astray. columnNames[reader.getChromColumn()] = "chrom"; columnNames[reader.getStartColumn()] = "start"; if (reader.getEndColumn() >= 0) { columnNames[reader.getEndColumn()] = "end"; } mapping = ColumnMapping.inferMapping(columnNames, false); } else if (mapping == ColumnMapping.VCF) { // For VCF files, save off the participant IDs stored in the extra columns. String[] allColumns = lastCommentLine.substring(1).split("\\t"); if (allColumns.length > columnNames.length) { extraColumns = new String[allColumns.length - columnNames.length]; for (int i = columnNames.length; i < allColumns.length; i++) { extraColumns[i - columnNames.length] = allColumns[i]; } } else { // A defective VCF file with no participants. extraColumns = new String[0]; } } } }
From source file:net.wimpi.modbus.io.ModbusTCPTransport.java
@Override public ModbusRequest readRequest() throws ModbusIOException { // System.out.println("readRequest()"); try {//from w w w . j av a 2s. co m ModbusRequest req = null; synchronized (m_ByteIn) { // use same buffer byte[] buffer = m_ByteIn.getBuffer(); // read to byte length of message if (m_Input.read(buffer, 0, 6) == -1) { throw new EOFException("Premature end of stream (Header truncated)."); } // extract length of bytes following in message int bf = ModbusUtil.registerToShort(buffer, 4); // read rest if (m_Input.read(buffer, 6, bf) == -1) { throw new ModbusIOException("Premature end of stream (Message truncated)."); } m_ByteIn.reset(buffer, (6 + bf)); m_ByteIn.skip(7); int functionCode = m_ByteIn.readUnsignedByte(); m_ByteIn.reset(); req = ModbusRequest.createModbusRequest(functionCode); req.readFrom(m_ByteIn); } return req; /* * int transactionID = m_Input.readUnsignedShort(); * int protocolID = m_Input.readUnsignedShort(); * int dataLength = m_Input.readUnsignedShort(); * if (protocolID != Modbus.DEFAULT_PROTOCOL_ID || dataLength > 256) { * throw new ModbusIOException(); * } * int unitID = m_Input.readUnsignedByte(); * int functionCode = m_Input.readUnsignedByte(); * ModbusRequest request = * ModbusRequest.createModbusRequest(functionCode, m_Input, false); * if (request instanceof IllegalFunctionRequest) { * //skip rest of bytes * for (int i = 0; i < dataLength - 2; i++) { * m_Input.readByte(); * } * } * //set read parameters * request.setTransactionID(transactionID); * request.setProtocolID(protocolID); * request.setUnitID(unitID); * return request; * */ } catch (EOFException eoex) { throw new ModbusIOException(true); } catch (SocketException sockex) { // connection reset by peer, also EOF throw new ModbusIOException(true); } catch (Exception ex) { ex.printStackTrace(); throw new ModbusIOException("I/O exception - failed to read."); } }
From source file:org.apache.shindig.gadgets.http.BasicHttpFetcherTest.java
@Test public void testToByteArraySafeThrowsException2() throws Exception { String exceptionMessage = "EOF Exception and Any Random Cause"; EOFException e = new EOFException(exceptionMessage); EasyMock.expect(mockInputStream.read(EasyMock.isA(byte[].class))).andThrow(e).anyTimes(); EasyMock.replay(mockEntity, mockInputStream); boolean exceptionCaught = false; try {//from ww w . j av a 2 s .c o m fetcher.toByteArraySafe(mockEntity); } catch (EOFException eofe) { assertEquals(exceptionMessage, eofe.getMessage()); exceptionCaught = true; } assertTrue(exceptionCaught); EasyMock.verify(mockEntity, mockInputStream); }
From source file:org.eclipse.smarthome.binding.homematic.internal.communicator.message.BinRpcMessage.java
/** * Decodes a BIN-RPC message from the given byte array. *///from w ww. j a v a 2 s . c om public BinRpcMessage(byte[] message, boolean methodHeader, String encoding) throws IOException, ParseException { this.encoding = encoding; if (message.length < 8) { throw new EOFException("Only " + message.length + " bytes received"); } validateBinXSignature(message); decodeMessage(message, methodHeader); }
From source file:org.apache.hadoop.hive.serde2.teradata.TeradataBinaryDataInputStream.java
/** * Read TIMESTAMP(P)./*from w ww . j av a2 s. c om*/ * The representation of timestamp in Teradata binary format is: * the byte number to read is based on the precision of timestamp, * each byte represents one char and the timestamp is using string representation, * eg: for TIMESTAMP(6), we need to read 26 bytes * 31 39 31 31 2d 31 31 2d 31 31 20 31 39 3a 32 30 3a 32 31 2e 34 33 33 32 30 30 * will represent 1911-11-11 19:20:21.433200. * the null timestamp will use space to pad. * * @param byteNum the byte number that will be read from inputstream * @return the timestamp * @throws IOException the io exception */ public Timestamp readTimestamp(Integer byteNum) throws IOException { // yyyy-mm-dd hh:mm:ss byte[] timestampContent = new byte[byteNum]; int numOfBytesRead = in.read(timestampContent); if (timestampContent.length != 0 && numOfBytesRead != byteNum) { throw new EOFException( format("Fail to read the timestamp. Expect %d bytes, get %d bytes", byteNum, numOfBytesRead)); } String timestampStr = new String(timestampContent, "UTF8"); if (timestampStr.trim().length() == 0) { return null; } return Timestamp.valueOf(timestampStr); }
From source file:com.aliyun.odps.volume.VolumeFSInputStream.java
@Override public synchronized void seek(long pos) throws IOException { checkClosed();//from ww w. j ava2 s .c om if (pos < 0) { throw new EOFException(FSExceptionMessages.NEGATIVE_SEEK); } if (pos > fileLength) { throw new EOFException(FSExceptionMessages.CANNOT_SEEK_PAST_EOF); } this.pos = pos; this.isSeeked = true; }
From source file:org.eredlab.g4.ccl.net.pop3.POP3.java
private void __getReply() throws IOException { String line;/*from w w w . j a v a 2 s. c o m*/ _replyLines.setSize(0); line = _reader.readLine(); if (line == null) throw new EOFException("Connection closed without indication."); if (line.startsWith(_OK)) _replyCode = POP3Reply.OK; else if (line.startsWith(_ERROR)) _replyCode = POP3Reply.ERROR; else throw new MalformedServerReplyException("Received invalid POP3 protocol response from server."); _replyLines.addElement(line); _lastReplyLine = line; if (_commandSupport_.getListenerCount() > 0) _commandSupport_.fireReplyReceived(_replyCode, getReplyString()); }
From source file:org.apache.sysml.runtime.io.MatrixReader.java
protected static void checkValidInputFile(FileSystem fs, Path path) throws IOException { //check non-existing file if (!fs.exists(path)) throw new IOException("File " + path.toString() + " does not exist on HDFS/LFS."); //check for empty file if (MapReduceTool.isFileEmpty(fs, path)) throw new EOFException("Empty input file " + path.toString() + "."); }
From source file:org.apache.hadoop.mapred.IFileInputStream.java
/** * Close the input stream. Note that we need to read to the end of the * stream to validate the checksum./*from ww w. j av a 2 s .co m*/ */ @Override public void close() throws IOException { if (curReadahead != null) { curReadahead.cancel(); } if (currentOffset < dataLength) { byte[] t = new byte[Math.min((int) (Integer.MAX_VALUE & (dataLength - currentOffset)), 32 * 1024)]; while (currentOffset < dataLength) { int n = read(t, 0, t.length); if (0 == n) { throw new EOFException("Could not validate checksum"); } } } in.close(); }
From source file:org.apache.hadoop.hbase.regionserver.wal.ProtobufLogReader.java
private void initInternal(FSDataInputStream stream, boolean isFirst) throws IOException { close();/* w w w . j a v a 2 s . c o m*/ long expectedPos = PB_WAL_MAGIC.length; if (stream == null) { stream = fs.open(path); stream.seek(expectedPos); } if (stream.getPos() != expectedPos) { throw new IOException("The stream is at invalid position: " + stream.getPos()); } // Initialize metadata or, when we reset, just skip the header. WALProtos.WALHeader.Builder builder = WALProtos.WALHeader.newBuilder(); boolean hasHeader = readHeader(builder, stream); if (!hasHeader) { throw new EOFException("Couldn't read WAL PB header"); } if (isFirst) { WALProtos.WALHeader header = builder.build(); this.hasCompression = header.hasHasCompression() && header.getHasCompression(); this.hasTagCompression = header.hasHasTagCompression() && header.getHasTagCompression(); } this.inputStream = stream; this.walEditsStopOffset = this.fileLength; long currentPosition = stream.getPos(); trailerPresent = setTrailerIfPresent(); this.seekOnFs(currentPosition); if (LOG.isTraceEnabled()) { LOG.trace("After reading the trailer: walEditsStopOffset: " + this.walEditsStopOffset + ", fileLength: " + this.fileLength + ", " + "trailerPresent: " + trailerPresent); } }