List of usage examples for java.io DataInputStream readFully
public final void readFully(byte b[]) throws IOException
From source file:org.apache.xmlgraphics.image.codec.png.PNGRed.java
private static PNGChunk readChunk(final DataInputStream distream) { try {//from w w w . j a v a2 s .c o m final int length = distream.readInt(); final int type = distream.readInt(); final byte[] data = new byte[length]; distream.readFully(data); final int crc = distream.readInt(); return new PNGChunk(length, type, data, crc); } catch (final Exception e) { log.error("Exception", e); return null; } }
From source file:it.infn.ct.jsaga.adaptor.jocci.job.jOCCIJobControlAdaptor.java
public String getPublicKey(String file) { FileInputStream fis = null;//w ww . j a v a 2 s . c o m String _publicKey = ""; try { File f = new File(file); fis = new FileInputStream(f); DataInputStream dis = new DataInputStream(fis); byte[] keyBytes = new byte[(int) f.length()]; dis.readFully(keyBytes); dis.close(); _publicKey = new String(keyBytes).trim(); } catch (IOException ex) { java.util.logging.Logger.getLogger(jOCCIJobControlAdaptor.class.getName()).log(Level.SEVERE, null, ex); } finally { try { fis.close(); } catch (IOException ex) { java.util.logging.Logger.getLogger(jOCCIJobControlAdaptor.class.getName()).log(Level.SEVERE, null, ex); } } return (_publicKey); }
From source file:org.apache.jackrabbit.core.persistence.mem.InMemBundlePersistenceManager.java
/** * Reads the content of the hash maps from the file system * * @throws Exception if an error occurs/*from w ww . java2s . com*/ */ public synchronized void loadContents() throws Exception { // read item states FileSystemResource fsRes = new FileSystemResource(wspFS, BUNDLE_FILE_PATH); if (!fsRes.exists()) { return; } BufferedInputStream bis = new BufferedInputStream(fsRes.getInputStream()); DataInputStream in = new DataInputStream(bis); try { int n = in.readInt(); // number of entries while (n-- > 0) { String s = in.readUTF(); // id NodeId id = NodeId.valueOf(s); int length = in.readInt(); // data length byte[] data = new byte[length]; in.readFully(data); // data // store in map bundleStore.put(id, data); } } finally { in.close(); } // read references fsRes = new FileSystemResource(wspFS, REFS_FILE_PATH); bis = new BufferedInputStream(fsRes.getInputStream()); in = new DataInputStream(bis); try { int n = in.readInt(); // number of entries while (n-- > 0) { String s = in.readUTF(); // target id NodeId id = NodeId.valueOf(s); int length = in.readInt(); // data length byte[] data = new byte[length]; in.readFully(data); // data // store in map refsStore.put(id, data); } } finally { in.close(); } if (!useFileBlobStore) { // read blobs fsRes = new FileSystemResource(wspFS, BLOBS_FILE_PATH); bis = new BufferedInputStream(fsRes.getInputStream()); in = new DataInputStream(bis); try { int n = in.readInt(); // number of entries while (n-- > 0) { String id = in.readUTF(); // id int length = in.readInt(); // data length byte[] data = new byte[length]; in.readFully(data); // data // store in map blobs.put(id, data); } } finally { in.close(); } } }
From source file:com.koushikdutta.superuser.MultitaskSuRequestActivity.java
void manageSocket() { new Thread() { @Override/*from ww w . j a v a2 s . co m*/ public void run() { try { mSocket = new LocalSocket(); mSocket.connect(new LocalSocketAddress(mSocketPath, Namespace.FILESYSTEM)); DataInputStream is = new DataInputStream(mSocket.getInputStream()); ContentValues payload = new ContentValues(); for (int i = 0; i < SU_PROTOCOL_PARAM_MAX; i++) { int nameLen = is.readInt(); if (nameLen > SU_PROTOCOL_NAME_MAX) throw new IllegalArgumentException("name length too long: " + nameLen); byte[] nameBytes = new byte[nameLen]; is.readFully(nameBytes); String name = new String(nameBytes); int dataLen = is.readInt(); if (dataLen > getValueMax(name)) throw new IllegalArgumentException(name + " data length too long: " + dataLen); byte[] dataBytes = new byte[dataLen]; is.readFully(dataBytes); String data = new String(dataBytes); payload.put(name, data); // Log.i(LOGTAG, name); // Log.i(LOGTAG, data); if ("eof".equals(name)) break; } int protocolVersion = payload.getAsInteger("version"); mCallerUid = payload.getAsInteger("from.uid"); mDesiredUid = payload.getAsByte("to.uid"); mDesiredCmd = payload.getAsString("command"); String calledBin = payload.getAsString("from.bin"); mPid = payload.getAsInteger("pid"); runOnUiThread(new Runnable() { @Override public void run() { mRequestReady = true; requestReady(); } }); if ("com.koushikdutta.superuser".equals(getPackageName())) { if (!SuHelper.CURRENT_VERSION.equals(payload.getAsString("binary.version"))) SuCheckerReceiver.doNotification(MultitaskSuRequestActivity.this); } } catch (Exception ex) { Log.i(LOGTAG, ex.getMessage(), ex); try { mSocket.close(); } catch (Exception e) { } runOnUiThread(new Runnable() { @Override public void run() { finish(); } }); } } }.start(); }
From source file:com.limegroup.gnutella.metadata.ASFParser.java
/** * Parses the extended content encryption object, looking for encryption's * we know about.//from w w w .j a va2 s. c om * Currently, this is Weed. */ private void parseExtendedContentEncryption(DataInputStream ds) throws IOException { LOG.debug("Parsing extended content encryption"); int size = ByteUtils.leb2int(ds); if (size < 0) throw new IOException( "ASF file reports excessive length of encryption data:" + ByteUtils.uint2long(size)); byte[] b = new byte[size]; ds.readFully(b); String xml = new String(b, "UTF-16").trim(); WRMXML wrmdata = new WRMXML(xml); if (!wrmdata.isValid()) { LOG.debug("WRM Data is invalid."); return; } _wrmdata = wrmdata; WeedInfo weed = new WeedInfo(wrmdata); if (weed.isValid()) { LOG.debug("Parsed weed data."); _weed = weed; _wrmdata = weed; if (_weed.getAuthor() != null) _artist = _weed.getAuthor(); if (_weed.getTitle() != null) _title = _weed.getTitle(); if (_weed.getDescription() != null) _comment = _weed.getDescription(); if (_weed.getCollection() != null) _album = _weed.getCollection(); if (_weed.getCopyright() != null) _copyright = _weed.getCopyright(); return; } }
From source file:com.limegroup.gnutella.metadata.ASFParser.java
/** * Parses the content encryption object, to determine if the file is protected. * We parse through it all, even though we don't use all of it, to ensure * that the object is well-formed./*from w w w . j a va 2 s. c o m*/ */ private void parseContentEncryption(DataInputStream ds) throws IOException { LOG.debug("Parsing content encryption"); long skipSize = ByteUtils.uint2long(ByteUtils.leb2int(ds)); // data IOUtils.ensureSkip(ds, skipSize); int typeSize = ByteUtils.leb2int(ds); // type if (typeSize < 0) throw new IOException("ASF file is corrupt. Type size < 0: " + typeSize); byte[] b = new byte[typeSize]; ds.readFully(b); _drmType = new String(b).trim(); skipSize = ByteUtils.uint2long(ByteUtils.leb2int(ds)); // data IOUtils.ensureSkip(ds, skipSize); skipSize = ByteUtils.uint2long(ByteUtils.leb2int(ds)); // url IOUtils.ensureSkip(ds, skipSize); }
From source file:com.limegroup.gnutella.metadata.ASFParser.java
/** * Parses known information out of the Content Description object. * <p>//from ww w . jav a2s. co m * The data is stored as: * 10 bytes of sizes (2 bytes for each size). * The data corresponding to each size. * <p> * The data is stored in order of: * Title, Author, Copyright, Description, Rating. */ private void parseContentDescription(DataInputStream ds) throws IOException { LOG.debug("Parsing Content Description"); int[] sizes = { -1, -1, -1, -1, -1 }; for (int i = 0; i < sizes.length; i++) sizes[i] = ByteUtils.ushort2int(ByteUtils.leb2short(ds)); byte[][] info = new byte[5][]; for (int i = 0; i < sizes.length; i++) info[i] = new byte[sizes[i]]; for (int i = 0; i < info.length; i++) ds.readFully(info[i]); _title = string(info[0]); _artist = string(info[1]); _copyright = string(info[2]); _comment = string(info[3]); _rating = string(info[4]); if (LOG.isDebugEnabled()) LOG.debug("Standard Tag Values. Title: " + _title + ", Author: " + _artist + ", Copyright: " + _copyright + ", Description: " + _comment + ", Rating: " + _rating); }
From source file:com.limegroup.gnutella.metadata.ASFParser.java
/** * Reads the extended Content Description object. * The extended tag has an arbitrary number of fields. * The number of fields is stored first, as: * Field Count (2 bytes)/*from w ww .jav a 2 s . com*/ *<p> * Each field is stored as: * <pre> * Field Size (2 bytes) * Field (Field Size bytes) * Data Type (2 bytes) * Data Size (2 bytes) * Data (Data Size bytes) * </pre> */ private void parseExtendedContentDescription(DataInputStream ds) throws IOException { LOG.debug("Parsing extended content description"); int fieldCount = ByteUtils.ushort2int(ByteUtils.leb2short(ds)); if (LOG.isDebugEnabled()) LOG.debug("Extended fieldCount: " + fieldCount); for (int i = 0; i < fieldCount; i++) { int fieldSize = ByteUtils.ushort2int(ByteUtils.leb2short(ds)); byte[] field = new byte[fieldSize]; ds.readFully(field); String fieldName = string(field); int dataType = ByteUtils.ushort2int(ByteUtils.leb2short(ds)); int dataSize = ByteUtils.ushort2int(ByteUtils.leb2short(ds)); switch (dataType) { case TYPE_STRING: parseExtendedString(fieldName, dataSize, ds); break; case TYPE_BINARY: parseExtendedBinary(fieldName, dataSize, ds); break; case TYPE_BOOLEAN: parseExtendedBoolean(fieldName, dataSize, ds); break; case TYPE_INT: parseExtendedInt(fieldName, dataSize, ds); break; case TYPE_LONG: parseExtendedInt(fieldName, dataSize, ds); break; default: if (LOG.isDebugEnabled()) LOG.debug("Unknown dataType: " + dataType + " for field: " + fieldName); IOUtils.ensureSkip(ds, dataSize); } } }
From source file:org.apache.hadoop.mapred.TestShuffleHandler.java
/** * Validate the ownership of the map-output files being pulled in. The * local-file-system owner of the file should match the user component in the * * @throws Exception exception//from ww w.jav a2 s.c o m */ @Test(timeout = 100000) public void testMapFileAccess() throws IOException { // This will run only in NativeIO is enabled as SecureIOUtils need it assumeTrue(NativeIO.isAvailable()); Configuration conf = new Configuration(); conf.setInt(ShuffleHandler.SHUFFLE_PORT_CONFIG_KEY, 0); conf.setInt(ShuffleHandler.MAX_SHUFFLE_CONNECTIONS, 3); conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos"); UserGroupInformation.setConfiguration(conf); File absLogDir = new File("target", TestShuffleHandler.class.getSimpleName() + "LocDir").getAbsoluteFile(); conf.set(YarnConfiguration.NM_LOCAL_DIRS, absLogDir.getAbsolutePath()); ApplicationId appId = ApplicationId.newInstance(12345, 1); LOG.info(appId.toString()); String appAttemptId = "attempt_12345_1_m_1_0"; String user = "randomUser"; String userFolder = "randomUserFolder"; String reducerId = "0"; List<File> fileMap = new ArrayList<File>(); createShuffleHandlerFiles(absLogDir, userFolder, appId.toString(), appAttemptId, conf, fileMap); ShuffleHandler shuffleHandler = new ShuffleHandler() { @Override protected Shuffle getShuffle(Configuration conf) { // replace the shuffle handler with one stubbed for testing return new Shuffle(conf) { @Override protected void verifyRequest(String appid, ChannelHandlerContext ctx, HttpRequest request, HttpResponse response, URL requestUri) throws IOException { // Do nothing. } }; } }; shuffleHandler.init(conf); try { shuffleHandler.start(); DataOutputBuffer outputBuffer = new DataOutputBuffer(); outputBuffer.reset(); Token<JobTokenIdentifier> jt = new Token<JobTokenIdentifier>("identifier".getBytes(), "password".getBytes(), new Text(user), new Text("shuffleService")); jt.write(outputBuffer); shuffleHandler.initializeApplication(new ApplicationInitializationContext(user, appId, ByteBuffer.wrap(outputBuffer.getData(), 0, outputBuffer.getLength()), userFolder)); URL url = new URL( "http://127.0.0.1:" + shuffleHandler.getConfig().get(ShuffleHandler.SHUFFLE_PORT_CONFIG_KEY) + "/mapOutput?job=job_12345_0001&reduce=" + reducerId + "&map=attempt_12345_1_m_1_0"); HttpURLConnection conn = (HttpURLConnection) url.openConnection(); conn.setRequestProperty(ShuffleHeader.HTTP_HEADER_NAME, ShuffleHeader.DEFAULT_HTTP_HEADER_NAME); conn.setRequestProperty(ShuffleHeader.HTTP_HEADER_VERSION, ShuffleHeader.DEFAULT_HTTP_HEADER_VERSION); conn.connect(); byte[] byteArr = new byte[10000]; try { DataInputStream is = new DataInputStream(conn.getInputStream()); is.readFully(byteArr); } catch (EOFException e) { // ignore } // Retrieve file owner name FileInputStream is = new FileInputStream(fileMap.get(0)); String owner = NativeIO.POSIX.getFstat(is.getFD()).getOwner(); is.close(); String message = "Owner '" + owner + "' for path " + fileMap.get(0).getAbsolutePath() + " did not match expected owner '" + user + "'"; Assert.assertTrue((new String(byteArr)).contains(message)); } finally { shuffleHandler.stop(); FileUtil.fullyDelete(absLogDir); } }
From source file:com.tealeaf.NativeShim.java
public String loadSourceFile(String url) { TeaLeafOptions options = context.getOptions(); String sourceString = null;/*w w w. j a v a2 s . c om*/ if (options.isDevelop() && options.get("forceURL", false)) { // load native.js from the file system // read file in String path = resourceManager.getStorageDirectory(); String result = null; DataInputStream in = null; try { File f = new File(path + url); byte[] buffer = new byte[(int) f.length()]; in = new DataInputStream(new FileInputStream(f)); in.readFully(buffer); result = new String(buffer); } catch (FileNotFoundException e) { logger.log("Error loading", url, "from", path); logger.log("File not found!"); throw new RuntimeException("File not found in loadSourceFile"); } catch (IOException e) { throw new RuntimeException("IO problem in fileToString", e); } finally { try { if (in != null) { in.close(); } } catch (IOException e) { logger.log(e); } } sourceString = result; } else { sourceString = resourceManager.getFileContents(url); } return sourceString; }