List of usage examples for java.io DataInputStream readFully
public final void readFully(byte b[]) throws IOException
From source file:org.mwc.cmap.xyplot.views.XYPlotView.java
private void rtfToClipboard(final String fName, final Dimension dim) { // Issue #520 - Copy WMF embedded in RTF ByteArrayOutputStream os = null; DataInputStream dis = null; try {// w w w .java 2 s . c om os = new ByteArrayOutputStream(); RTFWriter writer = new RTFWriter(os); File file = new File(fName); byte[] data = new byte[(int) file.length()]; dis = new DataInputStream(new FileInputStream(file)); dis.readFully(data); writer.writeHeader(); writer.writeEmfPicture(data, dim.getWidth(), dim.getHeight()); writer.writeTail(); RTFTransfer rtfTransfer = RTFTransfer.getInstance(); Clipboard clipboard = new Clipboard(Display.getDefault()); Object[] rtfData = new Object[] { os.toString() }; clipboard.setContents(rtfData, new Transfer[] { rtfTransfer }); } catch (final Exception e1) { IStatus status = new Status(IStatus.ERROR, PlotViewerPlugin.PLUGIN_ID, e1.getLocalizedMessage(), e1); XYPlotPlugin.getDefault().getLog().log(status); } finally { if (os != null) { try { os.close(); } catch (IOException e1) { // ignore } } if (dis != null) { try { dis.close(); } catch (IOException e1) { // ignore } } } }
From source file:org.apache.hadoop.hbase.security.HBaseSaslRpcClient.java
/** * Do client side SASL authentication with server via the given InputStream * and OutputStream/*from ww w . java 2s. c o m*/ * * @param inS * InputStream to use * @param outS * OutputStream to use * @return true if connection is set up, or false if needs to switch * to simple Auth. * @throws IOException */ public boolean saslConnect(InputStream inS, OutputStream outS) throws IOException { DataInputStream inStream = new DataInputStream(new BufferedInputStream(inS)); DataOutputStream outStream = new DataOutputStream(new BufferedOutputStream(outS)); try { byte[] saslToken = new byte[0]; if (saslClient.hasInitialResponse()) saslToken = saslClient.evaluateChallenge(saslToken); if (saslToken != null) { outStream.writeInt(saslToken.length); outStream.write(saslToken, 0, saslToken.length); outStream.flush(); if (LOG.isDebugEnabled()) LOG.debug("Have sent token of size " + saslToken.length + " from initSASLContext."); } if (!saslClient.isComplete()) { readStatus(inStream); int len = inStream.readInt(); if (len == SaslUtil.SWITCH_TO_SIMPLE_AUTH) { if (!fallbackAllowed) { throw new IOException("Server asks us to fall back to SIMPLE auth, " + "but this client is configured to only allow secure connections."); } if (LOG.isDebugEnabled()) { LOG.debug("Server asks us to fall back to simple auth."); } saslClient.dispose(); return false; } saslToken = new byte[len]; if (LOG.isDebugEnabled()) LOG.debug("Will read input token of size " + saslToken.length + " for processing by initSASLContext"); inStream.readFully(saslToken); } while (!saslClient.isComplete()) { saslToken = saslClient.evaluateChallenge(saslToken); if (saslToken != null) { if (LOG.isDebugEnabled()) LOG.debug("Will send token of size " + saslToken.length + " from initSASLContext."); outStream.writeInt(saslToken.length); outStream.write(saslToken, 0, saslToken.length); outStream.flush(); } if (!saslClient.isComplete()) { readStatus(inStream); saslToken = new byte[inStream.readInt()]; if (LOG.isDebugEnabled()) LOG.debug("Will read input token of size " + saslToken.length + " for processing by initSASLContext"); inStream.readFully(saslToken); } } if (LOG.isDebugEnabled()) { LOG.debug("SASL client context established. Negotiated QoP: " + saslClient.getNegotiatedProperty(Sasl.QOP)); } return true; } catch (IOException e) { try { saslClient.dispose(); } catch (SaslException ignored) { // ignore further exceptions during cleanup } throw e; } }
From source file:org.trianacode.taskgraph.util.FileUtils.java
/** * Copies the file from the first location to the second. If the second location exists then the user is prompted * to ask if he/she wants to overwrite the exisiting file. They are given dates of the files also. The full Bill * Gates effect!!!!!! The mode can be either ASCII or BINARY. * * @return false if the file could not be copied, true if successful *///from w w w . j a va 2s. c om public static boolean copyFile(String loc1, String loc2, int mode, boolean confirmOnReplace) { BufferedReader br; DataInputStream ds; PrintWriter bw; DataOutputStream dout; long fmod1, fmod2; String text; byte[] bytes; File f1; File f2; if ((loc1 == null) || (loc2 == null)) { return false; } f1 = new File(loc1); f2 = new File(loc2); // if (!f1.exists()) { // return false; // } if (f2.isDirectory()) // add source file name if none chosen { loc2 = loc2 + File.separator + loc1.substring(loc1.lastIndexOf(File.separator) + 1); } f2 = new File(loc2); if (confirmOnReplace) { if ((f1.exists()) && (f2.exists())) { // does the time check with user fmod1 = f1.lastModified(); fmod2 = f2.lastModified(); /* TODO QuestionWindow con = new QuestionWindow(null, "Do you want to replace :\n" + loc2 + " modified : " + Str.niceDateAndTime(new Date(fmod2)) + "\nwith\n" + loc1 + " modified : " + Str.niceDateAndTime(new Date(fmod1)) + " ?\n"); if (con.reply != con.YES) return false;*/ } } try { if (mode == ASCII) { br = createReader(loc1); text = readFile(br); bw = FileUtils.createWriter(loc2); bw.print(text); closeWriter(bw); closeReader(br); } else { ds = new DataInputStream(new FileInputStream(f1)); bytes = new byte[(int) f1.length()]; ds.readFully(bytes); dout = createOutputStream(loc2); dout.write(bytes); dout.close(); ds.close(); } } catch (Exception ee) { logger.warn("\"Error Copying from \\n\" +\n" + " loc1 + \" to \\n\" + loc2" + ":" + formatThrowable(ee)); return false; } return true; }
From source file:org.spout.engine.filesystem.WorldFiles.java
public static void readColumn(InputStream in, SpoutColumn column, AtomicInteger lowestY, BlockMaterial[][] topmostBlocks) { if (in == null) { //The inputstream is null because no height map data exists for (int x = 0; x < SpoutColumn.BLOCKS.SIZE; x++) { for (int z = 0; z < SpoutColumn.BLOCKS.SIZE; z++) { column.getAtomicInteger(x, z).set(Integer.MIN_VALUE); topmostBlocks[x][z] = null; column.setDirty(x, z);/*from w ww. j a v a 2 s. c om*/ } } lowestY.set(Integer.MAX_VALUE); return; } DataInputStream dataStream = new DataInputStream(in); try { for (int x = 0; x < SpoutColumn.BLOCKS.SIZE; x++) { for (int z = 0; z < SpoutColumn.BLOCKS.SIZE; z++) { column.getAtomicInteger(x, z).set(dataStream.readInt()); } } @SuppressWarnings("unused") int version = dataStream.readInt(); lowestY.set(dataStream.readInt()); //Save heightmap StringMap global = ((SpoutEngine) Spout.getEngine()).getEngineItemMap(); StringMap itemMap = column.getWorld().getItemMap(); boolean warning = false; for (int x = 0; x < SpoutColumn.BLOCKS.SIZE; x++) { for (int z = 0; z < SpoutColumn.BLOCKS.SIZE; z++) { if (!dataStream.readBoolean()) { continue; } int blockState = dataStream.readInt(); short blockId = BlockFullState.getId(blockState); short blockData = BlockFullState.getData(blockState); blockId = (short) itemMap.convertTo(global, blockId); blockState = BlockFullState.getPacked(blockId, blockData); BlockMaterial m; try { m = (BlockMaterial) MaterialRegistry.get(blockState); } catch (ClassCastException e) { m = null; if (!warning) { Spout.getLogger().severe( "Error reading column topmost block information, block was not a valid BlockMaterial"); warning = false; } } if (m == null) { column.setDirty(x, z); } topmostBlocks[x][z] = m; } } //Save Biomes BiomeManager manager = null; try { //Biome manager is serialized with: // - boolean, if a biome manager exists // - String, the class name // - int, the number of bytes of data to read // - byte[], size of the above int in length boolean exists = dataStream.readBoolean(); if (exists) { String biomeManagerClass = dataStream.readUTF(); int biomeSize = dataStream.readInt(); byte[] biomes = new byte[biomeSize]; dataStream.readFully(biomes); //Attempt to create the biome manager class from the class name @SuppressWarnings("unchecked") Class<? extends BiomeManager> clazz = (Class<? extends BiomeManager>) Class .forName(biomeManagerClass); Class<?>[] params = { int.class, int.class }; manager = clazz.getConstructor(params).newInstance(column.getX(), column.getZ()); manager.deserialize(biomes); column.setBiomeManager(manager); } } catch (Exception e) { Spout.getLogger().log(Level.SEVERE, "Failed to read biome data for column", e); } } catch (IOException e) { Spout.getLogger() .severe("Error reading column height-map for column" + column.getX() + ", " + column.getZ()); } }
From source file:org.opendedup.sdfs.filestore.cloud.BatchAwsS3ChunkStore.java
private String[] getStrings(S3Object sobj) throws IOException { this.s3clientLock.readLock().lock(); try {//from w w w. jav a2s. co m boolean encrypt = false; boolean compress = false; boolean lz4compress = false; int cl = (int) sobj.getObjectMetadata().getContentLength(); byte[] data = new byte[cl]; DataInputStream in = null; try { in = new DataInputStream(sobj.getObjectContent()); in.readFully(data); } catch (Exception e) { throw new IOException(e); } finally { try { in.close(); } catch (Exception e) { } } Map<String, String> mp = this.getUserMetaData(sobj.getObjectMetadata()); if (mp.containsKey("md5sum")) { try { byte[] shash = BaseEncoding.base64().decode(mp.get("md5sum")); byte[] chash; chash = ServiceUtils.computeMD5Hash(data); if (!Arrays.equals(shash, chash)) throw new IOException("download corrupt at " + sobj.getKey()); } catch (NoSuchAlgorithmException e) { throw new IOException(e); } } int size = Integer.parseInt((String) mp.get("size")); if (mp.containsKey("encrypt")) { encrypt = Boolean.parseBoolean((String) mp.get("encrypt")); } if (mp.containsKey("compress")) { compress = Boolean.parseBoolean((String) mp.get("compress")); } else if (mp.containsKey("lz4compress")) { lz4compress = Boolean.parseBoolean((String) mp.get("lz4compress")); } byte[] ivb = null; if (mp.containsKey("ivspec")) ivb = BaseEncoding.base64().decode(mp.get("ivspec")); if (encrypt) { if (ivb != null) data = EncryptUtils.decryptCBC(data, new IvParameterSpec(ivb)); else data = EncryptUtils.decryptCBC(data); } if (compress) data = CompressionUtils.decompressZLIB(data); else if (lz4compress) { data = CompressionUtils.decompressLz4(data, size); } String hast = new String(data); SDFSLogger.getLog().debug("reading hashes " + (String) mp.get("hashes") + " from " + sobj.getKey()); String[] st = hast.split(","); return st; } finally { this.s3clientLock.readLock().unlock(); } }
From source file:de.huxhorn.lilith.swing.ApplicationPreferences.java
private void initIfNecessary(File file, String resourcePath, String historyBasePath, boolean overwriteAlways) { boolean delete = false; if (overwriteAlways) { delete = true;//ww w .j a v a 2 s . c o m } else if (file.isFile()) { byte[] available = null; try { FileInputStream availableFile = new FileInputStream(file); available = getMD5(availableFile); } catch (FileNotFoundException e) { // ignore } byte[] current = getMD5(ApplicationPreferences.class.getResourceAsStream(resourcePath)); if (Arrays.equals(available, current)) { // we are done already. The current version is the latest version. if (logger.isDebugEnabled()) logger.debug("The current version of {} is also the latest version.", file.getAbsolutePath()); return; } if (available != null) { // check older versions if available URL historyUrl = getClass().getResource(historyBasePath + "history.txt"); if (historyUrl != null) { List<String> historyList = readLines(historyUrl); for (String currentLine : historyList) { InputStream is = getClass().getResourceAsStream(historyBasePath + currentLine + ".md5"); if (is != null) { DataInputStream dis = new DataInputStream(is); byte[] checksum = new byte[16]; try { dis.readFully(checksum); if (Arrays.equals(available, checksum)) { if (logger.isInfoEnabled()) logger.info("Found old version of {}: {}", file.getAbsolutePath(), currentLine); delete = true; break; } } catch (IOException e) { if (logger.isWarnEnabled()) logger.warn("Exception while reading checksum of {}!", currentLine, e); } finally { try { dis.close(); } catch (IOException e) { // ignore } } } } } } else { // we couldn't calculate the checksum. Try to delete it... delete = true; } } URL resourceUrl = ApplicationPreferences.class.getResource(resourcePath); if (resourceUrl == null) { if (logger.isErrorEnabled()) logger.error("Couldn't find resource {}!", resourcePath); return; } copy(resourceUrl, file, delete); }
From source file:org.opendedup.sdfs.filestore.cloud.BatchAwsS3ChunkStore.java
public StringResult getStringResult(String key) throws IOException, InterruptedException { this.s3clientLock.readLock().lock(); S3Object sobj = null;//from w ww .j a v a2 s. c o m try { ObjectMetadata md = null; try { sobj = s3Service.getObject(getName(), key); md = s3Service.getObjectMetadata(this.name, key); } catch (Exception e) { throw new IOException(e); } int cl = (int) md.getContentLength(); byte[] data = new byte[cl]; DataInputStream in = null; try { in = new DataInputStream(sobj.getObjectContent()); in.readFully(data); } catch (Exception e) { throw new IOException(e); } finally { if (in != null) in.close(); } boolean encrypt = false; boolean compress = false; boolean lz4compress = false; Map<String, String> mp = this.getUserMetaData(md); byte[] ivb = null; if (mp.containsKey("ivspec")) { ivb = BaseEncoding.base64().decode(mp.get("ivspec")); } if (mp.containsKey("md5sum")) { try { byte[] shash = BaseEncoding.base64().decode(mp.get("md5sum")); byte[] chash = ServiceUtils.computeMD5Hash(data); if (!Arrays.equals(shash, chash)) throw new IOException("download corrupt at " + sobj.getKey()); } catch (NoSuchAlgorithmException e) { throw new IOException(e); } } int size = Integer.parseInt(mp.get("size")); encrypt = Boolean.parseBoolean(mp.get("encrypt")); lz4compress = Boolean.parseBoolean(mp.get("lz4compress")); boolean changed = false; Long hid = EncyptUtils.decHashArchiveName(sobj.getKey().substring(5), encrypt); if (this.clustered) mp = s3Service.getObjectMetadata(this.name, this.getClaimName(hid)).getUserMetadata(); if (mp.containsKey("deleted")) { mp.remove("deleted"); changed = true; } if (mp.containsKey("deleted-objects")) { mp.remove("deleted-objects"); changed = true; } if (encrypt) { if (ivb != null) { data = EncryptUtils.decryptCBC(data, new IvParameterSpec(ivb)); } else { data = EncryptUtils.decryptCBC(data); } } if (compress) data = CompressionUtils.decompressZLIB(data); else if (lz4compress) { data = CompressionUtils.decompressLz4(data, size); } String hast = new String(data); SDFSLogger.getLog().debug("reading hashes " + (String) mp.get("objects") + " from " + hid + " encn " + sobj.getKey().substring(5)); StringTokenizer ht = new StringTokenizer(hast, ","); StringResult st = new StringResult(); st.id = hid; st.st = ht; if (mp.containsKey("bsize")) { HashBlobArchive.currentLength.addAndGet(Integer.parseInt(mp.get("bsize"))); } if (mp.containsKey("bcompressedsize")) { HashBlobArchive.compressedLength.addAndGet(Integer.parseInt(mp.get("bcompressedsize"))); } if (changed) { try { md = sobj.getObjectMetadata(); md.setUserMetadata(mp); String kn = null; if (this.clustered) kn = this.getClaimName(hid); else kn = sobj.getKey(); this.updateObject(kn, md); } catch (Exception e) { throw new IOException(e); } } return st; } finally { if (sobj != null) sobj.close(); this.s3clientLock.readLock().unlock(); } }
From source file:org.apache.jackrabbit.core.persistence.bundle.util.BundleBinding.java
/** * Deserializes a <code>PropertyState</code> from the data input stream. * * @param in the input stream/*from ww w.j a va2s . c o m*/ * @param id the property id for the new property entry * @return the property entry * @throws IOException if an I/O error occurs. */ public NodePropBundle.PropertyEntry readPropertyEntry(DataInputStream in, PropertyId id) throws IOException { NodePropBundle.PropertyEntry entry = new NodePropBundle.PropertyEntry(id); // type and modcount int type = in.readInt(); entry.setModCount((short) ((type >> 16) & 0x0ffff)); type &= 0x0ffff; entry.setType(type); // multiValued entry.setMultiValued(in.readBoolean()); // definitionId in.readUTF(); // values int count = in.readInt(); // count InternalValue[] values = new InternalValue[count]; String[] blobIds = new String[count]; for (int i = 0; i < count; i++) { InternalValue val; switch (type) { case PropertyType.BINARY: int size = in.readInt(); if (size == BINARY_IN_DATA_STORE) { val = InternalValue.create(dataStore, in.readUTF()); } else if (size == BINARY_IN_BLOB_STORE) { blobIds[i] = in.readUTF(); try { if (blobStore instanceof ResourceBasedBLOBStore) { val = InternalValue .create(((ResourceBasedBLOBStore) blobStore).getResource(blobIds[i])); } else { val = InternalValue.create(blobStore.get(blobIds[i])); } } catch (IOException e) { if (errorHandling.ignoreMissingBlobs()) { log.warn("Ignoring error while reading blob-resource: " + e); val = InternalValue.create(new byte[0]); } else { throw e; } } catch (Exception e) { throw new IOException("Unable to create property value: " + e.toString()); } } else { // short values into memory byte[] data = new byte[size]; in.readFully(data); val = InternalValue.create(data); } break; case PropertyType.DOUBLE: val = InternalValue.create(in.readDouble()); break; case PropertyType.LONG: val = InternalValue.create(in.readLong()); break; case PropertyType.BOOLEAN: val = InternalValue.create(in.readBoolean()); break; case PropertyType.NAME: val = InternalValue.create(readQName(in)); break; case PropertyType.REFERENCE: val = InternalValue.create(readUUID(in)); break; default: // because writeUTF(String) has a size limit of 64k, // Strings are serialized as <length><byte[]> int len = in.readInt(); byte[] bytes = new byte[len]; in.readFully(bytes); val = InternalValue.valueOf(new String(bytes, "UTF-8"), type); } values[i] = val; } entry.setValues(values); entry.setBlobIds(blobIds); return entry; }
From source file:org.apache.jackrabbit.core.persistence.bundle.util.BundleBinding.java
/** * Deserializes a <code>PropertyState</code> from the data input stream. * * @param in the input stream//from w ww . j a v a2 s . c o m * @param id the property id for the new property entry * @return the property entry * @throws IOException if an I/O error occurs. */ public NodePropBundle.PropertyEntry readPropertyEntry(DataInputStream in, PropertyId id) throws IOException { NodePropBundle.PropertyEntry entry = new NodePropBundle.PropertyEntry(id); // type and modcount int type = in.readInt(); entry.setModCount((short) ((type >> 16) & 0x0ffff)); type &= 0x0ffff; entry.setType(type); // multiValued entry.setMultiValued(in.readBoolean()); // definitionId entry.setPropDefId(PropDefId.valueOf(in.readUTF())); // values int count = in.readInt(); // count InternalValue[] values = new InternalValue[count]; String[] blobIds = new String[count]; for (int i = 0; i < count; i++) { InternalValue val; switch (type) { case PropertyType.BINARY: int size = in.readInt(); if (size == BINARY_IN_DATA_STORE) { val = InternalValue.create(dataStore, in.readUTF()); } else if (size == BINARY_IN_BLOB_STORE) { blobIds[i] = in.readUTF(); try { if (blobStore instanceof ResourceBasedBLOBStore) { val = InternalValue .create(((ResourceBasedBLOBStore) blobStore).getResource(blobIds[i])); } else { val = InternalValue.create(blobStore.get(blobIds[i])); } } catch (IOException e) { if (errorHandling.ignoreMissingBlobs()) { log.warn("Ignoring error while reading blob-resource: " + e); val = InternalValue.create(new byte[0]); } else { throw e; } } catch (Exception e) { throw new IOException("Unable to create property value: " + e.toString()); } } else { // short values into memory byte[] data = new byte[size]; in.readFully(data); val = InternalValue.create(data); } break; case PropertyType.DOUBLE: val = InternalValue.create(in.readDouble()); break; case PropertyType.DECIMAL: val = InternalValue.create(readDecimal(in)); break; case PropertyType.LONG: val = InternalValue.create(in.readLong()); break; case PropertyType.BOOLEAN: val = InternalValue.create(in.readBoolean()); break; case PropertyType.NAME: val = InternalValue.create(readQName(in)); break; case PropertyType.WEAKREFERENCE: case PropertyType.REFERENCE: val = InternalValue.create(readID(in)); break; default: // because writeUTF(String) has a size limit of 64k, // Strings are serialized as <length><byte[]> int len = in.readInt(); byte[] bytes = new byte[len]; in.readFully(bytes); val = InternalValue.valueOf(new String(bytes, "UTF-8"), type); } values[i] = val; } entry.setValues(values); entry.setBlobIds(blobIds); return entry; }