List of usage examples for java.io DataInputStream DataInputStream
public DataInputStream(InputStream in)
From source file:com.datatorrent.stram.FSRecoveryHandler.java
@Override public DataInputStream getLog() throws IOException { if (fs.exists(logBackupPath)) { // restore state prior to log replay throw new AssertionError("Restore state prior to reading log: " + logBackupPath); }/* w ww . j av a 2 s . com*/ if (fs.exists(logPath)) { LOG.debug("Opening existing log ({})", logPath); return fs.open(logPath); } else { LOG.debug("No existing log ({})", logPath); return new DataInputStream(new ByteArrayInputStream(new byte[] {})); } }
From source file:joshelser.LimitAndSumColumnsBase.java
protected long countSerialized(Iterable<Entry<Key, Value>> data) throws IOException { long observedSum = 0; long numResults = 0l; for (Entry<Key, Value> result : data) { numResults++;// w w w .j a va 2 s . c o m ByteArrayInputStream bais = new ByteArrayInputStream(result.getValue().get()); observedSum += WritableUtils.readVLong(new DataInputStream(bais)); } System.out.println("Number of results to sum: " + numResults); return observedSum; }
From source file:com.bonsai.wallet32.HDWallet.java
public static JSONObject deserialize(WalletApplication walletApp, KeyCrypter keyCrypter, KeyParameter aesKey) throws IOException, InvalidCipherTextException, JSONException { File file = walletApp.getHDWalletFile(null); String path = file.getPath(); try {/*from w w w . j a v a2 s . co m*/ mLogger.info("restoring HDWallet from " + path); int len = (int) file.length(); // Open persisted file. DataInputStream dis = new DataInputStream(new FileInputStream(file)); // Read IV from file. byte[] iv = new byte[KeyCrypterGroestl.BLOCK_LENGTH/*KeyCrypterScrypt.BLOCK_LENGTH*/]; dis.readFully(iv); // Read the ciphertext from the file. byte[] cipherBytes = new byte[len - iv.length]; dis.readFully(cipherBytes); dis.close(); // Decrypt the ciphertext. ParametersWithIV keyWithIv = new ParametersWithIV(new KeyParameter(aesKey.getKey()), iv); BufferedBlockCipher cipher = new PaddedBufferedBlockCipher(new CBCBlockCipher(new AESFastEngine())); cipher.init(false, keyWithIv); int minimumSize = cipher.getOutputSize(cipherBytes.length); byte[] outputBuffer = new byte[minimumSize]; int length1 = cipher.processBytes(cipherBytes, 0, cipherBytes.length, outputBuffer, 0); int length2 = cipher.doFinal(outputBuffer, length1); int actualLength = length1 + length2; byte[] decryptedBytes = new byte[actualLength]; System.arraycopy(outputBuffer, 0, decryptedBytes, 0, actualLength); // Parse the decryptedBytes. String jsonstr = new String(decryptedBytes); /* // THIS CONTAINS THE SEED! // Have to break the message into chunks for big messages ... String msg = jsonstr; while (msg.length() > 1024) { String chunk = msg.substring(0, 1024); mLogger.error(chunk); msg = msg.substring(1024); } mLogger.error(msg); */ JSONObject node = new JSONObject(jsonstr); return node; } catch (IOException ex) { mLogger.warn("trouble reading " + path + ": " + ex.toString()); throw ex; } catch (RuntimeException ex) { mLogger.warn("trouble restoring wallet: " + ex.toString()); throw ex; } catch (InvalidCipherTextException ex) { mLogger.warn("wallet decrypt failed: " + ex.toString()); throw ex; } }
From source file:J2MESortMixedRecordDataTypeExample.java
public int compare(byte[] record1, byte[] record2) { int record1int, record2int; try {//w w w .j ava2s . c o m int maxlen = Math.max(record1.length, record2.length); if (maxlen > comparatorInputData.length) { comparatorInputData = new byte[maxlen]; } comparatorInputStream = new ByteArrayInputStream(record1); comparatorInputDataType = new DataInputStream(comparatorInputStream); comparatorInputDataType.readUTF(); record1int = comparatorInputDataType.readInt(); comparatorInputStream = new ByteArrayInputStream(record2); comparatorInputDataType = new DataInputStream(comparatorInputStream); comparatorInputDataType.readUTF(); record2int = comparatorInputDataType.readInt(); if (record1int == record2int) { return RecordComparator.EQUIVALENT; } else if (record1int < record2int) { return RecordComparator.PRECEDES; } else { return RecordComparator.FOLLOWS; } } catch (Exception error) { return RecordComparator.EQUIVALENT; } }
From source file:com.orange.ocara.model.export.docx.DocxWriterTest.java
/** * Determine whether a file is a ZIP File. *///from w ww . ja v a 2 s .co m private static boolean isZipFile(File file) throws IOException { if (file.isDirectory()) { return false; } if (!file.canRead()) { throw new IOException("Cannot read file " + file.getAbsolutePath()); } if (file.length() < 4) { return false; } DataInputStream in = new DataInputStream(new BufferedInputStream(new FileInputStream(file))); int test = in.readInt(); in.close(); return test == 0x504b0304; }
From source file:it.infn.ct.InstantiateVM.java
public static String doCreate(Properties properties, EntityBuilder eb, Model model, Client client, JSONObject egiInput) {/*ww w .ja v a 2 s . c o m*/ URI uri_location = null; String networkInterfaceLocation = ""; String networkInterfaceLocation_stripped = ""; Resource vm_resource = null; try { if (properties.getProperty("RESOURCE").equals("compute")) { String segments[] = properties.getProperty("OCCI_OS_TPL").split("#"); String OCCI_OS_TPL = segments[segments.length - 1]; String segments2[] = properties.getProperty("OCCI_RESOURCE_TPL").split("#"); String OCCI_RESOURCE_TPL = segments2[segments2.length - 1]; System.out.println("[+] Creating a new compute Virtual Machine (VM)"); // Creating a compute instance Resource compute = eb.getResource("compute"); Mixin mixin = model.findMixin(OCCI_OS_TPL); compute.addMixin(mixin); compute.addMixin(model.findMixin(OCCI_OS_TPL, "os_tpl")); compute.addMixin(model.findMixin(OCCI_RESOURCE_TPL, "resource_tpl")); // Checking the context if (properties.getProperty("PUBLIC_KEY_FILE") != null && !properties.getProperty("PUBLIC_KEY_FILE").isEmpty()) { String _public_key_file = properties.getProperty("PUBLIC_KEY_FILE") .substring(properties.getProperty("PUBLIC_KEY_FILE").lastIndexOf(":") + 1); File f = new File(_public_key_file); FileInputStream fis = new FileInputStream(f); DataInputStream dis = new DataInputStream(fis); byte[] keyBytes = new byte[(int) f.length()]; dis.readFully(keyBytes); dis.close(); String _publicKey = new String(keyBytes).trim(); // Add SSH public key compute.addMixin(model .findMixin(URI.create("http://schemas.openstack.org/instance/credentials#public_key"))); compute.addAttribute("org.openstack.credentials.publickey.data", _publicKey); // Add the name for the public key if (OCCI_PUBLICKEY_NAME != null && !OCCI_PUBLICKEY_NAME.isEmpty()) compute.addAttribute("org.openstack.credentials.publickey.name", properties.getProperty("OCCI_PUBLICKEY_NAME")); } if (properties.getProperty("USER_DATA") != null && !properties.getProperty("USER_DATA").isEmpty()) { String _user_data = properties.getProperty("USER_DATA") .substring(properties.getProperty("USER_DATA").lastIndexOf(":") + 1); File f = new File(_user_data); FileInputStream fis = new FileInputStream(f); DataInputStream dis = new DataInputStream(fis); byte[] keyBytes = new byte[(int) f.length()]; dis.readFully(keyBytes); dis.close(); byte[] data = Base64.encodeBase64(keyBytes); String user_data = new String(data); compute.addMixin( model.findMixin(URI.create("http://schemas.openstack.org/compute/instance#user_data"))); compute.addAttribute("org.openstack.compute.user_data", user_data); } // Set VM title compute.setTitle(properties.getProperty("OCCI_CORE_TITLE")); URI location = client.create(compute); return location.toString(); } if (properties.getProperty("RESOURCE").equals("storage")) { System.out.println("[+] Creating a volume storage"); // Creating a storage instance Storage storage = eb.getStorage(); storage.setTitle(properties.getProperty("OCCI_CORE_TITLE")); storage.setSize(properties.getProperty("OCCI_STORAGE_SIZE")); URI storageLocation = client.create(storage); List<URI> list = client.list("storage"); List<URI> storageURIs = new ArrayList<URI>(); for (URI uri : list) { if (uri.toString().contains("storage")) storageURIs.add(uri); } System.out.println("URI = " + storageLocation); } } catch (FileNotFoundException ex) { throw new RuntimeException(ex); } catch (IOException ex) { throw new RuntimeException(ex); } catch (EntityBuildingException | AmbiguousIdentifierException | InvalidAttributeValueException | CommunicationException ex) { throw new RuntimeException(ex); } return ""; }
From source file:com.limegroup.gnutella.metadata.ASFParser.java
/** * Parses a ASF input stream's metadata. * This first checks that the marker (16 bytes) is correct, reads the data offset & object count, * and then iterates through the objects, reading them. * Each object is stored in the format:/*www. ja va 2 s . c om*/ * <xmp> * ObjectID (16 bytes) * Object Size (4 bytes) * Object (Object Size bytes) * </xmp> */ private void parse(InputStream is) throws IOException { CountingInputStream counter = new CountingInputStream(is); DataInputStream ds = new DataInputStream(counter); byte[] marker = new byte[IDs.HEADER_ID.length]; ds.readFully(marker); if (!Arrays.equals(marker, IDs.HEADER_ID)) throw new IOException("not an ASF file"); long dataOffset = ByteUtils.leb2long(ds); int objectCount = ByteUtils.leb2int(ds); IOUtils.ensureSkip(ds, 2); if (LOG.isDebugEnabled()) LOG.debug("Data Offset: " + dataOffset + ", objectCount: " + objectCount); if (dataOffset < 0) throw new IOException("ASF file is corrupt. Data offset negative:" + dataOffset); if (objectCount < 0) throw new IOException( "ASF file is corrupt. Object count unreasonable:" + ByteUtils.uint2long(objectCount)); if (objectCount > 100) throw new IOException("object count very high: " + objectCount); byte[] object = new byte[16]; for (int i = 0; i < objectCount; i++) { if (LOG.isDebugEnabled()) LOG.debug("Parsing object[" + i + "]"); ds.readFully(object); long size = ByteUtils.leb2long(ds) - 24; if (size < 0) throw new IOException("ASF file is corrupt. Object size < 0 :" + size); counter.clearAmountRead(); readObject(ds, object, size); int read = counter.getAmountRead(); if (read > size) throw new IOException("read (" + read + ") more than size (" + size + ")"); else if (read != size) { if (LOG.isDebugEnabled()) LOG.debug("Skipping to next object. Read: " + read + ", size: " + size); IOUtils.ensureSkip(ds, size - read); } } }
From source file:io.dacopancm.socketdcm.net.StreamSocket.java
public String getStreamFile() { String list = "false"; try {/*from ww w . j a v a 2 s .co m*/ if (ConectType.GET_STREAM.name().equalsIgnoreCase(method)) { logger.log(Level.INFO, "get stream socket call"); sock = new Socket(ip, port); DataOutputStream dOut = new DataOutputStream(sock.getOutputStream()); DataInputStream dIn = new DataInputStream(sock.getInputStream()); dOut.writeUTF(ConectType.STREAM.name());//send stream type dOut.writeUTF("uncompressed");//send comprimido o no dOut.writeUTF(ConectType.GET_STREAM.toString()); //send type dOut.writeUTF(streamid); dOut.flush(); // Send off the data String rtspId = dIn.readUTF(); dOut.close(); logger.log(Level.INFO, "resp get stream file: {0}", rtspId); list = rtspId; } } catch (IOException ex) { HelperUtil.showErrorB("No se pudo establecer conexin"); logger.log(Level.INFO, "get stream file error no connect:{0}", ex.getMessage()); try { if (sock != null) { sock.close(); } } catch (IOException e) { } } return list; }
From source file:fr.inria.soctrace.framesoc.tutorials.importer.TutorialParser.java
private boolean parseRawTrace(IProgressMonitor monitor) throws SoCTraceException { try {// w ww. j a v a 2s . c o m // Init boolean partialImport = false; numberOfEvents = 0; page = 0; eventList.clear(); // Open the file BufferedReader br = new BufferedReader( new InputStreamReader(new DataInputStream(new FileInputStream(traceFile)))); String[] line; String strLine; // Read a line while ((strLine = br.readLine()) != null) { // Remove extra white spaces strLine = strLine.trim(); if (strLine.isEmpty()) continue; // Split the line with the default separator line = strLine.split(CSV_SEPARATOR); switch (line[0]) { // Event Producer case EVENT_PRODUCER_TAG: createEventProducer(line); break; // Event case EVENT_TAG: createEvent(line); break; default: // Ignore continue; } if (eventList.size() == PAGE_SIZE) page++; if (eventList.size() >= PAGE_SIZE) { // Save the current batch of events saveEvents(eventList); // Increase the number of processed events numberOfEvents += eventList.size(); eventList.clear(); // Check the monitor status if (monitor.isCanceled()) { partialImport = true; break; } } } // Close the file reader if (br != null) br.close(); // Save the remaining events if (eventList.size() > 0) { saveEvents(eventList); numberOfEvents += eventList.size(); eventList.clear(); } return partialImport; } catch (Exception e) { throw new SoCTraceException(e); } }
From source file:com.fliaping.trip.warc.lemurproject.WarcFileRecordReader.java
private boolean openNextFile() { try {/* w w w . j a va2s . c o m*/ if (compressionInput != null) { compressionInput.close(); } else if (currentFile != null) { currentFile.close(); } currentFile = null; compressionInput = null; currentFilePath++; if (currentFilePath >= filePathList.length) { return false; } currentFile = this.fs.open(filePathList[currentFilePath]); // is the file gzipped? if ((compressionCodec != null) && (filePathList[currentFilePath].getName().endsWith("gz"))) { compressionInput = new DataInputStream(compressionCodec.createInputStream(currentFile)); LOG.info("Compression enabled"); } } catch (IOException ex) { LOG.info("IOError opening " + filePathList[currentFilePath].toString() + " - message: " + ex.getMessage()); return false; } return true; }