List of usage examples for java.io DataInputStream readInt
public final int readInt() throws IOException
readInt
method of DataInput
. From source file:org.veronicadb.core.memorygraph.storage.SimpleLocalFileStorageSink.java
@SuppressWarnings("resource") @Override/*from w w w .java 2 s. co m*/ public VSubGraph readGraphBlock(long graphId) throws VStorageFailureException { logger.info("Read requsted for graphid:" + graphId); VSubGraph graph = null; File[] files = storageDirectory.listFiles((dir, name) -> name.startsWith(graphId + "")); if (files.length == 0) { throw new VStorageFailureException(SimpleLocalFileStorageSink.class, "Invalid graphId:" + graphId + " can't read block from disk"); } logger.info("Found:" + files.length + " versions of shard for graphid:" + graphId); File latestDataFile = Arrays.asList(files).stream() .sorted((o1, o2) -> Long.compare(o2.lastModified(), o1.lastModified())).findFirst().get(); logger.info("Latest shard for graphid:" + graphId + " is " + latestDataFile.getName()); String flushTime = latestDataFile.getName().split("\\.")[0].split("_")[1]; DataInputStream stream = null; InputStream baseStream = null; try { baseStream = new BufferedInputStream(new FileInputStream(latestDataFile), BUFFER_SIZE); if (compress) { baseStream = decompressor.getDeclaredConstructor(InputStream.class).newInstance(baseStream); } stream = new DataInputStream(baseStream); } catch (FileNotFoundException e) { throw new VStorageFailureException(SimpleLocalFileStorageSink.class, "Graph block file doesn't exist for:" + graphId + " file:" + latestDataFile.getPath(), e); } catch (InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException | NoSuchMethodException | SecurityException e) { throw new VStorageFailureException(SimpleLocalFileStorageSink.class, "Failed to initialize pluggable de-compressor", e); } try { long readGraphId = readGraphId(stream); int vertexCount = stream.readInt(); byte[] bloomBytes = null; if (getGraph() != null) { graph = getGraph().getGraphShard(readGraphId); // skip bloom bytes skipBloom(stream); } else { graph = new VSubGraph(readGraphId, vertexCount); bloomBytes = readGraphBloom(stream); } List<VVertex> vertices = readVertices(graph, stream); if (getGraph() != null) { graph.loadVertices(vertices); } else { graph.reinit(bloomBytes, vertices); } graph.getLastFlush().set(Long.parseLong(flushTime)); } catch (IOException e) { throw new VStorageFailureException(SimpleLocalFileStorageSink.class, "Failure to read graphId:" + graphId + " file:" + latestDataFile.getPath() + " from disk", e); } finally { try { stream.close(); baseStream.close(); } catch (IOException e) { throw new VStorageFailureException(SimpleLocalFileStorageSink.class, "Failed to close shard file stream", e); } } return graph; }
From source file:org.motechproject.mobile.web.OXDFormDownloadServlet.java
/** * Processes requests for both HTTP <code>GET</code> and <code>POST</code> * methods.//from w w w.j a v a2 s. c o m * * @param request servlet request * @param response servlet response * @throws ServletException if a servlet-specific error occurs * @throws IOException if an I/O error occurs */ @RequestMapping(method = RequestMethod.POST) public void processRequest(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { // Get our raw input and output streams InputStream input = request.getInputStream(); OutputStream output = response.getOutputStream(); // Wrap the streams for compression ZOutputStream zOutput = new ZOutputStream(output, JZlib.Z_BEST_COMPRESSION); // Wrap the streams so we can use logical types DataInputStream dataInput = new DataInputStream(input); DataOutputStream dataOutput = new DataOutputStream(zOutput); try { // Read the common submission data from mobile phone String name = dataInput.readUTF(); String password = dataInput.readUTF(); String serializer = dataInput.readUTF(); String locale = dataInput.readUTF(); byte action = dataInput.readByte(); // TODO: add authentication, possible M6 enhancement log.info("downloading: name=" + name + ", password=" + password + ", serializer=" + serializer + ", locale=" + locale + ", action=" + action); EpihandyXformSerializer serObj = new EpihandyXformSerializer(); ByteArrayOutputStream baos = new ByteArrayOutputStream(); // Perform the action specified by the mobile phone try { if (action == ACTION_DOWNLOAD_STUDY_LIST) { serObj.serializeStudies(baos, studyService.getStudies()); } else if (action == ACTION_DOWNLOAD_USERS_AND_FORMS) { serObj.serializeUsers(baos, userService.getUsers()); int studyId = dataInput.readInt(); String studyName = studyService.getStudyName(studyId); List<String> studyForms = formService.getStudyForms(studyId); serObj.serializeForms(baos, studyForms, studyId, studyName); } } catch (Exception e) { dataOutput.writeByte(RESPONSE_ERROR); throw new ServletException("failed to serialize data", e); } // Write out successful upload response dataOutput.writeByte(RESPONSE_SUCCESS); dataOutput.write(baos.toByteArray()); response.setStatus(HttpServletResponse.SC_OK); } finally { // Should always do this dataOutput.flush(); zOutput.finish(); response.flushBuffer(); } }
From source file:de.tum.frm2.nicos_android.nicos.NicosClient.java
public TupleOfTwo<Byte, Object> _read() throws ProtocolError { // receive first byte + (possibly) length DataInputStream din = new DataInputStream(socketIn); byte start;/*from ww w. ja va 2 s . c o m*/ try { start = din.readByte(); } catch (IOException e) { throw new ProtocolError("connection broken"); } if (start == daemon.ACK) { // ACK == executed ok, no more information follows return new TupleOfTwo<>(start, null); } if (start != daemon.NAK && start != daemon.STX) { // Server respondend with neither NAK (error) nor STX (ok) throw new ProtocolError("invalid response " + String.valueOf(start)); } // it has a length... int length; try { length = din.readInt(); } catch (IOException e) { throw new ProtocolError("connection broken"); } // Cannot concat these two try blocks: We need length before allocating msg. // And msg needs to be declared outside of try block to be accessible afterwards. try { byte[] msg = new byte[length]; try { din.readFully(msg, 0, length); } catch (IOException e) { throw new ProtocolError("connection broken"); } Unpickler unpickler = new Unpickler(); Object result = null; try { result = unpickler.loads(msg); } catch (Exception e) { // result stays at null. handle_error(e); } return new TupleOfTwo<>(start, result); } catch (OutOfMemoryError e) { throw new ProtocolError("bad response"); } }
From source file:ClassFile.java
public boolean read(DataInputStream di, ConstantPoolInfo pool[]) throws IOException { int len;//from w ww .j a v a2 s . c o m name = pool[di.readShort()]; len = di.readInt(); data = new byte[len]; len = di.read(data); if (len != data.length) return (false); return (true); }
From source file:org.apache.giraph.graph.BspServiceMaster.java
/** * Read the finalized checkpoint file and associated metadata files for the * checkpoint. Modifies the {@link PartitionOwner} objects to get the * checkpoint prefixes. It is an optimization to prevent all workers from * searching all the files. Also read in the aggregator data from the * finalized checkpoint file and setting it. * * @param superstep Checkpoint set to examine. * @param partitionOwners Partition owners to modify with checkpoint * prefixes/* w w w.j a v a 2s .c om*/ * @throws IOException * @throws InterruptedException * @throws KeeperException */ private void prepareCheckpointRestart(long superstep, Collection<PartitionOwner> partitionOwners) throws IOException, KeeperException, InterruptedException { FileSystem fs = getFs(); List<Path> validMetadataPathList = new ArrayList<Path>(); String finalizedCheckpointPath = getCheckpointBasePath(superstep) + CHECKPOINT_FINALIZED_POSTFIX; DataInputStream finalizedStream = fs.open(new Path(finalizedCheckpointPath)); int prefixFileCount = finalizedStream.readInt(); for (int i = 0; i < prefixFileCount; ++i) { String metadataFilePath = finalizedStream.readUTF() + CHECKPOINT_METADATA_POSTFIX; validMetadataPathList.add(new Path(metadataFilePath)); } // Set the merged aggregator data if it exists. int aggregatorDataSize = finalizedStream.readInt(); if (aggregatorDataSize > 0) { byte[] aggregatorZkData = new byte[aggregatorDataSize]; int actualDataRead = finalizedStream.read(aggregatorZkData, 0, aggregatorDataSize); if (actualDataRead != aggregatorDataSize) { throw new RuntimeException("prepareCheckpointRestart: Only read " + actualDataRead + " of " + aggregatorDataSize + " aggregator bytes from " + finalizedCheckpointPath); } String mergedAggregatorPath = getMergedAggregatorPath(getApplicationAttempt(), superstep - 1); if (LOG.isInfoEnabled()) { LOG.info("prepareCheckpointRestart: Reloading merged " + "aggregator " + "data '" + Arrays.toString(aggregatorZkData) + "' to previous checkpoint in path " + mergedAggregatorPath); } if (getZkExt().exists(mergedAggregatorPath, false) == null) { getZkExt().createExt(mergedAggregatorPath, aggregatorZkData, Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT, true); } else { getZkExt().setData(mergedAggregatorPath, aggregatorZkData, -1); } } masterCompute.readFields(finalizedStream); finalizedStream.close(); Map<Integer, PartitionOwner> idOwnerMap = new HashMap<Integer, PartitionOwner>(); for (PartitionOwner partitionOwner : partitionOwners) { if (idOwnerMap.put(partitionOwner.getPartitionId(), partitionOwner) != null) { throw new IllegalStateException("prepareCheckpointRestart: Duplicate partition " + partitionOwner); } } // Reading the metadata files. Simply assign each partition owner // the correct file prefix based on the partition id. for (Path metadataPath : validMetadataPathList) { String checkpointFilePrefix = metadataPath.toString(); checkpointFilePrefix = checkpointFilePrefix.substring(0, checkpointFilePrefix.length() - CHECKPOINT_METADATA_POSTFIX.length()); DataInputStream metadataStream = fs.open(metadataPath); long partitions = metadataStream.readInt(); for (long i = 0; i < partitions; ++i) { long dataPos = metadataStream.readLong(); int partitionId = metadataStream.readInt(); PartitionOwner partitionOwner = idOwnerMap.get(partitionId); if (LOG.isInfoEnabled()) { LOG.info("prepareSuperstepRestart: File " + metadataPath + " with position " + dataPos + ", partition id = " + partitionId + " assigned to " + partitionOwner); } partitionOwner.setCheckpointFilesPrefix(checkpointFilePrefix); } metadataStream.close(); } }
From source file:bobs.is.compress.sevenzip.SevenZFile.java
private StartHeader readStartHeader(final long startHeaderCrc) throws IOException { final StartHeader startHeader = new StartHeader(); DataInputStream dataInputStream = null; try {/*from w ww. ja va 2 s . c o m*/ dataInputStream = new DataInputStream(new CRC32VerifyingInputStream( new BoundedRandomAccessFileInputStream(file, 20), 20, startHeaderCrc)); startHeader.nextHeaderOffset = Long.reverseBytes(dataInputStream.readLong()); startHeader.nextHeaderSize = Long.reverseBytes(dataInputStream.readLong()); startHeader.nextHeaderCrc = 0xffffFFFFL & Integer.reverseBytes(dataInputStream.readInt()); return startHeader; } finally { if (dataInputStream != null) { dataInputStream.close(); } } }
From source file:ClassFile.java
/** * Read a class from InputStream <i>in</i>. *//*w w w .ja v a 2 s. co m*/ public boolean read(InputStream in) throws IOException { DataInputStream di = new DataInputStream(in); int count; magic = di.readInt(); if (magic != (int) 0xCAFEBABE) { return (false); } majorVersion = di.readShort(); minorVersion = di.readShort(); count = di.readShort(); constantPool = new ConstantPoolInfo[count]; if (debug) System.out.println("read(): Read header..."); constantPool[0] = new ConstantPoolInfo(); for (int i = 1; i < constantPool.length; i++) { constantPool[i] = new ConstantPoolInfo(); if (!constantPool[i].read(di)) { return (false); } // These two types take up "two" spots in the table if ((constantPool[i].type == ConstantPoolInfo.LONG) || (constantPool[i].type == ConstantPoolInfo.DOUBLE)) i++; } /* * Update pointers in the constant table. This turns the * table into a real datastructure. * * TODO: Have it verify that the right arguments are present */ for (int i = 1; i < constantPool.length; i++) { if (constantPool[i] == null) continue; if (constantPool[i].index1 > 0) constantPool[i].arg1 = constantPool[constantPool[i].index1]; if (constantPool[i].index2 > 0) constantPool[i].arg2 = constantPool[constantPool[i].index2]; } if (dumpConstants) { for (int i = 1; i < constantPool.length; i++) { System.out.println("C" + i + " - " + constantPool[i]); } } accessFlags = di.readShort(); thisClass = constantPool[di.readShort()]; superClass = constantPool[di.readShort()]; if (debug) System.out.println("read(): Read class info..."); /* * Identify all of the interfaces implemented by this class */ count = di.readShort(); if (count != 0) { if (debug) System.out.println("Class implements " + count + " interfaces."); interfaces = new ConstantPoolInfo[count]; for (int i = 0; i < count; i++) { int iindex = di.readShort(); if ((iindex < 1) || (iindex > constantPool.length - 1)) return (false); interfaces[i] = constantPool[iindex]; if (debug) System.out.println("I" + i + ": " + interfaces[i]); } } if (debug) System.out.println("read(): Read interface info..."); /* * Identify all fields in this class. */ count = di.readShort(); if (debug) System.out.println("This class has " + count + " fields."); if (count != 0) { fields = new FieldInfo[count]; for (int i = 0; i < count; i++) { fields[i] = new FieldInfo(); if (!fields[i].read(di, constantPool)) { return (false); } if (debug) System.out.println("F" + i + ": " + fields[i].toString(constantPool)); } } if (debug) System.out.println("read(): Read field info..."); /* * Identify all the methods in this class. */ count = di.readShort(); if (count != 0) { methods = new MethodInfo[count]; for (int i = 0; i < count; i++) { methods[i] = new MethodInfo(); if (!methods[i].read(di, constantPool)) { return (false); } if (debug) System.out.println("M" + i + ": " + methods[i].toString()); } } if (debug) System.out.println("read(): Read method info..."); /* * Identify all of the attributes in this class */ count = di.readShort(); if (count != 0) { attributes = new AttributeInfo[count]; for (int i = 0; i < count; i++) { attributes[i] = new AttributeInfo(); if (!attributes[i].read(di, constantPool)) { return (false); } } } if (debug) { System.out.println("read(): Read attribute info..."); System.out.println("done."); } isValidClass = true; return (true); }
From source file:com.alphabetbloc.accessmrs.services.SyncManager.java
private void insertPatients(DataInputStream zdis) throws Exception { long start = System.currentTimeMillis(); SQLiteDatabase db = DbProvider.getDb(); SimpleDateFormat output = new SimpleDateFormat("MMM dd, yyyy"); SimpleDateFormat input = new SimpleDateFormat("yyyy-MM-dd"); InsertHelper ih = new InsertHelper(db, DataModel.PATIENTS_TABLE); int ptIdIndex = ih.getColumnIndex(DataModel.KEY_PATIENT_ID); int ptIdentifierIndex = ih.getColumnIndex(DataModel.KEY_IDENTIFIER); int ptGivenIndex = ih.getColumnIndex(DataModel.KEY_GIVEN_NAME); int ptFamilyIndex = ih.getColumnIndex(DataModel.KEY_FAMILY_NAME); int ptMiddleIndex = ih.getColumnIndex(DataModel.KEY_MIDDLE_NAME); int ptBirthIndex = ih.getColumnIndex(DataModel.KEY_BIRTH_DATE); int ptGenderIndex = ih.getColumnIndex(DataModel.KEY_GENDER); db.beginTransaction();//from w ww . jav a2 s .c o m sLoopProgress.set(0); try { sLoopCount.set(zdis.readInt()); if (App.DEBUG) Log.v(TAG, "insertPatients icount: " + sLoopCount); for (int i = 1; i < sLoopCount.get() + 1; i++) { ih.prepareForInsert(); ih.bind(ptIdIndex, zdis.readInt()); ih.bind(ptFamilyIndex, zdis.readUTF()); ih.bind(ptMiddleIndex, zdis.readUTF()); ih.bind(ptGivenIndex, zdis.readUTF()); ih.bind(ptGenderIndex, zdis.readUTF()); ih.bind(ptBirthIndex, parseDate(input, output, zdis.readUTF())); ih.bind(ptIdentifierIndex, zdis.readUTF()); ih.execute(); sLoopProgress.getAndIncrement(); } db.setTransactionSuccessful(); } finally { ih.close(); db.endTransaction(); } long end = System.currentTimeMillis(); if (App.DEBUG) Log.v("SYNC BENCHMARK", String.format("Total Patients: \n%d\nTotal Time: \n%d\nRecords per second: \n%.2f", sLoopCount.get(), (int) (end - start), 1000 * (double) sLoopCount.get() / (double) (end - start))); }
From source file:com.alphabetbloc.accessmrs.services.SyncManager.java
private void insertPatientForms(final DataInputStream zdis) throws Exception { long start = System.currentTimeMillis(); SimpleDateFormat output = new SimpleDateFormat("MMM dd, yyyy"); SimpleDateFormat input = new SimpleDateFormat("yyyy-MM-dd"); SQLiteDatabase db = DbProvider.getDb(); InsertHelper ih = new InsertHelper(db, DataModel.OBSERVATIONS_TABLE); int ptIdIndex = ih.getColumnIndex(DataModel.KEY_PATIENT_ID); int obsTextIndex = ih.getColumnIndex(DataModel.KEY_VALUE_TEXT); int obsNumIndex = ih.getColumnIndex(DataModel.KEY_VALUE_NUMERIC); int obsDateIndex = ih.getColumnIndex(DataModel.KEY_VALUE_DATE); int obsIntIndex = ih.getColumnIndex(DataModel.KEY_VALUE_INT); int obsFieldIndex = ih.getColumnIndex(DataModel.KEY_FIELD_NAME); int obsTypeIndex = ih.getColumnIndex(DataModel.KEY_DATA_TYPE); int obsEncDateIndex = ih.getColumnIndex(DataModel.KEY_ENCOUNTER_DATE); db.beginTransaction();/* w ww . j a v a 2 s .c o m*/ sLoopProgress.set(0); try { sLoopCount.set(zdis.readInt()); if (App.DEBUG) Log.v(TAG, "insertPatientForms icount: " + sLoopCount); for (int i = 1; i < sLoopCount.get() + 1; i++) { ih.prepareForInsert(); ih.bind(ptIdIndex, zdis.readInt()); ih.bind(obsFieldIndex, zdis.readUTF()); byte dataType = zdis.readByte(); if (dataType == DataModel.TYPE_STRING) { ih.bind(obsTextIndex, zdis.readUTF()); } else if (dataType == DataModel.TYPE_INT) { ih.bind(obsIntIndex, zdis.readInt()); } else if (dataType == DataModel.TYPE_DOUBLE) { ih.bind(obsNumIndex, zdis.readDouble()); } else if (dataType == DataModel.TYPE_DATE) { ih.bind(obsDateIndex, parseDate(input, output, zdis.readUTF())); } ih.bind(obsTypeIndex, dataType); ih.bind(obsEncDateIndex, parseDate(input, output, zdis.readUTF())); ih.execute(); sLoopProgress.getAndIncrement(); } db.setTransactionSuccessful(); } finally { ih.close(); db.endTransaction(); } long end = System.currentTimeMillis(); if (App.DEBUG) Log.v("SYNC BENCHMARK", String.format("Total Patient-Forms: \n%d\nTotal Time: \n%d\nRecords per second: \n%.2f", sLoopCount.get(), (int) (end - start), 1000 * (double) sLoopCount.get() / (double) (end - start))); }
From source file:com.alphabetbloc.accessmrs.services.SyncManager.java
private void insertObservations(DataInputStream zdis) throws Exception { long start = System.currentTimeMillis(); SimpleDateFormat output = new SimpleDateFormat("MMM dd, yyyy"); SimpleDateFormat input = new SimpleDateFormat("yyyy-MM-dd"); SQLiteDatabase db = DbProvider.getDb(); InsertHelper ih = new InsertHelper(db, DataModel.OBSERVATIONS_TABLE); int ptIdIndex = ih.getColumnIndex(DataModel.KEY_PATIENT_ID); int obsTextIndex = ih.getColumnIndex(DataModel.KEY_VALUE_TEXT); int obsNumIndex = ih.getColumnIndex(DataModel.KEY_VALUE_NUMERIC); int obsDateIndex = ih.getColumnIndex(DataModel.KEY_VALUE_DATE); int obsIntIndex = ih.getColumnIndex(DataModel.KEY_VALUE_INT); int obsFieldIndex = ih.getColumnIndex(DataModel.KEY_FIELD_NAME); int obsTypeIndex = ih.getColumnIndex(DataModel.KEY_DATA_TYPE); int obsEncDateIndex = ih.getColumnIndex(DataModel.KEY_ENCOUNTER_DATE); db.beginTransaction();/*from ww w. j av a2s . c o m*/ sLoopProgress.set(0); int count = 0; try { sLoopCount.set(zdis.readInt()); if (App.DEBUG) Log.v(TAG, "insertObservations icount: " + sLoopCount); for (int i = 1; i < sLoopCount.get() + 1; i++) { ih.prepareForInsert(); ih.bind(ptIdIndex, zdis.readInt()); ih.bind(obsFieldIndex, zdis.readUTF()); byte dataType = zdis.readByte(); if (dataType == DataModel.TYPE_STRING) { ih.bind(obsTextIndex, zdis.readUTF()); } else if (dataType == DataModel.TYPE_INT) { ih.bind(obsIntIndex, zdis.readInt()); } else if (dataType == DataModel.TYPE_DOUBLE) { ih.bind(obsNumIndex, zdis.readDouble()); } else if (dataType == DataModel.TYPE_DATE) { ih.bind(obsDateIndex, parseDate(input, output, zdis.readUTF())); } ih.bind(obsTypeIndex, dataType); ih.bind(obsEncDateIndex, parseDate(input, output, zdis.readUTF())); ih.execute(); count++; sLoopProgress.set(count * 2); } db.setTransactionSuccessful(); } finally { ih.close(); db.endTransaction(); } long end = System.currentTimeMillis(); if (App.DEBUG) Log.v("SYNC BENCHMARK", String.format("Total Observations: \n%d\nTotal Time: \n%d\nRecords per second: \n%.2f", sLoopCount.get(), (int) (end - start), 1000 * (double) sLoopCount.get() / (double) (end - start))); }