List of usage examples for java.io DataOutputStream writeInt
public final void writeInt(int v) throws IOException
int
to the underlying output stream as four bytes, high byte first. From source file:org.sakaiproject.nakamura.auth.trusted.TokenStore.java
/** * Save all the secureKeys to file//from w ww. jav a2s. co m */ @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "RV_RETURN_VALUE_IGNORED_BAD_PRACTICE", justification = "Could be injected from annother bundle") private void saveLocalSecretKeys() { FileOutputStream fout = null; DataOutputStream keyOutputStream = null; try { File parent = tokenFile.getAbsoluteFile().getParentFile(); LOG.debug("Saving Local Secret Keys to {} ", tokenFile.getAbsoluteFile()); if (!parent.exists()) { parent.mkdirs(); } fout = new FileOutputStream(tmpTokenFile); keyOutputStream = new DataOutputStream(fout); keyOutputStream.writeInt(secretKeyId); keyOutputStream.writeLong(nextUpdate); for (int i = 0; i < secretKeyRingBuffer.length; i++) { if (secretKeyRingBuffer[i] == null) { keyOutputStream.writeInt(0); } else { keyOutputStream.writeInt(1); keyOutputStream.writeLong(secretKeyRingBuffer[i].getExpires()); keyOutputStream.writeUTF(secretKeyRingBuffer[i].getServerId()); byte[] b = secretKeyRingBuffer[i].getSecretKey().getEncoded(); keyOutputStream.writeInt(b.length); keyOutputStream.write(b); } } keyOutputStream.close(); if (!tmpTokenFile.renameTo(tokenFile)) { LOG.error( "Failed to save cookie keys, rename of tokenFile failed. Reload of secure token keys will fail while this is happening. "); } } catch (IOException e) { LOG.error("Failed to save cookie keys " + e.getMessage()); } finally { try { keyOutputStream.close(); } catch (Exception e) { } try { fout.close(); } catch (Exception e) { } } }
From source file:org.apache.fontbox.ttf.TTFSubsetter.java
/** * @param out The data output stream.// w ww . ja v a 2s.c o m * @param nTables The number of table. * @return The file offset of the first TTF table to write. * @throws IOException Upon errors. */ private long writeFileHeader(DataOutputStream out, int nTables) throws IOException { out.writeInt(0x00010000); out.writeShort(nTables); int mask = Integer.highestOneBit(nTables); int searchRange = mask * 16; out.writeShort(searchRange); int entrySelector = log2(mask); out.writeShort(entrySelector); // numTables * 16 - searchRange int last = 16 * nTables - searchRange; out.writeShort(last); return 0x00010000L + toUInt32(nTables, searchRange) + toUInt32(entrySelector, last); }
From source file:org.carbondata.processing.restructure.SchemaRestructurer.java
private void writeLevelCardinalityFile(String loadFolderLoc, String tableName, int[] dimCardinality) throws KettleException { String levelCardinalityFilePath = loadFolderLoc + File.separator + CarbonCommonConstants.LEVEL_METADATA_FILE + tableName + ".metadata"; DataOutputStream outstream = null; try {/* w w w .j av a2 s . c o m*/ int dimCardinalityArrLength = dimCardinality.length; outstream = FileFactory.getDataOutputStream(levelCardinalityFilePath, FileFactory.getFileType(levelCardinalityFilePath)); outstream.writeInt(dimCardinalityArrLength); for (int i = 0; i < dimCardinalityArrLength; i++) { outstream.writeInt(dimCardinality[i]); } LOGGER.info("Level cardinality file written to : " + levelCardinalityFilePath); } catch (IOException e) { LOGGER.error( "Error while writing level cardinality file : " + levelCardinalityFilePath + e.getMessage()); throw new KettleException("Not able to write level cardinality file", e); } finally { CarbonUtil.closeStreams(outstream); } }
From source file:br.org.indt.ndg.servlets.PostResults.java
protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { log.info("Trying to process result stream from " + request.getRemoteAddr()); DataOutputStream dataOutputStream = new DataOutputStream(response.getOutputStream()); servletError = false;/* w w w. ja va 2 s . com*/ String results = Decompress(request); if (servletError) { dataOutputStream.writeInt(FAILURE); log.error("Failed processing result stream from " + request.getRemoteAddr()); } else { if (results != null) { StringReader stringReader = new StringReader(results); BufferedReader bufferedReader = new BufferedReader(stringReader); StringBuffer stringBuffer = new StringBuffer(); String line = bufferedReader.readLine(); while ((line != null) && (!servletError)) { stringBuffer.append(line + '\n'); if (line.trim().equals("</result>")) { log.info("============= Result received by GPRS ============"); log.info(stringBuffer); try { msmBD.postResult(stringBuffer, createTransactionLogVO(request)); stringBuffer = new StringBuffer(); servletError = false; } catch (Exception e) { servletError = true; } } line = bufferedReader.readLine(); } if (servletError) { dataOutputStream.writeInt(FAILURE); log.error("Failed processing result stream from " + request.getRemoteAddr()); } else { dataOutputStream.writeInt(SUCCESS); log.info("Successfully processed result stream from " + request.getRemoteAddr()); } bufferedReader.close(); } else { dataOutputStream.writeInt(SUCCESS); log.error("Failed processing stream from " + request.getRemoteAddr()); } } dataOutputStream.close(); }
From source file:org.commoncrawl.service.crawlhistory.CrawlHistoryServer.java
private void serializeBloomFilter(Path checkpointPath) throws IOException { FileSystem fs = CrawlEnvironment.getDefaultFileSystem(); // delete existing ... fs.delete(checkpointPath, false);//from w ww . j ava2 s.com FSDataOutputStream outputStream = fs.create(checkpointPath); try { DataOutputStream dataOut = new DataOutputStream(outputStream); dataOut.writeInt(0); // version dataOut.writeInt(_state.getCurrentCrawlNumber()); // crawl number ... // serialize bloom filter contents ... _bloomFilter.serialize(outputStream); } finally { if (outputStream != null) { outputStream.flush(); outputStream.close(); } } }
From source file:com.zpci.firstsignhairclipdemo.MainActivity.java
public void savewavefile(byte[] ra) { //prepend 44 byte wave header to data int sampleRate = 8000; // audio sample rate is 8000 SPS int numSecs = ra.length / sampleRate; // number of seconds of audio to record int samples = sampleRate * numSecs; // number of samples in file short bitsPerSample = 8; // one byte per sample int filesize = samples + 44; // check this? int fmtChunkSize = 16; // size of 'fmt' chunk short channels = 1; // mono int byteRate = sampleRate * channels * bitsPerSample / 8; // will be 8K for us short format = 1; // 1 == uncompressed pcm short blockalign = (short) (channels * bitsPerSample / 8); // bytes per sample int audiolen = samples * channels * bitsPerSample / 8; // length of audio in bytes try {/*from w w w.j a v a 2 s . com*/ //OutputStream os = openFileOutput("diagaudio.wav", Context.MODE_PRIVATE); String state = Environment.getExternalStorageState(); Log.d(TAG, "External storage state: " + state); if (Environment.MEDIA_MOUNTED.equals(state)) { //create firstsign directory File rootPath = new File(Environment.getExternalStorageDirectory(), "firstsign"); if (!rootPath.exists()) { rootPath.mkdirs(); Log.d(TAG, "mkdirs"); } File file = new File(rootPath, "hairclipaudio.wav"); file.createNewFile(); OutputStream os = new FileOutputStream(file); BufferedOutputStream bos = new BufferedOutputStream(os); DataOutputStream wf = new DataOutputStream(bos); wf.write("RIFF".getBytes()); wf.writeInt(Integer.reverseBytes(filesize - 8)); wf.write("WAVE".getBytes()); wf.write("fmt ".getBytes()); wf.writeInt(Integer.reverseBytes(fmtChunkSize)); wf.writeShort(Short.reverseBytes(format)); wf.writeShort(Short.reverseBytes(channels)); wf.writeInt(Integer.reverseBytes(sampleRate)); wf.writeInt(Integer.reverseBytes(byteRate)); wf.writeShort(Short.reverseBytes(blockalign)); wf.writeShort(Short.reverseBytes(bitsPerSample)); wf.write("data".getBytes()); wf.writeInt(Integer.reverseBytes(audiolen)); wf.write(ra); wf.close(); bos.close(); os.close(); Log.d(TAG, "wavefile write complete"); } else { Toast.makeText(this, "SDCard not mounted", Toast.LENGTH_LONG).show(); } //what do i do? } catch (Exception e) { Log.e(TAG, "exception in savewavefile"); e.printStackTrace(); } }
From source file:au.org.ala.spatial.util.RecordsSmall.java
private void makeUniquePoints() throws Exception { //make unique points and index points = new RandomAccessFile(filename + "records.csv.small.points", "r"); double[] allPoints = getPointsAll(); Coord[] p = new Coord[allPoints.length / 2]; for (int i = 0; i < allPoints.length; i += 2) { p[i / 2] = new Coord(allPoints[i], allPoints[i + 1], i / 2); }/*from w w w . j a v a2 s. co m*/ allPoints = null; //make available to GC Arrays.sort(p, new Comparator<Coord>() { public int compare(Coord o1, Coord o2) { return o1.longitude == o2.longitude ? (o1.latitude == o2.latitude ? 0 : (o1.latitude - o2.latitude > 0.0 ? 1 : -1)) : (o1.longitude - o2.longitude > 0.0 ? 1 : -1); } }); DataOutputStream outputUniquePoints = new DataOutputStream( new BufferedOutputStream(new FileOutputStream(filename + "records.csv.small.pointsUniquePoints"))); DataOutputStream outputUniqueIdx = new DataOutputStream( new BufferedOutputStream(new FileOutputStream(filename + "records.csv.small.pointsUniqueIdx"))); int pos = -1; //first point is set after pos++ int[] newPos = new int[p.length]; for (int i = 0; i < p.length; i++) { if (i == 0 || p[i].latitude != p[i - 1].latitude || p[i].longitude != p[i - 1].longitude) { outputUniquePoints.writeDouble(p[i].latitude); outputUniquePoints.writeDouble(p[i].longitude); pos++; } newPos[p[i].pos] = pos; } for (int i = 0; i < p.length; i++) { outputUniqueIdx.writeInt(newPos[i]); } outputUniqueIdx.flush(); outputUniqueIdx.close(); outputUniquePoints.flush(); outputUniquePoints.close(); points.close(); }
From source file:org.apache.sshd.server.sftp.SftpSubsystem.java
protected void send(Buffer buffer) throws IOException { DataOutputStream dos = new DataOutputStream(out); dos.writeInt(buffer.available()); dos.write(buffer.array(), buffer.rpos(), buffer.available()); dos.flush();/*from w w w . j a va2 s .co m*/ }
From source file:com.linkedin.pinot.core.startree.OffHeapStarTreeBuilder.java
private void appendToBuffer(DataOutputStream dos, DimensionBuffer dimensions, MetricBuffer metricHolder) throws IOException { for (int i = 0; i < numDimensions; i++) { dos.writeInt(dimensions.getDimension(i)); }//from ww w. ja v a 2 s . com dos.write(metricHolder.toBytes(metricSizeBytes)); }
From source file:org.apache.hadoop.hbase.util.RegionMover.java
/** * Write the number of regions moved in the first line followed by regions moved in subsequent * lines//from w w w . j a v a 2 s. c o m * @param filename * @param movedRegions * @throws IOException */ private void writeFile(String filename, List<HRegionInfo> movedRegions) throws IOException { FileOutputStream fos = null; DataOutputStream dos = null; try { fos = new FileOutputStream(filename); dos = new DataOutputStream(fos); dos.writeInt(movedRegions.size()); for (HRegionInfo region : movedRegions) { Bytes.writeByteArray(dos, region.toByteArray()); } } catch (IOException e) { LOG.error("ERROR: Was Not able to write regions moved to output file but moved " + movedRegions.size() + " regions", e); throw e; } finally { if (dos != null) { dos.close(); } if (fos != null) { fos.close(); } } }