List of usage examples for java.io DataInputStream available
public int available() throws IOException
From source file:org.kualigan.maven.plugins.api.DefaultPrototypeHelper.java
/** * Puts temporary pom in the system temp directory. prototype-pom.xml is extracted * from the plugin.// ww w .j a v a2 s . co m */ public void extractTempPom() throws MojoExecutionException { getCaller().getLog().info("Extracting the Temp Pom"); final InputStream pom_is = getClass().getClassLoader().getResourceAsStream("prototype-resources/pom.xml"); byte[] fileBytes = null; try { final DataInputStream dis = new DataInputStream(pom_is); fileBytes = new byte[dis.available()]; dis.readFully(fileBytes); dis.close(); } catch (Exception e) { throw new MojoExecutionException("Wasn't able to read in the prototype pom", e); } finally { try { pom_is.close(); } catch (Exception e) { // Ignore exceptions } } try { final FileOutputStream fos = new FileOutputStream( System.getProperty("java.io.tmpdir") + File.separator + "prototype-pom.xml"); try { fos.write(fileBytes); } finally { fos.close(); } } catch (Exception e) { throw new MojoExecutionException("Could not write temporary pom file", e); } }
From source file:SafeUTF.java
public String safeReadUTF(DataInputStream in) throws IOException { boolean isNull = in.readByte() == NULL; if (isNull) { return null; }//from ww w .j a v a 2s . co m short numChunks = in.readShort(); int bufferSize = chunkSize * numChunks; // special handling for single chunk if (numChunks == 1) { // The text size is likely to be much smaller than the chunkSize // so set bufferSize to the min of the input stream available // and the maximum buffer size. Since the input stream // available() can be <= 0 we check for that and default to // a small msg size of 256 bytes. int inSize = in.available(); if (inSize <= 0) { inSize = 256; } bufferSize = Math.min(inSize, bufferSize); lastReadBufferSize = bufferSize; } StringBuffer buff = new StringBuffer(bufferSize); for (int i = 0; i < numChunks; i++) { String s = in.readUTF(); buff.append(s); } return buff.toString(); }
From source file:net.sergetk.mobile.lcdui.BitmapFont.java
/** * Creates a new font from the resource. The capacity of the color cache defines maximum size of * the color cache.//from w ww. java2s .c o m * * @param fontPath * the resource name * @param colorCacheCapacity * the maximum color cache size */ public BitmapFont(String fontPath, int colorCacheCapacity) { this.style = Font.STYLE_PLAIN; this.currentColor = 0; this.colorCache = new CacheEntry[colorCacheCapacity]; this.colorUsageCounts = new IntHashMap(colorCacheCapacity * 2); try { InputStream input = new Object().getClass().getResourceAsStream(fontPath); if (input == null) { throw new IOException(); } DataInputStream data = new DataInputStream(input); int streamLen = data.available(); this.fontFilePath = fontPath; this.version = data.readByte(); this.height = data.readByte(); this.baseline = data.readByte(); this.xIndent = data.readByte(); this.yIndent = data.readByte(); this.spaceWidth = data.readByte(); characterMap = data.readUTF(); int count = characterMap.length(); // read characters widthes this.widths = new int[count]; this.x = new int[count]; this.y = new int[count]; for (int i = 0; i < count; i++) { widths[i] = data.readByte(); } baseImage = null; // the original implementation supported multiple-images // in the font file, but this is not necessary. Because I do // not want to change the encoding, I am leaving this byte that // used to represent the number of PNGs in the file data.skipBytes(1); short pngLen = data.readShort(); byte[] buffer = new byte[pngLen]; data.read(buffer, 0, pngLen); this.pngOffset = (short) (streamLen - pngLen); baseImage = Image.createImage(buffer, 0, pngLen); currentImage = baseImage; // calculate characters coordinates int curX = 0, curY = 0; for (int i = 0; i < count; i++) { if (widths[i] < 0) { // negative width points to another character int sourceIndex = -widths[i]; widths[i] = widths[sourceIndex]; x[i] = x[sourceIndex]; y[i] = y[sourceIndex]; } else { x[i] = curX; y[i] = curY; curX += widths[i]; } } if (defaultFont == null) defaultFont = this; } catch (IOException e) { // Log.warn("IOException reading font: ", e); System.err.println("IOException reading font: " + e.getMessage()); e.printStackTrace(); } }
From source file:org.mrgeo.vector.mrsvector.VectorTile.java
protected static HeaderBlock readOSMHeader(final InputStream stream) throws IOException { DataInputStream dis; if (stream instanceof DataInputStream) { dis = (DataInputStream) stream; } else {/*from w w w .ja v a2 s .c om*/ dis = new DataInputStream(stream); } while (dis.available() > 0) { final BlobHeader header = parseHeader(dis); if (header.getType().equals(OSMHEADER)) { final CodedInputStream blob = CodedInputStream.newInstance(parseBlob(dis, header)); final HeaderBlock hb = HeaderBlock.parseFrom(blob); return hb; } } return null; }
From source file:com.isecpartners.gizmo.HttpRequest.java
private void readerToStringBuffer(DataInputStream buffered, StringBuffer contents) { try {/* www . jav a2s.c om*/ byte ch_n = buffered.readByte(); while (ch_n != -1) { contents.append((char) ch_n); if ((contents.indexOf("\r\n\r\n") != -1) || (buffered.available() == 0)) { break; } ch_n = buffered.readByte(); } } catch (Exception ex) { System.out.println(ex); } }
From source file:org.siphon.d2js.EmbedSqlTranslator.java
private String getLineNear(String code, int pos) { DataInputStream inputStream = new DataInputStream(IOUtils.toInputStream(code)); String line = null;// ww w . j av a 2s . c om int lineHead = 0; int lineIndex = 0; try { while (inputStream.available() >= code.length() - pos) { lineHead = code.length() - inputStream.available(); line = inputStream.readLine(); lineIndex++; } } catch (IOException e) { // e.printStackTrace(); } String s = "LN " + lineIndex + ":" + (pos - lineHead + 1) + " " + line; return s; }
From source file:org.kalypso.kalypsomodel1d2d.sim.ResultManager.java
private void processSWANTabFile(final FileObject swanResOutTabFile, final FileObject swanResShiftFile) { final GM_Position lShiftPosition = SWANDataConverterHelper.readCoordinateShiftValues(swanResShiftFile); if (lShiftPosition == null) { return;/* w w w. jav a 2s .c o m*/ } try { if (swanResOutTabFile.isContentOpen()) { swanResOutTabFile.close(); } final FileObject swanResOutTabFileBackUp = swanResOutTabFile.getParent() .resolveFile(swanResOutTabFile.getName().getBaseName() + ".bck"); //$NON-NLS-1$ swanResOutTabFile.moveTo(swanResOutTabFileBackUp); // int lIntLinesCounter = 0; final OutputStream lOutStream = swanResOutTabFile.getContent().getOutputStream(); final DataInputStream lInDataStream = new DataInputStream( swanResOutTabFileBackUp.getContent().getInputStream()); BufferedReader streamReader = new BufferedReader(new InputStreamReader(lInDataStream)); final Formatter lFormatter = new Formatter(lOutStream, Charset.defaultCharset().name(), Locale.US); while (lInDataStream.available() != 0) { final String lStrTmpLine = streamReader.readLine().trim(); // ++lIntLinesCounter; if (lStrTmpLine.startsWith("%")) { //$NON-NLS-1$ lFormatter.format("%s\n", lStrTmpLine); //$NON-NLS-1$ continue; } final StringTokenizer lStrTokenizer = new StringTokenizer(lStrTmpLine, " "); //$NON-NLS-1$ int lIntTokenCounter = 0; String lStrNewLine = ""; //$NON-NLS-1$ while (lStrTokenizer.hasMoreTokens()) { final String lStrToken = lStrTokenizer.nextToken(); if (lIntTokenCounter == 1) { lStrNewLine += String.format(Locale.US, "%.5f\t", //$NON-NLS-1$ NumberUtils.parseQuietDouble(lStrToken) + lShiftPosition.getX()); } else if (lIntTokenCounter == 2) { lStrNewLine += String.format(Locale.US, "%.5f\t", //$NON-NLS-1$ NumberUtils.parseQuietDouble(lStrToken) + lShiftPosition.getY()); } else { lStrNewLine += lStrToken + "\t"; //$NON-NLS-1$ } lIntTokenCounter++; } lFormatter.format("%s\n", lStrNewLine); //$NON-NLS-1$ } lFormatter.close(); lInDataStream.close(); lOutStream.close(); } catch (final Exception e) { return; } return; }
From source file:org.pentaho.di.job.entries.folderscompare.JobEntryFoldersCompare.java
/** * Check whether 2 files have the same contents. * * @param file1//w w w.j ava 2s . co m * first file to compare * @param file2 * second file to compare * @return true if files are equal, false if they are not * * @throws IOException * upon IO problems */ protected boolean equalFileContents(FileObject file1, FileObject file2) throws KettleFileException { // Really read the contents and do comparisons DataInputStream in1 = null; DataInputStream in2 = null; try { // Really read the contents and do comparisons in1 = new DataInputStream( new BufferedInputStream(KettleVFS.getInputStream(KettleVFS.getFilename(file1), this))); in2 = new DataInputStream( new BufferedInputStream(KettleVFS.getInputStream(KettleVFS.getFilename(file2), this))); char ch1, ch2; while (in1.available() != 0 && in2.available() != 0) { ch1 = (char) in1.readByte(); ch2 = (char) in2.readByte(); if (ch1 != ch2) { return false; } } if (in1.available() != in2.available()) { return false; } else { return true; } } catch (IOException e) { throw new KettleFileException(e); } finally { if (in1 != null) { try { in1.close(); } catch (IOException ignored) { // Nothing to see here... } } if (in2 != null) { try { in2.close(); } catch (Exception ignored) { // We can't do anything else here... } } } }
From source file:org.mrgeo.vector.mrsvector.VectorTileCleaner.java
void read(final InputStream stream) throws IOException { DataInputStream dis; if (stream instanceof DataInputStream) { dis = (DataInputStream) stream; } else {/*from w ww . java 2 s .c o m*/ dis = new DataInputStream(stream); } while (dis.available() > 0) { final BlobHeader header = parseHeader(dis); // final InputStream blob =; final CodedInputStream blob = CodedInputStream.newInstance(parseBlob(dis, header)); // for speed, we make _large_ tiles, we don't want protobuf to complain... blob.setSizeLimit(MAX_STREAM_SIZE); if (header.getType().equals(OSMDATA)) { parseOSM(blob); } } organize(); }
From source file:org.commoncrawl.service.crawlhistory.CrawlHistoryServer.java
@Override public void bulkUpdateHistory(AsyncContext<BulkUpdateData, NullMessage> rpcContext) throws RPCException { LOG.info("Received BulkUpdate Request"); ImmutableBuffer inputBuffer = rpcContext.getInput().getFingerprintList(); if (inputBuffer.getCount() != 0) { try {/*www .j av a 2 s . co m*/ if (_bloomFilter == null) { throw new IOException("BloomFilter Not Initilized. Invalid Server State!"); } DataInputStream inputStream = new DataInputStream( new ByteArrayInputStream(inputBuffer.getReadOnlyBytes(), 0, inputBuffer.getCount())); URLFPV2 fingerprint = new URLFPV2(); int itemsAdded = 0; while (inputStream.available() != 0) { fingerprint.setDomainHash(WritableUtils.readVLong(inputStream)); fingerprint.setUrlHash(WritableUtils.readVLong(inputStream)); _bloomFilter.add(fingerprint); ++itemsAdded; } _urlsProcessedSinceCheckpoint.addAndGet(itemsAdded); LOG.info("Finished Processed BulkUpdate Request. " + itemsAdded + " items processed."); } catch (IOException e) { LOG.error(CCStringUtils.stringifyException(e)); rpcContext.setStatus(Status.Error_RequestFailed); rpcContext.setErrorDesc(CCStringUtils.stringifyException(e)); } rpcContext.completeRequest(); } }