List of usage examples for java.io DataInput readByte
byte readByte() throws IOException;
From source file:org.apache.hama.graph.GraphJobMessage.java
@Override public void readFields(DataInput in) throws IOException { flag = in.readByte(); if (isVertexMessage()) { vertexId = ReflectionUtils.newInstance(VERTEX_ID_CLASS, null); vertexId.readFields(in);//from w ww.j a v a 2s . c o m vertexValue = ReflectionUtils.newInstance(VERTEX_VALUE_CLASS, null); vertexValue.readFields(in); } else if (isMapMessage()) { map = new MapWritable(); map.readFields(in); } else if (isPartitioningMessage()) { //LOG.info("??? vertex"); Vertex<Writable, Writable, Writable> vertex = GraphJobRunner.newVertexInstance(VERTEX_CLASS, null); Writable vertexId = ReflectionUtils.newInstance(VERTEX_ID_CLASS, null); vertexId.readFields(in); //LOG.info(" " + vertexId.toString()); vertex.setVertexID(vertexId); if (in.readBoolean()) { Writable vertexValue = ReflectionUtils.newInstance(VERTEX_VALUE_CLASS, null); vertexValue.readFields(in); //LOG.info("" + vertexValue); vertex.setValue(vertexValue); } int size = in.readInt(); //LOG.info("?: " + size); vertex.setEdges(new ArrayList<Edge<Writable, Writable>>(size)); for (int i = 0; i < size; i++) { Writable edgeVertexID = ReflectionUtils.newInstance(VERTEX_ID_CLASS, null); edgeVertexID.readFields(in); //LOG.info(" " + edgeVertexID); Writable edgeValue = null; if (in.readBoolean()) { edgeValue = ReflectionUtils.newInstance(EDGE_VALUE_CLASS, null); edgeValue.readFields(in); } //LOG.info(": " + edgeValue); vertex.getEdges().add(new Edge<Writable, Writable>(edgeVertexID, edgeValue)); } this.vertex = vertex; //LOG.info("Vertex" + vertex); } else if (isVerticesSizeMessage()) { vertices_size = new IntWritable(); vertices_size.readFields(in); } else if (isBoundaryVertexSizeMessage()) { boundaryVertex_size = new IntWritable(); boundaryVertex_size.readFields(in); } else { vertexId = ReflectionUtils.newInstance(VERTEX_ID_CLASS, null); vertexId.readFields(in); } }
From source file:org.apache.hama.ml.ann.NeuralNetwork.java
@SuppressWarnings({ "rawtypes", "unchecked" }) @Override// w w w . j a v a2 s.c o m public void readFields(DataInput input) throws IOException { // read model type this.modelType = WritableUtils.readString(input); // read learning rate this.learningRate = input.readDouble(); // read model path this.modelPath = WritableUtils.readString(input); if (this.modelPath.equals("null")) { this.modelPath = null; } // read feature transformer int bytesLen = input.readInt(); byte[] featureTransformerBytes = new byte[bytesLen]; for (int i = 0; i < featureTransformerBytes.length; ++i) { featureTransformerBytes[i] = input.readByte(); } Class<? extends FeatureTransformer> featureTransformerCls = (Class<? extends FeatureTransformer>) SerializationUtils .deserialize(featureTransformerBytes); Constructor[] constructors = featureTransformerCls.getDeclaredConstructors(); Constructor constructor = constructors[0]; try { this.featureTransformer = (FeatureTransformer) constructor.newInstance(new Object[] {}); } catch (InstantiationException e) { e.printStackTrace(); } catch (IllegalAccessException e) { e.printStackTrace(); } catch (IllegalArgumentException e) { e.printStackTrace(); } catch (InvocationTargetException e) { e.printStackTrace(); } }
From source file:org.apache.hama.ml.perception.SmallMultiLayerPerceptron.java
@SuppressWarnings("rawtypes") @Override/*from w ww .j a v a2 s .co m*/ public void readFields(DataInput input) throws IOException { this.MLPType = WritableUtils.readString(input); this.learningRate = input.readDouble(); this.regularization = input.readDouble(); this.momentum = input.readDouble(); this.numberOfLayers = input.readInt(); this.squashingFunctionName = WritableUtils.readString(input); this.costFunctionName = WritableUtils.readString(input); this.squashingFunction = FunctionFactory.createDoubleFunction(this.squashingFunctionName); this.costFunction = FunctionFactory.createDoubleDoubleFunction(this.costFunctionName); // read the number of neurons for each layer this.layerSizeArray = new int[this.numberOfLayers]; for (int i = 0; i < numberOfLayers; ++i) { this.layerSizeArray[i] = input.readInt(); } this.weightMatrice = new DenseDoubleMatrix[this.numberOfLayers - 1]; for (int i = 0; i < numberOfLayers - 1; ++i) { this.weightMatrice[i] = (DenseDoubleMatrix) MatrixWritable.read(input); } // read feature transformer int bytesLen = input.readInt(); byte[] featureTransformerBytes = new byte[bytesLen]; for (int i = 0; i < featureTransformerBytes.length; ++i) { featureTransformerBytes[i] = input.readByte(); } Class featureTransformerCls = (Class) SerializationUtils.deserialize(featureTransformerBytes); Constructor constructor = featureTransformerCls.getConstructors()[0]; try { this.featureTransformer = (FeatureTransformer) constructor.newInstance(new Object[] {}); } catch (InstantiationException e) { e.printStackTrace(); } catch (IllegalAccessException e) { e.printStackTrace(); } catch (IllegalArgumentException e) { e.printStackTrace(); } catch (InvocationTargetException e) { e.printStackTrace(); } }
From source file:org.apache.hawq.pxf.service.io.GPDBWritable.java
@Override public void readFields(DataInput in) throws IOException { /*//from www. ja v a2 s . c o m * extract pkt len. * * GPSQL-1107: * The DataInput might already be empty (EOF), but we can't check it beforehand. * If that's the case, pktlen is updated to -1, to mark that the object is still empty. * (can be checked with isEmpty()). */ pktlen = readPktLen(in); if (isEmpty()) { return; } /* extract the version and col cnt */ int version = in.readShort(); int curOffset = 4 + 2; int colCnt; /* !!! Check VERSION !!! */ if (version != GPDBWritable.VERSION && version != GPDBWritable.PREV_VERSION) { throw new IOException("Current GPDBWritable version(" + GPDBWritable.VERSION + ") does not match input version(" + version + ")"); } if (version == GPDBWritable.VERSION) { errorFlag = in.readByte(); curOffset += 1; } colCnt = in.readShort(); curOffset += 2; /* Extract Column Type */ colType = new int[colCnt]; DBType[] coldbtype = new DBType[colCnt]; for (int i = 0; i < colCnt; i++) { int enumType = (in.readByte()); curOffset += 1; if (enumType == DBType.BIGINT.ordinal()) { colType[i] = BIGINT.getOID(); coldbtype[i] = DBType.BIGINT; } else if (enumType == DBType.BOOLEAN.ordinal()) { colType[i] = BOOLEAN.getOID(); coldbtype[i] = DBType.BOOLEAN; } else if (enumType == DBType.FLOAT8.ordinal()) { colType[i] = FLOAT8.getOID(); coldbtype[i] = DBType.FLOAT8; } else if (enumType == DBType.INTEGER.ordinal()) { colType[i] = INTEGER.getOID(); coldbtype[i] = DBType.INTEGER; } else if (enumType == DBType.REAL.ordinal()) { colType[i] = REAL.getOID(); coldbtype[i] = DBType.REAL; } else if (enumType == DBType.SMALLINT.ordinal()) { colType[i] = SMALLINT.getOID(); coldbtype[i] = DBType.SMALLINT; } else if (enumType == DBType.BYTEA.ordinal()) { colType[i] = BYTEA.getOID(); coldbtype[i] = DBType.BYTEA; } else if (enumType == DBType.TEXT.ordinal()) { colType[i] = TEXT.getOID(); coldbtype[i] = DBType.TEXT; } else { throw new IOException("Unknown GPDBWritable.DBType ordinal value"); } } /* Extract null bit array */ byte[] nullbytes = new byte[getNullByteArraySize(colCnt)]; in.readFully(nullbytes); curOffset += nullbytes.length; boolean[] colIsNull = byteArrayToBooleanArray(nullbytes, colCnt); /* extract column value */ colValue = new Object[colCnt]; for (int i = 0; i < colCnt; i++) { if (!colIsNull[i]) { /* Skip the alignment padding */ int skipbytes = roundUpAlignment(curOffset, coldbtype[i].getAlignment()) - curOffset; for (int j = 0; j < skipbytes; j++) { in.readByte(); } curOffset += skipbytes; /* For fixed length type, increment the offset according to type type length here. * For var length type (BYTEA, TEXT), we'll read 4 byte length header and the * actual payload. */ int varcollen = -1; if (coldbtype[i].isVarLength()) { varcollen = in.readInt(); curOffset += 4 + varcollen; } else { curOffset += coldbtype[i].getTypeLength(); } switch (DataType.get(colType[i])) { case BIGINT: { colValue[i] = in.readLong(); break; } case BOOLEAN: { colValue[i] = in.readBoolean(); break; } case FLOAT8: { colValue[i] = in.readDouble(); break; } case INTEGER: { colValue[i] = in.readInt(); break; } case REAL: { colValue[i] = in.readFloat(); break; } case SMALLINT: { colValue[i] = in.readShort(); break; } /* For BYTEA column, it has a 4 byte var length header. */ case BYTEA: { colValue[i] = new byte[varcollen]; in.readFully((byte[]) colValue[i]); break; } /* For text formatted column, it has a 4 byte var length header * and it's always null terminated string. * So, we can remove the last "\0" when constructing the string. */ case TEXT: { byte[] data = new byte[varcollen]; in.readFully(data, 0, varcollen); colValue[i] = new String(data, 0, varcollen - 1, CHARSET); break; } default: throw new IOException("Unknown GPDBWritable ColType"); } } } /* Skip the ending alignment padding */ int skipbytes = roundUpAlignment(curOffset, 8) - curOffset; for (int j = 0; j < skipbytes; j++) { in.readByte(); } curOffset += skipbytes; if (errorFlag != 0) { throw new IOException("Received error value " + errorFlag + " from format"); } }
From source file:org.apache.hawq.pxf.service.io.Text.java
public static long readVLong(DataInput stream) throws IOException { byte firstByte = stream.readByte(); int len = decodeVIntSize(firstByte); if (len == 1) { return firstByte; }//from www . j a v a 2s .c o m long i = 0; for (int idx = 0; idx < len - 1; idx++) { byte b = stream.readByte(); i = i << 8; i = i | (b & 0xFF); } return (isNegativeVInt(firstByte) ? (i ^ -1L) : i); }
From source file:org.apache.horn.core.AbstractNeuralNetwork.java
@SuppressWarnings({ "rawtypes", "unchecked" }) @Override//ww w. j av a 2 s .c o m public void readFields(DataInput input) throws IOException { // read model type this.modelType = WritableUtils.readString(input); // read learning rate this.learningRate = input.readFloat(); // read model path this.modelPath = WritableUtils.readString(input); if (this.modelPath.equals("null")) { this.modelPath = null; } // read feature transformer int bytesLen = input.readInt(); byte[] featureTransformerBytes = new byte[bytesLen]; for (int i = 0; i < featureTransformerBytes.length; ++i) { featureTransformerBytes[i] = input.readByte(); } Class<? extends FloatFeatureTransformer> featureTransformerCls = (Class<? extends FloatFeatureTransformer>) SerializationUtils .deserialize(featureTransformerBytes); Constructor[] constructors = featureTransformerCls.getDeclaredConstructors(); Constructor constructor = constructors[0]; try { this.featureTransformer = (FloatFeatureTransformer) constructor.newInstance(new Object[] {}); } catch (InstantiationException e) { e.printStackTrace(); } catch (IllegalAccessException e) { e.printStackTrace(); } catch (IllegalArgumentException e) { e.printStackTrace(); } catch (InvocationTargetException e) { e.printStackTrace(); } }
From source file:org.apache.isis.objectstore.nosql.db.file.server.FileServer.java
private void syncConnection(final Socket connection, final int readTimeout) { try {/*from w ww .j a v a 2 s .c o m*/ final CRC32 crc32 = new CRC32(); final DataOutput output = new DataOutputStream(connection.getOutputStream()); final DataInput input = new DataInputStream(new CheckedInputStream(connection.getInputStream(), crc32)); if (input.readByte() != INIT) { return; } final LogRange logFileRange = Util.logFileRange(); final long lastId = logFileRange.noLogFile() ? -1 : logFileRange.getLast(); output.writeLong(lastId); do { if (input.readByte() != RECOVERY_LOG) { return; } crc32.reset(); final long logId = input.readLong(); final File file = Util.tmpLogFile(logId); LOG.info("syncing recovery file: " + file.getName()); final BufferedOutputStream fileOutput = new BufferedOutputStream(new FileOutputStream(file)); final byte[] buffer = new byte[8092]; int length; while ((length = input.readInt()) > 0) { input.readFully(buffer, 0, length); fileOutput.write(buffer, 0, length); } fileOutput.close(); final long calculatedChecksum = crc32.getValue(); final long sentChecksum = input.readLong(); if (calculatedChecksum != sentChecksum) { throw new NoSqlStoreException("Checksum didn't match during download of " + file.getName()); } recover(file); final File renameTo = Util.logFile(logId); file.renameTo(renameTo); } while (true); } catch (final NoSqlStoreException e) { LOG.error("file server failure", e); } catch (final IOException e) { LOG.error("networking failure", e); } catch (final RuntimeException e) { LOG.error("request failure", e); } finally { try { connection.close(); } catch (final IOException e) { LOG.warn("failure to close connection", e); } } // TODO restart }
From source file:org.apache.marmotta.kiwi.io.KiWiIO.java
/** * Read a KiWiNode serialized with writeNode and return it. The type indicator is used to determine which type * of resource to instantiate./* w w w . j a va 2 s .com*/ * * @param input DataInput source * @return an instance of a subclass of KiWiNode, depending on the type indicator read from the source * @throws IOException */ public static KiWiNode readNode(DataInput input) throws IOException { int type = input.readByte(); switch (type) { case 0: return null; case TYPE_URI: return readURI(input); case TYPE_BNODE: return readBNode(input); case TYPE_BOOLEAN: return readBooleanLiteral(input); case TYPE_DATE: return readDateLiteral(input); case TYPE_DOUBLE: return readDoubleLiteral(input); case TYPE_INT: return readIntLiteral(input); case TYPE_STRING: return readStringLiteral(input); default: throw new IllegalArgumentException("unknown KiWiNode type: " + type); } }
From source file:org.apache.marmotta.kiwi.io.KiWiIO.java
/** * Read a KiWiUriResource serialized with writeURI and return it. * * @param input DataInput source/* w ww. ja v a 2 s.c o m*/ * @return a KiWiUriResource * @throws IOException */ public static KiWiUriResource readURI(DataInput input) throws IOException { long id = input.readLong(); if (id == -1) { return null; } else { int prefixMode = input.readByte(); String uriPrefix = ""; String uriSuffix = DataIO.readString(input); switch (prefixMode) { case PREFIX_XSD: uriPrefix = XSD.NAMESPACE; break; case PREFIX_RDF: uriPrefix = RDF.NAMESPACE; break; case PREFIX_RDFS: uriPrefix = RDFS.NAMESPACE; break; case PREFIX_SKOS: uriPrefix = SKOS.NAMESPACE; break; case PREFIX_DC: uriPrefix = DC.NAMESPACE; break; case PREFIX_DCT: uriPrefix = DCTERMS.NAMESPACE; break; case PREFIX_OWL: uriPrefix = OWL.NAMESPACE; break; case PREFIX_SCHEMA: uriPrefix = SCHEMA.NAMESPACE; break; case PREFIX_REDLINK: uriPrefix = NS_REDLINK; break; case PREFIX_DBPEDIA: uriPrefix = NS_DBPEDIA; break; case PREFIX_FREEBASE: uriPrefix = NS_FREEBASE; break; case PREFIX_LOCAL: uriPrefix = HTTP_LOCALHOST; break; default: uriPrefix = ""; break; } Date created = new Date(input.readLong()); KiWiUriResource r = new KiWiUriResource(uriPrefix + uriSuffix, created); r.setId(id); return r; } }
From source file:org.apache.marmotta.kiwi.io.KiWiIO.java
/** * Read a KiWiStringLiteral serialized with writeStringLiteral from a DataInput source * * @param input the source// w ww .ja v a 2 s . c o m * @return the de-serialized KiWiStringLiteral * @throws IOException */ public static KiWiStringLiteral readStringLiteral(DataInput input) throws IOException { long id = input.readLong(); if (id == -1) { return null; } else { String content = readContent(input); byte langB = input.readByte(); String lang; switch (langB) { case LANG_EN: lang = "en"; break; case LANG_DE: lang = "de"; break; case LANG_FR: lang = "fr"; break; case LANG_ES: lang = "es"; break; case LANG_IT: lang = "it"; break; case LANG_PT: lang = "pt"; break; case LANG_NL: lang = "nl"; break; case LANG_SV: lang = "sv"; break; case LANG_NO: lang = "no"; break; case LANG_FI: lang = "fi"; break; case LANG_RU: lang = "ru"; break; case LANG_DK: lang = "dk"; break; case LANG_PL: lang = "pl"; break; default: lang = DataIO.readString(input); } KiWiUriResource dtype = readURI(input); Date created = new Date(input.readLong()); KiWiStringLiteral r = new KiWiStringLiteral(content, lang != null ? Locale.forLanguageTag(lang) : null, dtype, created); r.setId(id); return r; } }