List of usage examples for java.io DataOutputStream writeInt
public final void writeInt(int v) throws IOException
int
to the underlying output stream as four bytes, high byte first. From source file:ml.shifu.shifu.core.dtrain.dt.BinaryDTSerializer.java
public static void save(ModelConfig modelConfig, List<ColumnConfig> columnConfigList, List<List<TreeNode>> baggingTrees, String loss, int inputCount, OutputStream output) throws IOException { DataOutputStream fos = null; try {/*from ww w . jav a2 s .co m*/ fos = new DataOutputStream(new GZIPOutputStream(output)); // version fos.writeInt(CommonConstants.TREE_FORMAT_VERSION); fos.writeUTF(modelConfig.getAlgorithm()); fos.writeUTF(loss); fos.writeBoolean(modelConfig.isClassification()); fos.writeBoolean(modelConfig.getTrain().isOneVsAll()); fos.writeInt(inputCount); Map<Integer, String> columnIndexNameMapping = new HashMap<Integer, String>(); Map<Integer, List<String>> columnIndexCategoricalListMapping = new HashMap<Integer, List<String>>(); Map<Integer, Double> numericalMeanMapping = new HashMap<Integer, Double>(); for (ColumnConfig columnConfig : columnConfigList) { if (columnConfig.isFinalSelect()) { columnIndexNameMapping.put(columnConfig.getColumnNum(), columnConfig.getColumnName()); } if (columnConfig.isCategorical() && CollectionUtils.isNotEmpty(columnConfig.getBinCategory())) { columnIndexCategoricalListMapping.put(columnConfig.getColumnNum(), columnConfig.getBinCategory()); } if (columnConfig.isNumerical() && columnConfig.getMean() != null) { numericalMeanMapping.put(columnConfig.getColumnNum(), columnConfig.getMean()); } } if (columnIndexNameMapping.size() == 0) { boolean hasCandidates = CommonUtils.hasCandidateColumns(columnConfigList); for (ColumnConfig columnConfig : columnConfigList) { if (CommonUtils.isGoodCandidate(columnConfig, hasCandidates)) { columnIndexNameMapping.put(columnConfig.getColumnNum(), columnConfig.getColumnName()); } } } // serialize numericalMeanMapping fos.writeInt(numericalMeanMapping.size()); for (Entry<Integer, Double> entry : numericalMeanMapping.entrySet()) { fos.writeInt(entry.getKey()); // for some feature, it is null mean value, it is not selected, just set to 0d to avoid NPE fos.writeDouble(entry.getValue() == null ? 0d : entry.getValue()); } // serialize columnIndexNameMapping fos.writeInt(columnIndexNameMapping.size()); for (Entry<Integer, String> entry : columnIndexNameMapping.entrySet()) { fos.writeInt(entry.getKey()); fos.writeUTF(entry.getValue()); } // serialize columnIndexCategoricalListMapping fos.writeInt(columnIndexCategoricalListMapping.size()); for (Entry<Integer, List<String>> entry : columnIndexCategoricalListMapping.entrySet()) { List<String> categories = entry.getValue(); if (categories != null) { fos.writeInt(entry.getKey()); fos.writeInt(categories.size()); for (String category : categories) { // There is 16k limitation when using writeUTF() function. // if the category value is larger than 10k, write a marker -1 and write bytes instead of // writeUTF; // in read part logic should be changed also to readByte not readUTF according to the marker if (category.length() < Constants.MAX_CATEGORICAL_VAL_LEN) { fos.writeUTF(category); } else { fos.writeShort(UTF_BYTES_MARKER); // marker here byte[] bytes = category.getBytes("UTF-8"); fos.writeInt(bytes.length); for (int i = 0; i < bytes.length; i++) { fos.writeByte(bytes[i]); } } } } } Map<Integer, Integer> columnMapping = getColumnMapping(columnConfigList); fos.writeInt(columnMapping.size()); for (Entry<Integer, Integer> entry : columnMapping.entrySet()) { fos.writeInt(entry.getKey()); fos.writeInt(entry.getValue()); } // after model version 4 (>=4), IndependentTreeModel support bagging, here write a default RF/GBT size 1 fos.writeInt(baggingTrees.size()); for (int i = 0; i < baggingTrees.size(); i++) { List<TreeNode> trees = baggingTrees.get(i); int treeLength = trees.size(); fos.writeInt(treeLength); for (TreeNode treeNode : trees) { treeNode.write(fos); } } } catch (IOException e) { LOG.error("Error in writing output.", e); } finally { IOUtils.closeStream(fos); } }
From source file:com.adaptris.security.StdOutput.java
private static void write(DataOutputStream out, byte[] bytes) throws IOException { if (bytes == null) { out.writeInt(0); } else {//from w w w. ja v a 2 s .c o m out.writeInt(bytes.length); out.write(bytes, 0, bytes.length); } }
From source file:org.eclipse.swt.snippets.Snippet319.java
static byte[] convertToByteArray(MyType type) { DataOutputStream dataOutStream = null; try {//from w w w . jav a 2s. c o m ByteArrayOutputStream byteOutStream = new ByteArrayOutputStream(); dataOutStream = new DataOutputStream(byteOutStream); byte[] bytes = type.name.getBytes(); dataOutStream.writeInt(bytes.length); dataOutStream.write(bytes); dataOutStream.writeLong(type.time); return byteOutStream.toByteArray(); } catch (IOException e) { return null; } finally { if (dataOutStream != null) { try { dataOutStream.close(); } catch (IOException e) { } } } }
From source file:Main.java
protected static void serialiseLength(DataOutputStream os, int len, int max_length) throws IOException { if (len > max_length) { throw (new IOException("Invalid DHT data length: max=" + max_length + ",actual=" + len)); }/*from ww w. j a v a 2 s. co m*/ if (max_length < 256) { os.writeByte(len); } else if (max_length < 65536) { os.writeShort(len); } else { os.writeInt(len); } }
From source file:org.apache.cassandra.service.MigrationManager.java
/** * Serialize given row mutations into raw bytes * * @param schema The row mutations to serialize * @param version The version of the message service to use for serialization * * @return serialized mutations//w w w . j av a 2 s .c o m * * @throws IOException on failed serialization */ public static byte[] serializeSchema(Collection<RowMutation> schema, int version) throws IOException { FastByteArrayOutputStream bout = new FastByteArrayOutputStream(); DataOutputStream dout = new DataOutputStream(bout); dout.writeInt(schema.size()); for (RowMutation mutation : schema) RowMutation.serializer().serialize(mutation, dout, version); dout.close(); return bout.toByteArray(); }
From source file:org.apache.jackrabbit.core.persistence.util.Serializer.java
/** * Serializes the specified <code>PropertyState</code> object to the given * binary <code>stream</code>. Binary values are stored in the specified * <code>BLOBStore</code>./*w w w.ja va2 s . c om*/ * * @param state <code>state</code> to serialize * @param stream the stream where the <code>state</code> should be * serialized to * @param blobStore handler for BLOB data * @throws Exception if an error occurs during the serialization * @see #deserialize(PropertyState, InputStream,BLOBStore) */ public static void serialize(PropertyState state, OutputStream stream, BLOBStore blobStore) throws Exception { DataOutputStream out = new DataOutputStream(stream); // type out.writeInt(state.getType()); // multiValued out.writeBoolean(state.isMultiValued()); // definitionId out.writeUTF(""); // modCount out.writeShort(state.getModCount()); // values InternalValue[] values = state.getValues(); out.writeInt(values.length); // count for (int i = 0; i < values.length; i++) { InternalValue val = values[i]; if (state.getType() == PropertyType.BINARY) { // special handling required for binary value: // put binary value in BLOB store InputStream in = val.getStream(); String blobId = blobStore.createId(state.getPropertyId(), i); try { blobStore.put(blobId, in, val.getLength()); } finally { IOUtils.closeQuietly(in); } // store id of BLOB as property value out.writeUTF(blobId); // value // replace value instance with value backed by resource // in BLOB store and discard old value instance (e.g. temp file) if (blobStore instanceof ResourceBasedBLOBStore) { // optimization: if the BLOB store is resource-based // retrieve the resource directly rather than having // to read the BLOB from an input stream FileSystemResource fsRes = ((ResourceBasedBLOBStore) blobStore).getResource(blobId); values[i] = InternalValue.create(fsRes); } else { in = blobStore.get(blobId); try { values[i] = InternalValue.create(in); } finally { IOUtils.closeQuietly(in); } } val.discard(); } else { /** * because writeUTF(String) has a size limit of 65k, * Strings are serialized as <length><byte[]> */ //out.writeUTF(val.toString()); // value byte[] bytes = val.toString().getBytes(ENCODING); out.writeInt(bytes.length); // lenght of byte[] out.write(bytes); // byte[] } } }
From source file:org.apache.jackrabbit.core.persistence.util.Serializer.java
/** * Serializes the specified <code>NodeReferences</code> object to the given * binary <code>stream</code>. * * @param refs object to serialize/*from ww w . j a v a 2 s .c o m*/ * @param stream the stream where the object should be serialized to * @throws Exception if an error occurs during the serialization * @see #deserialize(NodeReferences, InputStream) */ public static void serialize(NodeReferences refs, OutputStream stream) throws Exception { DataOutputStream out = new DataOutputStream(stream); // references Collection<PropertyId> c = refs.getReferences(); out.writeInt(c.size()); // count for (Iterator<PropertyId> iter = c.iterator(); iter.hasNext();) { PropertyId propId = iter.next(); out.writeUTF(propId.toString()); // propertyId } }
From source file:com.dbay.apns4j.tools.ApnsTools.java
public final static byte[] generateData(List<FrameItem> list) { ByteArrayOutputStream bos = new ByteArrayOutputStream(); DataOutputStream os = new DataOutputStream(bos); int frameLength = 0; for (FrameItem item : list) { // itemId length = 1, itemDataLength = 2 frameLength += 1 + 2 + item.getItemLength(); }//from w w w . ja va 2 s .co m try { os.writeByte(Command.SEND_V2); os.writeInt(frameLength); for (FrameItem item : list) { os.writeByte(item.getItemId()); os.writeShort(item.getItemLength()); os.write(item.getItemData()); } return bos.toByteArray(); } catch (IOException e) { e.printStackTrace(); } throw new RuntimeException(); }
From source file:net.datenwerke.transloader.primitive.WrapperConverter.java
private static void write(Object wrapper, DataOutputStream dataStream) throws IOException { char typeCode = getTypeCode(wrapper); switch (typeCode) { case 'B': dataStream.writeByte(intValue(wrapper)); break;//from w w w . j a v a2 s . com case 'C': dataStream.writeChar(charValue(wrapper)); break; case 'S': dataStream.writeShort(intValue(wrapper)); break; case 'I': dataStream.writeInt(intValue(wrapper)); break; case 'J': dataStream.writeLong(number(wrapper).longValue()); break; case 'F': dataStream.writeFloat(number(wrapper).floatValue()); break; case 'D': dataStream.writeDouble(number(wrapper).doubleValue()); break; case 'Z': dataStream.writeBoolean(booleanValue(wrapper)); } }
From source file:org.mrgeo.data.raster.RasterWritable.java
private static void writeHeader(int width, int height, int bands, int datatype, OutputStream out) throws IOException { final DataOutputStream dos = new DataOutputStream(out); dos.writeInt(HEADERSIZE); dos.writeInt(height);/* ww w .j a v a 2 s. c o m*/ dos.writeInt(width); dos.writeInt(bands); dos.writeInt(datatype); dos.writeInt(SampleModelType.BANDED.ordinal()); }