Example usage for java.io DataOutputStream writeUTF

List of usage examples for java.io DataOutputStream writeUTF

Introduction

In this page you can find the example usage for java.io DataOutputStream writeUTF.

Prototype

public final void writeUTF(String str) throws IOException 

Source Link

Document

Writes a string to the underlying output stream using modified UTF-8 encoding in a machine-independent manner.

Usage

From source file:epn.edu.ec.bibliotecadigital.servidor.ServerRunnable.java

@Override
public void run() {
    try {/*from w  ww . j  a va  2 s.c  o  m*/
        DataInputStream dataIn = new DataInputStream(clientSocket.getInputStream());
        DataOutputStream dataOut = new DataOutputStream(clientSocket.getOutputStream());
        OutputStream out;
        String accion = dataIn.readUTF();
        Libro lbr;
        String nombreUsuario = dataIn.readUTF();
        System.out.println("nombreUsuario" + nombreUsuario);
        switch (accion) {
        case "bajar":

            String codArchivo = dataIn.readUTF();
            dataOut = new DataOutputStream(clientSocket.getOutputStream());

            lbr = new LibroJpaController(emf).findLibro(Integer.parseInt(codArchivo));
            if (lbr == null) {
                dataOut.writeBoolean(false);
                break;
            }
            dataOut.writeBoolean(true);

            //File file = new File("C:\\Computacion Distribuida\\" + lbr.getNombre());
            dataOut.writeUTF(lbr.getNombre());
            out = clientSocket.getOutputStream();
            try {
                byte[] bytes = new byte[64 * 1024];
                InputStream in = new ByteArrayInputStream(lbr.getArchivo());

                int count;
                while ((count = in.read(bytes)) > 0) {
                    out.write(bytes, 0, count);
                }
                Usuariolibros usrLbr = new Usuariolibros();
                usrLbr.setFecha(Calendar.getInstance().getTime());
                usrLbr.setAccion('B');
                usrLbr.setCodigolibro(lbr);
                usrLbr.setNombrecuenta(new Usuario(nombreUsuario));
                new UsuariolibrosJpaController(emf).create(usrLbr);
                in.close();
            } finally {
                IOUtils.closeQuietly(out);
            }
            break;
        case "subir":
            dataIn = new DataInputStream(clientSocket.getInputStream());
            String fileName = dataIn.readUTF();
            InputStream in = clientSocket.getInputStream();
            try {
                out = new FileOutputStream("C:\\Computacion Distribuida\\" + fileName);
                byte[] bytes = new byte[64 * 1024];

                int count;
                while ((count = in.read(bytes)) > 0) {
                    out.write(bytes, 0, count);
                }
                out.close();
                lbr = new Libro();
                lbr.setNombre(fileName);
                lbr.setArchivo(
                        IOUtils.toByteArray(new FileInputStream("C:\\Computacion Distribuida\\" + fileName)));

                new LibroJpaController(emf).create(lbr);
                Usuariolibros usrLbr = new Usuariolibros();
                usrLbr.setFecha(Calendar.getInstance().getTime());
                usrLbr.setAccion('S');
                usrLbr.setCodigolibro(lbr);
                usrLbr.setNombrecuenta(new Usuario(nombreUsuario));
                new UsuariolibrosJpaController(emf).create(usrLbr);
                actualizarLibrosEnServidores(fileName);
            } finally {
                IOUtils.closeQuietly(in);
            }
            break;
        case "obtenerLista":
            ObjectOutputStream outToServer = new ObjectOutputStream(clientSocket.getOutputStream());
            outToServer.writeObject(new LibroJpaController(emf).findLibroEntities());
            outToServer.close();
            break;
        case "verificarEstado":
            dataOut.writeUTF(String.valueOf(server.isDisponible()));
            break;
        case "actualizar":
            dataIn = new DataInputStream(clientSocket.getInputStream());
            String fileNameFromServer = dataIn.readUTF();
            in = clientSocket.getInputStream();
            try {
                out = new FileOutputStream("C:\\Computacion Distribuida\\" + fileNameFromServer);
                byte[] bytes = new byte[64 * 1024];

                int count;
                while ((count = in.read(bytes)) > 0) {
                    out.write(bytes, 0, count);
                }
                out.close();
            } catch (IOException e) {
                e.printStackTrace();
            } finally {
                IOUtils.closeQuietly(in);
            }

        }
        dataIn.close();
    } catch (IOException e) {
        e.printStackTrace();
    }
}

From source file:org.apache.jackrabbit.core.persistence.bundle.util.BundleBinding.java

/**
 * Serializes a <code>PropertyState</code> to the data output stream
 *
 * @param out the output stream/* www .  ja  va 2s  .com*/
 * @param state the property entry to store
 * @throws IOException if an I/O error occurs.
 */
public void writeState(DataOutputStream out, NodePropBundle.PropertyEntry state) throws IOException {
    // type & mod count
    out.writeInt(state.getType() | (state.getModCount() << 16));
    // multiValued
    out.writeBoolean(state.isMultiValued());
    // definitionId
    out.writeUTF(state.getPropDefId().toString());
    // values
    InternalValue[] values = state.getValues();
    out.writeInt(values.length); // count
    for (int i = 0; i < values.length; i++) {
        InternalValue val = values[i];
        switch (state.getType()) {
        case PropertyType.BINARY:
            try {
                long size = val.getLength();
                if (dataStore != null) {
                    int maxMemorySize = dataStore.getMinRecordLength() - 1;
                    if (size < maxMemorySize) {
                        writeSmallBinary(out, val, state, i);
                    } else {
                        out.writeInt(BINARY_IN_DATA_STORE);
                        val.store(dataStore);
                        out.writeUTF(val.toString());
                    }
                    break;
                }
                // special handling required for binary value:
                // spool binary value to file in blob store
                if (size < 0) {
                    log.warn("Blob has negative size. Potential loss of data. " + "id={} idx={}", state.getId(),
                            String.valueOf(i));
                    out.writeInt(0);
                    values[i] = InternalValue.create(new byte[0]);
                    val.discard();
                } else if (size > minBlobSize) {
                    out.writeInt(BINARY_IN_BLOB_STORE);
                    String blobId = state.getBlobId(i);
                    if (blobId == null) {
                        try {
                            InputStream in = val.getStream();
                            try {
                                blobId = blobStore.createId(state.getId(), i);
                                blobStore.put(blobId, in, size);
                                state.setBlobId(blobId, i);
                            } finally {
                                IOUtils.closeQuietly(in);
                            }
                        } catch (Exception e) {
                            String msg = "Error while storing blob. id=" + state.getId() + " idx=" + i
                                    + " size=" + size;
                            log.error(msg, e);
                            throw new IOException(msg);
                        }
                        try {
                            // replace value instance with value
                            // backed by resource in blob store and delete temp file
                            if (blobStore instanceof ResourceBasedBLOBStore) {
                                values[i] = InternalValue
                                        .create(((ResourceBasedBLOBStore) blobStore).getResource(blobId));
                            } else {
                                values[i] = InternalValue.create(blobStore.get(blobId));
                            }
                        } catch (Exception e) {
                            log.error("Error while reloading blob. truncating. id=" + state.getId() + " idx="
                                    + i + " size=" + size, e);
                            values[i] = InternalValue.create(new byte[0]);
                        }
                        val.discard();
                    }
                    // store id of blob as property value
                    out.writeUTF(blobId); // value
                } else {
                    // delete evt. blob
                    byte[] data = writeSmallBinary(out, val, state, i);
                    // replace value instance with value
                    // backed by resource in blob store and delete temp file
                    values[i] = InternalValue.create(data);
                    val.discard();
                }
            } catch (RepositoryException e) {
                String msg = "Error while storing blob. id=" + state.getId() + " idx=" + i + " value=" + val;
                log.error(msg, e);
                throw new IOException(msg);
            }
            break;
        case PropertyType.DOUBLE:
            try {
                out.writeDouble(val.getDouble());
            } catch (RepositoryException e) {
                // should never occur
                throw new IOException("Unexpected error while writing DOUBLE value.");
            }
            break;
        case PropertyType.DECIMAL:
            try {
                writeDecimal(out, val.getDecimal());
            } catch (RepositoryException e) {
                // should never occur
                throw new IOException("Unexpected error while writing DECIMAL value.");
            }
            break;
        case PropertyType.LONG:
            try {
                out.writeLong(val.getLong());
            } catch (RepositoryException e) {
                // should never occur
                throw new IOException("Unexpected error while writing LONG value.");
            }
            break;
        case PropertyType.BOOLEAN:
            try {
                out.writeBoolean(val.getBoolean());
            } catch (RepositoryException e) {
                // should never occur
                throw new IOException("Unexpected error while writing BOOLEAN value.");
            }
            break;
        case PropertyType.NAME:
            try {
                writeQName(out, val.getName());
            } catch (RepositoryException e) {
                // should never occur
                throw new IOException("Unexpected error while writing NAME value.");
            }
            break;
        case PropertyType.WEAKREFERENCE:
        case PropertyType.REFERENCE:
            writeID(out, val.getNodeId());
            break;
        default:
            // because writeUTF(String) has a size limit of 64k,
            // we're using write(byte[]) instead
            byte[] bytes = val.toString().getBytes("UTF-8");
            out.writeInt(bytes.length); // length of byte[]
            out.write(bytes); // byte[]
        }
    }
}

From source file:org.apache.jackrabbit.core.persistence.bundle.util.BundleBinding.java

/**
 * Serializes a <code>PropertyState</code> to the data output stream
 *
 * @param out the output stream/*from ww  w  . j  a  va 2s .  c  o  m*/
 * @param state the property entry to store
 * @throws IOException if an I/O error occurs.
 */
public void writeState(DataOutputStream out, NodePropBundle.PropertyEntry state) throws IOException {
    // type & mod count
    out.writeInt(state.getType() | (state.getModCount() << 16));
    // multiValued
    out.writeBoolean(state.isMultiValued());
    // definitionId
    out.writeUTF(state.getPropDefId().toString());
    // values
    InternalValue[] values = state.getValues();
    out.writeInt(values.length); // count
    for (int i = 0; i < values.length; i++) {
        InternalValue val = values[i];
        switch (state.getType()) {
        case PropertyType.BINARY:
            try {
                long size = val.getLength();
                if (dataStore != null) {
                    int maxMemorySize = dataStore.getMinRecordLength() - 1;
                    if (size < maxMemorySize) {
                        writeSmallBinary(out, val, state, i);
                    } else {
                        out.writeInt(BINARY_IN_DATA_STORE);
                        val.store(dataStore);
                        out.writeUTF(val.toString());
                    }
                    break;
                }
                // special handling required for binary value:
                // spool binary value to file in blob store
                if (size < 0) {
                    log.warn("Blob has negative size. Potential loss of data. " + "id={} idx={}", state.getId(),
                            String.valueOf(i));
                    out.writeInt(0);
                    values[i] = InternalValue.create(new byte[0]);
                    val.discard();
                } else if (size > minBlobSize) {
                    out.writeInt(BINARY_IN_BLOB_STORE);
                    String blobId = state.getBlobId(i);
                    if (blobId == null) {
                        try {
                            InputStream in = val.getStream();
                            try {
                                blobId = blobStore.createId(state.getId(), i);
                                blobStore.put(blobId, in, size);
                                state.setBlobId(blobId, i);
                            } finally {
                                IOUtils.closeQuietly(in);
                            }
                        } catch (Exception e) {
                            String msg = "Error while storing blob. id=" + state.getId() + " idx=" + i
                                    + " size=" + size;
                            log.error(msg, e);
                            throw new IOException(msg);
                        }
                        try {
                            // replace value instance with value
                            // backed by resource in blob store and delete temp file
                            if (blobStore instanceof ResourceBasedBLOBStore) {
                                values[i] = InternalValue
                                        .create(((ResourceBasedBLOBStore) blobStore).getResource(blobId));
                            } else {
                                values[i] = InternalValue.create(blobStore.get(blobId));
                            }
                        } catch (Exception e) {
                            log.error("Error while reloading blob. truncating. id=" + state.getId() + " idx="
                                    + i + " size=" + size, e);
                            values[i] = InternalValue.create(new byte[0]);
                        }
                        val.discard();
                    }
                    // store id of blob as property value
                    out.writeUTF(blobId); // value
                } else {
                    // delete evt. blob
                    byte[] data = writeSmallBinary(out, val, state, i);
                    // replace value instance with value
                    // backed by resource in blob store and delete temp file
                    values[i] = InternalValue.create(data);
                    val.discard();
                }
            } catch (RepositoryException e) {
                String msg = "Error while storing blob. id=" + state.getId() + " idx=" + i + " value=" + val;
                log.error(msg, e);
                throw new IOException(msg);
            }
            break;
        case PropertyType.DOUBLE:
            try {
                out.writeDouble(val.getDouble());
            } catch (RepositoryException e) {
                // should never occur
                throw new IOException("Unexpected error while writing DOUBLE value.");
            }
            break;
        case PropertyType.DECIMAL:
            try {
                writeDecimal(out, val.getDecimal());
            } catch (RepositoryException e) {
                // should never occur
                throw new IOException("Unexpected error while writing DECIMAL value.");
            }
            break;
        case PropertyType.LONG:
            try {
                out.writeLong(val.getLong());
            } catch (RepositoryException e) {
                // should never occur
                throw new IOException("Unexpected error while writing LONG value.");
            }
            break;
        case PropertyType.BOOLEAN:
            try {
                out.writeBoolean(val.getBoolean());
            } catch (RepositoryException e) {
                // should never occur
                throw new IOException("Unexpected error while writing BOOLEAN value.");
            }
            break;
        case PropertyType.NAME:
            try {
                writeQName(out, val.getName());
            } catch (RepositoryException e) {
                // should never occur
                throw new IOException("Unexpected error while writing NAME value.");
            }
            break;
        case PropertyType.WEAKREFERENCE:
        case PropertyType.REFERENCE:
            writeUUID(out, val.getUUID());
            break;
        default:
            // because writeUTF(String) has a size limit of 64k,
            // we're using write(byte[]) instead
            byte[] bytes = val.toString().getBytes("UTF-8");
            out.writeInt(bytes.length); // length of byte[]
            out.write(bytes); // byte[]
        }
    }
}

From source file:org.chromium.chrome.browser.tabmodel.TabPersistentStore.java

/**
 * Serializes data from a {@link TabModelSelector} into a byte array.
 * @param standardInfo      Info about the regular {@link TabModel}.
 * @param incognitoInfo     Info about the Incognito {@link TabModel}.
 * @param tabsBeingRestored Tabs that are in the process of being restored.
 * @return                  {@code byte[]} containing the serialized state of {@code selector}.
 */// w ww . ja  va 2 s.  c  o m
public static byte[] serializeMetadata(TabModelMetadata standardInfo, TabModelMetadata incognitoInfo,
        @Nullable List<TabRestoreDetails> tabsBeingRestored) throws IOException {
    ThreadUtils.assertOnUiThread();

    int standardCount = standardInfo.ids.size();
    int incognitoCount = incognitoInfo.ids.size();

    // Determine how many Tabs there are, including those not yet been added to the TabLists.
    int numAlreadyLoaded = incognitoCount + standardCount;
    int numStillBeingLoaded = tabsBeingRestored == null ? 0 : tabsBeingRestored.size();
    int numTabsTotal = numStillBeingLoaded + numAlreadyLoaded;

    // Save the index file containing the list of tabs to restore.
    ByteArrayOutputStream output = new ByteArrayOutputStream();
    DataOutputStream stream = new DataOutputStream(output);
    stream.writeInt(SAVED_STATE_VERSION);
    stream.writeInt(numTabsTotal);
    stream.writeInt(incognitoCount);
    stream.writeInt(incognitoInfo.index);
    stream.writeInt(standardInfo.index + incognitoCount);
    Log.d(TAG, "Serializing tab lists; counts: " + standardCount + ", " + incognitoCount + ", "
            + (tabsBeingRestored == null ? 0 : tabsBeingRestored.size()));

    // Save incognito state first, so when we load, if the incognito files are unreadable
    // we can fall back easily onto the standard selected tab.
    for (int i = 0; i < incognitoCount; i++) {
        stream.writeInt(incognitoInfo.ids.get(i));
        stream.writeUTF(incognitoInfo.urls.get(i));
    }
    for (int i = 0; i < standardCount; i++) {
        stream.writeInt(standardInfo.ids.get(i));
        stream.writeUTF(standardInfo.urls.get(i));
    }

    // Write out information about the tabs that haven't finished being loaded.
    // We shouldn't have to worry about Tab duplication because the tab details are processed
    // only on the UI Thread.
    if (tabsBeingRestored != null) {
        for (TabRestoreDetails details : tabsBeingRestored) {
            stream.writeInt(details.id);
            stream.writeUTF(details.url);
        }
    }

    stream.close();
    return output.toByteArray();
}

From source file:com.ebay.erl.mobius.core.collection.BigTupleList.java

/**
 * Flush {@link Tuple}s in {@link #buffer_in_memory} into
 * disk, and new local file will be created by {@link #newLocalFile()}
 * and store the {@link File} reference in {@link #buffer_on_disk} for
 * future reference.//from   w  w  w. jav a2  s.  c om
 */
private void flushToDisk() {
    this.flushing = true;
    File localFile;

    if (this.buffer_in_memory.size() == 0) {
        // no tuple in memory
        return;
    }
    long start = System.currentTimeMillis();
    long availableMemory = this.availableMemory();

    String message = Thread.currentThread().toString() + " BID[" + this._ID + "] "
            + "writing in-memory tuples (" + getNumberFormat().format(this.buffer_in_memory.size())
            + " entries) into disk, " + "available memory:" + availableMemory / _MB + "MB.";

    LOGGER.info(message);
    if (this.reporter != null) {
        this.reporter.setStatus(message);
        this.reporter.progress();
    }

    try {
        // check if we still have enough local space to prevent 
        // full of disk exception.
        long freeDiskSpace = this.workOutput.getFreeSpace() / _MB;
        if (freeDiskSpace < 300) {
            // less than 300MB free space left, throw
            // exceptions
            throw new IOException("Not enough space left (" + freeDiskSpace + "MB remaining) on "
                    + this.workOutput.getAbsolutePath() + ".");
        }

        localFile = this.newLocalFile();
        DataOutputStream out = new DataOutputStream(
                new GZIPOutputStream(new BufferedOutputStream(new FileOutputStream(localFile))));

        // write the tuple schema in the header
        String[] tupleSchema = this.buffer_in_memory.get(0).getSchema();
        out.writeInt(tupleSchema.length);
        if (tupleSchema.length == 0)
            throw new IllegalArgumentException("Tuple with empty schema!");
        for (String aColumn : tupleSchema) {
            out.writeUTF(aColumn);
        }

        // write number of tuple in this file
        out.writeLong(this.buffer_in_memory.size());

        if (this.comparator != null) {
            // sort the Tuple in memory first
            Collections.sort(this.buffer_in_memory, this.comparator);
        }

        // write all the tuple in memory buffer
        long counts = 0L;
        for (Tuple aTuple : this.buffer_in_memory) {
            aTuple.write(out);
            counts++;
            if (counts % 5000 == 0 && this.reporter != null)// report every 5000 IO
                this.reporter.progress();
        }
        out.flush();
        out.close();

        // clear memory buffer
        this.buffer_in_memory.clear();

        long end = System.currentTimeMillis();

        LOGGER.info(Thread.currentThread().toString() + " BID[" + this._ID + "] " + "Write has completed, cost "
                + ((end - start) / 1000) + " seconds, " + "available memory:" + this.availableMemory() / _MB
                + "MB, " + "wrote to:" + localFile.getAbsolutePath() + "(size:"
                + localFile.getTotalSpace() / _MB + "MB) , " + "in memory tuples numbers:"
                + this.buffer_in_memory.size());

        this.flushing = false;
    } catch (IOException e) {
        throw new RuntimeException(e);
    }
}

From source file:org.apache.jackrabbit.core.persistence.bundle.util.BundleBinding.java

/**
 * Serializes a <code>NodePropBundle</code> to a data output stream
 *
 * @param out the output stream//from   www  . j a v  a2 s.c  om
 * @param bundle the bundle to serialize
 * @throws IOException if an I/O error occurs.
 */
public void writeBundle(DataOutputStream out, NodePropBundle bundle) throws IOException {
    long size = out.size();

    // primaryType and version
    out.writeInt((VERSION_CURRENT << 24) | nsIndex.stringToIndex(bundle.getNodeTypeName().getNamespaceURI()));
    out.writeInt(nameIndex.stringToIndex(bundle.getNodeTypeName().getLocalName()));

    // parentUUID
    writeID(out, bundle.getParentId());

    // definitionId
    out.writeUTF("");

    // mixin types
    Iterator iter = bundle.getMixinTypeNames().iterator();
    while (iter.hasNext()) {
        writeIndexedQName(out, (Name) iter.next());
    }
    writeIndexedQName(out, null);

    // properties
    iter = bundle.getPropertyNames().iterator();
    while (iter.hasNext()) {
        Name pName = (Name) iter.next();
        // skip redundant primaryType, mixinTypes and uuid properties
        if (pName.equals(NameConstants.JCR_PRIMARYTYPE) || pName.equals(NameConstants.JCR_MIXINTYPES)
                || pName.equals(NameConstants.JCR_UUID)) {
            continue;
        }
        NodePropBundle.PropertyEntry pState = bundle.getPropertyEntry(pName);
        if (pState == null) {
            log.error("PropertyState missing in bundle: " + pName);
        } else {
            writeIndexedQName(out, pName);
            writeState(out, pState);
        }
    }
    writeIndexedQName(out, null);

    // write uuid flag
    out.writeBoolean(bundle.isReferenceable());

    // child nodes (list of uuid/name pairs)
    iter = bundle.getChildNodeEntries().iterator();
    while (iter.hasNext()) {
        NodePropBundle.ChildNodeEntry entry = (NodePropBundle.ChildNodeEntry) iter.next();
        writeID(out, entry.getId()); // uuid
        writeQName(out, entry.getName()); // name
    }
    writeID(out, null);

    // write mod count
    writeModCount(out, bundle.getModCount());

    // write shared set
    iter = bundle.getSharedSet().iterator();
    while (iter.hasNext()) {
        writeID(out, (NodeId) iter.next());
    }
    writeID(out, null);

    // set size of bundle
    bundle.setSize(out.size() - size);
}

From source file:org.apache.jackrabbit.core.persistence.bundle.util.BundleBinding.java

/**
 * Serializes a <code>NodePropBundle</code> to a data output stream
 *
 * @param out the output stream//from  w  w  w. ja v  a2s  .c o m
 * @param bundle the bundle to serialize
 * @throws IOException if an I/O error occurs.
 */
public void writeBundle(DataOutputStream out, NodePropBundle bundle) throws IOException {
    long size = out.size();

    // primaryType and version
    out.writeInt((VERSION_CURRENT << 24) | nsIndex.stringToIndex(bundle.getNodeTypeName().getNamespaceURI()));
    out.writeInt(nameIndex.stringToIndex(bundle.getNodeTypeName().getLocalName()));

    // parentUUID
    writeID(out, bundle.getParentId());

    // definitionId
    out.writeUTF(bundle.getNodeDefId().toString());

    // mixin types
    for (Name name : bundle.getMixinTypeNames()) {
        writeIndexedQName(out, name);
    }
    writeIndexedQName(out, null);

    // properties
    for (Name pName : bundle.getPropertyNames()) {
        // skip redundant primaryType, mixinTypes and uuid properties
        if (pName.equals(NameConstants.JCR_PRIMARYTYPE) || pName.equals(NameConstants.JCR_MIXINTYPES)
                || pName.equals(NameConstants.JCR_UUID)) {
            continue;
        }
        NodePropBundle.PropertyEntry pState = bundle.getPropertyEntry(pName);
        if (pState == null) {
            log.error("PropertyState missing in bundle: " + pName);
        } else {
            writeIndexedQName(out, pName);
            writeState(out, pState);
        }
    }
    writeIndexedQName(out, null);

    // write uuid flag
    out.writeBoolean(bundle.isReferenceable());

    // child nodes (list of uuid/name pairs)
    for (NodePropBundle.ChildNodeEntry entry : bundle.getChildNodeEntries()) {
        writeID(out, entry.getId()); // uuid
        writeQName(out, entry.getName()); // name
    }
    writeID(out, null);

    // write mod count
    writeModCount(out, bundle.getModCount());

    // write shared set
    for (NodeId nodeId : bundle.getSharedSet()) {
        writeID(out, nodeId);
    }
    writeID(out, null);

    // set size of bundle
    bundle.setSize(out.size() - size);
}

From source file:org.sakaiproject.util.serialize.Type1BaseResourcePropertiesSerializer.java

/**
 * @see org.sakaiproject.entity.api.serialize.DataStreamEntitySerializer#serialize(org.sakaiproject.entity.api.serialize.SerializableEntity,
 *      java.io.DataOutputStream)//  www  .  java  2s.c o m
 */
public void serialize(SerializableEntity se, DataOutputStream ds) throws EntityParseException {
    if (!(se instanceof SerializablePropertiesAccess)) {
        throw new EntityParseException("Cant serialize " + se + " as it is not a SerializableProperties ");
    }
    SerializablePropertiesAccess sp = (SerializablePropertiesAccess) se;
    Map<String, Object> properties = sp.getSerializableProperties();
    try {
        ds.writeInt(TYPE1);
        ds.writeInt(BLOCK1);
        int ps = properties.keySet().size();
        for (Iterator<String> i = properties.keySet().iterator(); i.hasNext();) {
            if (i.next() == null) {
                ps--;
            }
        }
        ds.writeInt(ps);
        for (Entry<String, Object> entry : properties.entrySet()) {
            String key = entry.getKey();
            Object value = entry.getValue();
            if (value != null) {
                if (value instanceof String) {
                    ds.writeInt(BLOCK2);
                    ds.writeUTF(key);
                    ds.writeUTF((String) value);
                } else if (value instanceof List) {
                    ds.writeInt(BLOCK3);
                    ds.writeUTF(key);
                    List<?> l = (List<?>) value;
                    int s = l.size();
                    for (Iterator<?> il = l.iterator(); il.hasNext();) {
                        if (il.next() == null) {
                            s--;
                        }
                    }
                    ds.writeInt(s);
                    for (Iterator<?> il = l.iterator(); il.hasNext();) {
                        Object v = il.next();
                        if (v != null) {
                            if (v instanceof String) {
                                ds.writeUTF((String) v);
                            } else {
                                log.warn("Non String found in property list " + v);
                            }
                        }
                    }
                } else {
                    log.warn("Non String found in property " + value);
                }
            }
        }
    } catch (Exception ex) {
        throw new EntityParseException("Failed to serialize properties ", ex);
    }

}

From source file:org.bdval.cache.TableCache.java

/**
 * Given the specified paramters, save the table to the cache.
 *
 * @param splitId     The split id//w  w w .j  av a  2 s.  c  o m
 * @param splitType   The Split type
 * @param datasetName The dataset name
 * @param table       the Table to save.
 */
public void saveTableToCache(final int splitId, final String splitType, final String datasetName,
        final Table table) {
    DataOutputStream dataOutput = null;
    try {
        if (!checkTableConfiguration(table)) {
            return;
        }

        final int numColumns = table.getColumnNumber();

        final File cachedTableFile = getCachedTableFile(splitId, splitType, datasetName);
        dataOutput = new DataOutputStream(new FastBufferedOutputStream(new FileOutputStream(cachedTableFile)));

        // Write the number of columns
        dataOutput.writeInt(numColumns);
        LOG.info("Writing " + numColumns + " columns");
        int numWritten = 0;
        for (int i = 0; i < numColumns; i++) {
            // For each column write if it is a "d"ouble or "s"tring column
            final String id = table.getIdentifier(i);
            if (table.getType(i) == String.class) {
                dataOutput.writeUTF("s");
                dataOutput.writeUTF(id);
                final String[] stringColumnData = table.getStrings(id);
                final int numStrings = stringColumnData.length;
                dataOutput.writeInt(numStrings);
                for (final String stringColumnItem : stringColumnData) {
                    // Each String
                    dataOutput.writeUTF(stringColumnItem);
                }
                numWritten++;
            } else if (table.getType(i) == double.class) {
                dataOutput.writeUTF("d");
                dataOutput.writeUTF(id);
                final double[] doubleColumnData = table.getDoubles(id);
                final int numDoubles = doubleColumnData.length;
                dataOutput.writeInt(numDoubles);
                for (final double doubleColumnItem : doubleColumnData) {
                    // Each double
                    dataOutput.writeDouble(doubleColumnItem);
                }
                numWritten++;
            }
        }

        dataOutput.flush();

        LOG.info("Wrote " + numWritten + " columns");
        LOG.info("++ SAVED TABLE TO CACHE for split-id=" + splitId + ", split-type=" + splitType
                + ", dataset-name=" + datasetName);
    } catch (IOException e) {
        LOG.error("Cannot cache table. ", e);
    } catch (InvalidColumnException e) {
        LOG.error("Invalid table data", e);
    } finally {
        IOUtils.closeQuietly(dataOutput);
    }
}

From source file:org.hyperic.hq.agent.server.AgentDListProvider.java

private synchronized void flush(boolean toShutdown) throws AgentStorageException {
    if (shutdown.get() && !toShutdown) {
        return;//from w w  w.j  av  a2 s  .  co  m
    }
    final long start = System.currentTimeMillis();
    BufferedOutputStream bOs = null;
    FileOutputStream fOs = null;
    DataOutputStream dOs = null;
    if (!keyValDirty.get()) {
        return;
    }
    Entry<EncVal, EncVal> curr = null;
    try {
        fOs = new FileOutputStream(keyValFile);
        bOs = new BufferedOutputStream(fOs);
        dOs = new DataOutputStream(bOs);
        synchronized (keyVals) {
            dOs.writeLong(keyVals.size());
            for (Entry<EncVal, EncVal> entry : keyVals.entrySet()) {
                curr = entry;
                String encKey = entry.getKey().getEnc();
                String encVal = entry.getValue().getEnc();
                dOs.writeUTF(encKey);
                dOs.writeUTF(encVal);
            }
        }
    } catch (UTFDataFormatException e) {
        if (curr != null) {
            log.error("error writing key=" + curr.getKey().getVal() + ", value=" + curr.getValue().getVal(), e);
        } else {
            log.error(e, e);
        }
    } catch (IOException e) {
        log.error("Error flushing data", e);
        AgentStorageException toThrow = new AgentStorageException("Error flushing data: " + e);
        toThrow.initCause(e);
        throw toThrow;
    } finally {
        close(dOs);
        close(bOs);
        // After successful write, clear dirty flag.
        keyValDirty.set(false);
        close(fOs);
    }

    // After successful flush, update backup copy
    try {
        synchronized (keyVals) {
            FileUtil.copyFile(this.keyValFile, this.keyValFileBackup);
        }
    } catch (FileNotFoundException e) {
        log.warn(e);
        log.debug(e, e);
    } catch (IOException e) {
        log.error("Error backing up keyvals", e);
        AgentStorageException toThrow = new AgentStorageException("Error backing up keyvals: " + e);
        toThrow.initCause(e);
        throw toThrow;
    }
    agentStatsCollector.addStat(System.currentTimeMillis() - start,
            AgentStatsCollector.DISK_LIST_KEYVALS_FLUSH_TIME);
}