Example usage for java.io ObjectOutputStream flush

List of usage examples for java.io ObjectOutputStream flush

Introduction

In this page you can find the example usage for java.io ObjectOutputStream flush.

Prototype

public void flush() throws IOException 

Source Link

Document

Flushes the stream.

Usage

From source file:org.activiti.webservice.WebServiceSendActivitiBehavior.java

public void execute(ActivityExecution execution) throws Exception {
    String endpointUrlValue = this.getStringFromField(this.endpointUrl, execution);
    String languageValue = this.getStringFromField(this.language, execution);
    String payloadExpressionValue = this.getStringFromField(this.payloadExpression, execution);
    String resultVariableValue = this.getStringFromField(this.resultVariable, execution);
    String usernameValue = this.getStringFromField(this.username, execution);
    String passwordValue = this.getStringFromField(this.password, execution);

    ScriptingEngines scriptingEngines = Context.getProcessEngineConfiguration().getScriptingEngines();
    Object payload = scriptingEngines.evaluate(payloadExpressionValue, languageValue, execution);

    if (endpointUrlValue.startsWith("vm:")) {
        LocalWebServiceClient client = this.getWebServiceContext().getClient();
        WebServiceMessage message = new DefaultWebServiceMessage(payload, this.getWebServiceContext());
        WebServiceMessage resultMessage = client.send(endpointUrlValue, message);
        Object result = resultMessage.getPayload();
        if (resultVariableValue != null) {
            execution.setVariable(resultVariableValue, result);
        }/*from   www .  j  ava  2  s.  c  om*/

    } else {

        HttpClientBuilder clientBuilder = HttpClientBuilder.create();

        if (usernameValue != null && passwordValue != null) {
            CredentialsProvider provider = new BasicCredentialsProvider();
            UsernamePasswordCredentials credentials = new UsernamePasswordCredentials(usernameValue,
                    passwordValue);
            provider.setCredentials(new AuthScope("localhost", -1, "webservice-realm"), credentials);
            clientBuilder.setDefaultCredentialsProvider(provider);
        }

        HttpClient client = clientBuilder.build();

        HttpPost request = new HttpPost(endpointUrlValue);

        try {
            ByteArrayOutputStream baos = new ByteArrayOutputStream();
            ObjectOutputStream oos = new ObjectOutputStream(baos);
            oos.writeObject(payload);
            oos.flush();
            oos.close();

            request.setEntity(new ByteArrayEntity(baos.toByteArray()));

        } catch (Exception e) {
            throw new ActivitiException("Error setting message payload", e);
        }

        byte[] responseBytes = null;
        try {
            // execute the POST request
            HttpResponse response = client.execute(request);
            responseBytes = IOUtils.toByteArray(response.getEntity().getContent());

        } finally {
            // release any connection resources used by the method
            request.releaseConnection();
        }

        if (responseBytes != null) {
            try {
                ByteArrayInputStream in = new ByteArrayInputStream(responseBytes);
                ObjectInputStream is = new ObjectInputStream(in);
                Object result = is.readObject();
                if (resultVariableValue != null) {
                    execution.setVariable(resultVariableValue, result);
                }
            } catch (Exception e) {
                throw new ActivitiException("Failed to read response value", e);
            }
        }
    }

    this.leave(execution);
}

From source file:com.github.stephanarts.cas.ticket.registry.RegistryClient.java

/**
 * addTicket Method.//  w  w w . j  a va2s.  c  om
 *
 * @param ticket CAS Ticket object
 *
 * @throws JSONRPCException Throws JSONRPCException containing any error.
 */
public final void addTicket(final Ticket ticket) throws JSONRPCException {

    byte[] serializedTicket = {};
    JSONObject params = new JSONObject();

    /* Check if it's not null */
    if (ticket == null) {
        throw new JSONRPCException(-32501, "Could not encode Ticket");
    }

    try {
        ByteArrayOutputStream bo = new ByteArrayOutputStream();
        ObjectOutputStream so = new ObjectOutputStream(bo);
        so.writeObject(ticket);
        so.flush();
        serializedTicket = bo.toByteArray();
    } catch (final Exception e) {
        throw new JSONRPCException(-32501, "Could not encode Ticket");
    }

    params.put("ticket-id", ticket.getId());
    params.put("ticket", DatatypeConverter.printBase64Binary(serializedTicket));

    this.call("cas.addTicket", params);
}

From source file:org.atricore.idbus.idojos.serializedsessionstore.SerializedSessionStore.java

private void saveSerializedSessions() throws SSOSessionException {

    // If this is too slow, we may save session information only once a second or something like that ...
    try {//from w  ww .ja v  a  2  s. c om

        FileOutputStream out = new FileOutputStream(getSerializedFile());
        ObjectOutputStream s = new ObjectOutputStream(out);

        synchronized (this) {
            s.writeObject(_sessions);
        }

        s.flush();
        s.close();
        out.close();
    } catch (IOException e) {
        throw new SSOSessionException(e.getMessage(), e);
    }
}

From source file:com.joliciel.talismane.machineLearning.linearsvm.LinearSVMModel.java

@Override
public void writeDataToStream(ZipOutputStream zos) {
    try {// w ww  .  j a  v  a  2s  .c o  m
        zos.putNextEntry(new ZipEntry("featureIndexMap.obj"));
        ObjectOutputStream out = new ObjectOutputStream(zos);

        try {
            out.writeObject(featureIndexMap);
        } finally {
            out.flush();
        }

        zos.flush();

        zos.putNextEntry(new ZipEntry("outcomes.obj"));
        out = new ObjectOutputStream(zos);
        try {
            out.writeObject(outcomes);
        } finally {
            out.flush();
        }

        zos.flush();
    } catch (IOException e) {
        LogUtils.logError(LOG, e);
        throw new RuntimeException(e);
    }

}

From source file:org.csc.phynixx.common.io.LogRecordWriter.java

/**
 * write the object's class name to check the consistency if the restroe fails.
 *
 * A null object is accepted/*from w  ww  .  j ava  2  s  .c  o m*/
 *
 * @param object serializable object. it has to fullfill the requirements of {@link java.io.ObjectOutputStream#writeObject(Object)} .
 * @return returns the fluent API
 * @throws IOException
 */
public LogRecordWriter writeObject(Object object) throws IOException {
    if (object == null) {
        return this.writeNullObject();
    }
    ObjectOutputStream out = null;
    try {
        ByteArrayOutputStream byteOutput = new ByteArrayOutputStream();
        out = new ObjectOutputStream(byteOutput);
        out.writeObject(object);
        out.flush();
        byte[] serBytes = byteOutput.toByteArray();
        io.writeInt(serBytes.length);
        io.write(serBytes);
    } finally {
        if (out != null) {
            IOUtils.closeQuietly(out);
        }
    }
    return this;
}

From source file:org.verdictdb.core.querying.QueryExecutionPlan.java

public QueryExecutionPlan deepcopy() {
    try {//www  .  jav  a2  s  .c  o m
        ByteArrayOutputStream bos = new ByteArrayOutputStream();
        ObjectOutputStream out = new ObjectOutputStream(bos);
        out.writeObject(this);
        out.flush();
        out.close();

        ObjectInputStream in = new ObjectInputStream(new ByteArrayInputStream(bos.toByteArray()));
        return (QueryExecutionPlan) in.readObject();
    } catch (ClassNotFoundException | IOException e) {
        e.printStackTrace();
    }
    return null;
}

From source file:org.eclipse.thym.core.internal.util.BundleHttpCacheStorage.java

@Override
public void putEntry(String key, HttpCacheEntry entry) throws IOException {
    ByteArrayOutputStream byteArrayOS = null;
    ObjectOutputStream objectOut = null;

    try {/*ww  w  .  java2 s . co m*/
        File f = getCacheFile(key);
        byteArrayOS = new ByteArrayOutputStream();
        objectOut = new ObjectOutputStream(byteArrayOS);
        objectOut.writeObject(entry);
        objectOut.flush();
        FileUtils.writeByteArrayToFile(f, byteArrayOS.toByteArray());
    } finally {
        if (objectOut != null)
            objectOut.close();
        if (byteArrayOS != null)
            byteArrayOS.close();
    }
}

From source file:com.github.davidcarboni.encryptedfileupload.EncryptedFileItemSerializeTest.java

/**
 * Do serialization/* w  w w .java2  s  .com*/
 */
private ByteArrayOutputStream serialize(Object target) throws Exception {
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    ObjectOutputStream oos = new ObjectOutputStream(baos);
    oos.writeObject(target);
    oos.flush();
    oos.close();
    return baos;
}

From source file:org.sourceopen.hadoop.hbase.replication.consumer.FileChannelManager.java

/**
 * @throws IOException//from ww  w .java  2s  .c  o  m
 */
private void scanProducerFilesAndAddToZK() throws IOException {
    // s1. scanProducerFiles
    // <group,filename set>

    // ??
    //  ProtocolAdapter.listHead() ? Head ??? ProtocolAdapter 
    Map<String, ArrayList<String>> fstMap = new HashMap<String, ArrayList<String>>();
    Path targetPath = new Path(conf.get(HDFSFileAdapter.CONFKEY_HDFS_FS_ROOT)
            + conf.get(HDFSFileAdapter.CONFKEY_HDFS_FS_TARGETPATH));
    FileStatus[] fstList = fs.listStatus(targetPath);
    if (fstList == null || fstList.length < 1) {
        if (LOG.isWarnEnabled()) {
            LOG.warn("Can not find any target File at targetPath");
        }
        return;
    }
    for (FileStatus fst : fstList) {
        if (!fst.isDir()) {
            String fileName = fst.getPath().getName();
            ProtocolHead fileHead = HDFSFileAdapter.validataFileName(fileName);
            if (fileHead == null && LOG.isErrorEnabled()) {
                LOG.error("validataFileName fail. path: " + fst.getPath());
                continue;
            }
            String group = fileHead.getGroupName();
            ArrayList<String> ftsSet = fstMap.get(group);
            if (ftsSet == null) {
                ftsSet = new ArrayList<String>();
                fstMap.put(group, ftsSet);
            }
            ftsSet.add(fileName);
        }
    }
    // s2. update ZK
    if (MapUtils.isNotEmpty(fstMap)) {
        for (String group : fstMap.keySet()) {
            String groupRoot = root.getPath() + ConsumerConstants.FILE_SEPERATOR + group;
            String queue = groupRoot + ConsumerConstants.FILE_SEPERATOR + ConsumerConstants.ZK_QUEUE;
            int queueVer;
            try {
                Stat statZkRoot = zoo.exists(groupRoot, false);
                if (statZkRoot == null) {
                    zoo.create(groupRoot, null, Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT);
                }
                Stat statZkQueue = zoo.exists(queue, false);
                if (statZkQueue == null) {
                    zoo.create(queue, null, Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT);
                }
                statZkQueue = zoo.exists(queue, false);
                queueVer = statZkQueue.getVersion();
            } catch (Exception e) {
                LOG.error("Consumer create znode of group failed. Znode: " + groupRoot, e);
                continue;
            }
            ByteArrayOutputStream baos = new ByteArrayOutputStream();
            ObjectOutputStream oos = new ObjectOutputStream(baos);
            oos.writeObject(fstMap.get(group));
            oos.flush();
            try {
                zoo.setData(queue, baos.toByteArray(), queueVer);
            } catch (Exception e) {
                LOG.error("Consumer update znode of queue failed. Znode: " + queue, e);
            }
        }
    }
}

From source file:org.apache.storm.hive.security.AutoHiveNimbus.java

@SuppressWarnings("unchecked")
protected byte[] getHadoopCredentials(Map conf, final Configuration configuration,
        final String topologySubmitterUser) {
    try {//from w  w  w. j  a  v  a2s  .co  m
        if (UserGroupInformation.isSecurityEnabled()) {
            String hiveMetaStoreURI = getMetaStoreURI(configuration);
            String hiveMetaStorePrincipal = getMetaStorePrincipal(configuration);
            HiveConf hcatConf = createHiveConf(hiveMetaStoreURI, hiveMetaStorePrincipal);
            login(configuration);

            UserGroupInformation currentUser = UserGroupInformation.getCurrentUser();
            UserGroupInformation proxyUser = UserGroupInformation.createProxyUser(topologySubmitterUser,
                    currentUser);
            try {
                Token<DelegationTokenIdentifier> delegationTokenId = getDelegationToken(hcatConf,
                        hiveMetaStorePrincipal, topologySubmitterUser);
                proxyUser.addToken(delegationTokenId);
                LOG.info("Obtained Hive tokens, adding to user credentials.");

                Credentials credential = proxyUser.getCredentials();
                ByteArrayOutputStream bao = new ByteArrayOutputStream();
                ObjectOutputStream out = new ObjectOutputStream(bao);
                credential.write(out);
                out.flush();
                out.close();
                return bao.toByteArray();
            } catch (Exception ex) {
                LOG.debug(" Exception" + ex.getMessage());
                throw ex;
            }
        } else {
            throw new RuntimeException("Security is not enabled for Hadoop");
        }
    } catch (Exception ex) {
        throw new RuntimeException("Failed to get delegation tokens.", ex);
    }
}