List of usage examples for java.io ObjectOutputStream flush
public void flush() throws IOException
From source file:org.quartz.impl.jdbcjobstore.StdJDBCDelegate.java
/** * <p>/*from www.j a v a2 s .c o m*/ * Create a serialized <code>java.util.ByteArrayOutputStream</code> * version of an Object. * </p> * * @param obj * the object to serialize * @return the serialized ByteArrayOutputStream * @throws IOException * if serialization causes an error */ protected ByteArrayOutputStream serializeObject(Object obj) throws IOException { ByteArrayOutputStream baos = new ByteArrayOutputStream(); if (null != obj) { ObjectOutputStream out = new ObjectOutputStream(baos); out.writeObject(obj); out.flush(); } return baos; }
From source file:com.sentaroh.android.SMBSync2.ActivityMain.java
private void saveTaskData() { util.addDebugMsg(2, "I", "saveTaskData entered"); if (!isTaskTermination) { if (!isTaskDataExisted() || mGp.msgListAdapter.resetDataChanged()) { ActivityDataHolder data = new ActivityDataHolder(); data.ml = mGp.msgListAdapter.getMessageList(); data.pl = mGp.syncTaskAdapter.getArrayList(); try { FileOutputStream fos = openFileOutput(SMBSYNC_SERIALIZABLE_FILE_NAME, MODE_PRIVATE); BufferedOutputStream bos = new BufferedOutputStream(fos, 4096 * 256); ObjectOutputStream oos = new ObjectOutputStream(bos); oos.writeObject(data);/* w ww.j av a 2 s. c om*/ oos.flush(); oos.close(); util.addDebugMsg(1, "I", "Task data was saved."); } catch (Exception e) { e.printStackTrace(); util.addLogMsg("E", "saveTaskData error, " + e.toString()); util.addLogMsg("E", "StackTrace element, " + printStackTraceElement(e.getStackTrace())); } } } }
From source file:com.sentaroh.android.SMBSync.SMBSyncMain.java
private void saveTaskData() { util.addDebugLogMsg(2, "I", "saveRestartData entered"); if (!isTaskTermination) { if (!isTaskDataExisted() || mGp.msgListAdapter.resetDataChanged()) { ActivityDataHolder data = new ActivityDataHolder(); data.ml = mGp.msgListAdapter.getAllItem(); data.pl = mGp.profileAdapter.getAllItem(); try { FileOutputStream fos = openFileOutput(SMBSYNC_SERIALIZABLE_FILE_NAME, MODE_PRIVATE); BufferedOutputStream bos = new BufferedOutputStream(fos, 4096 * 256); ObjectOutputStream oos = new ObjectOutputStream(bos); oos.writeObject(data);// w w w. ja va2s. c o m oos.flush(); oos.close(); util.addDebugLogMsg(1, "I", "Restart data was saved."); } catch (Exception e) { e.printStackTrace(); util.addDebugLogMsg(1, "E", "saveRestartData error, " + e.toString()); } } } }
From source file:com.hexidec.ekit.EkitCore.java
/** * Method for serializing the document out to a file *///from w ww . j a v a 2 s .c o m public void serializeOut(HTMLDocument doc) throws IOException { File whatFile = getFileFromChooser(".", JFileChooser.SAVE_DIALOG, extsSer, Translatrix.getTranslationString("FiletypeSer")); if (whatFile != null) { ObjectOutputStream oos = new ObjectOutputStream(new FileOutputStream(whatFile)); oos.writeObject(doc); oos.flush(); oos.close(); } refreshOnUpdate(); }
From source file:com.izforge.izpack.compiler.packager.impl.Packager.java
/** * Write Packs to primary jar or each to a separate jar. *//* w w w .j a v a2 s . c o m*/ protected void writePacks() throws Exception { final int num = packsList.size(); sendMsg("Writing " + num + " Pack" + (num > 1 ? "s" : "") + " into installer"); // Map to remember pack number and bytes offsets of back references Map<File, Object[]> storedFiles = new HashMap<File, Object[]>(); // Pack200 files map Map<Integer, File> pack200Map = new HashMap<Integer, File>(); int pack200Counter = 0; // Force UTF-8 encoding in order to have proper ZipEntry names. primaryJarStream.setEncoding("utf-8"); // First write the serialized files and file metadata data for each pack // while counting bytes. int packNumber = 0; IXMLElement root = new XMLElementImpl("packs"); for (PackInfo packInfo : packsList) { Pack pack = packInfo.getPack(); pack.nbytes = 0; if ((pack.id == null) || (pack.id.length() == 0)) { pack.id = pack.name; } // create a pack specific jar if required // REFACTOR : Repare web installer // REFACTOR : Use a mergeManager for each packages that will be added to the main merger // if (packJarsSeparate) { // See installer.Unpacker#getPackAsStream for the counterpart // String name = baseFile.getName() + ".pack-" + pack.id + ".jar"; // packStream = IoHelper.getJarOutputStream(name, baseFile.getParentFile()); // } sendMsg("Writing Pack " + packNumber + ": " + pack.name, PackagerListener.MSG_VERBOSE); // Retrieve the correct output stream org.apache.tools.zip.ZipEntry entry = new org.apache.tools.zip.ZipEntry( RESOURCES_PATH + "packs/pack-" + pack.id); primaryJarStream.putNextEntry(entry); primaryJarStream.flush(); // flush before we start counting ByteCountingOutputStream dos = new ByteCountingOutputStream(outputStream); ObjectOutputStream objOut = new ObjectOutputStream(dos); // We write the actual pack files objOut.writeInt(packInfo.getPackFiles().size()); for (PackFile packFile : packInfo.getPackFiles()) { boolean addFile = !pack.loose; boolean pack200 = false; File file = packInfo.getFile(packFile); if (file.getName().toLowerCase().endsWith(".jar") && info.isPack200Compression() && isNotSignedJar(file)) { packFile.setPack200Jar(true); pack200 = true; } // use a back reference if file was in previous pack, and in // same jar Object[] info = storedFiles.get(file); if (info != null && !packJarsSeparate) { packFile.setPreviousPackFileRef((String) info[0], (Long) info[1]); addFile = false; } objOut.writeObject(packFile); // base info if (addFile && !packFile.isDirectory()) { long pos = dos.getByteCount(); // get the position if (pack200) { /* * Warning! * * Pack200 archives must be stored in separated streams, as the Pack200 unpacker * reads the entire stream... * * See http://java.sun.com/javase/6/docs/api/java/util/jar/Pack200.Unpacker.html */ pack200Map.put(pack200Counter, file); objOut.writeInt(pack200Counter); pack200Counter = pack200Counter + 1; } else { FileInputStream inStream = new FileInputStream(file); long bytesWritten = IoHelper.copyStream(inStream, objOut); inStream.close(); if (bytesWritten != packFile.length()) { throw new IOException("File size mismatch when reading " + file); } } storedFiles.put(file, new Object[] { pack.id, pos }); } // even if not written, it counts towards pack size pack.nbytes += packFile.size(); } // Write out information about parsable files objOut.writeInt(packInfo.getParsables().size()); for (ParsableFile parsableFile : packInfo.getParsables()) { objOut.writeObject(parsableFile); } // Write out information about executable files objOut.writeInt(packInfo.getExecutables().size()); for (ExecutableFile executableFile : packInfo.getExecutables()) { objOut.writeObject(executableFile); } // Write out information about updatecheck files objOut.writeInt(packInfo.getUpdateChecks().size()); for (UpdateCheck updateCheck : packInfo.getUpdateChecks()) { objOut.writeObject(updateCheck); } // Cleanup objOut.flush(); if (!compressor.useStandardCompression()) { outputStream.close(); } primaryJarStream.closeEntry(); // close pack specific jar if required if (packJarsSeparate) { primaryJarStream.closeAlways(); } IXMLElement child = new XMLElementImpl("pack", root); child.setAttribute("nbytes", Long.toString(pack.nbytes)); child.setAttribute("name", pack.name); if (pack.id != null) { child.setAttribute("id", pack.id); } root.addChild(child); packNumber++; } // Now that we know sizes, write pack metadata to primary jar. primaryJarStream.putNextEntry(new org.apache.tools.zip.ZipEntry(RESOURCES_PATH + "packs.info")); ObjectOutputStream out = new ObjectOutputStream(primaryJarStream); out.writeInt(packsList.size()); for (PackInfo packInfo : packsList) { out.writeObject(packInfo.getPack()); } out.flush(); primaryJarStream.closeEntry(); // Pack200 files Pack200.Packer packer = createAgressivePack200Packer(); for (Integer key : pack200Map.keySet()) { File file = pack200Map.get(key); primaryJarStream .putNextEntry(new org.apache.tools.zip.ZipEntry(RESOURCES_PATH + "packs/pack200-" + key)); JarFile jar = new JarFile(file); packer.pack(jar, primaryJarStream); jar.close(); primaryJarStream.closeEntry(); } }
From source file:bftsmart.tom.core.Synchronizer.java
private void startSynchronization(int nextReg) { boolean condition; ObjectOutputStream out = null; ByteArrayOutputStream bos = null; if (this.controller.getStaticConf().isBFT()) { condition = lcManager.getStopsSize(nextReg) > this.controller.getCurrentViewF(); } else {/*from w w w.j a v a 2s.co m*/ condition = lcManager.getStopsSize(nextReg) > 0; } // Ask to start the synchronizations phase if enough messages have been received already if (condition && lcManager.getNextReg() == lcManager.getLastReg()) { Logger.println("(Synchronizer.startSynchronization) initialize synch phase"); requestsTimer.Enabled(false); requestsTimer.stopTimer(); lcManager.setNextReg(lcManager.getLastReg() + 1); // define next timestamp int regency = lcManager.getNextReg(); // store information about message I am going to send lcManager.addStop(regency, this.controller.getStaticConf().getProcessId()); //execManager.stop(); // stop execution of consensus //Get requests that timed out and the requests received in STOP messages //and add those STOPed requests to the client manager addSTOPedRequestsToClientManager(); List<TOMMessage> messages = getRequestsToRelay(); try { // serialize conent to send in the STOP message bos = new ByteArrayOutputStream(); out = new ObjectOutputStream(bos); // Do I have messages to send in the STOP message? if (messages != null && messages.size() > 0) { //TODO: If this is null, there was no timeout nor STOP messages. //What shall be done then? out.writeBoolean(true); byte[] serialized = bb.makeBatch(messages, 0, 0, controller); out.writeObject(serialized); } else { out.writeBoolean(false); System.out.println( "(Synchronizer.startSynchronization) Strange... did not include any request in my STOP message for regency " + regency); } out.flush(); bos.flush(); byte[] payload = bos.toByteArray(); out.close(); bos.close(); // send message STOP System.out.println("(Synchronizer.startSynchronization) sending STOP message to install regency " + regency + " with " + (messages != null ? messages.size() : 0) + " request(s) to relay"); LCMessage stop = new LCMessage(this.controller.getStaticConf().getProcessId(), TOMUtil.STOP, regency, payload); requestsTimer.setSTOP(regency, stop); // make replica re-transmit the stop message until a new regency is installed communication.send(this.controller.getCurrentViewOtherAcceptors(), stop); } catch (IOException ex) { ex.printStackTrace(); java.util.logging.Logger.getLogger(TOMLayer.class.getName()).log(Level.SEVERE, null, ex); } finally { try { out.close(); bos.close(); } catch (IOException ex) { ex.printStackTrace(); java.util.logging.Logger.getLogger(TOMLayer.class.getName()).log(Level.SEVERE, null, ex); } } } if (this.controller.getStaticConf().isBFT()) { condition = lcManager.getStopsSize(nextReg) > (2 * this.controller.getCurrentViewF()); } else { condition = lcManager.getStopsSize(nextReg) > this.controller.getCurrentViewF(); } // Did the synchronization phase really started? //if (lcManager.getStopsSize(nextReg) > this.reconfManager.getQuorum2F() && lcManager.getNextReg() > lcManager.getLastReg()) { if (condition && lcManager.getNextReg() > lcManager.getLastReg()) { if (!execManager.stopped()) execManager.stop(); // stop consensus execution if more than f replicas sent a STOP message Logger.println("(Synchronizer.startSynchronization) installing regency " + lcManager.getNextReg()); lcManager.setLastReg(lcManager.getNextReg()); // define last timestamp int regency = lcManager.getLastReg(); // avoid memory leaks lcManager.removeStops(nextReg); lcManager.clearCurrentRequestTimedOut(); lcManager.clearRequestsFromSTOP(); requestsTimer.Enabled(true); requestsTimer.setShortTimeout(-1); requestsTimer.startTimer(); //int leader = regency % this.reconfManager.getCurrentViewN(); // new leader int leader = lcManager.getNewLeader(); int in = tom.getInExec(); // cid to execute int last = tom.getLastExec(); // last cid decided execManager.setNewLeader(leader); // If I am not the leader, I have to send a STOPDATA message to the elected leader if (leader != this.controller.getStaticConf().getProcessId()) { try { // serialize content of the STOPDATA message bos = new ByteArrayOutputStream(); out = new ObjectOutputStream(bos); Consensus cons = null; // content of the last decided CID if (last > -1) cons = execManager.getConsensus(last); //Do I have info on my last executed consensus? if (cons != null && cons.getDecisionEpoch() != null && cons.getDecisionEpoch().propValue != null) { out.writeBoolean(true); out.writeInt(last); //byte[] decision = exec.getLearner().getDecision(); byte[] decision = cons.getDecisionEpoch().propValue; Set<ConsensusMessage> proof = cons.getDecisionEpoch().getProof(); out.writeObject(decision); out.writeObject(proof); // TODO: WILL BE NECESSARY TO ADD A PROOF!!! } else { out.writeBoolean(false); ////// THIS IS TO CATCH A BUG!!!!! if (last > -1) { System.out.println("[DEBUG INFO FOR LAST CID #1]"); if (cons == null) { if (last > -1) System.out.println("No consensus instance for cid " + last); } else if (cons.getDecisionEpoch() == null) { System.out.println("No decision epoch for cid " + last); } else { System.out.println( "epoch for cid: " + last + ": " + cons.getDecisionEpoch().toString()); if (cons.getDecisionEpoch().propValue == null) { System.out.println("No propose for cid " + last); } else { System.out.println( "Propose hash for cid " + last + ": " + Base64.encodeBase64String( tom.computeHash(cons.getDecisionEpoch().propValue))); } } } } if (in > -1) { // content of cid in execution cons = execManager.getConsensus(in); //cons.incEts(); // make the consensus advance to the next epoch cons.setETS(regency); // make the consensus advance to the next epoch //int ets = cons.getEts(); //cons.createEpoch(ets, controller); cons.createEpoch(regency, controller); //Logger.println("(Synchronizer.startSynchronization) incrementing ets of consensus " + cons.getId() + " to " + ets); Logger.println("(Synchronizer.startSynchronization) incrementing ets of consensus " + cons.getId() + " to " + regency); TimestampValuePair quorumWrites; if (cons.getQuorumWrites() != null) { quorumWrites = cons.getQuorumWrites(); } else { quorumWrites = new TimestampValuePair(0, new byte[0]); } HashSet<TimestampValuePair> writeSet = cons.getWriteSet(); //CollectData collect = new CollectData(this.controller.getStaticConf().getProcessId(), in, ets, quorumWrites, writeSet); CollectData collect = new CollectData(this.controller.getStaticConf().getProcessId(), in, regency, quorumWrites, writeSet); SignedObject signedCollect = tom.sign(collect); out.writeObject(signedCollect); } else { cons = execManager.getConsensus(last + 1); //cons.incEts(); // make the consensus advance to the next epoch cons.setETS(regency); // make the consensus advance to the next epoch //int ets = cons.getEts(); //cons.createEpoch(ets, controller); cons.createEpoch(regency, controller); //Logger.println("(Synchronizer.startSynchronization) incrementing ets of consensus " + cons.getId() + " to " + ets); Logger.println("(Synchronizer.startSynchronization) incrementing ets of consensus " + cons.getId() + " to " + regency); //CollectData collect = new CollectData(this.controller.getStaticConf().getProcessId(), last + 1, ets, new TimestampValuePair(0, new byte[0]), new HashSet<TimestampValuePair>()); CollectData collect = new CollectData(this.controller.getStaticConf().getProcessId(), last + 1, regency, new TimestampValuePair(0, new byte[0]), new HashSet<TimestampValuePair>()); SignedObject signedCollect = tom.sign(collect); out.writeObject(signedCollect); } out.flush(); bos.flush(); byte[] payload = bos.toByteArray(); out.close(); bos.close(); int[] b = new int[1]; b[0] = leader; System.out .println("(Synchronizer.startSynchronization) sending STOPDATA of regency " + regency); // send message SYNC to the new leader communication.send(b, new LCMessage(this.controller.getStaticConf().getProcessId(), TOMUtil.STOPDATA, regency, payload)); //TODO: Turn on timeout again? } catch (IOException ex) { ex.printStackTrace(); java.util.logging.Logger.getLogger(TOMLayer.class.getName()).log(Level.SEVERE, null, ex); } finally { try { out.close(); bos.close(); } catch (IOException ex) { ex.printStackTrace(); java.util.logging.Logger.getLogger(TOMLayer.class.getName()).log(Level.SEVERE, null, ex); } } // the replica might have received a SYNC that was out of context at the time it was received, but now can be processed Set<LCMessage> sync = getOutOfContextLC(TOMUtil.SYNC, regency); Logger.println( "(Synchronizer.startSynchronization) Checking if there are out of context SYNC for regency " + regency); if (sync.size() > 0) { System.out.println( "(Synchronizer.startSynchronization) Processing out of context SYNC for regency " + regency); } else { Logger.println( "(Synchronizer.startSynchronization) No out of context SYNC for regency " + regency); } for (LCMessage m : sync) { if (m.getSender() == execManager.getCurrentLeader()) { processSYNC(m.getPayload(), regency); return; // makes no sense to continue, since there is only one SYNC message } } } else { // If leader, I will store information that I would send in a SYNC message Logger.println("(Synchronizer.startSynchronization) I'm the leader for this new regency"); CertifiedDecision lastDec = null; CollectData collect = null; Consensus cons = null; //Content of the last decided CID if (last > -1) cons = execManager.getConsensus(last); //Do I have info on my last executed consensus? if (cons != null && cons.getDecisionEpoch() != null && cons.getDecisionEpoch().propValue != null) { //byte[] decision = exec.getLearner().getDecision(); byte[] decision = cons.getDecisionEpoch().propValue; Set<ConsensusMessage> proof = cons.getDecisionEpoch().getProof(); lastDec = new CertifiedDecision(this.controller.getStaticConf().getProcessId(), last, decision, proof); // TODO: WILL BE NECESSARY TO ADD A PROOF!!!?? } else { lastDec = new CertifiedDecision(this.controller.getStaticConf().getProcessId(), last, null, null); ////// THIS IS TO CATCH A BUG!!!!! if (last > -1) { System.out.println("[DEBUG INFO FOR LAST CID #2]"); if (cons == null) { if (last > -1) System.out.println("No consensus instance for cid " + last); } else if (cons.getDecisionEpoch() == null) { System.out.println("No decision epoch for cid " + last); } else { System.out .println("epoch for cid: " + last + ": " + cons.getDecisionEpoch().toString()); } if (cons.getDecisionEpoch().propValue == null) { System.out.println("No propose for cid " + last); } else { System.out.println("Propose hash for cid " + last + ": " + Base64 .encodeBase64String(tom.computeHash(cons.getDecisionEpoch().propValue))); } } } lcManager.addLastCID(regency, lastDec); if (in > -1) { // content of cid being executed cons = execManager.getConsensus(in); //cons.incEts(); // make the consensus advance to the next epoch cons.setETS(regency); // make the consensus advance to the next epoch //int ets = cons.getEts(); //cons.createEpoch(ets, controller); cons.createEpoch(regency, controller); //Logger.println("(Synchronizer.startSynchronization) incrementing ets of consensus " + cons.getId() + " to " + ets); Logger.println("(Synchronizer.startSynchronization) incrementing ets of consensus " + cons.getId() + " to " + regency); TimestampValuePair quorumWrites; if (cons.getQuorumWrites() != null) { quorumWrites = cons.getQuorumWrites(); } else { quorumWrites = new TimestampValuePair(0, new byte[0]); } HashSet<TimestampValuePair> writeSet = cons.getWriteSet(); //collect = new CollectData(this.controller.getStaticConf().getProcessId(), in, ets, quorumWrites, writeSet); collect = new CollectData(this.controller.getStaticConf().getProcessId(), in, regency, quorumWrites, writeSet); } else { cons = execManager.getConsensus(last + 1); //cons.incEts(); // make the consensus advance to the next epoch cons.setETS(regency); // make the consensus advance to the next epoch //int ets = cons.getEts(); //cons.createEpoch(ets, controller); cons.createEpoch(regency, controller); //Logger.println("(Synchronizer.startSynchronization) incrementing ets of consensus " + cons.getId() + " to " + ets); Logger.println("(Synchronizer.startSynchronization) incrementing ets of consensus " + cons.getId() + " to " + regency); //collect = new CollectData(this.controller.getStaticConf().getProcessId(), last + 1, ets, new TimestampValuePair(0, new byte[0]), new HashSet<TimestampValuePair>()); collect = new CollectData(this.controller.getStaticConf().getProcessId(), last + 1, regency, new TimestampValuePair(0, new byte[0]), new HashSet<TimestampValuePair>()); } SignedObject signedCollect = tom.sign(collect); lcManager.addCollect(regency, signedCollect); // the replica might have received STOPDATAs that were out of context at the time they were received, but now can be processed Set<LCMessage> stopdatas = getOutOfContextLC(TOMUtil.STOPDATA, regency); Logger.println( "(Synchronizer.startSynchronization) Checking if there are out of context STOPDATAs for regency " + regency); if (stopdatas.size() > 0) { System.out.println("(Synchronizer.startSynchronization) Processing " + stopdatas.size() + " out of context STOPDATAs for regency " + regency); } else { Logger.println("(Synchronizer.startSynchronization) No out of context STOPDATAs for regency " + regency); } for (LCMessage m : stopdatas) { processSTOPDATA(m, regency); } } } }
From source file:org.regenstrief.util.Util.java
/** * Makes a deep clone of an object using serialization. The object, including all of its * components, must be serializable.//w ww .ja v a 2s . c om * * @param t the object to be cloned * @return a deep clone of the input object * @throws IOException if an I/O error occurs with the input stream or output stream * @throws ClassNotFoundException if the class of the input object cannot be found upon * deserialization */ @SuppressWarnings("unchecked") public static <T extends Serializable> T deepCopy(final T t) throws IOException, ClassNotFoundException { ObjectOutputStream objectOutputStream = null; ObjectInputStream objectInputStream = null; try { final ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); objectOutputStream = new ObjectOutputStream(byteArrayOutputStream); objectOutputStream.writeObject(t); objectOutputStream.flush(); objectInputStream = new ObjectInputStream( new ByteArrayInputStream(byteArrayOutputStream.toByteArray())); return (T) objectInputStream.readObject(); } finally { IoUtil.close(objectOutputStream); IoUtil.close(objectInputStream); } }
From source file:org.hyperledger.fabric.sdk.Channel.java
/** * Serialize channel to a byte array using Java serialization. * Deserialized channel will NOT be in an initialized state. * * @throws InvalidArgumentException/*from w w w . j av a 2 s . c om*/ * @throws IOException */ public byte[] serializeChannel() throws IOException, InvalidArgumentException { if (isShutdown()) { throw new InvalidArgumentException(format("Channel %s has been shutdown.", getName())); } ObjectOutputStream out = null; try { ByteArrayOutputStream bai = new ByteArrayOutputStream(); out = new ObjectOutputStream(bai); out.writeObject(this); out.flush(); return bai.toByteArray(); } finally { if (null != out) { try { out.close(); } catch (IOException e) { logger.error(e); // best effort. } } } }
From source file:lasige.steeldb.jdbc.BFTRowSet.java
/** * Generates the bytes of the results of a query. * This method is used to create the hash of the results, to provide a way to * compare two queries results.//from w ww . j a v a2 s. co m * * @return the bytes of the vector with results from a query */ public byte[] getResultsBytes() { ByteArrayOutputStream bOut = new ByteArrayOutputStream(); ObjectOutputStream obOut = null; try { obOut = new ObjectOutputStream(bOut); obOut.writeObject(rvh); obOut.flush(); bOut.flush(); obOut.close(); bOut.close(); } catch (IOException ex) { ex.printStackTrace(); return null; } return bOut.toByteArray(); }