List of usage examples for java.io ObjectOutputStream close
public void close() throws IOException
From source file:com.openteach.diamond.network.waverider.master.MasterState.java
public ByteBuffer toByteBuffer() { ByteArrayOutputStream bout = null; ObjectOutputStream oout = null; try {//from www . ja va2 s . c o m bout = new ByteArrayOutputStream(); oout = new ObjectOutputStream(bout); oout.writeObject(this); oout.flush(); return ByteBuffer.wrap(bout.toByteArray()); } catch (IOException e) { throw new RuntimeException(e); } finally { try { if (oout != null) { oout.close(); } if (bout != null) { bout.close(); } } catch (IOException e) { logger.error(e); } } }
From source file:models.TopicModel.java
public void saveObjectGraph() throws Exception { Ebean.beginTransaction();//from w w w . j a va 2s.co m Configuration config = Play.application().configuration(); try { ByteArrayOutputStream baos = new ByteArrayOutputStream(); ObjectOutputStream oos = new ObjectOutputStream(baos); oos.writeObject(malletTopicModel); oos.close(); model = baos.toByteArray(); baos = new ByteArrayOutputStream(); oos = new ObjectOutputStream(baos); oos.writeObject(malletTopicModel.getInferencer()); oos.close(); inferencer = baos.toByteArray(); ArrayList<Topic> topicList = new ArrayList<Topic>(); // create topics Object[][] topicWords = malletTopicModel.getTopWords(config.getInt("smarts.topicModel.numTopWords")); for (int topicNum = 0; topicNum < this.numTopics; topicNum++) { StringBuilder wordList = new StringBuilder(); Object[] words = topicWords[topicNum]; for (Object w : words) { wordList.append(w); wordList.append(" "); } if (wordList.length() > 0) { // remove trailing space wordList.deleteCharAt(wordList.length() - 1); } Topic topic = new Topic(topicNum, wordList.toString()); topics.add(topic); topicList.add(topic); } // create documents PancakeTopicInferencer inferencer = malletTopicModel.getInferencer(); InstanceList docVectors = getDocumentVectors(); // Only record the n most significant topics List<List> orderedDistributions = inferencer.inferSortedDistributions(docVectors, config.getInt("smarts.inference.numIterations"), config.getInt("smarts.inference.thinning"), config.getInt("smarts.inference.burnInPeriod"), Double.parseDouble(config.getString("smarts.inference.threshold")), config.getInt("smarts.inference.numSignificantFeatures")); for (int docIndex = 0; docIndex < orderedDistributions.size(); docIndex++) { List docData = orderedDistributions.get(docIndex); String docName = (String) docData.get(0); double[] docTopWeights = generateTopTopicWeightVector(docIndex, orderedDistributions); Document doc = new Document(docName, docTopWeights); documents.add(doc); getDocuments().add(doc); } Ebean.save(this); Ebean.save(topics); // loop to save so the save hook is called for (Document doc : documents) { doc.save(); } Ebean.commitTransaction(); } finally { Ebean.endTransaction(); } }
From source file:de.julielab.jcore.ae.lingpipegazetteer.chunking.ChunkerProviderImpl.java
private void serializeDictionary(File serializedDictionaryFile) throws FileNotFoundException, IOException { LOGGER.info("Storing dictionary to: {}", serializedDictionaryFile.getAbsolutePath()); LOGGER.info(/*ww w. ja va 2s . c o m*/ "Warning: Loading a serialized dictionary seems to take longer than just reading the original text entries"); ObjectOutputStream oos = new ObjectOutputStream( new GZIPOutputStream(new FileOutputStream(serializedDictionaryFile))); dict.compileTo(oos); oos.close(); LOGGER.info("{} bytes written.", serializedDictionaryFile.length()); }
From source file:com.sec.ose.osi.thread.job.identify.data.IdentifyQueue.java
private boolean updateIdentifyQueuFile() { log.debug("updateIdentifyQueFile-start - num of item:" + size()); FileOutputStream fos = null;/*w w w . j av a 2 s .co m*/ try { File file = new File(identifyQueueFilePath); fos = new FileOutputStream(file); if (identifyDataQueue.size() > 0) { ObjectOutputStream oosWriter = new ObjectOutputStream(fos); for (IdentifyData tmpIdentifiedData : identifyDataQueue) { oosWriter.writeObject(tmpIdentifiedData); } oosWriter.flush(); oosWriter.close(); } } catch (IOException e) { log.warn(e); } finally { if (fos != null) { try { fos.close(); } catch (Exception e) { log.debug(e); } } log.debug("updateIdentifyQueFile-end"); } return true; }
From source file:it.geosolutions.geobatch.actions.ds2ds.geoserver.DSGeoServerAction.java
/** * Deserializes datastore params. This method is used prior to the datastore * configuration using the geoserver-manager library * /*from w w w .j a va 2s . co m*/ * @param datastore * @return * @throws IOException * @throws ClassNotFoundException */ private Map<String, Object> deserialize(Map<String, Serializable> datastore) throws IOException, ClassNotFoundException { ByteArrayOutputStream out = new ByteArrayOutputStream(); ObjectOutputStream objOut = new ObjectOutputStream(out); objOut.writeObject(datastore); objOut.close(); ObjectInputStream objIn = new ObjectInputStream(new ByteArrayInputStream(out.toByteArray())); Map<String, Object> connect = (Map<String, Object>) objIn.readObject(); return connect; }
From source file:MSUmpire.LCMSPeakStructure.LCMSPeakBase.java
private void JavaSerializationPeakClusterWrite() { try {/*from w w w . j a v a 2 s .com*/ Logger.getRootLogger().info("Writing PeakCluster serialization to file:" + FilenameUtils.getBaseName(ScanCollectionName) + "_PeakCluster.ser..."); FileOutputStream fout = new FileOutputStream( FilenameUtils.getFullPath(ParentmzXMLName) + FilenameUtils.getBaseName(ParentmzXMLName) + "_Peak/" + FilenameUtils.getBaseName(ScanCollectionName) + "_PeakCluster.ser", false); ObjectOutputStream oos = new ObjectOutputStream(fout); oos.writeObject(PeakClusters); oos.close(); fout.close(); } catch (Exception ex) { Logger.getRootLogger().error(ExceptionUtils.getStackTrace(ex)); } }
From source file:com.adito.boot.AbstractPropertyClass.java
public void store() throws IOException { if (store != null) { throw new IllegalStateException("Already storing property class. Either restor or reset first."); }//from www. ja v a 2 s.c o m store = new ByteArrayOutputStream(); ObjectOutputStream oos = new ObjectOutputStream(store); try { oos.writeObject(definitions); oos.writeObject(categories); oos.writeObject(categoryMap); } finally { oos.close(); } }
From source file:com.floreantpos.model.MenuItem.java
public MenuItem clone(MenuItem source) throws Exception { MenuItem menuItem = null;/*from w w w. j av a2 s . c o m*/ try { ByteArrayOutputStream bos = new ByteArrayOutputStream(); ObjectOutputStream out = new ObjectOutputStream(bos); out.writeObject(source); out.flush(); out.close(); ObjectInputStream in = new ObjectInputStream(new ByteArrayInputStream(bos.toByteArray())); menuItem = (MenuItem) in.readObject(); in.close(); } catch (Exception ex) { throw ex; } return menuItem; }
From source file:edu.stanford.muse.datacache.BlobStore.java
public synchronized void pack() throws IOException { // write to a tmp file first, then rename -- we don't want to trash the if there is a disk full or disk error or something.... String f = this.dir + File.separatorChar + META_DATA_FILENAME; String tmp = f + ".tmp"; ObjectOutputStream oos = new ObjectOutputStream(new FileOutputStream(tmp)); pack_to_stream(oos);//from w w w . ja v a 2 s. c o m oos.close(); File F = new File(f); if (F.exists()) { boolean b = F.delete(); if (!b) log.warn("Failed to delete blobs metadata file"); } boolean success = new File(tmp).renameTo(new File(f)); if (!success) log.warn("Metadata rename failed... packing may be incomplete!"); log.info("packed datastore: " + this); }
From source file:com.chinamobile.bcbsp.fault.tools.GetRecord.java
/** * syschronize DirRecord to the disk file. * @param dr/*from w ww . j a v a2s . c o m*/ * DirRecord * @param localDirPath * local file Path on disk * @param hdfsNameNodeHostName * hdfs namenode hostname */ private void syschronizeDirRecordWithDisk(DirRecord dr, String localDirPath, String hdfsNameNodeHostName) { ObjectOutputStream oos = null; File recordfile = new File(localDirPath); try { if (!recordfile.getParentFile().exists()) { recordfile.getParentFile().mkdirs(); } oos = new ObjectOutputStream(new FileOutputStream(localDirPath)); oos.writeObject(dr); // write the dr into recordPath; oos.close(); } catch (FileNotFoundException e) { //LOG.error("[syschronizeDirRecordWithDisk]", e); throw new RuntimeException("[syschronizeDirRecordWithDisk]", e); } catch (IOException e) { //LOG.error("[syschronizeDirRecordWithDisk]", e); throw new RuntimeException("[syschronizeDirRecordWithDisk]", e); } HdfsOperater.uploadHdfs(recordfile.getAbsolutePath(), hdfsNameNodeHostName + recordfile.getAbsolutePath()); }