List of usage examples for java.io ObjectOutputStream close
public void close() throws IOException
From source file:fi.aalto.seqpig.io.BamStorer.java
@Override public void checkSchema(ResourceSchema s) throws IOException { selectedBAMAttributes = new HashMap<String, Integer>(); allBAMFieldNames = new HashMap<String, Integer>(); String[] fieldNames = s.fieldNames(); for (int i = 0; i < fieldNames.length; i++) { System.out.println("field: " + fieldNames[i]); allBAMFieldNames.put(fieldNames[i], new Integer(i)); if (fieldNames[i].equalsIgnoreCase("RG") || fieldNames[i].equalsIgnoreCase("LB") || fieldNames[i].equalsIgnoreCase("PU") || fieldNames[i].equalsIgnoreCase("PG") || fieldNames[i].equalsIgnoreCase("AS") || fieldNames[i].equalsIgnoreCase("SQ") || fieldNames[i].equalsIgnoreCase("MQ") || fieldNames[i].equalsIgnoreCase("NM") || fieldNames[i].equalsIgnoreCase("H0") || fieldNames[i].equalsIgnoreCase("H1") || fieldNames[i].equalsIgnoreCase("H2") || fieldNames[i].equalsIgnoreCase("UQ") || fieldNames[i].equalsIgnoreCase("PQ") || fieldNames[i].equalsIgnoreCase("NH") || fieldNames[i].equalsIgnoreCase("IH") || fieldNames[i].equalsIgnoreCase("HI") || fieldNames[i].equalsIgnoreCase("MD") || fieldNames[i].equalsIgnoreCase("CS") || fieldNames[i].equalsIgnoreCase("CQ") || fieldNames[i].equalsIgnoreCase("CM") || fieldNames[i].equalsIgnoreCase("R2") || fieldNames[i].equalsIgnoreCase("Q2") || fieldNames[i].equalsIgnoreCase("S2") || fieldNames[i].equalsIgnoreCase("CC") || fieldNames[i].equalsIgnoreCase("CP") || fieldNames[i].equalsIgnoreCase("SM") || fieldNames[i].equalsIgnoreCase("AM") || fieldNames[i].equalsIgnoreCase("MF") || fieldNames[i].equalsIgnoreCase("E2") || fieldNames[i].equalsIgnoreCase("U2") || fieldNames[i].equalsIgnoreCase("OQ")) { System.out.println("selected attribute: " + fieldNames[i] + " i: " + i); selectedBAMAttributes.put(fieldNames[i], new Integer(i)); }//from ww w. j av a 2 s .co m } if (!(allBAMFieldNames.containsKey("name") && allBAMFieldNames.containsKey("start") && allBAMFieldNames.containsKey("end") && allBAMFieldNames.containsKey("read") && allBAMFieldNames.containsKey("cigar") && allBAMFieldNames.containsKey("basequal") && allBAMFieldNames.containsKey("flags") && allBAMFieldNames.containsKey("insertsize") && allBAMFieldNames.containsKey("mapqual") && allBAMFieldNames.containsKey("matestart") && allBAMFieldNames.containsKey("materefindex") && allBAMFieldNames.containsKey("refindex"))) throw new IOException("Error: Incorrect BAM tuple-field name or compulsory field missing"); Base64 codec = new Base64(); Properties p = UDFContext.getUDFContext().getUDFProperties(this.getClass()); String datastr; ByteArrayOutputStream bstream = new ByteArrayOutputStream(); ObjectOutputStream ostream = new ObjectOutputStream(bstream); ostream.writeObject(selectedBAMAttributes); ostream.close(); datastr = codec.encodeBase64String(bstream.toByteArray()); p.setProperty("selectedBAMAttributes", datastr); bstream = new ByteArrayOutputStream(); ostream = new ObjectOutputStream(bstream); ostream.writeObject(allBAMFieldNames); ostream.close(); datastr = codec.encodeBase64String(bstream.toByteArray()); p.setProperty("allBAMFieldNames", datastr); }
From source file:fi.aalto.seqpig.io.SamStorer.java
public SamStorer(String samfileheaderfilename) { String str = ""; this.samfileheader = ""; try {// w w w.ja va 2 s. c o m Configuration conf = UDFContext.getUDFContext().getJobConf(); // see https://issues.apache.org/jira/browse/PIG-2576 if (conf == null || conf.get("mapred.task.id") == null) { // we are running on the frontend decodeSAMFileHeader(); return; } URI uri = new URI(samfileheaderfilename); FileSystem fs = FileSystem.get(uri, conf); BufferedReader in = new BufferedReader(new InputStreamReader(fs.open(new Path(samfileheaderfilename)))); while (true) { str = in.readLine(); if (str == null) break; else this.samfileheader += str + "\n"; } in.close(); } catch (Exception e) { System.out.println("ERROR: could not read SAM header from file " + samfileheaderfilename); System.out.println("exception was: " + e.toString()); } try { Base64 codec = new Base64(); Properties p = UDFContext.getUDFContext().getUDFProperties(this.getClass()); ByteArrayOutputStream bstream = new ByteArrayOutputStream(); ObjectOutputStream ostream = new ObjectOutputStream(bstream); ostream.writeObject(this.samfileheader); ostream.close(); String datastr = codec.encodeBase64String(bstream.toByteArray()); p.setProperty("samfileheader", datastr); } catch (Exception e) { System.out.println("ERROR: Unable to store SAMFileHeader in BamStorer!"); } this.samfileheader_decoded = getSAMFileHeader(); }
From source file:fi.aalto.seqpig.io.SamStorer.java
@Override public void checkSchema(ResourceSchema s) throws IOException { selectedSAMAttributes = new HashMap<String, Integer>(); allSAMFieldNames = new HashMap<String, Integer>(); String[] fieldNames = s.fieldNames(); for (int i = 0; i < fieldNames.length; i++) { System.out.println("field: " + fieldNames[i]); allSAMFieldNames.put(fieldNames[i], new Integer(i)); if (fieldNames[i].equalsIgnoreCase("RG") || fieldNames[i].equalsIgnoreCase("LB") || fieldNames[i].equalsIgnoreCase("PU") || fieldNames[i].equalsIgnoreCase("PG") || fieldNames[i].equalsIgnoreCase("AS") || fieldNames[i].equalsIgnoreCase("SQ") || fieldNames[i].equalsIgnoreCase("MQ") || fieldNames[i].equalsIgnoreCase("NM") || fieldNames[i].equalsIgnoreCase("H0") || fieldNames[i].equalsIgnoreCase("H1") || fieldNames[i].equalsIgnoreCase("H2") || fieldNames[i].equalsIgnoreCase("UQ") || fieldNames[i].equalsIgnoreCase("PQ") || fieldNames[i].equalsIgnoreCase("NH") || fieldNames[i].equalsIgnoreCase("IH") || fieldNames[i].equalsIgnoreCase("HI") || fieldNames[i].equalsIgnoreCase("MD") || fieldNames[i].equalsIgnoreCase("CS") || fieldNames[i].equalsIgnoreCase("CQ") || fieldNames[i].equalsIgnoreCase("CM") || fieldNames[i].equalsIgnoreCase("R2") || fieldNames[i].equalsIgnoreCase("Q2") || fieldNames[i].equalsIgnoreCase("S2") || fieldNames[i].equalsIgnoreCase("CC") || fieldNames[i].equalsIgnoreCase("CP") || fieldNames[i].equalsIgnoreCase("SM") || fieldNames[i].equalsIgnoreCase("AM") || fieldNames[i].equalsIgnoreCase("MF") || fieldNames[i].equalsIgnoreCase("E2") || fieldNames[i].equalsIgnoreCase("U2") || fieldNames[i].equalsIgnoreCase("OQ")) { System.out.println("selected attribute: " + fieldNames[i] + " i: " + i); selectedSAMAttributes.put(fieldNames[i], new Integer(i)); }//from ww w.ja v a 2 s. c om } if (!(allSAMFieldNames.containsKey("name") && allSAMFieldNames.containsKey("start") && allSAMFieldNames.containsKey("end") && allSAMFieldNames.containsKey("read") && allSAMFieldNames.containsKey("cigar") && allSAMFieldNames.containsKey("basequal") && allSAMFieldNames.containsKey("flags") && allSAMFieldNames.containsKey("insertsize") && allSAMFieldNames.containsKey("mapqual") && allSAMFieldNames.containsKey("matestart") && allSAMFieldNames.containsKey("materefindex") && allSAMFieldNames.containsKey("refindex"))) throw new IOException("Error: Incorrect SAM tuple-field name or compulsory field missing"); Base64 codec = new Base64(); Properties p = UDFContext.getUDFContext().getUDFProperties(this.getClass()); String datastr; ByteArrayOutputStream bstream = new ByteArrayOutputStream(); ObjectOutputStream ostream = new ObjectOutputStream(bstream); ostream.writeObject(selectedSAMAttributes); ostream.close(); datastr = codec.encodeBase64String(bstream.toByteArray()); p.setProperty("selectedSAMAttributes", datastr); bstream = new ByteArrayOutputStream(); ostream = new ObjectOutputStream(bstream); ostream.writeObject(allSAMFieldNames); ostream.close(); datastr = codec.encodeBase64String(bstream.toByteArray()); p.setProperty("allSAMFieldNames", datastr); }
From source file:com.mongodb.hadoop.mapred.input.MongoInputSplit.java
/** * Serialize the Split instance//from w ww . jav a2 s .c o m */ public void write(DataOutput out) throws IOException { final ObjectOutputStream objOut = new ObjectOutputStream((OutputStream) out); // TODO - Use object outputstream instead of going to <-> from string? out.writeUTF(_mongoURI.toString()); out.writeUTF(JSON.serialize(_querySpec)); out.writeUTF(JSON.serialize(_fieldSpec)); out.writeUTF(JSON.serialize(_sortSpec)); out.writeInt(_limit); out.writeInt(_skip); objOut.close(); }
From source file:com.all.dht.util.DhtFileUtils.java
public void persistObjectToFile(Serializable object, String routeTableFile) throws IOException { ObjectOutputStream out = new ObjectOutputStream( new BufferedOutputStream(new SecureOutputStream(new FileOutputStream(routeTableFile)))); out.writeObject(object);/* w w w. j ava 2s.co m*/ out.flush(); out.close(); }
From source file:deepschema.ExtractingTool.java
/** * Caches classes and subclasses to file. * // ww w . j a v a2 s. c o m * @param operation : "read" or "write" */ @SuppressWarnings("unchecked") void cache(String action) { final String cacheFile = ".cache"; try { switch (action) { case "read": { ObjectInputStream objectInputStream = new ObjectInputStream(new FileInputStream(cacheFile)); classes = (Map<Integer, WikidataClassProperties>) ((ObjectInputStream) objectInputStream) .readObject(); instances = (Map<Integer, WikidataInstanceProperties>) ((ObjectInputStream) objectInputStream) .readObject(); objectInputStream.close(); } case "write": { ObjectOutputStream objectOutputStream = new ObjectOutputStream(new FileOutputStream(cacheFile)); objectOutputStream.writeObject(classes); objectOutputStream.writeObject(instances); objectOutputStream.flush(); objectOutputStream.close(); } } } catch (ClassNotFoundException | IOException e) { System.err.println("Problem while reading/writing from/to cache."); e.printStackTrace(); } }
From source file:com.vmware.identity.openidconnect.sample.RelyingPartyInstaller.java
private void writeObject(String file, Object object) throws IOException { FileOutputStream fos = null;/* w ww.j av a2 s .com*/ ObjectOutputStream oos = null; try { fos = new FileOutputStream(file.toString()); oos = new ObjectOutputStream(fos); oos.writeObject(object); } finally { if (oos != null) { oos.close(); } } }
From source file:org.eclipse.thym.core.internal.util.BundleHttpCacheStorage.java
@Override public void putEntry(String key, HttpCacheEntry entry) throws IOException { ByteArrayOutputStream byteArrayOS = null; ObjectOutputStream objectOut = null; try {// www .j av a2s . c o m File f = getCacheFile(key); byteArrayOS = new ByteArrayOutputStream(); objectOut = new ObjectOutputStream(byteArrayOS); objectOut.writeObject(entry); objectOut.flush(); FileUtils.writeByteArrayToFile(f, byteArrayOS.toByteArray()); } finally { if (objectOut != null) objectOut.close(); if (byteArrayOS != null) byteArrayOS.close(); } }
From source file:com.offbynull.coroutines.instrumenter.InstrumenterTest.java
@Test public void mustProperlySuspendWithSerialization() throws Exception { try (URLClassLoader classLoader = loadClassesInZipResourceAndInstrument( SERIALIZABLE_INVOKE_TEST + ".zip")) { Class<Coroutine> cls = (Class<Coroutine>) classLoader.loadClass(SERIALIZABLE_INVOKE_TEST); Coroutine coroutine = ConstructorUtils.invokeConstructor(cls, new StringBuilder()); // Create and run original for a few cycles CoroutineRunner originalRunner = new CoroutineRunner(coroutine); Assert.assertTrue(originalRunner.execute()); Assert.assertTrue(originalRunner.execute()); Assert.assertTrue(originalRunner.execute()); Assert.assertTrue(originalRunner.execute()); Assert.assertTrue(originalRunner.execute()); Assert.assertTrue(originalRunner.execute()); // Serialize ByteArrayOutputStream baos = new ByteArrayOutputStream(); ObjectOutputStream oos = new ObjectOutputStream(baos); oos.writeObject(originalRunner); oos.close(); baos.close();/*from w w w . j a v a 2 s .c om*/ byte[] serializedCoroutine = baos.toByteArray(); // Deserialize ByteArrayInputStream bais = new ByteArrayInputStream(serializedCoroutine); ObjectInputStream ois = new ObjectInputStream(bais) { @Override protected Class<?> resolveClass(ObjectStreamClass desc) throws IOException, ClassNotFoundException { try { return super.resolveClass(desc); } catch (ClassNotFoundException cnfe) { return classLoader.loadClass(desc.getName()); } } }; CoroutineRunner deserializedRunner = (CoroutineRunner) ois.readObject(); // Continue running deserialized Assert.assertTrue(deserializedRunner.execute()); Assert.assertTrue(deserializedRunner.execute()); Assert.assertTrue(deserializedRunner.execute()); Assert.assertTrue(deserializedRunner.execute()); Assert.assertFalse(deserializedRunner.execute()); // coroutine finished executing here Assert.assertTrue(deserializedRunner.execute()); Assert.assertTrue(deserializedRunner.execute()); Assert.assertTrue(deserializedRunner.execute()); // Assert everything continued fine with deserialized version Object deserializedCoroutine = FieldUtils.readField(deserializedRunner, "coroutine", true); StringBuilder deserializedBuilder = (StringBuilder) FieldUtils.readField(deserializedCoroutine, "builder", true); Assert.assertEquals("started\n" + "0\n" + "1\n" + "2\n" + "3\n" + "4\n" + "5\n" + "6\n" + "7\n" + "8\n" + "9\n" + "started\n" + "0\n" + "1\n" + "2\n", deserializedBuilder.toString()); } }
From source file:models.TopicModel.java
public TopicModel(String name, int numTopics, double alpha, double beta, Reader dataReader) throws Exception { this.name = name; this.alpha = alpha; this.beta = beta; this.numTopics = numTopics; TopicModel named_model = TopicModel.find.where().eq("name", name).findUnique(); if (named_model != null) { // TODO: a better exception. Also, handle concurrency throw new Exception("A model of that name already exists"); }/* w w w.j a v a2 s. c o m*/ // convert input to vectors Pipe instancePipe = getStandardPipes(); InstanceList instances = new InstanceList(instancePipe); instances.addThruPipe( new CsvIterator(dataReader, Pattern.compile("^(\\S*)[\\s,]*(\\S*)[\\s,]*(.*)$"), 3, 2, 1)); ByteArrayOutputStream baos = new ByteArrayOutputStream(); ObjectOutputStream oos = new ObjectOutputStream(baos); oos.writeObject(instances); oos.close(); this.featureSequence = baos.toByteArray(); Configuration config = Play.application().configuration(); // train model malletTopicModel = new PersistentParallelTopicModel(this.numTopics, this.alpha, this.beta); malletTopicModel.addInstances(instances); malletTopicModel.setNumIterations(config.getInt("smarts.topicModel.numIterations")); malletTopicModel.setOptimizeInterval(config.getInt("smarts.topicModel.optimizeIntervals")); malletTopicModel.setBurninPeriod(config.getInt("smarts.topicModel.burnInPeriod")); malletTopicModel.setSymmetricAlpha(config.getBoolean("smarts.topicModel.symmetricAlpha")); malletTopicModel.setNumThreads(config.getInt("smarts.topicModel.numThreads")); malletTopicModel.estimate(); }