List of usage examples for java.io ObjectOutputStream flush
public void flush() throws IOException
From source file:org.photovault.imginfo.XMPConverter.java
/** Create XMP metadata based on given photo and image file @param f The image file, used to initialize media management information @param p The photo used to initialize descriptive fields @return An XMP metadata object initialized based on the given information @throws com.adobe.xmp.XMPException If an error occurs while creating the metadata/*from ww w . j av a2s . c o m*/ */ public XMPMeta getXMPMetadata(ImageFile f, PhotoInfo p) throws XMPException { XMPMeta meta = XMPMetaFactory.create(); XMPSchemaRegistry reg = XMPMetaFactory.getSchemaRegistry(); // Check for Photovault schemas if (reg.getNamespacePrefix(NS_PV) == null) { try { reg.registerNamespace(NS_PV, "pv"); } catch (XMPException e) { log.error("CMPException: " + e.getMessage()); } } if (reg.getNamespacePrefix(NS_PHOTOSHOP) == null) { try { reg.registerNamespace(NS_PHOTOSHOP, "photoshop"); } catch (XMPException e) { log.error("CMPException: " + e.getMessage()); } } byte[] data = null; try { URI ifileURI = new URI("uuid", f.getId().toString(), null); meta.setProperty(NS_MM, "InstanceID", ifileURI.toString()); meta.setProperty(NS_MM, "Manager", "Photovault 0.5.0dev"); meta.setProperty(NS_MM, "ManageTo", ifileURI.toString()); } catch (URISyntaxException ex) { log.error(ex); } CopyImageDescriptor firstImage = (CopyImageDescriptor) f.getImage("image#0"); OriginalImageDescriptor orig = firstImage.getOriginal(); String rrNS = reg.getNamespaceURI("stRef"); try { URI origURI = new URI("uuid", orig.getFile().getId().toString(), orig.getLocator()); meta.setStructField(NS_MM, "DerivedFrom", rrNS, "InstanceID", origURI.toString()); } catch (URISyntaxException ex) { log.error(ex); } meta.setStructField(NS_MM, "DerivedFrom", NS_PV, "Rotation", Double.toString(firstImage.getRotation())); Rectangle2D cropArea = firstImage.getCropArea(); meta.setStructField(NS_MM, "DerivedFrom", NS_PV, "XMin", Double.toString(cropArea.getMinX())); meta.setStructField(NS_MM, "DerivedFrom", NS_PV, "XMax", Double.toString(cropArea.getMaxX())); meta.setStructField(NS_MM, "DerivedFrom", NS_PV, "YMin", Double.toString(cropArea.getMinY())); meta.setStructField(NS_MM, "DerivedFrom", NS_PV, "YMax", Double.toString(cropArea.getMaxY())); ChannelMapOperation cm = firstImage.getColorChannelMapping(); if (cm != null) { try { ByteArrayOutputStream cms = new ByteArrayOutputStream(); ObjectOutputStream cmos = new ObjectOutputStream(cms); cmos.writeObject(cm); String cmBase64 = Base64.encodeBytes(cms.toByteArray(), Base64.GZIP | Base64.DONT_BREAK_LINES); meta.setStructField(NS_MM, "DerivedFrom", NS_PV, "ChannelMap", cmBase64); } catch (IOException e) { log.error("Error serializing channel map", e); } } RawConversionSettings rs = firstImage.getRawSettings(); if (rs != null) { try { ByteArrayOutputStream rss = new ByteArrayOutputStream(); ObjectOutputStream rsos = new ObjectOutputStream(rss); rsos.writeObject(rs); String rsBase64 = Base64.encodeBytes(rss.toByteArray(), Base64.GZIP | Base64.DONT_BREAK_LINES); meta.setStructField(NS_MM, "DerivedFrom", NS_PV, "RawConversion", rsBase64); } catch (IOException e) { log.error("Error serializing raw settings", e); } } /* Set the image metadata based the photo we are creating this copy. There may be other photos associated with the origial image file, so we should store information about these in some proprietary part of metadata. */ meta.appendArrayItem(NS_DC, "creator", new PropertyOptions().setArrayOrdered(true), p.getPhotographer(), null); meta.setProperty(NS_DC, "description", p.getDescription()); double fstop = p.getFStop(); if (fstop > 0.0) { String aperture = floatToRational(p.getFStop()); meta.setProperty(NS_EXIF, "ApertureValue", aperture); meta.setProperty(NS_EXIF, "FNumber", aperture); } // String film = photo.getFilm(); int isoSpeed = p.getFilmSpeed(); if (isoSpeed > 0) { meta.appendArrayItem(NS_EXIF, "ISOSpeedRatings", new PropertyOptions().setArrayOrdered(true), String.valueOf(isoSpeed), null); } double focalLength = p.getFocalLength(); if (focalLength > 0.0) { meta.setProperty(NS_EXIF, "FocalLength", floatToRational(focalLength)); } int quality = p.getQuality(); meta.setPropertyDouble(NS_XMP_BASIC, "Rating", (double) quality); /* XMP location needs to be formal hierachical place, so we will store * this as a keyword. */ PropertyOptions subjectOptions = new PropertyOptions(PropertyOptions.ARRAY); String shootingPlace = p.getShootingPlace(); if (shootingPlace != null) { meta.appendArrayItem(NS_DC, "subject", subjectOptions, shootingPlace, null); } for (Tag tag : p.getTags()) { meta.appendArrayItem(NS_DC, "subject", subjectOptions, tag.getName(), null); } double expTime = p.getShutterSpeed(); if (expTime > 0.0) { String shutterSpeed = expTimeAsRational(expTime); meta.setProperty(NS_EXIF, "ExposureTme", shutterSpeed); } // photo.getTechNotes(); Date shootDate = p.getShootTime(); if (shootDate != null) { DateFormat dfmt = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZ"); String xmpShootDate = dfmt.format(shootDate); meta.setProperty(NS_XMP_BASIC, "CreateDate", xmpShootDate); meta.setProperty(NS_PHOTOSHOP, "DateCreated", xmpShootDate); } // Save technical data meta.setProperty(NS_TIFF, "Model", p.getCamera()); meta.setProperty(NS_EXIF_AUX, "Lens", p.getLens()); // TODO: add other photo attributes as well // Add names of the folders the photo belongs to as keywords for (PhotoFolder folder : p.getFolders()) { if (folder.getExternalDir() == null) { meta.appendArrayItem(NS_DC, "subject", subjectOptions, folder.getName(), null); } } // Save the history of the image ObjectHistory<PhotoInfo> h = p.getHistory(); ObjectHistoryDTO<PhotoInfo> hdto = new ObjectHistoryDTO<PhotoInfo>(h); ByteArrayOutputStream histStream = new ByteArrayOutputStream(); try { ObjectOutputStream histoStream = new ObjectOutputStream(histStream); histoStream.writeObject(hdto); histoStream.flush(); histStream.flush(); byte histData[] = histStream.toByteArray(); String histBase64 = Base64.encodeBytes(histData, Base64.GZIP | Base64.DONT_BREAK_LINES); meta.setProperty(NS_PV, "History", histBase64); } catch (IOException e) { log.warn("Error serializing history", e); } return meta; }
From source file:net.sf.infrared.collector.impl.persistence.ApplicationStatisticsDaoImpl.java
private ByteArrayOutputStream serializeObject(Object obj) throws IOException { ByteArrayOutputStream baos = new ByteArrayOutputStream(); if (null != obj) { ObjectOutputStream out = new ObjectOutputStream(baos); out.writeObject(obj);/*from www. j a va 2 s.c o m*/ out.flush(); } return baos; }
From source file:calendarioSeries.vistas.MainViewController.java
private void rellenarArchivo() { try {/* w w w. j a v a 2 s. c om*/ File file = new File("seriesUsuario.json"); PrintWriter pw = new PrintWriter(file); JSONObject array = new JSONObject(); int count = 0; for (Serie serie : series) { JSONArray auxi = new JSONArray(); auxi.put(0, serie.getTitulo()); auxi.put(1, serie.getLastVisto()); array.put(serie.getId(), auxi); } pw.println(array.toString()); pw.flush(); File fileSeries = new File("data.db"); FileOutputStream fos = new FileOutputStream(fileSeries); ObjectOutputStream oos; try { oos = new ObjectOutputStream(fos); oos.writeObject(series); oos.flush(); oos.close(); } catch (IOException ex) { ex.printStackTrace(); } } catch (FileNotFoundException e) { e.printStackTrace(); } }
From source file:org.wso2.carbon.identity.sts.store.dao.DBStsDAO.java
private byte[] getTokenContent(Token token) throws IOException { ByteArrayOutputStream tokenArrayStream = new ByteArrayOutputStream(); ObjectOutputStream tokenObjectStream = null; byte[] tokenBytes = null; try {/*from w ww . ja v a2 s. co m*/ tokenObjectStream = new ObjectOutputStream(tokenArrayStream); tokenObjectStream.writeObject(token); tokenObjectStream.flush(); tokenBytes = tokenArrayStream.toByteArray(); } finally { if (tokenObjectStream != null) { tokenObjectStream.close(); } } return tokenBytes; }
From source file:cai.flow.packets.V5_Packet.java
/** * UDPflowsVector/* w w w . j a v a2 s. co m*/ * * @param RouterIP * @param buf * @param len * @throws DoneException */ @SuppressWarnings("unchecked") public V5_Packet(String RouterIP, byte[] buf, int len) throws DoneException { if (false) {//(Params.DEBUG) { // File tmpFile = new File(Params.path + File.separator + "cache.tmp"); if (tmpFile.exists()) { try { ObjectInputStream fIn = new ObjectInputStream(new FileInputStream(tmpFile)); try { buf = (byte[]) fIn.readObject(); len = ((Integer) fIn.readObject()).intValue(); } catch (ClassNotFoundException e) { e.printStackTrace(); } fIn.close(); } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } } else { try { ObjectOutputStream fOut; fOut = new ObjectOutputStream(new FileOutputStream(tmpFile)); fOut.writeObject(buf); fOut.writeObject(new Integer(len)); fOut.flush(); fOut.close(); } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException e1) { e1.printStackTrace(); } } // } if (len < V5_Header_Size) throw new DoneException(" * incomplete header *"); this.RouterIP = RouterIP; count = Util.to_number(buf, 2, 2); if (count <= 0 || len != V5_Header_Size + count * V5_Flow_Size) throw new DoneException(" * corrupted packet " + len + "/" + count + "/" + (V5_Header_Size + count * V5_Flow_Size) + " *"); SysUptime = Util.to_number(buf, 4, 4); unix_secs = Util.to_number(buf, 8, 4); unix_nsecs = Util.to_number(buf, 12, 4); flow_sequence = Util.to_number(buf, 16, 4); engine_type = buf[20]; engine_id = buf[21]; logger.debug(" uptime: " + Util.uptime(SysUptime / 1000) + ", date: " + unix_secs + "." + unix_nsecs); logger.debug(" sequence: " + flow_sequence + ", count: " + count + ", engine: " + engine_type + "/" + engine_id); flows = new Vector((int) count); for (int i = 0, p = V5_Header_Size; i < count; i++, p += V5_Flow_Size) { V5_Flow f; try { f = new V5_Flow(RouterIP, buf, p); if (Params.DEBUG) { if (!f.equals( new V5_Flow(RouterIP, buf, p, TemplateManager.getTemplateManager().getV5Template()))) { logger.error("ERROR: Data inconsistency with different algorithm"); } } if (f.srcaddr != null && f.dstaddr != null) { flows.add(f); } else { if (Params.DEBUG) { logger.error(f.srcaddr + " " + f.dstaddr + " "); } } } catch (DoneException e) { if (Params.DEBUG) { logger.debug("", e); } if (e.getMessage() != null && (!e.getMessage().equals(""))) { logger.debug("", e); } } } }
From source file:au.org.ala.layers.dao.UserDataDAOImpl.java
private boolean set(String header_id, String ref, String data_type, Object o) { Long id = Long.parseLong(header_id.split(":")[0]); String facet_id = (header_id.contains(":")) ? " " + header_id.split(":")[1] : ""; try {// w ww . j ava 2s . c o m ByteArrayOutputStream bytes = new ByteArrayOutputStream(); ObjectOutputStream obj = new ObjectOutputStream(bytes); obj.writeObject(o); obj.flush(); try { String sql_delete = "DELETE FROM ud_data_x WHERE ud_header_id = ? AND ref = ? AND data_type = ?;"; int deleted = jdbcTemplate.update(sql_delete, new Object[] { id, ref + facet_id, data_type }); String sql_insert = "INSERT INTO ud_data_x (ud_header_id,ref,data_type, data) " + " VALUES ( ?, ?, ?, ?);"; int inserted = jdbcTemplate.update(sql_insert, new Object[] { id, ref + facet_id, data_type, bytes.toByteArray() }); return inserted > 0; } catch (Exception e) { logger.error("failed to set ud_data_x for " + header_id + ", " + ref, e); } } catch (Exception e) { logger.error("failed to write bytes for: " + header_id + ", " + ref, e); } return false; }
From source file:org.yes.cart.web.support.util.cookie.impl.CookieTuplizerImpl.java
/** * {@inheritDoc}/* w ww . j ava2 s .co m*/ */ public Cookie[] toCookies(final Cookie[] oldCookies, final Serializable serializable) throws UnableToCookielizeObjectException { ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); synchronized (desCipher) { BASE64EncoderStream base64EncoderStream = new BASE64EncoderStream(byteArrayOutputStream, Integer.MAX_VALUE); //will be splited manually CipherOutputStream cipherOutputStream = new CipherOutputStream(base64EncoderStream, desCipher); ObjectOutputStream objectOutputStream = null; try { objectOutputStream = new ObjectOutputStream(cipherOutputStream); objectOutputStream.writeObject(serializable); objectOutputStream.flush(); objectOutputStream.close(); } catch (Throwable ioe) { ShopCodeContext.getLog(this) .error(MessageFormat.format("Unable to serialize object {0}", serializable), ioe); throw new UnableToCookielizeObjectException(ioe); } finally { try { if (objectOutputStream != null) { objectOutputStream.close(); } cipherOutputStream.close(); base64EncoderStream.close(); byteArrayOutputStream.close(); } catch (IOException e) { ShopCodeContext.getLog(this).error("Can not close stream", e); } } } return assembleCookiesForObject(oldCookies, split(byteArrayOutputStream.toString()), serializable); }
From source file:com.joliciel.talismane.extensions.corpus.CorpusStatistics.java
@Override public void onCompleteParse() { try {// ww w . j a v a 2 s . co m if (writer != null) { double unknownLexiconPercent = 1; if (referenceWords != null) { int unknownLexiconCount = 0; for (String word : words) { if (!referenceWords.contains(word)) unknownLexiconCount++; } unknownLexiconPercent = (double) unknownLexiconCount / (double) words.size(); } double unknownLowercaseLexiconPercent = 1; if (referenceLowercaseWords != null) { int unknownLowercaseLexiconCount = 0; for (String lowercase : lowerCaseWords) { if (!referenceLowercaseWords.contains(lowercase)) unknownLowercaseLexiconCount++; } unknownLowercaseLexiconPercent = (double) unknownLowercaseLexiconCount / (double) lowerCaseWords.size(); } writer.write(CSV.format("sentenceCount") + CSV.format(sentenceCount) + "\n"); writer.write(CSV.format("sentenceLengthMean") + CSV.format(sentenceLengthStats.getMean()) + "\n"); writer.write(CSV.format("sentenceLengthStdDev") + CSV.format(sentenceLengthStats.getStandardDeviation()) + "\n"); writer.write(CSV.format("tokenLexiconSize") + CSV.format(words.size()) + "\n"); writer.write(CSV.format("tokenLexiconUnknown") + CSV.format(unknownLexiconPercent * 100.0) + "\n"); writer.write(CSV.format("tokenCount") + CSV.format(tokenCount) + "\n"); double unknownTokenPercent = ((double) unknownTokenCount / (double) tokenCount) * 100.0; writer.write(CSV.format("tokenUnknown") + CSV.format(unknownTokenPercent) + "\n"); writer.write(CSV.format("lowercaseLexiconSize") + CSV.format(lowerCaseWords.size()) + "\n"); writer.write(CSV.format("lowercaseLexiconUnknown") + CSV.format(unknownLowercaseLexiconPercent * 100.0) + "\n"); writer.write(CSV.format("alphanumericCount") + CSV.format(alphanumericCount) + "\n"); double unknownAlphanumericPercent = ((double) unknownAlphanumericCount / (double) alphanumericCount) * 100.0; writer.write(CSV.format("alphanumericUnknown") + CSV.format(unknownAlphanumericPercent) + "\n"); writer.write(CSV.format("syntaxDepthMean") + CSV.format(syntaxDepthStats.getMean()) + "\n"); writer.write(CSV.format("syntaxDepthStdDev") + CSV.format(syntaxDepthStats.getStandardDeviation()) + "\n"); writer.write(CSV.format("maxSyntaxDepthMean") + CSV.format(maxSyntaxDepthStats.getMean()) + "\n"); writer.write(CSV.format("maxSyntaxDepthStdDev") + CSV.format(maxSyntaxDepthStats.getStandardDeviation()) + "\n"); writer.write( CSV.format("sentAvgSyntaxDepthMean") + CSV.format(avgSyntaxDepthStats.getMean()) + "\n"); writer.write(CSV.format("sentAvgSyntaxDepthStdDev") + CSV.format(avgSyntaxDepthStats.getStandardDeviation()) + "\n"); writer.write(CSV.format("syntaxDistanceMean") + CSV.format(syntaxDistanceStats.getMean()) + "\n"); writer.write(CSV.format("syntaxDistanceStdDev") + CSV.format(syntaxDistanceStats.getStandardDeviation()) + "\n"); double nonProjectivePercent = ((double) nonProjectiveCount / (double) totalDepCount) * 100.0; writer.write(CSV.format("nonProjectiveCount") + CSV.format(nonProjectiveCount) + "\n"); writer.write(CSV.format("nonProjectivePercent") + CSV.format(nonProjectivePercent) + "\n"); writer.write(CSV.format("PosTagCounts") + "\n"); for (String posTag : posTagCounts.keySet()) { int count = posTagCounts.get(posTag); writer.write(CSV.format(posTag) + CSV.format(count) + CSV.format(((double) count / (double) tokenCount) * 100.0) + "\n"); } writer.write(CSV.format("DepLabelCounts") + "\n"); for (String depLabel : depLabelCounts.keySet()) { int count = depLabelCounts.get(depLabel); writer.write(CSV.format(depLabel) + CSV.format(count) + CSV.format(((double) count / (double) totalDepCount) * 100.0) + "\n"); } writer.flush(); writer.close(); } if (this.serializationFile != null) { ZipOutputStream zos = new ZipOutputStream(new FileOutputStream(serializationFile, false)); zos.putNextEntry(new ZipEntry("Contents.obj")); ObjectOutputStream oos = new ObjectOutputStream(zos); try { oos.writeObject(this); } finally { oos.flush(); } zos.flush(); zos.close(); } } catch (IOException e) { LogUtils.logError(LOG, e); throw new RuntimeException(e); } }
From source file:org.tizzit.util.spring.httpinvoker.StreamSupportingHttpInvokerServiceExporter.java
protected void writeRemoteInvocationResult(final HttpServletRequest request, final HttpServletResponse response, final RemoteInvocationResult result, final OutputStream os) throws IOException { if (hasStreamResult(result)) { final OutputStream decoratedOut = decorateOutputStream(request, response, os); response.setHeader("Transfer-Encoding", "chunked"); try {/*from www .ja v a2 s . c om*/ // We want to be able to close the ObjectOutputStream in order to // properly flush and clear it out, but we don't want it closing // our underlying OutputStream. final ObjectOutputStream oos = new ObjectOutputStream( new CloseShieldedOutputStream(new BufferedOutputStream(decoratedOut, 4096))); try { doWriteRemoteInvocationResult(result, oos); oos.flush(); } finally { oos.close(); } doWriteReturnInputStream((StreamSupportingRemoteInvocationResult) result, decoratedOut); } finally { decoratedOut.close(); } } else { super.writeRemoteInvocationResult(request, response, result, os); } }
From source file:com.bt.aloha.dao.StateInfoDaoTest.java
private SimpleTestInfo insertSimpleTestInfo(String id, String f1, String f2) { SimpleTestInfo sti = new SimpleTestInfo(id, f1, f2); byte[] stiBytes = null; try {/*from w w w. j a v a 2 s . c om*/ ByteArrayOutputStream bos = new ByteArrayOutputStream(); ObjectOutputStream oos = new ObjectOutputStream(bos); oos.writeObject(sti); oos.flush(); oos.close(); stiBytes = bos.toByteArray(); } catch (Exception e) { throw new RuntimeException("Unable to serialize SimpleTestInfo", e); } PreparedStatement s = null; try { s = connection.prepareStatement("insert into StateInfo" + "(object_id, object_type, object_version, last_use_time, is_dead, force_housekeep, object_value) values(?, 'Collection', ?, ?, ?, 0, ?)"); s.setString(1, id); s.setString(2, "1"); s.setLong(3, new java.util.Date().getTime()); s.setInt(4, 0); s.setBytes(5, stiBytes); log.debug("Inserted row in Collection " + "for current SimpleTestInfo"); s.execute(); connection.commit(); } catch (SQLException e) { try { if (connection != null) connection.rollback(); } catch (SQLException e1) { throw new RuntimeException("Unable to rollback operation on SimpleTestInfo", e); } throw new RuntimeException("Unable to execute db operation on SimpleTestInfo. op rolledback", e); } finally { if (s != null) try { s.close(); } catch (SQLException e) { log.warn("Unable to close prepared statement", e); } } return sti; }