List of usage examples for java.io ObjectOutputStream flush
public void flush() throws IOException
From source file:ubic.gemma.analysis.report.ArrayDesignReportServiceImpl.java
@Override public void generateAllArrayDesignReport() { log.info("Generating report summarizing all platforms ... "); // obtain time information (for timestamping) Date d = new Date(System.currentTimeMillis()); String timestamp = DateFormatUtils.format(d, "yyyy.MM.dd HH:mm"); long numCsBioSequences = arrayDesignService.numAllCompositeSequenceWithBioSequences(); long numCsBlatResults = arrayDesignService.numAllCompositeSequenceWithBlatResults(); long numCsGenes = arrayDesignService.numAllCompositeSequenceWithGenes(); long numGenes = arrayDesignService.numAllGenes(); // create a surrogate ArrayDesignValue object to represent the total of all platforms ArrayDesignValueObject adVo = new ArrayDesignValueObject(); adVo.setNumProbeSequences(Long.toString(numCsBioSequences)); adVo.setNumProbeAlignments(Long.toString(numCsBlatResults)); adVo.setNumProbesToGenes(Long.toString(numCsGenes)); adVo.setNumGenes(Long.toString(numGenes)); adVo.setDateCached(timestamp);// www .j a v a2s . c o m try { // remove file first File f = new File(HOME_DIR + File.separatorChar + ARRAY_DESIGN_REPORT_DIR + File.separatorChar + ARRAY_DESIGN_SUMMARY); if (f.exists()) { if (!f.canWrite() || !f.delete()) { log.warn("Cannot write to file."); return; } } FileOutputStream fos = new FileOutputStream(HOME_DIR + File.separatorChar + ARRAY_DESIGN_REPORT_DIR + File.separatorChar + ARRAY_DESIGN_SUMMARY); ObjectOutputStream oos = new ObjectOutputStream(fos); oos.writeObject(adVo); oos.flush(); oos.close(); } catch (Throwable e) { // cannot write to file. Just fail gracefully. log.error("Cannot write to file."); } log.info("Done making reports"); }
From source file:com.icantrap.collections.dawg.Dawg.java
/** * Writes an instance of a dawg to an OutputStream. Once the data is written to the OutputStream, it is flushed, but * the stream is not closed./*from w ww . j a v a 2 s . com*/ * * @param os the OutputStream to write the dawg to * @throws IOException if writing the dawg to the stream causes an IOException */ public void store(OutputStream os) throws IOException { BufferedOutputStream bos = new BufferedOutputStream(os, 8 * 1024); ObjectOutputStream oos = new ObjectOutputStream(bos); oos.writeObject(nodes); oos.flush(); }
From source file:com.joliciel.lefff.LefffMemoryLoader.java
public void serializeMemoryBase(LefffMemoryBase memoryBase, File memoryBaseFile) { LOG.debug("serializeMemoryBase"); boolean isZip = false; if (memoryBaseFile.getName().endsWith(".zip")) isZip = true;//from w ww . j a v a2s. c o m FileOutputStream fos = null; ObjectOutputStream out = null; ZipOutputStream zos = null; try { fos = new FileOutputStream(memoryBaseFile); if (isZip) { zos = new ZipOutputStream(fos); zos.putNextEntry(new ZipEntry("lefff.obj")); out = new ObjectOutputStream(zos); } else { out = new ObjectOutputStream(fos); } try { out.writeObject(memoryBase); } finally { out.flush(); out.close(); } } catch (IOException ioe) { throw new RuntimeException(ioe); } }
From source file:org.jax.haplotype.inference.CachingIdenticalByStateDataManager.java
/** * Find the IBS regions//from www . j a v a 2 s. c o m * @param genomeName * the name of the genome that we want to use * @param referenceStrain * the reference strain * @param comparisonStrains * the comparison strains * @param chromosomeNumber * the chromosome number (starting with 1) * @param minimumExtentInSnps * the minimum IBD extent in SNPs * @param minimumExtentInBasePairs * the minimum IBD extent in base pairs * @return * the SNP blocks */ public SnpIntervalListGroup findIdenticalByStateRegions(String genomeName, String referenceStrain, String[] comparisonStrains, int chromosomeNumber, int minimumExtentInSnps, long minimumExtentInBasePairs) { try { for (int i = 0; i < comparisonStrains.length; i++) { if (comparisonStrains[i] == null) { throw new NullPointerException(); } } ScanningIdenticalByStateFinder scanningIdenticalByStateFinder = new ScanningIdenticalByStateFinder(); LOG.fine("calling caching findIdenticalByStateRegions. " + "# comparison strains: " + comparisonStrains.length); Map<String, SnpIntervalList> snpBlockListsMap = new HashMap<String, SnpIntervalList>(); synchronized (CachingIdenticalByStateDataManager.class) { Map<String, File> comparisonStrainsFileMap = new HashMap<String, File>(); for (int i = 0; i < comparisonStrains.length; i++) { File uniqueFileCache = getCacheFile(genomeName, referenceStrain, comparisonStrains[i], chromosomeNumber, minimumExtentInSnps, minimumExtentInBasePairs); // if the cache doesn't exist, create it if (uniqueFileCache.createNewFile()) { uniqueFileCache.deleteOnExit(); comparisonStrainsFileMap.put(comparisonStrains[i], uniqueFileCache); } else { LOG.fine("loading cache: " + uniqueFileCache.getAbsolutePath()); ObjectInputStream objectInput = new ObjectInputStream( new BufferedInputStream(new FileInputStream(uniqueFileCache))); snpBlockListsMap.put(comparisonStrains[i], (SnpIntervalList) objectInput.readObject()); } } if (!comparisonStrainsFileMap.isEmpty()) { GenomeDataSource genomeDataSource = this.genomeDataManager.getGenomeDataMap().get(genomeName); ChromosomeDataSource chromosomeDataSource = genomeDataSource.getChromosomeDataSources() .get(chromosomeNumber); String[] comparisonStrainsToCalculate = comparisonStrainsFileMap.keySet() .toArray(new String[comparisonStrainsFileMap.size()]); for (int i = 0; i < comparisonStrainsToCalculate.length; i++) { if (comparisonStrainsToCalculate[i] == null) { throw new NullPointerException(); } } SdpInputStream sdpStream = chromosomeDataSource.getSdpInputStream(referenceStrain, comparisonStrainsToCalculate); SnpPositionInputStream snpPositionStream = chromosomeDataSource.getSnpPositionInputStream(); SnpIntervalListGroup newIbsRegions = scanningIdenticalByStateFinder.findIdenticalByStateRegions( sdpStream, snpPositionStream, minimumExtentInSnps, minimumExtentInBasePairs); Map<String, List<BasePairInterval>> newIbsIntervalLists = newIbsRegions.getSnpBlocksMap(); for (Entry<String, List<BasePairInterval>> intervalListEntry : newIbsIntervalLists.entrySet()) { System.out.println("Strain Key: " + intervalListEntry.getKey()); SnpIntervalList snpIntervalList = new SnpIntervalList(intervalListEntry.getValue(), newIbsRegions.getStartInBasePairs(), newIbsRegions.getExtentInBasePairs()); String strainName = intervalListEntry.getKey(); File uniqueFileCache = comparisonStrainsFileMap.get(strainName); if (uniqueFileCache == null) { throw new NullPointerException(); } snpBlockListsMap.put(strainName, snpIntervalList); ObjectOutputStream objectOutput = new ObjectOutputStream( new BufferedOutputStream(new FileOutputStream(uniqueFileCache))); objectOutput.writeObject(snpIntervalList); objectOutput.flush(); objectOutput.close(); } } } return new SnpIntervalListGroup(snpBlockListsMap); } catch (Exception ex) { LOG.log(Level.SEVERE, "Failed to cache IBS", ex); return null; } }
From source file:org.alfresco.repo.admin.UnserializerValidatorBootstrap.java
/** * Check if Java unserialize remote code execution is already fixed on this * <b>commons collections</b> version. * // w w w. j av a2 s .co m * @return */ private boolean isCommonsCollectionsDeserializerFixed() { try { Class<?> invokerTransformerClass = Class.forName( "org.apache.commons.collections.functors.InvokerTransformer", true, this.getClass().getClassLoader()); if (invokerTransformerClass != null) { Constructor<?> invokerTransformerConstructor = invokerTransformerClass.getConstructor(String.class, Class[].class, Object[].class); Object invokerTransformerInstance = invokerTransformerConstructor.newInstance(null, null, null); ObjectOutputStream objectOut = null; ByteArrayOutputStream byteOut = null; try { // Write the object out to a byte array byteOut = new ByteArrayOutputStream(); objectOut = new ObjectOutputStream(byteOut); objectOut.writeObject(invokerTransformerInstance); objectOut.flush(); } catch (UnsupportedOperationException e) { // Expected: Serialization support is disabled for security // reasons. return true; } catch (IOException e) { throw new AlfrescoRuntimeException(ERR_UNEXPECTED_ERROR, e); } finally { if (objectOut != null) { try { objectOut.close(); } catch (Throwable e) { } } if (byteOut != null) { try { byteOut.close(); } catch (Throwable e) { } } } } } catch (SecurityException e) { // This is and expected, acceptable exception that we can ignore. } catch (ClassNotFoundException e) { // This is and expected, acceptable exception that we can ignore. } catch (InstantiationException e) { // This is and expected, acceptable exception that we can ignore. } catch (IllegalAccessException e) { // This is and expected, acceptable exception that we can ignore. } catch (NoSuchMethodException e) { throw new AlfrescoRuntimeException(ERR_UNEXPECTED_ERROR, e); } catch (IllegalArgumentException e) { throw new AlfrescoRuntimeException(ERR_UNEXPECTED_ERROR, e); } catch (InvocationTargetException e) { // This is and expected, acceptable exception that we can ignore. } return false; }
From source file:dynamite.zafroshops.app.fragment.AddZopFragment.java
public ArrayList<MobileCountry> getCountries() { ArrayList<MobileCountry> result = new ArrayList<>(); final SharedPreferences preferences = getActivity().getPreferences(0); final SharedPreferences.Editor editor = preferences.edit(); boolean fromFile = false; if (preferences.contains(StorageKeys.COUNTRIES_KEY)) { try {// w w w.j a v a2 s . c om ObjectInputStream ois = new ObjectInputStream( getActivity().getBaseContext().openFileInput(StorageKeys.COUNTRIES_KEY)); zopCountries = (ArrayList<MobileCountry>) ois.readObject(); zopCountryAdapter.clear(); zopCountryAdapter.addAll(zopCountries); fromFile = true; } catch (IOException e) { e.printStackTrace(); } catch (ClassNotFoundException e) { e.printStackTrace(); } } if (!fromFile) { if (zopCountries == null || zopCountries.size() == 0) { ListenableFuture<JsonElement> query = MainActivity.MobileClient.invokeApi("mobileCountry", "GET", new ArrayList<Pair<String, String>>()); Futures.addCallback(query, new FutureCallback<JsonElement>() { @Override public void onSuccess(JsonElement result) { JsonArray typesAsJson = result.getAsJsonArray(); if (typesAsJson != null) { zopCountries = new Gson().fromJson(result, new TypeToken<ArrayList<MobileCountry>>() { }.getType()); } zopCountryAdapter.clear(); zopCountryAdapter.addAll(zopCountries); setLocation(null); try { ObjectOutputStream oos = new ObjectOutputStream(getActivity().getBaseContext() .openFileOutput(StorageKeys.COUNTRIES_KEY, Context.MODE_PRIVATE)); oos.writeObject(zopCountries); oos.flush(); oos.close(); editor.putString(StorageKeys.COUNTRIES_KEY, StorageKeys.COUNTRIES_KEY); editor.commit(); } catch (IOException e) { e.printStackTrace(); } } @Override public void onFailure(@NonNull Throwable t) { } }); } } return result; }
From source file:ubic.gemma.analysis.report.ArrayDesignReportServiceImpl.java
@Override public void generateArrayDesignReport(ArrayDesignValueObject adVo) { ArrayDesign ad = arrayDesignService.load(adVo.getId()); if (ad == null) return;/*w w w .j a va2s.c o m*/ // obtain time information (for timestamping) Date d = new Date(System.currentTimeMillis()); String timestamp = DateFormatUtils.format(d, "yyyy.MM.dd HH:mm"); long numProbes = arrayDesignService.getCompositeSequenceCount(ad); long numCsBioSequences = arrayDesignService.numCompositeSequenceWithBioSequences(ad); long numCsBlatResults = arrayDesignService.numCompositeSequenceWithBlatResults(ad); long numCsGenes = arrayDesignService.numCompositeSequenceWithGenes(ad); long numGenes = arrayDesignService.numGenes(ad); adVo.setDesignElementCount(numProbes); adVo.setNumProbeSequences(Long.toString(numCsBioSequences)); adVo.setNumProbeAlignments(Long.toString(numCsBlatResults)); adVo.setNumProbesToGenes(Long.toString(numCsGenes)); adVo.setNumGenes(Long.toString(numGenes)); adVo.setDateCached(timestamp); // check the directory exists. String reportDir = HOME_DIR + File.separatorChar + ARRAY_DESIGN_REPORT_DIR; File reportDirF = new File(reportDir); if (!reportDirF.exists()) { reportDirF.mkdirs(); } String reportFileName = reportDir + File.separatorChar + ARRAY_DESIGN_REPORT_FILE_NAME_PREFIX + "." + adVo.getId(); try { // remove old file first (possible todo: don't do this until after new file is okayed - maybe this delete // isn't needed, just clobber.) File f = new File(reportFileName); if (f.exists()) { if (!f.canWrite() || !f.delete()) { log.error( "Report exists but cannot overwrite, leaving the old one in place: " + reportFileName); return; } } FileOutputStream fos = new FileOutputStream(reportFileName); ObjectOutputStream oos = new ObjectOutputStream(fos); oos.writeObject(adVo); oos.flush(); oos.close(); } catch (Throwable e) { log.error("Cannot write to file: " + reportFileName); return; } log.info("Generated report for " + ad); }
From source file:com.orange.matosweb.MatosCampaign.java
/** * Backup the steps.// w w w. j a va 2 s.c om */ private void backup() { try { FileOutputStream fos = new FileOutputStream(new File(privateFolder, BACKUP)); try { ObjectOutputStream oos = new ObjectOutputStream(fos); try { oos.writeObject(steps); oos.flush(); } finally { oos.close(); } } finally { fos.close(); } } catch (IOException e) { e.printStackTrace(); } }
From source file:org.hyperic.hq.events.FileAlertConditionEvaluatorStateRepository.java
private void persistStates(Map<Integer, Serializable> states, File out) { ObjectOutputStream objectOutputStream = null; FileOutputStream fileOutputStream = null; try {//from w w w . j a v a2 s . c o m if (out.isFile()) { log.warn(out.getAbsolutePath() + " already exists. It will be deleted."); } out.delete(); fileOutputStream = new FileOutputStream(out); objectOutputStream = new ObjectOutputStream(new BufferedOutputStream(fileOutputStream)); objectOutputStream.writeObject(states); objectOutputStream.flush(); if (log.isInfoEnabled()) { log.info("Successfully saved alert condition evaluator states to " + out.getAbsolutePath()); } } catch (Exception e) { log.warn("Unable to save alert condition evaluator states", e); } finally { closeStream(objectOutputStream); closeStream(fileOutputStream); } }
From source file:org.kepler.util.AuthNamespace.java
/** * Serialize the authority:namespace String to the AuthorizedNamespace file. *//*from w w w. j av a 2s .c o m*/ private void writeAuthorizedNamespace() throws Exception { if (isDebugging) log.debug("writeAuthorizedNamespace()"); File ianFile = new File(_anFileName); if (!ianFile.exists()) { ianFile.delete(); } String authNamespace = getAuthNamespace(); if (authNamespace == null) { throw new Exception("authNamespace is null"); } OutputStream os = new FileOutputStream(_anFileName); ObjectOutputStream oos = null; try { oos = new ObjectOutputStream(os); oos.writeObject(authNamespace); oos.flush(); } finally { if (oos != null) { oos.close(); } } try { // Every time we write a new AuthNamespace make sure there is // at least one row added to the LSID_GENERATOR table for the // new Authority and Namespace, this is how LSIDGenerator // can tell if the LSID_GENERATOR table has been deleted since // the last time it accessed it. LSIDGenerator.insertRow(getAuthority(), getNamespace(), 0, 1); } catch (Exception e) { e.printStackTrace(); } }