List of usage examples for java.io ObjectInputStream close
public void close() throws IOException
From source file:org.everit.jira.timetracker.plugin.JiraTimetrackerPluginImpl.java
private void readObject(final java.io.ObjectInputStream stream) throws java.io.IOException, ClassNotFoundException { stream.close(); throw new java.io.NotSerializableException(getClass().getName()); }
From source file:org.motechproject.mobile.omp.manager.intellivr.ConvertSerializedIVRSessionsBean.java
@SuppressWarnings("unchecked") protected Map<String, IVRSession> loadIvrSessions() { Map<String, IVRSession> loadedSessions = new HashMap<String, IVRSession>(); if (ivrSessionSerialResource != null) { ObjectInputStream objIn = null; try {/*from w w w.j ava 2s.c o m*/ objIn = new ObjectInputStream(new FileInputStream(ivrSessionSerialResource.getFile())); loadedSessions = (Map<String, IVRSession>) objIn.readObject(); for (IVRSession s : loadedSessions.values()) { System.out.println("Loaded existing session " + s.getSessionId()); } return loadedSessions; } catch (IOException e) { System.out.println("Cached IVRSessions not loaded due to following error: " + e.getMessage()); } catch (ClassNotFoundException e) { System.out.println("Cached IVRSessions not loaded due to following error: " + e.getMessage()); } finally { if (objIn != null) try { objIn.close(); } catch (IOException e) { } } } return loadedSessions; }
From source file:app.utils.ACache.java
public Object getAsObject(String key) { File file = mCache.get(key);// ww w . j av a2s .c om if (!file.exists()) return null; InputStream is = null; ObjectInputStream ois = null; try { is = new FileInputStream(file); ois = new ObjectInputStream(is); return ois.readObject(); } catch (Exception e) { e.printStackTrace(); return null; } finally { try { if (is != null) is.close(); } catch (IOException e) { e.printStackTrace(); } try { if (ois != null) ois.close(); } catch (IOException e) { e.printStackTrace(); } } }
From source file:net.nicholaswilliams.java.licensing.ObjectSerializer.java
/** * Deserializes an object of the specified type from the provided byte stream. * * @param expectedType The type that is expected to be retrieved from {@code byteStream} (must implement {@link Serializable}) * @param byteStream The byte stream to retrieve the object from (it must contain exactly one object, of the exact type passed to {@code expectedType}) * @return the requested unserialized object, presumably in the stream. * @throws ObjectTypeNotExpectedException If the object found in the stream does not match the type {@code expectedType} or if a {@link ClassNotFoundException} or {@link NoClassDefFoundError} occurs * @throws ObjectDeserializationException If an I/O exception occurs while deserializing the object from the stream *//* w w w . ja va 2 s. c o m*/ public final <T extends Serializable> T readObject(Class<T> expectedType, byte[] byteStream) throws ObjectDeserializationException { ByteArrayInputStream bytes = new ByteArrayInputStream(byteStream); ObjectInputStream stream = null; try { stream = new ObjectInputStream(bytes); Object allegedObject = stream.readObject(); if (!expectedType.isInstance(allegedObject)) { throw new ObjectTypeNotExpectedException(expectedType.getName(), allegedObject.getClass().getName()); } return expectedType.cast(allegedObject); } catch (IOException e) { throw new ObjectDeserializationException( "An I/O error occurred while reading the object from the byte array.", e); } catch (ClassNotFoundException e) { throw new ObjectTypeNotExpectedException(expectedType.getName(), e.getMessage(), e); } catch (NoClassDefFoundError e) { throw new ObjectTypeNotExpectedException(expectedType.getName(), e.getMessage(), e); } finally { try { if (stream != null) stream.close(); } catch (IOException ignore) { } } }
From source file:edu.caltechUcla.sselCassel.projects.jMarkets.frontdesk.web.data.SessionBean.java
/** * Connect to the JMarkets server and start the session defined by the given SessionDef object. * Return the session Id of the created session */// ww w. j a v a 2 s . c om private int executeSession(String path, String name, SessionDef session) { try { URL servlet = new URL(path + "/servlet/ServletReceiver"); Request req = new Request(Request.SERVER_INIT_REQUEST); req.addIntInfo("numClients", numSubjects); req.addIntInfo("updateProtocol", JMConstants.HTTP_UPDATE_PROTOCOL); req.addIntInfo("updateTime", 100); req.addStringInfo("name", name); req.addInfo("session", session); URLConnection servletConnection = servlet.openConnection(); servletConnection.setDoInput(true); servletConnection.setDoOutput(true); servletConnection.setUseCaches(false); servletConnection.setDefaultUseCaches(false); servletConnection.setRequestProperty("Content-Type", "application/octet-stream"); ObjectOutputStream outputToServlet = new ObjectOutputStream(servletConnection.getOutputStream()); outputToServlet.writeObject(req); outputToServlet.flush(); outputToServlet.close(); ObjectInputStream inputFromServlet = new ObjectInputStream(servletConnection.getInputStream()); Response res = (Response) inputFromServlet.readObject(); int sessionId = res.getIntInfo("sessionId"); inputFromServlet.close(); return sessionId; } catch (IOException e) { e.printStackTrace(); } catch (ClassNotFoundException e) { e.printStackTrace(); } return -1; }
From source file:junk.gui.HazardDataSetCalcCondorApp.java
/** * this connects to the servlet on web server to check if dataset name already exists * or computation have already been for these parameter settings. * @return/*from w w w .jav a 2s .com*/ */ private Object checkForHazardMapComputation() { try { if (D) System.out.println("starting to make connection with servlet"); URL hazardMapServlet = new URL(DATASET_CHECK_SERVLET_URL); URLConnection servletConnection = hazardMapServlet.openConnection(); if (D) System.out.println("connection established"); // inform the connection that we will send output and accept input servletConnection.setDoInput(true); servletConnection.setDoOutput(true); // Don't use a cached version of URL connection. servletConnection.setUseCaches(false); servletConnection.setDefaultUseCaches(false); // Specify the content type that we will send binary data servletConnection.setRequestProperty("Content-Type", "application/octet-stream"); ObjectOutputStream toServlet = new ObjectOutputStream(servletConnection.getOutputStream()); //sending the parameters info. to the servlet toServlet.writeObject(getParametersInfo()); //sending the dataset id to the servlet toServlet.writeObject(datasetIdText.getText()); toServlet.flush(); toServlet.close(); // Receive the datasetnumber from the servlet after it has received all the data ObjectInputStream fromServlet = new ObjectInputStream(servletConnection.getInputStream()); Object obj = fromServlet.readObject(); //if(D) System.out.println("Receiving the Input from the Servlet:"+success); fromServlet.close(); return obj; } catch (Exception e) { ExceptionWindow bugWindow = new ExceptionWindow(this, e, getParametersInfo()); bugWindow.setVisible(true); bugWindow.pack(); } return null; }
From source file:com.ibm.jaggr.core.impl.deps.DepTree.java
/** * Object constructor. Attempts to de-serialize the cached dependency lists * from disk and then validates the dependency lists based on last-modified * dates, looking for any new or removed files. If the cached dependency * list data cannot be de-serialized, new lists are constructed. Once the * dependency lists have been validated, the list data is serialized back * out to disk./*from w ww. j av a 2 s . c o m*/ * * @param paths * Collection of URIs which specify the target resources * to be scanned for javascript files. * @param aggregator * The servlet instance for this object * @param stamp * timestamp associated with external override/customization * resources that are check on every server restart * @param clean * If true, then the dependency lists are generated from scratch * rather than by de-serializing and then validating the cached * dependency lists. * @param validateDeps * If true, then validate existing cached dependencies using * file last-modified times. * @throws IOException */ public DepTree(Collection<URI> paths, IAggregator aggregator, long stamp, boolean clean, boolean validateDeps) throws IOException { final String sourceMethod = "<ctor>"; //$NON-NLS-1$ boolean isTraceLogging = log.isLoggable(Level.FINER); if (isTraceLogging) { log.entering(DepTree.class.getName(), sourceMethod, new Object[] { paths, aggregator, stamp, clean, validateDeps }); } this.stamp = stamp; IConfig config = aggregator.getConfig(); rawConfig = config.toString(); cacheBust = AggregatorUtil.getCacheBust(aggregator); File cacheDir = new File(aggregator.getWorkingDirectory(), DEPCACHE_DIRNAME); File cacheFile = new File(cacheDir, CACHE_FILE); /* * The de-serialized dependency map. If we have a cached dependency map, * then it will be validated against the last-modified dates of the * current files and only the files that have changed will need to be * re-parsed to update the dependency lists. */ DepTree cached = null; if (!clean) { // If we're not starting clean, try to de-serialize the map from // cache try { ObjectInputStream is = new ObjectInputStream(new FileInputStream(cacheFile)); try { if (isTraceLogging) { log.finer("Attempting to read cached dependencies from " + cacheFile.toString()); //$NON-NLS-1$ } cached = (DepTree) is.readObject(); } finally { try { is.close(); } catch (Exception ignore) { } } } catch (FileNotFoundException e) { /* * Not an error. Just means that the cache file hasn't been * written yet or else it's been deleted. */ if (log.isLoggable(Level.INFO)) log.log(Level.INFO, Messages.DepTree_1); } catch (Exception e) { if (log.isLoggable(Level.SEVERE)) log.log(Level.SEVERE, e.getMessage(), e); } } // If the cacheBust config param has changed, then do a clean build // of the dependencies. if (cached != null) { if (stamp == 0) { // no init stamp provided. Preserve the cached one. stamp = cached.stamp; } if (stamp > cached.stamp) { // init stamp has been updated. Validate dependencies. validateDeps = true; } if (!StringUtils.equals(cacheBust, cached.cacheBust)) { if (isTraceLogging) { log.finer("Current cacheBust = " + cacheBust + ", cached cacheBust = " + cached.cacheBust); //$NON-NLS-1$//$NON-NLS-2$ } if (log.isLoggable(Level.INFO)) { log.info(Messages.DepTree_2); } cached = null; } if (cached != null && !StringUtils.equals(rawConfig, cached.rawConfig)) { if (isTraceLogging) { log.finer("Current config = " + rawConfig); //$NON-NLS-1$ log.finer("Cached config = " + cached.rawConfig); //$NON-NLS-1$ } validateDeps = true; } } /* * If we de-serialized a previously saved dependency map, then go with * that. */ if (cached != null && !validateDeps && !clean) { depMap = cached.depMap; fromCache = true; return; } else if (isTraceLogging) { log.finer("Building/validating deps: cached = " + cached + ", validateDeps = " + validateDeps //$NON-NLS-1$//$NON-NLS-2$ + ", clean = " + clean); //$NON-NLS-1$ } // Initialize the dependency map depMap = new ConcurrentHashMap<URI, DepTreeNode>(); // This can take a while, so print something to the console String msg = MessageFormat.format(Messages.DepTree_3, new Object[] { aggregator.getName() }); ConsoleService cs = new ConsoleService(); cs.println(msg); if (log.isLoggable(Level.INFO)) { log.info(msg); } // Make sure that all the paths are unique and orthogonal paths = DepUtils.removeRedundantPaths(paths); /* * Create the thread pools, one for the tree builders and one for the * parsers. Since a tree builder thread will wait for all the outstanding * parser threads started by that builder to complete, we need to use two * independent thread pools to guard against the possibility of deadlock * caused by all the threads in the pool being consumed by tree builders * and leaving none available to service the parsers. */ final ThreadGroup treeBuilderTG = new ThreadGroup(TREEBUILDER_TGNAME), parserTG = new ThreadGroup(JSPARSER_TGNAME); ExecutorService treeBuilderExc = Executors.newFixedThreadPool(10, new ThreadFactory() { public Thread newThread(Runnable r) { return new Thread(treeBuilderTG, r, MessageFormat.format(THREADNAME, new Object[] { treeBuilderTG.getName(), treeBuilderTG.activeCount() })); } }), parserExc = Executors.newFixedThreadPool(20, new ThreadFactory() { public Thread newThread(Runnable r) { return new Thread(parserTG, r, MessageFormat.format(THREADNAME, new Object[] { parserTG.getName(), parserTG.activeCount() })); } }); // Counter to keep track of number of tree builder threads started AtomicInteger treeBuilderCount = new AtomicInteger(0); // The completion services for the thread pools final CompletionService<URI> parserCs = new ExecutorCompletionService<URI>(parserExc); CompletionService<DepTreeBuilder.Result> treeBuilderCs = new ExecutorCompletionService<DepTreeBuilder.Result>( treeBuilderExc); Set<String> nonJSExtensions = Collections.unmodifiableSet(getNonJSExtensions(aggregator)); // Start the tree builder threads to process the paths for (final URI path : paths) { /* * Create or get from cache the root node for this path and * add it to the new map. */ DepTreeNode root = new DepTreeNode("", path); //$NON-NLS-1$ DepTreeNode cachedNode = null; if (cached != null) { cachedNode = cached.depMap.get(path); if (log.isLoggable(Level.INFO)) { log.info(MessageFormat.format(Messages.DepTree_4, new Object[] { path })); } } else { if (log.isLoggable(Level.INFO)) { log.info(MessageFormat.format(Messages.DepTree_5, new Object[] { path })); } } depMap.put(path, root); treeBuilderCount.incrementAndGet(); treeBuilderCs.submit(new DepTreeBuilder(aggregator, parserCs, path, root, cachedNode, nonJSExtensions)); } // List of parser exceptions LinkedList<Exception> parserExceptions = new LinkedList<Exception>(); /* * Pull the completed tree builder tasks from the completion queue until * all the paths have been processed */ while (treeBuilderCount.decrementAndGet() >= 0) { try { DepTreeBuilder.Result result = treeBuilderCs.take().get(); if (log.isLoggable(Level.INFO)) { log.info(MessageFormat.format(Messages.DepTree_6, new Object[] { result.parseCount, result.dirName })); } } catch (Exception e) { if (log.isLoggable(Level.SEVERE)) log.log(Level.SEVERE, e.getMessage(), e); parserExceptions.add(e); } } // shutdown the thread pools now that we're done with them parserExc.shutdown(); treeBuilderExc.shutdown(); // If parser exceptions occurred, then rethrow the first one if (parserExceptions.size() > 0) { throw new RuntimeException(parserExceptions.get(0)); } // Prune dead nodes (folder nodes with no children) for (Map.Entry<URI, DepTreeNode> entry : depMap.entrySet()) { entry.getValue().prune(); } /* * Make sure the cache directory exists before we try to serialize the * dependency map. */ if (!cacheDir.exists()) if (!cacheDir.mkdirs()) { throw new IOException( MessageFormat.format(Messages.DepTree_0, new Object[] { cacheDir.getAbsolutePath() })); } // Serialize the map to the cache directory ObjectOutputStream os; os = new ObjectOutputStream(new FileOutputStream(cacheFile)); try { if (isTraceLogging) { log.finer("Writing cached dependencies to " + cacheFile.toString()); //$NON-NLS-1$ } os.writeObject(this); } finally { try { os.close(); } catch (Exception ignore) { } } msg = MessageFormat.format(Messages.DepTree_7, new Object[] { aggregator.getName() }); // Output that we're done. cs.println(msg); if (log.isLoggable(Level.INFO)) { log.info(msg); } if (isTraceLogging) { log.exiting(DepTree.class.getName(), sourceMethod); } }
From source file:im.r_c.android.fusioncache.DiskCache.java
@Override public Serializable getSerializable(String key) { byte[] byteArray = getBytes(key); if (byteArray == null || byteArray.length == 0) { return null; }//from w w w . j a va 2 s .co m ByteArrayInputStream bais = null; ObjectInputStream ois = null; Object result = null; try { bais = new ByteArrayInputStream(byteArray); ois = new ObjectInputStream(bais); result = ois.readObject(); } catch (IOException | ClassNotFoundException e) { e.printStackTrace(); } finally { if (bais != null) { try { bais.close(); } catch (IOException ignored) { } } if (ois != null) { try { ois.close(); } catch (IOException ignored) { } } } if (result == null || !(result instanceof Serializable)) { return null; } else { return (Serializable) result; } }
From source file:hu.tbognar76.apking.ApKing.java
@SuppressWarnings("unchecked") private void readInCache() { try {//w ww . j a v a 2 s . com FileInputStream fileIn = new FileInputStream(this.init.serialCache); ObjectInputStream in = new ObjectInputStream(fileIn); this.serialInHash = (HashMap<String, ApkInfo>) in.readObject(); in.close(); fileIn.close(); // System.out.println("Cache loaded: " + this.serialCache + // " cache size: " + this.serialInHash.size()); } catch (IOException i) { System.out.println("No Cache!!"); return; } catch (ClassNotFoundException c) { c.printStackTrace(); return; } }
From source file:com.holycityaudio.SpinCAD.SpinCADFile.java
public SpinCADPatch fileReadPatch952(String fileName) throws IOException, ClassNotFoundException { // Object deserialization SpinCADPatch p = new SpinCADPatch(); p.patchFileName = fileName;/* w ww. ja v a 2s . c o m*/ FileInputStream fis = new FileInputStream(fileName); ObjectInputStream ois = new ObjectInputStream(fis); p.cb.line[0] = (String) ois.readObject(); p.cb.line[1] = (String) ois.readObject(); p.cb.line[2] = (String) ois.readObject(); p.cb.line[3] = (String) ois.readObject(); p.cb.line[4] = (String) ois.readObject(); p.patchModel = (SpinCADModel) ois.readObject(); ois.close(); return p; }