List of usage examples for java.io ObjectInputStream readLong
public long readLong() throws IOException
From source file:it.unimi.di.big.mg4j.document.TRECDocumentCollection.java
private void readObject(final ObjectInputStream s) throws IOException, ClassNotFoundException { s.defaultReadObject();/* w ww . ja va2 s . com*/ final long size = s.readLong(); final ObjectBigArrayBigList<TRECDocumentDescriptor> descriptors = new ObjectBigArrayBigList<TRECDocumentDescriptor>(); descriptors.ensureCapacity(size); for (int i = 0; i < size; i++) descriptors.add(new TRECDocumentDescriptor(s.readInt(), s.readLong(), s.readInt(), s.readInt())); this.descriptors = descriptors; }
From source file:com.ecyrd.jspwiki.ReferenceManager.java
/** * Reads the serialized data from the disk back to memory. * Returns the date when the data was last written on disk */// w w w . jav a2 s.c o m @SuppressWarnings("unchecked") private synchronized long unserializeFromDisk() throws IOException, ClassNotFoundException { ObjectInputStream in = null; long saved = 0L; try { StopWatch sw = new StopWatch(); sw.start(); File f = new File(m_engine.getWorkDir(), SERIALIZATION_FILE); in = new ObjectInputStream(new BufferedInputStream(new FileInputStream(f))); long ver = in.readLong(); if (ver != serialVersionUID) { throw new IOException("File format has changed; I need to recalculate references."); } saved = in.readLong(); m_refersTo = (Map) in.readObject(); m_referredBy = (Map) in.readObject(); in.close(); m_unmutableReferredBy = Collections.unmodifiableMap(m_referredBy); m_unmutableRefersTo = Collections.unmodifiableMap(m_refersTo); sw.stop(); log.debug("Read serialized data successfully in " + sw); } finally { if (in != null) in.close(); } return saved; }
From source file:com.ecyrd.jspwiki.ReferenceManager.java
/** * Reads the serialized data from the disk back to memory. * Returns the date when the data was last written on disk *//*from w ww.j a v a2 s.com*/ private synchronized long unserializeAttrsFromDisk(WikiPage p) throws IOException, ClassNotFoundException { ObjectInputStream in = null; long saved = 0L; try { StopWatch sw = new StopWatch(); sw.start(); // // Find attribute cache, and check if it exists // File f = new File(m_engine.getWorkDir(), SERIALIZATION_DIR); f = new File(f, getHashFileName(p.getName())); if (!f.exists()) { return 0L; } log.debug("Deserializing attributes for " + p.getName()); in = new ObjectInputStream(new BufferedInputStream(new FileInputStream(f))); long ver = in.readLong(); if (ver != serialVersionUID) { log.debug("File format has changed; cannot deserialize."); return 0L; } saved = in.readLong(); String name = in.readUTF(); if (!name.equals(p.getName())) { log.debug("File name does not match (" + name + "), skipping..."); return 0L; // Not here } long entries = in.readLong(); for (int i = 0; i < entries; i++) { String key = in.readUTF(); Object value = in.readObject(); p.setAttribute(key, value); log.debug(" attr: " + key + "=" + value); } in.close(); sw.stop(); log.debug("Read serialized data for " + name + " successfully in " + sw); p.setHasMetadata(); } catch (NoSuchAlgorithmException e) { log.fatal("No MD5!?!"); } finally { if (in != null) in.close(); } return saved; }
From source file:org.aksw.dice.eaglet.uri.impl.FileBasedCachingUriCheckerManager.java
public static ObjectLongOpenHashMap<String> readCacheFile(File cacheFile) { if (!cacheFile.exists() || cacheFile.isDirectory()) { return null; }/* w ww .j a v a 2 s .co m*/ FileInputStream fin = null; ObjectInputStream oin = null; try { fin = new FileInputStream(cacheFile); oin = new ObjectInputStream(fin); // first, read the number of URIs int count = oin.readInt(); String uri; ObjectLongOpenHashMap<String> cache = new ObjectLongOpenHashMap<String>(2 * count); for (int i = 0; i < count; ++i) { uri = (String) oin.readObject(); cache.put(uri, oin.readLong()); } return cache; } catch (Exception e) { LOGGER.error("Exception while reading cache file.", e); } finally { IOUtils.closeQuietly(oin); IOUtils.closeQuietly(fin); } return null; }
From source file:org.apache.flink.cep.nfa.SharedBuffer.java
private void readObject(ObjectInputStream ois) throws IOException, ClassNotFoundException { DataInputViewStreamWrapper source = new DataInputViewStreamWrapper(ois); ArrayList<SharedBufferEntry<K, V>> entryList = new ArrayList<>(); ois.defaultReadObject();//from w w w . ja va 2s. c om this.pages = new HashMap<>(); int numberPages = ois.readInt(); for (int i = 0; i < numberPages; i++) { // key of the page @SuppressWarnings("unchecked") K key = (K) ois.readObject(); SharedBufferPage<K, V> page = new SharedBufferPage<>(key); pages.put(key, page); int numberEntries = ois.readInt(); for (int j = 0; j < numberEntries; j++) { // restore the SharedBufferEntries for the given page V value = valueSerializer.deserialize(source); long timestamp = ois.readLong(); ValueTimeWrapper<V> valueTimeWrapper = new ValueTimeWrapper<>(value, timestamp); SharedBufferEntry<K, V> sharedBufferEntry = new SharedBufferEntry<K, V>(valueTimeWrapper, page); sharedBufferEntry.referenceCounter = ois.readInt(); page.entries.put(valueTimeWrapper, sharedBufferEntry); entryList.add(sharedBufferEntry); } } // read the edges of the shared buffer entries int numberEdges = ois.readInt(); for (int j = 0; j < numberEdges; j++) { int sourceIndex = ois.readInt(); int targetIndex = ois.readInt(); if (sourceIndex >= entryList.size() || sourceIndex < 0) { throw new RuntimeException("Could not find source entry with index " + sourceIndex + ". This indicates a corrupted state."); } else { // We've already deserialized the shared buffer entry. Simply read its ID and // retrieve the buffer entry from the list of entries SharedBufferEntry<K, V> sourceEntry = entryList.get(sourceIndex); final DeweyNumber version = (DeweyNumber) ois.readObject(); final SharedBufferEntry<K, V> target; if (targetIndex >= 0) { if (targetIndex >= entryList.size()) { throw new RuntimeException("Could not find target entry with index " + targetIndex + ". This indicates a corrupted state."); } else { target = entryList.get(targetIndex); } } else { target = null; } sourceEntry.edges.add(new SharedBufferEdge<K, V>(target, version)); } } }
From source file:org.apache.hadoop.hbase.io.hfile.bucket.BucketCache.java
@SuppressWarnings("unchecked") private void retrieveFromFile() throws IOException, BucketAllocatorException, ClassNotFoundException { File persistenceFile = new File(persistencePath); if (!persistenceFile.exists()) { return;/*from w w w. j a va2 s . co m*/ } assert !cacheEnabled; FileInputStream fis = null; ObjectInputStream ois = null; try { if (!ioEngine.isPersistent()) throw new IOException("Attempt to restore non-persistent cache mappings!"); fis = new FileInputStream(persistencePath); ois = new ObjectInputStream(fis); long capacitySize = ois.readLong(); if (capacitySize != cacheCapacity) throw new IOException("Mismatched cache capacity:" + StringUtils.byteDesc(capacitySize) + ", expected: " + StringUtils.byteDesc(cacheCapacity)); String ioclass = ois.readUTF(); String mapclass = ois.readUTF(); if (!ioEngine.getClass().getName().equals(ioclass)) throw new IOException("Class name for IO engine mismatch: " + ioclass + ", expected:" + ioEngine.getClass().getName()); if (!backingMap.getClass().getName().equals(mapclass)) throw new IOException("Class name for cache map mismatch: " + mapclass + ", expected:" + backingMap.getClass().getName()); UniqueIndexMap<Integer> deserMap = (UniqueIndexMap<Integer>) ois.readObject(); BucketAllocator allocator = new BucketAllocator(cacheCapacity, backingMap, this.realCacheSize); backingMap = (ConcurrentHashMap<BlockCacheKey, BucketEntry>) ois.readObject(); bucketAllocator = allocator; deserialiserMap = deserMap; } finally { if (ois != null) ois.close(); if (fis != null) fis.close(); if (!persistenceFile.delete()) { throw new IOException("Failed deleting persistence file " + persistenceFile.getAbsolutePath()); } } }
From source file:org.apache.hawq.pxf.plugins.hdfs.utilities.HdfsUtilities.java
/** * Parses fragment metadata and return matching {@link FileSplit}. * * @param inputData request input data//from w w w . jav a2 s. c o m * @return FileSplit with fragment metadata */ public static FileSplit parseFragmentMetadata(InputData inputData) { try { byte[] serializedLocation = inputData.getFragmentMetadata(); if (serializedLocation == null) { throw new IllegalArgumentException("Missing fragment location information"); } ByteArrayInputStream bytesStream = new ByteArrayInputStream(serializedLocation); ObjectInputStream objectStream = new ObjectInputStream(bytesStream); long start = objectStream.readLong(); long end = objectStream.readLong(); String[] hosts = (String[]) objectStream.readObject(); FileSplit fileSplit = new FileSplit(new Path(inputData.getDataSource()), start, end, hosts); LOG.debug("parsed file split: path " + inputData.getDataSource() + ", start " + start + ", end " + end + ", hosts " + ArrayUtils.toString(hosts)); return fileSplit; } catch (Exception e) { throw new RuntimeException("Exception while reading expected fragment metadata", e); } }
From source file:org.codehaus.wadi.core.store.DiscStore.java
public void load(Putter putter) { long time = System.currentTimeMillis(); String[] list = sessionStoreDir.list(); int suffixLength = ".".length() + streamer.getSuffix().length(); for (int i = 0; i < list.length; i++) { String name = list[i];//from w ww . j a v a 2 s. com String id = name.substring(0, name.length() - suffixLength); Motable motable = new BasicStoreMotable(this); File file = new File(sessionStoreDir, id + streamer.getSuffixWithDot()); FileInputStream fis = null; ObjectInputStream ois = null; try { fis = new FileInputStream(file); ois = new ObjectInputStream(fis); long creationTime = ois.readLong(); long lastAccessedTime = ois.readLong(); int maxInactiveInterval = ois.readInt(); name = (String) ois.readObject(); motable.init(creationTime, lastAccessedTime, maxInactiveInterval, name); if (accessOnLoad) { motable.setLastAccessedTime(time); } if (!motable.getTimedOut(time)) { putter.put(id, motable); } } catch (Exception e) { log.warn("load (exclusive disc) failed [" + file + "]", e); } finally { try { if (null != ois) { ois.close(); } } catch (IOException e) { log.warn("load (exclusive disc) problem [" + file + "]", e); } } } log.info("loaded (exclusive disc): " + list.length); }
From source file:org.codehaus.wadi.core.store.DiscStore.java
public byte[] loadBody(Motable motable) throws Exception { File file = new File(sessionStoreDir, motable.getId() + streamer.getSuffixWithDot()); FileInputStream fis = null;/*from w ww .ja v a 2 s . com*/ try { fis = new FileInputStream(file); ObjectInputStream ois = new ObjectInputStream(fis); ois.readLong(); ois.readLong(); ois.readInt(); ois.readObject(); int bodyLength = ois.readInt(); byte[] body = new byte[bodyLength]; fis.read(body); if (log.isTraceEnabled()) { log.trace("loaded exclusive disc: " + file + ": " + bodyLength + " bytes"); } return body; } catch (Exception e) { log.error("load exclusive disc failed: " + file, e); throw e; } finally { try { if (null != fis) { fis.close(); } } catch (IOException e) { log.warn("load exclusive disc problem: " + file, e); } } }
From source file:org.datanucleus.store.hbase.fieldmanager.FetchFieldManager.java
private long fetchLongInternal(AbstractMemberMetaData mmd, byte[] bytes) { long value;/*from www. ja v a2s . co m*/ if (bytes == null) { // Handle missing field String dflt = HBaseUtils.getDefaultValueForMember(mmd); if (dflt != null) { return Long.valueOf(dflt).longValue(); } return 0; } if (mmd.isSerialized()) { try { ByteArrayInputStream bis = new ByteArrayInputStream(bytes); ObjectInputStream ois = new ObjectInputStream(bis); value = ois.readLong(); ois.close(); bis.close(); } catch (IOException e) { throw new NucleusException(e.getMessage(), e); } } else { value = Bytes.toLong(bytes); } return value; }