List of usage examples for java.io ObjectInputStream readBoolean
public boolean readBoolean() throws IOException
From source file:no.sesat.search.datamodel.BeanDataObjectInvocationHandler.java
@SuppressWarnings("unchecked") private void readObject(ObjectInputStream stream) throws IOException, ClassNotFoundException { invocationTargetCache = new ConcurrentHashMap<Method, InvocationTarget>(5, 0.75f, 2); supportMethodCache = new ConcurrentHashMap<Method, Method>(0, 0.75f, 2); implementOf = (Class<T>) stream.readObject(); context = (BeanContext) stream.readObject(); support = stream.readObject();/*from www . j av a 2 s . c o m*/ immutable = stream.readBoolean(); properties = (List<Property>) stream.readObject(); toString = null; }
From source file:org.apache.axis.message.RPCParam.java
private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException { if (in.readBoolean()) { setQName(new QName((String) in.readObject(), (String) in.readObject())); }/*w ww. ja va2 s . c o m*/ in.defaultReadObject(); }
From source file:org.apache.lens.driver.hive.TestRemoteHiveDriver.java
/** * Read context.//from w w w . ja va2 s .co m * * @param bytes the bytes * @param driver the driver * @return the query context * @throws IOException Signals that an I/O exception has occurred. * @throws ClassNotFoundException the class not found exception */ private QueryContext readContext(byte[] bytes, LensDriver driver) throws IOException, ClassNotFoundException { ByteArrayInputStream bais = new ByteArrayInputStream(bytes); ObjectInputStream in = new ObjectInputStream(bais); QueryContext ctx; try { ctx = (QueryContext) in.readObject(); ctx.setConf(queryConf); boolean driverAvailable = in.readBoolean(); if (driverAvailable) { String driverQualifiedName = in.readUTF(); ctx.setSelectedDriver(driver); } } finally { in.close(); bais.close(); } return ctx; }
From source file:org.bigtextml.topics.ParallelTopicModel.java
private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException { int version = in.readInt(); //This part needs to be re-written data = (TopicAssignmentBigMap) in.readObject(); alphabet = (BigAlphabet) in.readObject(); topicAlphabet = (BigLabelAlphabet) in.readObject(); numTopics = in.readInt();//w w w .ja v a 2 s. c o m topicMask = in.readInt(); topicBits = in.readInt(); numTypes = in.readInt(); alpha = (double[]) in.readObject(); alphaSum = in.readDouble(); beta = in.readDouble(); betaSum = in.readDouble(); typeTopicCounts = (int[][]) in.readObject(); tokensPerTopic = (int[]) in.readObject(); docLengthCounts = (int[]) in.readObject(); topicDocCounts = (int[][]) in.readObject(); numIterations = in.readInt(); burninPeriod = in.readInt(); saveSampleInterval = in.readInt(); optimizeInterval = in.readInt(); showTopicsInterval = in.readInt(); wordsPerTopic = in.readInt(); saveStateInterval = in.readInt(); stateFilename = (String) in.readObject(); saveModelInterval = in.readInt(); modelFilename = (String) in.readObject(); randomSeed = in.readInt(); formatter = (NumberFormat) in.readObject(); printLogLikelihood = in.readBoolean(); numThreads = in.readInt(); }
From source file:org.datanucleus.store.hbase.fieldmanager.FetchFieldManager.java
private boolean fetchBooleanInternal(AbstractMemberMetaData mmd, byte[] bytes) { boolean value; if (bytes == null) { // Handle missing field String dflt = HBaseUtils.getDefaultValueForMember(mmd); if (dflt != null) { return Boolean.valueOf(dflt).booleanValue(); }/* w w w.j av a2 s . co m*/ return false; } if (mmd.isSerialized()) { try { ByteArrayInputStream bis = new ByteArrayInputStream(bytes); ObjectInputStream ois = new ObjectInputStream(bis); value = ois.readBoolean(); ois.close(); bis.close(); } catch (IOException e) { throw new NucleusException(e.getMessage(), e); } } else { value = Bytes.toBoolean(bytes); } return value; }
From source file:org.hibernate.engine.internal.StatefulPersistenceContext.java
public static StatefulPersistenceContext deserialize(ObjectInputStream ois, SessionImplementor session) throws IOException, ClassNotFoundException { LOG.trace("Serializing persistent-context"); StatefulPersistenceContext rtn = new StatefulPersistenceContext(session); // during deserialization, we need to reconnect all proxies and // collections to this session, as well as the EntityEntry and // CollectionEntry instances; these associations are transient // because serialization is used for different things. try {//from w w w. j av a2s . com rtn.defaultReadOnly = ois.readBoolean(); // todo : we can actually just determine this from the incoming EntityEntry-s rtn.hasNonReadOnlyEntities = ois.readBoolean(); int count = ois.readInt(); LOG.trace("Starting deserialization of [" + count + "] entitiesByKey entries"); rtn.entitiesByKey = new HashMap(count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count); for (int i = 0; i < count; i++) { rtn.entitiesByKey.put(EntityKey.deserialize(ois, session), ois.readObject()); } count = ois.readInt(); LOG.trace("Starting deserialization of [" + count + "] entitiesByUniqueKey entries"); rtn.entitiesByUniqueKey = new HashMap(count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count); for (int i = 0; i < count; i++) { rtn.entitiesByUniqueKey.put(EntityUniqueKey.deserialize(ois, session), ois.readObject()); } count = ois.readInt(); LOG.trace("Starting deserialization of [" + count + "] proxiesByKey entries"); rtn.proxiesByKey = new ReferenceMap(AbstractReferenceMap.HARD, AbstractReferenceMap.WEAK, count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count, .75f); for (int i = 0; i < count; i++) { EntityKey ek = EntityKey.deserialize(ois, session); Object proxy = ois.readObject(); if (proxy instanceof HibernateProxy) { ((HibernateProxy) proxy).getHibernateLazyInitializer().setSession(session); rtn.proxiesByKey.put(ek, proxy); } else LOG.trace("Encountered prunded proxy"); // otherwise, the proxy was pruned during the serialization process } count = ois.readInt(); LOG.trace("Starting deserialization of [" + count + "] entitySnapshotsByKey entries"); rtn.entitySnapshotsByKey = new HashMap(count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count); for (int i = 0; i < count; i++) { rtn.entitySnapshotsByKey.put(EntityKey.deserialize(ois, session), ois.readObject()); } count = ois.readInt(); LOG.trace("Starting deserialization of [" + count + "] entityEntries entries"); rtn.entityEntries = IdentityMap.instantiateSequenced(count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count); for (int i = 0; i < count; i++) { Object entity = ois.readObject(); EntityEntry entry = EntityEntry.deserialize(ois, session); rtn.entityEntries.put(entity, entry); } count = ois.readInt(); LOG.trace("Starting deserialization of [" + count + "] collectionsByKey entries"); rtn.collectionsByKey = new HashMap(count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count); for (int i = 0; i < count; i++) { rtn.collectionsByKey.put(CollectionKey.deserialize(ois, session), ois.readObject()); } count = ois.readInt(); LOG.trace("Starting deserialization of [" + count + "] collectionEntries entries"); rtn.collectionEntries = IdentityMap .instantiateSequenced(count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count); for (int i = 0; i < count; i++) { final PersistentCollection pc = (PersistentCollection) ois.readObject(); final CollectionEntry ce = CollectionEntry.deserialize(ois, session); pc.setCurrentSession(session); rtn.collectionEntries.put(pc, ce); } count = ois.readInt(); LOG.trace("Starting deserialization of [" + count + "] arrayHolders entries"); rtn.arrayHolders = IdentityMap.instantiate(count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count); for (int i = 0; i < count; i++) { rtn.arrayHolders.put(ois.readObject(), ois.readObject()); } count = ois.readInt(); LOG.trace("Starting deserialization of [" + count + "] nullifiableEntityKey entries"); rtn.nullifiableEntityKeys = new HashSet(); for (int i = 0; i < count; i++) { rtn.nullifiableEntityKeys.add(EntityKey.deserialize(ois, session)); } } catch (HibernateException he) { throw new InvalidObjectException(he.getMessage()); } return rtn; }
From source file:org.hibernate.engine.StatefulPersistenceContext.java
public static StatefulPersistenceContext deserialize(ObjectInputStream ois, SessionImplementor session) throws IOException, ClassNotFoundException { log.trace("deserializing persistent-context"); StatefulPersistenceContext rtn = new StatefulPersistenceContext(session); // during deserialization, we need to reconnect all proxies and // collections to this session, as well as the EntityEntry and // CollectionEntry instances; these associations are transient // because serialization is used for different things. try {/*from www . j av a 2 s . c o m*/ rtn.defaultReadOnly = ois.readBoolean(); // todo : we can actually just determine this from the incoming EntityEntry-s rtn.hasNonReadOnlyEntities = ois.readBoolean(); int count = ois.readInt(); log.trace("staring deserialization of [" + count + "] entitiesByKey entries"); rtn.entitiesByKey = new HashMap(count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count); for (int i = 0; i < count; i++) { rtn.entitiesByKey.put(EntityKey.deserialize(ois, session), ois.readObject()); } count = ois.readInt(); log.trace("staring deserialization of [" + count + "] entitiesByUniqueKey entries"); rtn.entitiesByUniqueKey = new HashMap(count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count); for (int i = 0; i < count; i++) { rtn.entitiesByUniqueKey.put(EntityUniqueKey.deserialize(ois, session), ois.readObject()); } count = ois.readInt(); log.trace("staring deserialization of [" + count + "] proxiesByKey entries"); rtn.proxiesByKey = new ReferenceMap(ReferenceMap.HARD, ReferenceMap.WEAK, count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count, .75f); for (int i = 0; i < count; i++) { EntityKey ek = EntityKey.deserialize(ois, session); Object proxy = ois.readObject(); if (proxy instanceof HibernateProxy) { ((HibernateProxy) proxy).getHibernateLazyInitializer().setSession(session); rtn.proxiesByKey.put(ek, proxy); } else { log.trace("encountered prunded proxy"); } // otherwise, the proxy was pruned during the serialization process } count = ois.readInt(); log.trace("staring deserialization of [" + count + "] entitySnapshotsByKey entries"); rtn.entitySnapshotsByKey = new HashMap(count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count); for (int i = 0; i < count; i++) { rtn.entitySnapshotsByKey.put(EntityKey.deserialize(ois, session), ois.readObject()); } count = ois.readInt(); log.trace("staring deserialization of [" + count + "] entityEntries entries"); rtn.entityEntries = IdentityMap.instantiateSequenced(count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count); for (int i = 0; i < count; i++) { Object entity = ois.readObject(); EntityEntry entry = EntityEntry.deserialize(ois, session); rtn.entityEntries.put(entity, entry); } count = ois.readInt(); log.trace("staring deserialization of [" + count + "] collectionsByKey entries"); rtn.collectionsByKey = new HashMap(count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count); for (int i = 0; i < count; i++) { rtn.collectionsByKey.put(CollectionKey.deserialize(ois, session), ois.readObject()); } count = ois.readInt(); log.trace("staring deserialization of [" + count + "] collectionEntries entries"); rtn.collectionEntries = IdentityMap .instantiateSequenced(count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count); for (int i = 0; i < count; i++) { final PersistentCollection pc = (PersistentCollection) ois.readObject(); final CollectionEntry ce = CollectionEntry.deserialize(ois, session); pc.setCurrentSession(session); rtn.collectionEntries.put(pc, ce); } count = ois.readInt(); log.trace("staring deserialization of [" + count + "] arrayHolders entries"); rtn.arrayHolders = IdentityMap.instantiate(count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count); for (int i = 0; i < count; i++) { rtn.arrayHolders.put(ois.readObject(), ois.readObject()); } count = ois.readInt(); log.trace("staring deserialization of [" + count + "] nullifiableEntityKeys entries"); rtn.nullifiableEntityKeys = new HashSet(); for (int i = 0; i < count; i++) { rtn.nullifiableEntityKeys.add(EntityKey.deserialize(ois, session)); } } catch (HibernateException he) { throw new InvalidObjectException(he.getMessage()); } return rtn; }
From source file:org.jactr.tools.async.message.ast.BaseASTMessage.java
private void readObject(java.io.ObjectInputStream in) throws IOException, ClassNotFoundException { in.defaultReadObject();/*from w ww . j a v a 2 s . c om*/ boolean astIsNotNull = in.readBoolean(); if (astIsNotNull) { boolean wasCompressed = in.readBoolean(); if (wasCompressed) { /* * pull through GZIPInputStream. this can be done more effeciently.. */ int len = in.readInt(); byte[] bytes = _localInput.get(); if (bytes == null || bytes.length < len) { bytes = new byte[len]; _localInput.set(bytes); } if (LOGGER.isDebugEnabled()) LOGGER.debug("Reading " + len + " bytes to decompress"); in.read(bytes, 0, len); // in.readFully(bytes); ByteArrayInputStream bais = new ByteArrayInputStream(bytes, 0, len); DataInputStream zip = new DataInputStream(new GZIPInputStream(bais)); _ast = Serializer.read(zip); } else _ast = Serializer.read(in); } }
From source file:org.mule.api.MessagingException.java
private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException { in.defaultReadObject();//www . j a v a 2 s . co m boolean failingMessageProcessorWasSerialized = in.readBoolean(); if (failingMessageProcessorWasSerialized) { this.failingMessageProcessor = (MessageProcessor) in.readObject(); } }
From source file:org.mule.DefaultMuleMessage.java
private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException { in.defaultReadObject();/*from w w w .j av a 2s . c om*/ boolean payloadWasSerialized = in.readBoolean(); if (payloadWasSerialized) { payload = in.readObject(); } else { int payloadSize = in.readInt(); byte[] serializedPayload = new byte[payloadSize]; in.read(serializedPayload); payload = serializedPayload; } inboundAttachments = deserializeAttachments((Map<String, SerializedDataHandler>) in.readObject()); outboundAttachments = deserializeAttachments((Map<String, SerializedDataHandler>) in.readObject()); }