List of usage examples for java.io InvalidObjectException InvalidObjectException
public InvalidObjectException(String reason)
InvalidObjectException
. From source file:ddf.catalog.data.impl.MetacardImpl.java
/** * Deserializes this {@link MetacardImpl}'s instance. * * @param stream the {@link ObjectInputStream} that contains the bytes of the object * @throws IOException/* w w w.j ava2 s . c o m*/ * @throws ClassNotFoundException */ private void readObject(ObjectInputStream stream) throws IOException, ClassNotFoundException { /* * defaultReadObject() is invoked for greater flexibility and compatibility. See the * *Serialization Note* in MetacardImpl's class Javadoc. */ stream.defaultReadObject(); map = new HashMap<String, Attribute>(); wrappedMetacard = null; type = (MetacardType) stream.readObject(); if (type == null) { throw new InvalidObjectException(MetacardType.class.getName() + " instance cannot be null."); } int numElements = stream.readInt(); for (int i = 0; i < numElements; i++) { Attribute attribute = (Attribute) stream.readObject(); if (attribute != null) { AttributeDescriptor attributeDescriptor = getMetacardType() .getAttributeDescriptor(attribute.getName()); if (attributeDescriptor != null && attribute.getValue() != null) { attributeDescriptor.getType().getAttributeFormat(); attributeDescriptor.getType().getClass(); } } setAttribute(attribute); } }
From source file:eu.eidas.auth.commons.attribute.ImmutableAttributeMap.java
/** * Effective Java, 2nd Ed. : Item 78: Serialization Proxy pattern. *//*ww w . j av a 2 s .c o m*/ private void readObject(@Nonnull ObjectInputStream objectInputStream) throws InvalidObjectException { throw new InvalidObjectException("Serialization Proxy required"); }
From source file:eu.eidas.auth.commons.attribute.ImmutableAttributeMap.java
/** * Effective Java, 2nd Ed. : Item 78: Serialization Proxy pattern. *//* w w w. java 2 s . c om*/ private void writeObject(@Nonnull ObjectOutputStream out) throws InvalidObjectException { throw new InvalidObjectException("Serialization Proxy required"); }
From source file:net.di2e.ecdr.commons.CDRMetacard.java
/** * Deserializes this instance./*from w w w . j av a 2 s .c om*/ * * @param stream * the {@link ObjectInputStream} that contains the bytes of the object * @throws IOException * @throws ClassNotFoundException */ private void readObject(ObjectInputStream stream) throws IOException, ClassNotFoundException { /* * defaultReadObject() is invoked for greater flexibility and compatibility. See the *Serialization Note* in * class Javadoc. */ stream.defaultReadObject(); map = new HashMap<String, Attribute>(); wrappedMetacard = null; type = (MetacardType) stream.readObject(); if (type == null) { throw new InvalidObjectException(MetacardType.class.getName() + " instance cannot be null."); } int numElements = stream.readInt(); for (int i = 0; i < numElements; i++) { Attribute attribute = (Attribute) stream.readObject(); if (attribute != null) { AttributeDescriptor attributeDescriptor = getMetacardType() .getAttributeDescriptor(attribute.getName()); if (attributeDescriptor != null && attribute.getValue() != null) { attributeDescriptor.getType().getAttributeFormat(); attributeDescriptor.getType().getClass(); } } setAttribute(attribute); } }
From source file:com.odoo.core.orm.OModel.java
public void storeManyToManyRecord(String column_name, int row_id, List<Integer> relationIds, Command command) throws InvalidObjectException { OColumn column = getColumn(column_name); if (column != null) { OModel rel_model = createInstance(column.getType()); String table = getTableName() + "_" + rel_model.getTableName() + "_rel"; String base_column = getTableName() + "_id"; String rel_column = rel_model.getTableName() + "_id"; SQLiteDatabase db = getWritableDatabase(); try {/*from ww w . j a v a 2s .c o m*/ switch (command) { case Add: if (relationIds.size() > 0) { for (int id : relationIds) { ContentValues values = new ContentValues(); values.put(base_column, row_id); values.put(rel_column, id); values.put("_write_date", ODateUtils.getDate()); db.insert(table, null, values); } } break; case Update: break; case Delete: // Deleting records to relation model if (relationIds.size() > 0) { for (int id : relationIds) { db.delete(table, base_column + " = ? AND " + rel_column + " = ?", new String[] { row_id + "", id + "" }); } } break; case Replace: // Removing old entries db.delete(table, base_column + " = ?", new String[] { row_id + "" }); // Creating new entries storeManyToManyRecord(column_name, row_id, relationIds, Command.Add); break; } } finally { db.close(); rel_model.close(); } } else { throw new InvalidObjectException( "Column [" + column_name + "] not found in " + getModelName() + " model."); } }
From source file:com.facebook.Session.java
private void readObject(ObjectInputStream stream) throws InvalidObjectException { throw new InvalidObjectException("Cannot readObject, serialization proxy required"); }
From source file:com.TagFu.facebook.Session.java
private void readObject(final ObjectInputStream stream) throws InvalidObjectException { throw new InvalidObjectException("Cannot readObject, serialization proxy required"); }
From source file:edu.harvard.iq.dvn.ingest.statdataio.impl.plugins.dta.DTAFileReader.java
private void decodeData(BufferedInputStream stream) throws IOException { dbgLog.fine("\n***** decodeData(): start *****"); if (stream == null) { throw new IllegalArgumentException("stream == null!"); }//from ww w . j a v a 2 s . c o m int nvar = (Integer) smd.getFileInformation().get("varQnty"); int nobs = (Integer) smd.getFileInformation().get("caseQnty"); if (dbgLog.isLoggable(Level.FINE)) dbgLog.fine("data diminsion[rxc]=(" + nobs + "," + nvar + ")"); if (dbgLog.isLoggable(Level.FINE)) dbgLog.fine("bytes per row=" + bytes_per_row + " bytes"); if (dbgLog.isLoggable(Level.FINE)) dbgLog.fine("variableTypelList=" + Arrays.deepToString(variableTypelList)); if (dbgLog.isLoggable(Level.FINE)) dbgLog.fine("StringVariableTable=" + StringVariableTable); FileOutputStream fileOutTab = null; PrintWriter pwout = null; // create a File object to save the tab-delimited data file File tabDelimitedDataFile = File.createTempFile("tempTabfile.", ".tab"); String tabDelimitedDataFileName = tabDelimitedDataFile.getAbsolutePath(); // save the temp file name in the metadata object smd.getFileInformation().put("tabDelimitedDataFileLocation", tabDelimitedDataFileName); fileOutTab = new FileOutputStream(tabDelimitedDataFile); pwout = new PrintWriter(new OutputStreamWriter(fileOutTab, "utf8"), true); // data storage // Object[][] dataTable = new Object[nobs][nvar]; // for later variable-wise calculations of statistics // dataTable2 sotres cut-out data columnwise Object[][] dataTable2 = new Object[nvar][nobs]; String[][] dateFormat = new String[nvar][nobs]; for (int i = 0; i < nobs; i++) { byte[] dataRowBytes = new byte[bytes_per_row]; Object[] dataRow = new Object[nvar]; int nbytes = stream.read(dataRowBytes, 0, bytes_per_row); if (nbytes == 0) { String errorMessage = "reading data: no data were read at(" + i + "th row)"; throw new IOException(errorMessage); } // decoding each row int byte_offset = 0; for (int columnCounter = 0; columnCounter < variableTypelList.length; columnCounter++) { Integer varType = variableTypeMap.get(variableTypelList[columnCounter]); String variableFormat = variableFormats[columnCounter]; boolean isDateTimeDatum = isDateTimeDatumList[columnCounter]; switch (varType != null ? varType : 256) { case -5: // Byte case // note: 1 byte signed byte byte_datum = dataRowBytes[byte_offset]; if (dbgLog.isLoggable(Level.FINER)) dbgLog.finer(i + "-th row " + columnCounter + "=th column byte =" + byte_datum); if (byte_datum >= BYTE_MISSING_VALUE) { if (dbgLog.isLoggable(Level.FINER)) dbgLog.finer(i + "-th row " + columnCounter + "=th column byte MV=" + byte_datum); dataRow[columnCounter] = MissingValueForTextDataFileNumeric; dataTable2[columnCounter][i] = null; //use null reference to indicate missing value in data that is passed to UNF } else { dataRow[columnCounter] = byte_datum; dataTable2[columnCounter][i] = byte_datum; } byte_offset++; break; case -4: // Stata-int (=java's short: 2byte) case // note: 2-byte signed int, not java's int ByteBuffer int_buffer = ByteBuffer.wrap(dataRowBytes, byte_offset, 2); if (isLittleEndian) { int_buffer.order(ByteOrder.LITTLE_ENDIAN); } short short_datum = int_buffer.getShort(); if (dbgLog.isLoggable(Level.FINER)) dbgLog.finer(i + "-th row " + columnCounter + "=th column stata int =" + short_datum); if (short_datum >= INT_MISSIG_VALUE) { if (dbgLog.isLoggable(Level.FINER)) dbgLog.finer(i + "-th row " + columnCounter + "=th column stata long missing value=" + short_datum); dataTable2[columnCounter][i] = null; //use null reference to indicate missing value in data that is passed to UNF if (isDateTimeDatum) { dataRow[columnCounter] = MissingValueForTextDataFileString; } else { dataRow[columnCounter] = MissingValueForTextDataFileNumeric; } } else { if (isDateTimeDatum) { DecodedDateTime ddt = decodeDateTimeData("short", variableFormat, Short.toString(short_datum)); if (dbgLog.isLoggable(Level.FINER)) dbgLog.finer(i + "-th row , decodedDateTime " + ddt.decodedDateTime + ", format=" + ddt.format); dataRow[columnCounter] = ddt.decodedDateTime; dateFormat[columnCounter][i] = ddt.format; dataTable2[columnCounter][i] = dataRow[columnCounter]; } else { dataTable2[columnCounter][i] = short_datum; dataRow[columnCounter] = short_datum; } } byte_offset += 2; break; case -3: // stata-Long (= java's int: 4 byte) case // note: 4-byte singed, not java's long dbgLog.fine("DATreader: stata long"); ByteBuffer long_buffer = ByteBuffer.wrap(dataRowBytes, byte_offset, 4); if (isLittleEndian) { long_buffer.order(ByteOrder.LITTLE_ENDIAN); } int int_datum = long_buffer.getInt(); if (dbgLog.isLoggable(Level.FINE)) dbgLog.fine(i + "-th row " + columnCounter + "=th column stata long =" + int_datum); if (int_datum >= LONG_MISSING_VALUE) { if (dbgLog.isLoggable(Level.FINE)) dbgLog.fine(i + "-th row " + columnCounter + "=th column stata long missing value=" + int_datum); dataTable2[columnCounter][i] = null; //use null reference to indicate missing value in data that is passed to UNF if (isDateTimeDatum) { dataRow[columnCounter] = MissingValueForTextDataFileString; } else { dataRow[columnCounter] = MissingValueForTextDataFileNumeric; } } else { if (isDateTimeDatum) { DecodedDateTime ddt = decodeDateTimeData("int", variableFormat, Integer.toString(int_datum)); if (dbgLog.isLoggable(Level.FINER)) dbgLog.finer(i + "-th row , decodedDateTime " + ddt.decodedDateTime + ", format=" + ddt.format); dataRow[columnCounter] = ddt.decodedDateTime; dateFormat[columnCounter][i] = ddt.format; dataTable2[columnCounter][i] = dataRow[columnCounter]; } else { dataTable2[columnCounter][i] = int_datum; dataRow[columnCounter] = int_datum; } } byte_offset += 4; break; case -2: // float case // note: 4-byte ByteBuffer float_buffer = ByteBuffer.wrap(dataRowBytes, byte_offset, 4); if (isLittleEndian) { float_buffer.order(ByteOrder.LITTLE_ENDIAN); } float float_datum = float_buffer.getFloat(); if (dbgLog.isLoggable(Level.FINER)) dbgLog.finer(i + "-th row " + columnCounter + "=th column float =" + float_datum); if (FLOAT_MISSING_VALUE_SET.contains(float_datum)) { if (dbgLog.isLoggable(Level.FINER)) dbgLog.finer(i + "-th row " + columnCounter + "=th column float missing value=" + float_datum); dataTable2[columnCounter][i] = null; //use null reference to indicate missing value in data that is passed to UNF if (isDateTimeDatum) { dataRow[columnCounter] = MissingValueForTextDataFileString; } else { dataRow[columnCounter] = MissingValueForTextDataFileNumeric; } } else { if (isDateTimeDatum) { DecodedDateTime ddt = decodeDateTimeData("float", variableFormat, doubleNumberFormatter.format(float_datum)); if (dbgLog.isLoggable(Level.FINER)) dbgLog.finer(i + "-th row , decodedDateTime " + ddt.decodedDateTime + ", format=" + ddt.format); dataRow[columnCounter] = ddt.decodedDateTime; dateFormat[columnCounter][i] = ddt.format; dataTable2[columnCounter][i] = dataRow[columnCounter]; } else { dataTable2[columnCounter][i] = float_datum; dataRow[columnCounter] = float_datum; } } byte_offset += 4; break; case -1: // double case // note: 8-byte ByteBuffer double_buffer = ByteBuffer.wrap(dataRowBytes, byte_offset, 8); if (isLittleEndian) { double_buffer.order(ByteOrder.LITTLE_ENDIAN); } double double_datum = double_buffer.getDouble(); if (DOUBLE_MISSING_VALUE_SET.contains(double_datum)) { dataTable2[columnCounter][i] = null; //use null reference to indicate missing value in data that is passed to UNF if (dbgLog.isLoggable(Level.FINER)) dbgLog.finer(i + "-th row " + columnCounter + "=th column double missing value=" + double_datum); if (isDateTimeDatum) { dataRow[columnCounter] = MissingValueForTextDataFileString; } else { dataRow[columnCounter] = MissingValueForTextDataFileNumeric; } } else { if (isDateTimeDatum) { DecodedDateTime ddt = decodeDateTimeData("double", variableFormat, doubleNumberFormatter.format(double_datum)); if (dbgLog.isLoggable(Level.FINER)) dbgLog.finer(i + "-th row , decodedDateTime " + ddt.decodedDateTime + ", format=" + ddt.format); dataRow[columnCounter] = ddt.decodedDateTime; dateFormat[columnCounter][i] = ddt.format; dataTable2[columnCounter][i] = dataRow[columnCounter]; } else { dataTable2[columnCounter][i] = double_datum; dataRow[columnCounter] = doubleNumberFormatter.format(double_datum); } } byte_offset += 8; break; case 0: // String case int strVarLength = StringVariableTable.get(columnCounter); String raw_datum = new String( Arrays.copyOfRange(dataRowBytes, byte_offset, (byte_offset + strVarLength)), "ISO-8859-1"); String string_datum = getNullStrippedString(raw_datum); if (dbgLog.isLoggable(Level.FINER)) dbgLog.finer(i + "-th row " + columnCounter + "=th column string =" + string_datum); if (string_datum.equals("")) { if (dbgLog.isLoggable(Level.FINER)) dbgLog.finer(i + "-th row " + columnCounter + "=th column string missing value=" + string_datum); dataRow[columnCounter] = MissingValueForTextDataFileString; dataTable2[columnCounter][i] = null; //use null reference to indicate missing value in data that is passed to UNF } else { String escapedString = string_datum.replaceAll("\"", Matcher.quoteReplacement("\\\"")); /* * Fixing the bug we've had in the Stata reader for * a longest time: new lines and tabs need to * be escaped too - otherwise it breaks our * TAB file structure! -- L.A. */ escapedString = escapedString.replaceAll("\t", Matcher.quoteReplacement("\\t")); escapedString = escapedString.replaceAll("\n", Matcher.quoteReplacement("\\n")); escapedString = escapedString.replaceAll("\r", Matcher.quoteReplacement("\\r")); // the escaped version of the string will be // stored in the tab file: dataRow[columnCounter] = "\"" + escapedString + "\""; // but note that the "raw" version of it is // used for the UNF: dataTable2[columnCounter][i] = string_datum; } byte_offset += strVarLength; break; default: dbgLog.fine("unknown variable type found"); String errorMessage = "unknow variable Type found at data section"; throw new InvalidObjectException(errorMessage); } // switch } // for-columnCounter // dump the row of data to the external file pwout.println(StringUtils.join(dataRow, "\t")); if (dbgLog.isLoggable(Level.FINE)) dbgLog.fine(i + "-th row's data={" + StringUtils.join(dataRow, ",") + "};"); } // for- i (row) pwout.close(); if (dbgLog.isLoggable(Level.FINER)) { dbgLog.finer("\ndataTable2(variable-wise):\n"); dbgLog.finer(Arrays.deepToString(dataTable2)); dbgLog.finer("\ndateFormat(variable-wise):\n"); dbgLog.finer(Arrays.deepToString(dateFormat)); } if (dbgLog.isLoggable(Level.FINE)) { dbgLog.fine("variableTypelList:\n" + Arrays.deepToString(variableTypelList)); dbgLog.fine("variableTypelListFinal:\n" + Arrays.deepToString(variableTypelListFinal)); } String[] unfValues = new String[nvar]; for (int j = 0; j < nvar; j++) { String variableType_j = variableTypelListFinal[j]; unfValues[j] = getUNF(dataTable2[j], dateFormat[j], variableType_j, unfVersionNumber, j); if (dbgLog.isLoggable(Level.FINE)) dbgLog.fine(j + "th unf value" + unfValues[j]); } if (dbgLog.isLoggable(Level.FINE)) dbgLog.fine("unf set:\n" + Arrays.deepToString(unfValues)); fileUnfValue = UNF5Util.calculateUNF(unfValues); if (dbgLog.isLoggable(Level.FINE)) dbgLog.fine("file-unf=" + fileUnfValue); stataDataSection.setUnf(unfValues); stataDataSection.setFileUnf(fileUnfValue); smd.setVariableUNF(unfValues); smd.getFileInformation().put("fileUNF", fileUnfValue); if (dbgLog.isLoggable(Level.FINE)) dbgLog.fine("unf values:\n" + unfValues); stataDataSection.setData(dataTable2); // close the stream dbgLog.fine("***** decodeData(): end *****\n\n"); }
From source file:org.hibernate.engine.StatefulPersistenceContext.java
public static StatefulPersistenceContext deserialize(ObjectInputStream ois, SessionImplementor session) throws IOException, ClassNotFoundException { log.trace("deserializing persistent-context"); StatefulPersistenceContext rtn = new StatefulPersistenceContext(session); // during deserialization, we need to reconnect all proxies and // collections to this session, as well as the EntityEntry and // CollectionEntry instances; these associations are transient // because serialization is used for different things. try {// w w w .j ava2 s . c o m rtn.defaultReadOnly = ois.readBoolean(); // todo : we can actually just determine this from the incoming EntityEntry-s rtn.hasNonReadOnlyEntities = ois.readBoolean(); int count = ois.readInt(); log.trace("staring deserialization of [" + count + "] entitiesByKey entries"); rtn.entitiesByKey = new HashMap(count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count); for (int i = 0; i < count; i++) { rtn.entitiesByKey.put(EntityKey.deserialize(ois, session), ois.readObject()); } count = ois.readInt(); log.trace("staring deserialization of [" + count + "] entitiesByUniqueKey entries"); rtn.entitiesByUniqueKey = new HashMap(count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count); for (int i = 0; i < count; i++) { rtn.entitiesByUniqueKey.put(EntityUniqueKey.deserialize(ois, session), ois.readObject()); } count = ois.readInt(); log.trace("staring deserialization of [" + count + "] proxiesByKey entries"); rtn.proxiesByKey = new ReferenceMap(ReferenceMap.HARD, ReferenceMap.WEAK, count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count, .75f); for (int i = 0; i < count; i++) { EntityKey ek = EntityKey.deserialize(ois, session); Object proxy = ois.readObject(); if (proxy instanceof HibernateProxy) { ((HibernateProxy) proxy).getHibernateLazyInitializer().setSession(session); rtn.proxiesByKey.put(ek, proxy); } else { log.trace("encountered prunded proxy"); } // otherwise, the proxy was pruned during the serialization process } count = ois.readInt(); log.trace("staring deserialization of [" + count + "] entitySnapshotsByKey entries"); rtn.entitySnapshotsByKey = new HashMap(count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count); for (int i = 0; i < count; i++) { rtn.entitySnapshotsByKey.put(EntityKey.deserialize(ois, session), ois.readObject()); } count = ois.readInt(); log.trace("staring deserialization of [" + count + "] entityEntries entries"); rtn.entityEntries = IdentityMap.instantiateSequenced(count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count); for (int i = 0; i < count; i++) { Object entity = ois.readObject(); EntityEntry entry = EntityEntry.deserialize(ois, session); rtn.entityEntries.put(entity, entry); } count = ois.readInt(); log.trace("staring deserialization of [" + count + "] collectionsByKey entries"); rtn.collectionsByKey = new HashMap(count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count); for (int i = 0; i < count; i++) { rtn.collectionsByKey.put(CollectionKey.deserialize(ois, session), ois.readObject()); } count = ois.readInt(); log.trace("staring deserialization of [" + count + "] collectionEntries entries"); rtn.collectionEntries = IdentityMap .instantiateSequenced(count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count); for (int i = 0; i < count; i++) { final PersistentCollection pc = (PersistentCollection) ois.readObject(); final CollectionEntry ce = CollectionEntry.deserialize(ois, session); pc.setCurrentSession(session); rtn.collectionEntries.put(pc, ce); } count = ois.readInt(); log.trace("staring deserialization of [" + count + "] arrayHolders entries"); rtn.arrayHolders = IdentityMap.instantiate(count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count); for (int i = 0; i < count; i++) { rtn.arrayHolders.put(ois.readObject(), ois.readObject()); } count = ois.readInt(); log.trace("staring deserialization of [" + count + "] nullifiableEntityKeys entries"); rtn.nullifiableEntityKeys = new HashSet(); for (int i = 0; i < count; i++) { rtn.nullifiableEntityKeys.add(EntityKey.deserialize(ois, session)); } } catch (HibernateException he) { throw new InvalidObjectException(he.getMessage()); } return rtn; }
From source file:org.hibernate.engine.internal.StatefulPersistenceContext.java
public static StatefulPersistenceContext deserialize(ObjectInputStream ois, SessionImplementor session) throws IOException, ClassNotFoundException { LOG.trace("Serializing persistent-context"); StatefulPersistenceContext rtn = new StatefulPersistenceContext(session); // during deserialization, we need to reconnect all proxies and // collections to this session, as well as the EntityEntry and // CollectionEntry instances; these associations are transient // because serialization is used for different things. try {/*w w w . j a v a2 s. co m*/ rtn.defaultReadOnly = ois.readBoolean(); // todo : we can actually just determine this from the incoming EntityEntry-s rtn.hasNonReadOnlyEntities = ois.readBoolean(); int count = ois.readInt(); LOG.trace("Starting deserialization of [" + count + "] entitiesByKey entries"); rtn.entitiesByKey = new HashMap(count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count); for (int i = 0; i < count; i++) { rtn.entitiesByKey.put(EntityKey.deserialize(ois, session), ois.readObject()); } count = ois.readInt(); LOG.trace("Starting deserialization of [" + count + "] entitiesByUniqueKey entries"); rtn.entitiesByUniqueKey = new HashMap(count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count); for (int i = 0; i < count; i++) { rtn.entitiesByUniqueKey.put(EntityUniqueKey.deserialize(ois, session), ois.readObject()); } count = ois.readInt(); LOG.trace("Starting deserialization of [" + count + "] proxiesByKey entries"); rtn.proxiesByKey = new ReferenceMap(AbstractReferenceMap.HARD, AbstractReferenceMap.WEAK, count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count, .75f); for (int i = 0; i < count; i++) { EntityKey ek = EntityKey.deserialize(ois, session); Object proxy = ois.readObject(); if (proxy instanceof HibernateProxy) { ((HibernateProxy) proxy).getHibernateLazyInitializer().setSession(session); rtn.proxiesByKey.put(ek, proxy); } else LOG.trace("Encountered prunded proxy"); // otherwise, the proxy was pruned during the serialization process } count = ois.readInt(); LOG.trace("Starting deserialization of [" + count + "] entitySnapshotsByKey entries"); rtn.entitySnapshotsByKey = new HashMap(count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count); for (int i = 0; i < count; i++) { rtn.entitySnapshotsByKey.put(EntityKey.deserialize(ois, session), ois.readObject()); } count = ois.readInt(); LOG.trace("Starting deserialization of [" + count + "] entityEntries entries"); rtn.entityEntries = IdentityMap.instantiateSequenced(count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count); for (int i = 0; i < count; i++) { Object entity = ois.readObject(); EntityEntry entry = EntityEntry.deserialize(ois, session); rtn.entityEntries.put(entity, entry); } count = ois.readInt(); LOG.trace("Starting deserialization of [" + count + "] collectionsByKey entries"); rtn.collectionsByKey = new HashMap(count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count); for (int i = 0; i < count; i++) { rtn.collectionsByKey.put(CollectionKey.deserialize(ois, session), ois.readObject()); } count = ois.readInt(); LOG.trace("Starting deserialization of [" + count + "] collectionEntries entries"); rtn.collectionEntries = IdentityMap .instantiateSequenced(count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count); for (int i = 0; i < count; i++) { final PersistentCollection pc = (PersistentCollection) ois.readObject(); final CollectionEntry ce = CollectionEntry.deserialize(ois, session); pc.setCurrentSession(session); rtn.collectionEntries.put(pc, ce); } count = ois.readInt(); LOG.trace("Starting deserialization of [" + count + "] arrayHolders entries"); rtn.arrayHolders = IdentityMap.instantiate(count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count); for (int i = 0; i < count; i++) { rtn.arrayHolders.put(ois.readObject(), ois.readObject()); } count = ois.readInt(); LOG.trace("Starting deserialization of [" + count + "] nullifiableEntityKey entries"); rtn.nullifiableEntityKeys = new HashSet(); for (int i = 0; i < count; i++) { rtn.nullifiableEntityKeys.add(EntityKey.deserialize(ois, session)); } } catch (HibernateException he) { throw new InvalidObjectException(he.getMessage()); } return rtn; }