Example usage for java.util Queue poll

List of usage examples for java.util Queue poll

Introduction

In this page you can find the example usage for java.util Queue poll.

Prototype

E poll();

Source Link

Document

Retrieves and removes the head of this queue, or returns null if this queue is empty.

Usage

From source file:com.ricemap.spateDB.core.RTree.java

/**
 * Builds the RTree given a serialized list of elements. It uses the given
 * stockObject to deserialize these elements and build the tree. Also writes
 * the created tree to the disk directly.
 * //  w  w  w.j a  v a2 s  .co  m
 * @param elements
 *            - serialization of elements to be written
 * @param offset
 *            - index of the first element to use in the elements array
 * @param len
 *            - number of bytes to user from the elements array
 * @param bytesAvailable
 *            - size available (in bytes) to store the tree structures
 * @param dataOut
 *            - an output to use for writing the tree to
 * @param fast_sort
 *            - setting this to <code>true</code> allows the method to run
 *            faster by materializing the offset of each element in the list
 *            which speeds up the comparison. However, this requires an
 *            additional 16 bytes per element. So, for each 1M elements, the
 *            method will require an additional 16 M bytes (approximately).
 */
public void bulkLoadWrite(final byte[] element_bytes, final int offset, final int len, final int degree,
        DataOutput dataOut, final boolean fast_sort, final boolean columnarStorage) {
    try {
        columnar = columnarStorage;
        //TODO: the order of fields should be stable under Oracle JVM, but not guaranteed
        Field[] fields = stockObject.getClass().getDeclaredFields();

        // Count number of elements in the given text
        int i_start = offset;
        final Text line = new Text();
        while (i_start < offset + len) {
            int i_end = skipToEOL(element_bytes, i_start);
            // Extract the line without end of line character
            line.set(element_bytes, i_start, i_end - i_start - 1);
            stockObject.fromText(line);

            elementCount++;
            i_start = i_end;
        }
        LOG.info("Bulk loading an RTree with " + elementCount + " elements");

        // It turns out the findBestDegree returns the best degree when the
        // whole
        // tree is loaded to memory when processed. However, as current
        // algorithms
        // process the tree while it's on disk, a higher degree should be
        // selected
        // such that a node fits one file block (assumed to be 4K).
        // final int degree = findBestDegree(bytesAvailable, elementCount);
        LOG.info("Writing an RTree with degree " + degree);

        int height = Math.max(1, (int) Math.ceil(Math.log(elementCount) / Math.log(degree)));
        int leafNodeCount = (int) Math.pow(degree, height - 1);
        if (elementCount < 2 * leafNodeCount && height > 1) {
            height--;
            leafNodeCount = (int) Math.pow(degree, height - 1);
        }
        int nodeCount = (int) ((Math.pow(degree, height) - 1) / (degree - 1));
        int nonLeafNodeCount = nodeCount - leafNodeCount;

        // Keep track of the offset of each element in the text
        final int[] offsets = new int[elementCount];
        final int[] ids = new int[elementCount];
        final double[] ts = fast_sort ? new double[elementCount] : null;
        final double[] xs = fast_sort ? new double[elementCount] : null;
        final double[] ys = fast_sort ? new double[elementCount] : null;

        //initialize columnar data output
        ByteArrayOutputStream index_bos = new ByteArrayOutputStream();
        DataOutputStream index_dos = new DataOutputStream(index_bos);
        ByteArrayOutputStream[] bos = new ByteArrayOutputStream[fields.length];
        DataOutputStream[] dos = new DataOutputStream[fields.length];
        for (int i = 0; i < bos.length; i++) {
            bos[i] = new ByteArrayOutputStream();
            dos[i] = new DataOutputStream(bos[i]);
        }

        i_start = offset;
        line.clear();
        for (int i = 0; i < elementCount; i++) {
            offsets[i] = i_start;
            ids[i] = i;
            int i_end = skipToEOL(element_bytes, i_start);
            if (xs != null) {
                // Extract the line with end of line character
                line.set(element_bytes, i_start, i_end - i_start - 1);
                stockObject.fromText(line);
                // Sample center of the shape
                ts[i] = (stockObject.getMBR().t1 + stockObject.getMBR().t2) / 2;
                xs[i] = (stockObject.getMBR().x1 + stockObject.getMBR().x2) / 2;
                ys[i] = (stockObject.getMBR().y1 + stockObject.getMBR().y2) / 2;

                //build columnar storage
                if (stockObject instanceof Point3d) {
                    index_dos.writeDouble(ts[i]);
                    index_dos.writeDouble(xs[i]);
                    index_dos.writeDouble(ys[i]);
                } else {
                    throw new RuntimeException("Indexing non-point shape with RTREE is not supported yet");
                }

                for (int j = 0; j < fields.length; j++) {
                    if (fields[j].getType().equals(Integer.TYPE)) {
                        dos[j].writeInt(fields[j].getInt(stockObject));
                    } else if (fields[j].getType().equals(Double.TYPE)) {
                        dos[j].writeDouble(fields[j].getDouble(stockObject));
                    } else if (fields[j].getType().equals(Long.TYPE)) {
                        dos[j].writeLong(fields[j].getLong(stockObject));
                    } else {
                        continue;
                        //throw new RuntimeException("Field type is not supported yet");
                    }
                }
            }
            i_start = i_end;
        }
        index_dos.close();
        for (int i = 0; i < dos.length; i++) {
            dos[i].close();
        }

        /** A struct to store information about a split */
        class SplitStruct extends Prism {
            /** Start and end index for this split */
            int index1, index2;
            /** Direction of this split */
            byte direction;
            /** Index of first element on disk */
            int offsetOfFirstElement;

            static final byte DIRECTION_T = 0;
            static final byte DIRECTION_X = 1;
            static final byte DIRECTION_Y = 2;

            SplitStruct(int index1, int index2, byte direction) {
                this.index1 = index1;
                this.index2 = index2;
                this.direction = direction;
            }

            @Override
            public void write(DataOutput out) throws IOException {
                //
                if (columnarStorage)
                    out.writeInt(index1);
                else
                    out.writeInt(offsetOfFirstElement);
                super.write(out);
            }

            void partition(Queue<SplitStruct> toBePartitioned) {
                IndexedSortable sortableT;
                IndexedSortable sortableX;
                IndexedSortable sortableY;

                if (fast_sort) {
                    // Use materialized xs[] and ys[] to do the comparisons
                    sortableT = new IndexedSortable() {
                        @Override
                        public void swap(int i, int j) {
                            // Swap ts
                            double tempt = ts[i];
                            ts[i] = ts[j];
                            ts[j] = tempt;
                            // Swap xs
                            double tempx = xs[i];
                            xs[i] = xs[j];
                            xs[j] = tempx;
                            // Swap ys
                            double tempY = ys[i];
                            ys[i] = ys[j];
                            ys[j] = tempY;
                            // Swap id
                            int tempid = offsets[i];
                            offsets[i] = offsets[j];
                            offsets[j] = tempid;

                            tempid = ids[i];
                            ids[i] = ids[j];
                            ids[j] = tempid;
                        }

                        @Override
                        public int compare(int i, int j) {
                            if (ts[i] < ts[j])
                                return -1;
                            if (ts[i] > ts[j])
                                return 1;
                            return 0;
                        }
                    };
                    sortableX = new IndexedSortable() {
                        @Override
                        public void swap(int i, int j) {
                            // Swap ts
                            double tempt = ts[i];
                            ts[i] = ts[j];
                            ts[j] = tempt;
                            // Swap xs
                            double tempx = xs[i];
                            xs[i] = xs[j];
                            xs[j] = tempx;
                            // Swap ys
                            double tempY = ys[i];
                            ys[i] = ys[j];
                            ys[j] = tempY;
                            // Swap id
                            int tempid = offsets[i];
                            offsets[i] = offsets[j];
                            offsets[j] = tempid;
                            tempid = ids[i];
                            ids[i] = ids[j];
                            ids[j] = tempid;
                        }

                        @Override
                        public int compare(int i, int j) {
                            if (ts[i] < ts[j])
                                return -1;
                            if (xs[i] < xs[j])
                                return -1;
                            if (xs[i] > xs[j])
                                return 1;
                            return 0;
                        }
                    };

                    sortableY = new IndexedSortable() {
                        @Override
                        public void swap(int i, int j) {
                            // Swap ts
                            double tempt = ts[i];
                            ts[i] = ts[j];
                            ts[j] = tempt;
                            // Swap xs
                            double tempx = xs[i];
                            xs[i] = xs[j];
                            xs[j] = tempx;
                            // Swap ys
                            double tempY = ys[i];
                            ys[i] = ys[j];
                            ys[j] = tempY;
                            // Swap id
                            int tempid = offsets[i];
                            offsets[i] = offsets[j];
                            offsets[j] = tempid;

                            tempid = ids[i];
                            ids[i] = ids[j];
                            ids[j] = tempid;
                        }

                        @Override
                        public int compare(int i, int j) {
                            if (ys[i] < ys[j])
                                return -1;
                            if (ys[i] > ys[j])
                                return 1;
                            return 0;
                        }
                    };
                } else {
                    // No materialized xs and ys. Always deserialize objects
                    // to compare
                    sortableT = new IndexedSortable() {
                        @Override
                        public void swap(int i, int j) {
                            // Swap id
                            int tempid = offsets[i];
                            offsets[i] = offsets[j];
                            offsets[j] = tempid;

                            tempid = ids[i];
                            ids[i] = ids[j];
                            ids[j] = tempid;
                        }

                        @Override
                        public int compare(int i, int j) {
                            // Get end of line
                            int eol = skipToEOL(element_bytes, offsets[i]);
                            line.set(element_bytes, offsets[i], eol - offsets[i] - 1);
                            stockObject.fromText(line);
                            double ti = (stockObject.getMBR().t1 + stockObject.getMBR().t2) / 2;

                            eol = skipToEOL(element_bytes, offsets[j]);
                            line.set(element_bytes, offsets[j], eol - offsets[j] - 1);
                            stockObject.fromText(line);
                            double tj = (stockObject.getMBR().t1 + stockObject.getMBR().t2) / 2;
                            if (ti < tj)
                                return -1;
                            if (ti > tj)
                                return 1;
                            return 0;
                        }
                    };
                    sortableX = new IndexedSortable() {
                        @Override
                        public void swap(int i, int j) {
                            // Swap id
                            int tempid = offsets[i];
                            offsets[i] = offsets[j];
                            offsets[j] = tempid;

                            tempid = ids[i];
                            ids[i] = ids[j];
                            ids[j] = tempid;
                        }

                        @Override
                        public int compare(int i, int j) {
                            // Get end of line
                            int eol = skipToEOL(element_bytes, offsets[i]);
                            line.set(element_bytes, offsets[i], eol - offsets[i] - 1);
                            stockObject.fromText(line);
                            double xi = (stockObject.getMBR().x1 + stockObject.getMBR().x2) / 2;

                            eol = skipToEOL(element_bytes, offsets[j]);
                            line.set(element_bytes, offsets[j], eol - offsets[j] - 1);
                            stockObject.fromText(line);
                            double xj = (stockObject.getMBR().x1 + stockObject.getMBR().x2) / 2;
                            if (xi < xj)
                                return -1;
                            if (xi > xj)
                                return 1;
                            return 0;
                        }
                    };

                    sortableY = new IndexedSortable() {
                        @Override
                        public void swap(int i, int j) {
                            // Swap id
                            int tempid = offsets[i];
                            offsets[i] = offsets[j];
                            offsets[j] = tempid;

                            tempid = ids[i];
                            ids[i] = ids[j];
                            ids[j] = tempid;
                        }

                        @Override
                        public int compare(int i, int j) {
                            int eol = skipToEOL(element_bytes, offsets[i]);
                            line.set(element_bytes, offsets[i], eol - offsets[i] - 1);
                            stockObject.fromText(line);
                            double yi = (stockObject.getMBR().y1 + stockObject.getMBR().y2) / 2;

                            eol = skipToEOL(element_bytes, offsets[j]);
                            line.set(element_bytes, offsets[j], eol - offsets[j] - 1);
                            stockObject.fromText(line);
                            double yj = (stockObject.getMBR().y1 + stockObject.getMBR().y2) / 2;
                            if (yi < yj)
                                return -1;
                            if (yi > yj)
                                return 1;
                            return 0;
                        }
                    };
                }

                final IndexedSorter sorter = new QuickSort();

                final IndexedSortable[] sortables = new IndexedSortable[3];
                sortables[SplitStruct.DIRECTION_T] = sortableT;
                sortables[SplitStruct.DIRECTION_X] = sortableX;
                sortables[SplitStruct.DIRECTION_Y] = sortableY;

                sorter.sort(sortables[direction], index1, index2);

                // Partition into maxEntries partitions (equally) and
                // create a SplitStruct for each partition
                int i1 = index1;
                for (int iSplit = 0; iSplit < degree; iSplit++) {
                    int i2 = index1 + (index2 - index1) * (iSplit + 1) / degree;
                    SplitStruct newSplit;
                    if (direction == 0) {
                        newSplit = new SplitStruct(i1, i2, (byte) 1);
                    } else if (direction == 1) {
                        newSplit = new SplitStruct(i1, i2, (byte) 2);
                    } else {
                        newSplit = new SplitStruct(i1, i2, (byte) 0);
                    }
                    toBePartitioned.add(newSplit);
                    i1 = i2;
                }
            }
        }

        // All nodes stored in level-order traversal
        Vector<SplitStruct> nodes = new Vector<SplitStruct>();
        final Queue<SplitStruct> toBePartitioned = new LinkedList<SplitStruct>();
        toBePartitioned.add(new SplitStruct(0, elementCount, SplitStruct.DIRECTION_X));

        while (!toBePartitioned.isEmpty()) {
            SplitStruct split = toBePartitioned.poll();
            if (nodes.size() < nonLeafNodeCount) {
                // This is a non-leaf
                split.partition(toBePartitioned);
            }
            nodes.add(split);
        }

        if (nodes.size() != nodeCount) {
            throw new RuntimeException(
                    "Expected node count: " + nodeCount + ". Real node count: " + nodes.size());
        }

        // Now we have our data sorted in the required order. Start building
        // the tree.
        // Store the offset of each leaf node in the tree
        FSDataOutputStream fakeOut = new FSDataOutputStream(new java.io.OutputStream() {
            // Null output stream
            @Override
            public void write(int b) throws IOException {
                // Do nothing
            }

            @Override
            public void write(byte[] b, int off, int len) throws IOException {
                // Do nothing
            }

            @Override
            public void write(byte[] b) throws IOException {
                // Do nothing
            }
        }, null, TreeHeaderSize + nodes.size() * NodeSize);
        for (int i_leaf = nonLeafNodeCount, i = 0; i_leaf < nodes.size(); i_leaf++) {
            nodes.elementAt(i_leaf).offsetOfFirstElement = (int) fakeOut.getPos();
            if (i != nodes.elementAt(i_leaf).index1)
                throw new RuntimeException();
            double t1, x1, y1, t2, x2, y2;

            // Initialize MBR to first object
            int eol = skipToEOL(element_bytes, offsets[i]);
            fakeOut.write(element_bytes, offsets[i], eol - offsets[i]);
            line.set(element_bytes, offsets[i], eol - offsets[i] - 1);
            stockObject.fromText(line);
            Prism mbr = stockObject.getMBR();
            t1 = mbr.t1;
            x1 = mbr.x1;
            y1 = mbr.y1;
            t2 = mbr.t2;
            x2 = mbr.x2;
            y2 = mbr.y2;
            i++;

            while (i < nodes.elementAt(i_leaf).index2) {
                eol = skipToEOL(element_bytes, offsets[i]);
                fakeOut.write(element_bytes, offsets[i], eol - offsets[i]);
                line.set(element_bytes, offsets[i], eol - offsets[i] - 1);
                stockObject.fromText(line);
                mbr = stockObject.getMBR();
                if (mbr.t1 < t1)
                    t1 = mbr.t1;
                if (mbr.x1 < x1)
                    x1 = mbr.x1;
                if (mbr.y1 < y1)
                    y1 = mbr.y1;
                if (mbr.t2 > t2)
                    t2 = mbr.t2;
                if (mbr.x2 > x2)
                    x2 = mbr.x2;
                if (mbr.y2 > y2)
                    y2 = mbr.y2;
                i++;
            }
            nodes.elementAt(i_leaf).set(t1, x1, y1, t2, x2, y2);
        }
        fakeOut.close();
        fakeOut = null;

        // Calculate MBR and offsetOfFirstElement for non-leaves
        for (int i_node = nonLeafNodeCount - 1; i_node >= 0; i_node--) {
            int i_first_child = i_node * degree + 1;
            nodes.elementAt(i_node).offsetOfFirstElement = nodes.elementAt(i_first_child).offsetOfFirstElement;
            int i_child = 0;
            Prism mbr;
            mbr = nodes.elementAt(i_first_child + i_child);
            double t1 = mbr.t1;
            double x1 = mbr.x1;
            double y1 = mbr.y1;
            double t2 = mbr.t2;
            double x2 = mbr.x2;
            double y2 = mbr.y2;
            i_child++;

            while (i_child < degree) {
                mbr = nodes.elementAt(i_first_child + i_child);
                if (mbr.t1 < t1)
                    t1 = mbr.t1;
                if (mbr.x1 < x1)
                    x1 = mbr.x1;
                if (mbr.y1 < y1)
                    y1 = mbr.y1;
                if (mbr.t2 > t2)
                    t2 = mbr.t2;
                if (mbr.x2 > x2)
                    x2 = mbr.x2;
                if (mbr.y2 > y2)
                    y2 = mbr.y2;
                i_child++;
            }
            nodes.elementAt(i_node).set(t1, x1, y1, t2, x2, y2);
        }

        // Start writing the tree
        // write tree header (including size)
        // Total tree size. (== Total bytes written - 8 bytes for the size
        // itself)
        dataOut.writeInt(TreeHeaderSize + NodeSize * nodeCount + len);
        // Tree height
        dataOut.writeInt(height);
        // Degree
        dataOut.writeInt(degree);
        dataOut.writeInt(elementCount);

        //isColumnar
        dataOut.writeInt(columnarStorage ? 1 : 0);

        // write nodes
        for (SplitStruct node : nodes) {
            node.write(dataOut);
        }
        // write elements
        if (columnarStorage) {
            byte[] index_bs = index_bos.toByteArray();
            byte[][] bss = new byte[bos.length][];
            for (int i = 0; i < bss.length; i++) {
                bss[i] = bos[i].toByteArray();
            }
            for (int element_i = 0; element_i < elementCount; element_i++) {
                //int eol = skipToEOL(element_bytes, offsets[element_i]);
                //dataOut.write(element_bytes, offsets[element_i], eol - offsets[element_i]);
                dataOut.write(index_bs, ids[element_i] * IndexUnitSize, IndexUnitSize);
            }

            for (int i = 0; i < fields.length; i++) {
                int fieldSize = 0;
                if (fields[i].getType().equals(Integer.TYPE)) {
                    fieldSize = 4;
                } else if (fields[i].getType().equals(Long.TYPE)) {
                    fieldSize = 8;
                } else if (fields[i].getType().equals(Double.TYPE)) {
                    fieldSize = 8;
                } else {
                    //throw new RuntimeException("Unsupported field type: " + fields[i].getType().getName());
                    continue;
                }
                for (int element_i = 0; element_i < elementCount; element_i++) {
                    //int eol = skipToEOL(element_bytes, offsets[element_i]);
                    //dataOut.write(element_bytes, offsets[element_i], eol - offsets[element_i]);
                    dataOut.write(bss[i], ids[element_i] * fieldSize, fieldSize);
                }
            }
        } else {
            for (int element_i = 0; element_i < elementCount; element_i++) {
                int eol = skipToEOL(element_bytes, offsets[element_i]);
                dataOut.write(element_bytes, offsets[element_i], eol - offsets[element_i]);
            }
        }

    } catch (IOException e) {
        e.printStackTrace();
    } catch (IllegalArgumentException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    } catch (IllegalAccessException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
}

From source file:com.xpn.xwiki.store.migration.hibernate.R40000XWIKI6990DataMigration.java

@Override
public void hibernateMigrate() throws DataMigrationException, XWikiException {
    final Map<Long, Long> docs = new HashMap<Long, Long>();
    final List<String> customMappedClasses = new ArrayList<String>();
    final Map<Long, Long> objs = new HashMap<Long, Long>();
    final Queue<Map<Long, Long>> stats = new LinkedList<Map<Long, Long>>();

    // Get ids conversion list
    getStore().executeRead(getXWikiContext(), new HibernateCallback<Object>() {
        private void fillDocumentIdConversion(Session session, Map<Long, Long> map) {
            String database = getXWikiContext().getDatabase();
            @SuppressWarnings("unchecked")
            List<Object[]> results = session
                    .createQuery("select doc.id, doc.space, doc.name, doc.defaultLanguage, doc.language from "
                            + XWikiDocument.class.getName() + " as doc")
                    .list();//from   www .  java2 s. c o m

            for (Object[] result : results) {
                long oldId = (Long) result[0];
                String space = (String) result[1];
                String name = (String) result[2];
                String defaultLanguage = (String) result[3];
                String language = (String) result[4];

                // Use a real document, since we need the language to be appended.
                // TODO: Change this when the locale is integrated
                XWikiDocument doc = new XWikiDocument(new DocumentReference(database, space, name));
                doc.setDefaultLanguage(defaultLanguage);
                doc.setLanguage(language);
                long newId = doc.getId();

                if (oldId != newId) {
                    map.put(oldId, newId);
                }
            }

            logProgress("Retrieved %d document IDs to be converted.", map.size());
        }

        private void fillObjectIdConversion(Session session, Map<Long, Long> map) {
            @SuppressWarnings("unchecked")
            List<Object[]> results = session
                    .createQuery("select obj.id, obj.name, obj.className, obj.number from "
                            + BaseObject.class.getName() + " as obj")
                    .list();
            for (Object[] result : results) {
                long oldId = (Long) result[0];
                String docName = (String) result[1];
                String className = (String) result[2];
                Integer number = (Integer) result[3];

                BaseObjectReference objRef = new BaseObjectReference(
                        R40000XWIKI6990DataMigration.this.resolver.resolve(className), number,
                        R40000XWIKI6990DataMigration.this.resolver.resolve(docName));
                long newId = Util.getHash(R40000XWIKI6990DataMigration.this.serializer.serialize(objRef));

                if (oldId != newId) {
                    map.put(oldId, newId);
                }
            }

            logProgress("Retrieved %d object IDs to be converted.", map.size());
        }

        private void fillCustomMappingMap(XWikiHibernateStore store, XWikiContext context)
                throws XWikiException, DataMigrationException {
            processCustomMappings(store, new CustomMappingCallback() {
                @Override
                public void processCustomMapping(XWikiHibernateStore store, String name, String mapping,
                        boolean hasDynamicMapping) throws XWikiException {
                    if (INTERNAL.equals(mapping) || hasDynamicMapping) {
                        customMappedClasses.add(name);
                    }
                }
            }, context);

            logProgress("Retrieved %d custom mapped classes to be processed.", customMappedClasses.size());
        }

        private void fillStatsConversionMap(Session session, Class<?> klass, Map<Long, Long> map) {
            @SuppressWarnings("unchecked")
            List<Object[]> results = session
                    .createQuery(
                            "select stats.id, stats.name, stats.number from " + klass.getName() + " as stats")
                    .list();
            for (Object[] result : results) {
                long oldId = (Long) result[0];
                String statsName = (String) result[1];
                Integer number = (Integer) result[2];

                // Do not try to convert broken records which would cause duplicated ids
                if (!statsName.startsWith(".") && !statsName.endsWith(".")) {
                    long newId = R40000XWIKI6990DataMigration.this.statsIdComputer.getId(statsName, number);

                    if (oldId != newId) {
                        map.put(oldId, newId);
                    }
                } else {
                    logger.debug("Skipping invalid statistical entry [{}] with name [{}]", oldId, statsName);
                }
            }

            String klassName = klass.getName().substring(klass.getName().lastIndexOf('.') + 1);
            logProgress("Retrieved %d %s statistics IDs to be converted.", map.size(),
                    klassName.substring(0, klassName.length() - 5).toLowerCase());
        }

        @Override
        public Object doInHibernate(Session session) throws XWikiException {
            try {
                fillDocumentIdConversion(session, docs);

                fillObjectIdConversion(session, objs);

                // Retrieve custom mapped classes
                if (getStore() instanceof XWikiHibernateStore) {
                    fillCustomMappingMap((XWikiHibernateStore) getStore(), getXWikiContext());
                }

                // Retrieve statistics ID conversion
                for (Class<?> statsClass : STATS_CLASSES) {
                    Map<Long, Long> map = new HashMap<Long, Long>();
                    fillStatsConversionMap(session, statsClass, map);
                    stats.add(map);
                }

                session.clear();
            } catch (Exception e) {
                throw new XWikiException(XWikiException.MODULE_XWIKI_STORE,
                        XWikiException.ERROR_XWIKI_STORE_MIGRATION, getName() + " migration failed", e);
            }
            return null;
        }
    });

    // Cache the configuration and the dialect
    configuration = getStore().getConfiguration();
    dialect = configuration.buildSettings().getDialect();

    // Check configuration for safe mode
    XWikiConfig config = getXWikiContext().getWiki().getConfig();
    /* True if migration should use safe but slower non-bulk native updates. */
    boolean useSafeUpdates = "1"
            .equals(config.getProperty("xwiki.store.migration." + this.getName() + ".safemode", "0"));

    // Use safe mode if the database has no temporary table supported by hibernate
    useSafeUpdates = useSafeUpdates || !configuration.buildSettings().getDialect().supportsTemporaryTables();

    // Proceed to document id conversion
    if (!docs.isEmpty()) {
        if (!useSafeUpdates) {
            // Pair table,key for table that need manual updates
            final List<String[]> tableToProcess = new ArrayList<String[]>();

            for (Class<?> docClass : DOC_CLASSES) {
                tableToProcess.addAll(getAllTableToProcess(docClass.getName()));
            }
            for (Class<?> docClass : DOCLINK_CLASSES) {
                tableToProcess.addAll(getAllTableToProcess(docClass.getName(), "docId"));
            }

            logProgress("Converting %d document IDs in %d tables...", docs.size(), tableToProcess.size());

            final long[] times = new long[tableToProcess.size() + 1];
            try {
                getStore().executeWrite(getXWikiContext(), new AbstractBulkIdConversionHibernateCallback() {
                    @Override
                    public void doBulkIdUpdate() {
                        times[timer++] += insertIdUpdates(docs);

                        for (String[] table : tableToProcess) {
                            times[timer++] += executeSqlIdUpdate(table[0], table[1]);
                        }
                    }
                });
            } catch (Exception e) {
                throw new XWikiException(XWikiException.MODULE_XWIKI_STORE,
                        XWikiException.ERROR_XWIKI_STORE_MIGRATION, getName() + " migration failed", e);
            }
            if (logger.isDebugEnabled()) {
                int timer = 0;
                logger.debug("Time elapsed for inserts: {} ms", times[timer++] / 1000000);

                for (String[] table : tableToProcess) {
                    logger.debug("Time elapsed for {} table: {} ms", table[0], times[timer++] / 1000000);
                }
            }
        } else {
            final List<String[]> docsColl = new ArrayList<String[]>();
            for (Class<?> docClass : DOC_CLASSES) {
                docsColl.addAll(getCollectionProperties(getClassMapping(docClass.getName())));
            }
            for (Class<?> docClass : DOCLINK_CLASSES) {
                docsColl.addAll(getCollectionProperties(getClassMapping(docClass.getName())));
            }

            logProgress("Converting %d document IDs in %d tables and %d collection tables...", docs.size(),
                    DOC_CLASSES.length + DOCLINK_CLASSES.length, docsColl.size());

            final long[] times = new long[DOC_CLASSES.length + DOCLINK_CLASSES.length + docsColl.size()];
            convertDbId(docs, new AbstractIdConversionHibernateCallback() {
                @Override
                public void doSingleUpdate() {
                    for (String[] coll : docsColl) {
                        times[timer++] += executeSqlIdUpdate(coll[0], coll[1]);
                    }

                    for (Class<?> doclinkClass : DOCLINK_CLASSES) {
                        times[timer++] += executeIdUpdate(doclinkClass, DOCID);
                    }
                    times[timer++] += executeIdUpdate(XWikiLink.class, DOCID);
                    times[timer++] += executeIdUpdate(XWikiRCSNodeInfo.class, ID + '.' + DOCID);
                    times[timer++] += executeIdUpdate(XWikiDocument.class, ID);
                }
            });
            if (logger.isDebugEnabled()) {
                int timer = 0;
                for (String[] coll : docsColl) {
                    logger.debug("Time elapsed for {} collection: {} ms", coll[0], times[timer++] / 1000000);
                }
                for (Class<?> doclinkClass : DOCLINK_CLASSES) {
                    logger.debug("Time elapsed for {} class: {} ms", doclinkClass.getName(),
                            times[timer++] / 1000000);
                }
                logger.debug("Time elapsed for {} class: {} ms", XWikiRCSNodeInfo.class.getName(),
                        times[timer++] / 1000000);
                logger.debug("Time elapsed for {} class: {} ms", XWikiDocument.class.getName(),
                        times[timer++] / 1000000);
            }
        }
        logProgress("All document IDs has been converted successfully.");
    } else {
        logProgress("No document IDs to convert, skipping.");
    }

    // Proceed to object id conversion
    if (!objs.isEmpty()) {
        if (!useSafeUpdates) {
            // Pair table,key for table that need manual updates
            final List<String[]> tableToProcess = new ArrayList<String[]>();

            PersistentClass objklass = getClassMapping(BaseObject.class.getName());
            tableToProcess.addAll(getCollectionProperties(objklass));

            for (Class<?> propertyClass : PROPERTY_CLASS) {
                tableToProcess.addAll(getAllTableToProcess(propertyClass.getName()));
            }
            for (String customClass : customMappedClasses) {
                tableToProcess.addAll(getAllTableToProcess(customClass));
            }
            tableToProcess.add(new String[] { objklass.getTable().getName(), getKeyColumnName(objklass) });

            logProgress("Converting %d object IDs in %d tables...", objs.size(), tableToProcess.size());

            final long[] times = new long[tableToProcess.size() + 1];
            try {
                getStore().executeWrite(getXWikiContext(), new AbstractBulkIdConversionHibernateCallback() {
                    @Override
                    public void doBulkIdUpdate() {
                        times[timer++] += insertIdUpdates(objs);

                        for (String[] table : tableToProcess) {
                            times[timer++] += executeSqlIdUpdate(table[0], table[1]);
                        }
                    }
                });
            } catch (Exception e) {
                throw new XWikiException(XWikiException.MODULE_XWIKI_STORE,
                        XWikiException.ERROR_XWIKI_STORE_MIGRATION, getName() + " migration failed", e);
            }
            if (logger.isDebugEnabled()) {
                int timer = 0;
                logger.debug("Time elapsed for inserts: {} ms", times[timer++] / 1000000);

                for (String[] table : tableToProcess) {
                    logger.debug("Time elapsed for {} table: {} ms", table[0], times[timer++] / 1000000);
                }
            }
        } else {
            // Name of classes that need manual updates
            final List<String> classToProcess = new ArrayList<String>();
            // Name of custom classes that need manual updates
            final List<String> customClassToProcess = new ArrayList<String>();
            // Pair table,key for collection table that need manual updates
            final List<String[]> objsColl = new ArrayList<String[]>();

            objsColl.addAll(getCollectionProperties(getClassMapping(BaseObject.class.getName())));
            for (Class<?> propertyClass : PROPERTY_CLASS) {
                String className = propertyClass.getName();
                PersistentClass klass = getClassMapping(className);

                // Add collection table that will not be updated by cascaded updates
                objsColl.addAll(getCollectionProperties(klass));

                // Skip classes that will be updated by cascaded updates
                if (!this.fkTables.contains(klass.getTable())) {
                    classToProcess.add(className);
                }
            }
            for (String customClass : customMappedClasses) {
                PersistentClass klass = getClassMapping(customClass);

                // Add collection table that will not be updated by cascaded updates
                objsColl.addAll(getCollectionProperties(klass));

                // Skip classes that will be updated by cascaded updates
                if (!this.fkTables.contains(klass.getTable())) {
                    customClassToProcess.add(customClass);
                }
            }

            logProgress(
                    "Converting %d object IDs in %d tables, %d custom mapped tables and %d collection tables...",
                    objs.size(), classToProcess.size() + 1, customClassToProcess.size(), objsColl.size());

            final long[] times = new long[classToProcess.size() + 1 + customClassToProcess.size()
                    + objsColl.size()];
            convertDbId(objs, new AbstractIdConversionHibernateCallback() {
                @Override
                public void doSingleUpdate() {
                    for (String[] coll : objsColl) {
                        times[timer++] += executeSqlIdUpdate(coll[0], coll[1]);
                    }

                    for (String customMappedClass : customClassToProcess) {
                        times[timer++] += executeIdUpdate(customMappedClass, ID);
                    }

                    for (String propertyClass : classToProcess) {
                        times[timer++] += executeIdUpdate(propertyClass, IDID);
                    }

                    times[timer++] += executeIdUpdate(BaseObject.class, ID);
                }
            });
            if (logger.isDebugEnabled()) {
                int timer = 0;
                for (String[] coll : objsColl) {
                    logger.debug("Time elapsed for {} collection: {} ms", coll[0], times[timer++] / 1000000);
                }
                for (String customMappedClass : customClassToProcess) {
                    logger.debug("Time elapsed for {} custom table: {} ms", customMappedClass,
                            times[timer++] / 1000000);
                }
                for (String propertyClass : classToProcess) {
                    logger.debug("Time elapsed for {} property table: {} ms", propertyClass,
                            times[timer++] / 1000000);
                }
                logger.debug("Time elapsed for {} class: {} ms", BaseObject.class.getName(),
                        times[timer++] / 1000000);
            }
        }
        logProgress("All object IDs has been converted successfully.");
    } else {
        logProgress("No object IDs to convert, skipping.");
    }

    // Proceed to statistics id conversions
    for (final Class<?> statsClass : STATS_CLASSES) {

        Map<Long, Long> map = stats.poll();
        String klassName = statsClass.getName().substring(statsClass.getName().lastIndexOf('.') + 1);
        klassName = klassName.substring(0, klassName.length() - 5).toLowerCase();

        if (!map.isEmpty()) {
            if (!useSafeUpdates) {
                final List<String[]> tableToProcess = new ArrayList<String[]>();
                final Map<Long, Long> statids = map;

                PersistentClass statklass = getClassMapping(statsClass.getName());
                tableToProcess.addAll(getCollectionProperties(statklass));
                tableToProcess
                        .add(new String[] { statklass.getTable().getName(), getKeyColumnName(statklass) });

                logProgress("Converting %d %s statistics IDs in %d tables...", map.size(), klassName,
                        tableToProcess.size());

                final long[] times = new long[tableToProcess.size() + 1];
                try {
                    getStore().executeWrite(getXWikiContext(), new AbstractBulkIdConversionHibernateCallback() {
                        @Override
                        public void doBulkIdUpdate() {
                            times[timer++] += insertIdUpdates(statids);

                            for (String[] table : tableToProcess) {
                                times[timer++] += executeSqlIdUpdate(table[0], table[1]);
                            }
                        }
                    });
                } catch (Exception e) {
                    throw new XWikiException(XWikiException.MODULE_XWIKI_STORE,
                            XWikiException.ERROR_XWIKI_STORE_MIGRATION, getName() + " migration failed", e);
                }
                if (logger.isDebugEnabled()) {
                    int timer = 0;
                    logger.debug("Time elapsed for inserts: {} ms", times[timer++] / 1000000);

                    for (String[] table : tableToProcess) {
                        logger.debug("Time elapsed for {} table: {} ms", table[0], times[timer++] / 1000000);
                    }
                }
            } else {
                final List<String[]> statsColl = new ArrayList<String[]>();
                statsColl.addAll(getCollectionProperties(getClassMapping(statsClass.getName())));

                logProgress("Converting %d %s statistics IDs in 1 tables and %d collection tables...",
                        map.size(), klassName, statsColl.size());

                final long[] times = new long[statsColl.size() + 1];
                convertDbId(map, new AbstractIdConversionHibernateCallback() {
                    @Override
                    public void doSingleUpdate() {
                        for (String[] coll : statsColl) {
                            times[timer++] += executeSqlIdUpdate(coll[0], coll[1]);
                        }
                        times[timer++] += executeIdUpdate(statsClass, ID);
                    }
                });
                if (logger.isDebugEnabled()) {
                    int timer = 0;
                    for (String[] coll : statsColl) {
                        logger.debug("Time elapsed for {} collection: {} ms", coll[0],
                                times[timer++] / 1000000);
                    }
                    logger.debug("Time elapsed for {} class: {} ms", statsClass.getName(),
                            times[timer++] / 1000000);
                }
            }
            logProgress("All %s statistics IDs has been converted successfully.", klassName);
        } else {
            logProgress("No %s statistics IDs to convert, skipping.", klassName);
        }
    }
}