Example usage for java.util Queue isEmpty

List of usage examples for java.util Queue isEmpty

Introduction

In this page you can find the example usage for java.util Queue isEmpty.

Prototype

boolean isEmpty();

Source Link

Document

Returns true if this collection contains no elements.

Usage

From source file:ch.entwine.weblounge.maven.S3DeployMojo.java

/**
 * /* w w  w .j a  va 2s .  c o  m*/
 * {@inheritDoc}
 * 
 * @see org.apache.maven.plugin.Mojo#execute()
 */
public void execute() throws MojoExecutionException, MojoFailureException {

    // Setup AWS S3 client
    AWSCredentials credentials = new BasicAWSCredentials(awsAccessKey, awsSecretKey);
    AmazonS3Client uploadClient = new AmazonS3Client(credentials);
    TransferManager transfers = new TransferManager(credentials);

    // Make sure key prefix does not start with a slash but has one at the
    // end
    if (keyPrefix.startsWith("/"))
        keyPrefix = keyPrefix.substring(1);
    if (!keyPrefix.endsWith("/"))
        keyPrefix = keyPrefix + "/";

    // Keep track of how much data has been transferred
    long totalBytesTransferred = 0L;
    int items = 0;
    Queue<Upload> uploads = new LinkedBlockingQueue<Upload>();

    try {
        // Check if S3 bucket exists
        getLog().debug("Checking whether bucket " + bucket + " exists");
        if (!uploadClient.doesBucketExist(bucket)) {
            getLog().error("Desired bucket '" + bucket + "' does not exist!");
            return;
        }

        getLog().debug("Collecting files to transfer from " + resources.getDirectory());
        List<File> res = getResources();
        for (File file : res) {
            // Make path of resource relative to resources directory
            String filename = file.getName();
            String extension = FilenameUtils.getExtension(filename);
            String path = file.getPath().substring(resources.getDirectory().length());
            String key = concat("/", keyPrefix, path).substring(1);

            // Delete old file version in bucket
            getLog().debug("Removing existing object at " + key);
            uploadClient.deleteObject(bucket, key);

            // Setup meta data
            ObjectMetadata meta = new ObjectMetadata();
            meta.setCacheControl("public, max-age=" + String.valueOf(valid * 3600));

            FileInputStream fis = null;
            GZIPOutputStream gzipos = null;
            final File fileToUpload;

            if (gzip && ("js".equals(extension) || "css".equals(extension))) {
                try {
                    fis = new FileInputStream(file);
                    File gzFile = File.createTempFile(file.getName(), null);
                    gzipos = new GZIPOutputStream(new FileOutputStream(gzFile));
                    IOUtils.copy(fis, gzipos);
                    fileToUpload = gzFile;
                    meta.setContentEncoding("gzip");
                    if ("js".equals(extension))
                        meta.setContentType("text/javascript");
                    if ("css".equals(extension))
                        meta.setContentType("text/css");
                } catch (FileNotFoundException e) {
                    getLog().error(e);
                    continue;
                } catch (IOException e) {
                    getLog().error(e);
                    continue;
                } finally {
                    IOUtils.closeQuietly(fis);
                    IOUtils.closeQuietly(gzipos);
                }
            } else {
                fileToUpload = file;
            }

            // Do a random check for existing errors before starting the next upload
            if (erroneousUpload != null)
                break;

            // Create put object request
            long bytesToTransfer = fileToUpload.length();
            totalBytesTransferred += bytesToTransfer;
            PutObjectRequest request = new PutObjectRequest(bucket, key, fileToUpload);
            request.setProgressListener(new UploadListener(credentials, bucket, key, bytesToTransfer));
            request.setMetadata(meta);

            // Schedule put object request
            getLog().info(
                    "Uploading " + key + " (" + FileUtils.byteCountToDisplaySize((int) bytesToTransfer) + ")");
            Upload upload = transfers.upload(request);
            uploads.add(upload);
            items++;
        }
    } catch (AmazonServiceException e) {
        getLog().error("Uploading resources failed: " + e.getMessage());
    } catch (AmazonClientException e) {
        getLog().error("Uploading resources failed: " + e.getMessage());
    }

    // Wait for uploads to be finished
    String currentUpload = null;
    try {
        Thread.sleep(1000);
        getLog().info("Waiting for " + uploads.size() + " uploads to finish...");
        while (!uploads.isEmpty()) {
            Upload upload = uploads.poll();
            currentUpload = upload.getDescription().substring("Uploading to ".length());
            if (TransferState.InProgress.equals(upload.getState()))
                getLog().debug("Waiting for upload " + currentUpload + " to finish");
            upload.waitForUploadResult();
        }
    } catch (AmazonServiceException e) {
        throw new MojoExecutionException("Error while uploading " + currentUpload);
    } catch (AmazonClientException e) {
        throw new MojoExecutionException("Error while uploading " + currentUpload);
    } catch (InterruptedException e) {
        getLog().debug("Interrupted while waiting for upload to finish");
    }

    // Check for errors that happened outside of the actual uploading
    if (erroneousUpload != null) {
        throw new MojoExecutionException("Error while uploading " + erroneousUpload);
    }

    getLog().info("Deployed " + items + " files ("
            + FileUtils.byteCountToDisplaySize((int) totalBytesTransferred) + ") to s3://" + bucket);
}

From source file:org.neo4j.io.pagecache.PageCacheTest.java

@Test(timeout = SEMI_LONG_TIMEOUT_MILLIS)
public void backgroundThreadsMustGracefullyShutDown() throws Exception {
    assumeTrue("For some reason, this test is very flaky on Windows", !SystemUtils.IS_OS_WINDOWS);

    int iterations = 1000;
    List<WeakReference<PageCache>> refs = new LinkedList<>();
    final Queue<Throwable> caughtExceptions = new ConcurrentLinkedQueue<>();
    final Thread.UncaughtExceptionHandler exceptionHandler = (t, e) -> {
        e.printStackTrace();/*  w  ww. java 2 s  . c o  m*/
        caughtExceptions.offer(e);
    };
    Thread.UncaughtExceptionHandler defaultUncaughtExceptionHandler = Thread
            .getDefaultUncaughtExceptionHandler();
    Thread.setDefaultUncaughtExceptionHandler(exceptionHandler);

    try {
        generateFileWithRecords(file("a"), recordCount, recordSize);
        int filePagesInTotal = recordCount / recordsPerFilePage;

        for (int i = 0; i < iterations; i++) {
            PageCache cache = createPageCache(fs, maxPages, pageCachePageSize, PageCacheTracer.NULL);

            // Touch all the pages
            PagedFile pagedFile = cache.map(file("a"), filePageSize);
            try (PageCursor cursor = pagedFile.io(0, PF_SHARED_READ_LOCK)) {
                for (int j = 0; j < filePagesInTotal; j++) {
                    assertTrue(cursor.next());
                }
            }

            // We're now likely racing with the eviction thread
            pagedFile.close();
            cache.close();
            refs.add(new WeakReference<>(cache));

            assertTrue(caughtExceptions.isEmpty());
        }
    } finally {
        Thread.setDefaultUncaughtExceptionHandler(defaultUncaughtExceptionHandler);
    }

    // Once the page caches has been closed and all references presumably set to null, then the only thing that
    // could possibly strongly reference the cache is any lingering background thread. If we do a couple of
    // GCs, then we should observe that the WeakReference has been cleared by the garbage collector. If it
    // hasn't, then something must be keeping it alive, even though it has been closed.
    int maxChecks = 100;
    boolean passed;
    do {
        System.gc();
        Thread.sleep(100);
        passed = true;

        for (WeakReference<PageCache> ref : refs) {
            if (ref.get() != null) {
                passed = false;
            }
        }
    } while (!passed && maxChecks-- > 0);

    if (!passed) {
        List<PageCache> nonNullPageCaches = new LinkedList<>();
        for (WeakReference<PageCache> ref : refs) {
            PageCache pageCache = ref.get();
            if (pageCache != null) {
                nonNullPageCaches.add(pageCache);
            }
        }

        if (!nonNullPageCaches.isEmpty()) {
            fail("PageCaches should not be held live after close: " + nonNullPageCaches);
        }
    }
}

From source file:org.apache.bookkeeper.mledger.impl.ManagedLedgerImpl.java

@Override
public void asyncOffloadPrefix(Position pos, OffloadCallback callback, Object ctx) {
    PositionImpl requestOffloadTo = (PositionImpl) pos;
    if (!isValidPosition(requestOffloadTo)) {
        callback.offloadFailed(new InvalidCursorPositionException("Invalid position for offload"), ctx);
        return;//ww  w  . j av a  2 s. c o m
    }

    PositionImpl firstUnoffloaded;

    Queue<LedgerInfo> ledgersToOffload = new ConcurrentLinkedQueue<>();
    synchronized (this) {
        log.info("[{}] Start ledgersOffload. ledgers={} totalSize={}", name, ledgers.keySet(),
                TOTAL_SIZE_UPDATER.get(this));

        if (STATE_UPDATER.get(this) == State.Closed) {
            log.info("[{}] Ignoring offload request since the managed ledger was already closed", name);
            callback.offloadFailed(new ManagedLedgerAlreadyClosedException(
                    "Can't offload closed managed ledger (" + name + ")"), ctx);
            return;
        }

        if (ledgers.isEmpty()) {
            log.info("[{}] Tried to offload a managed ledger with no ledgers, giving up", name);
            callback.offloadFailed(new ManagedLedgerAlreadyClosedException(
                    "Can't offload managed ledger (" + name + ") with no ledgers"), ctx);
            return;
        }

        long current = ledgers.lastKey();

        // the first ledger which will not be offloaded. Defaults to current,
        // in the case that the whole headmap is offloaded. Otherwise it will
        // be set as we iterate through the headmap values
        long firstLedgerRetained = current;
        for (LedgerInfo ls : ledgers.headMap(current).values()) {
            if (requestOffloadTo.getLedgerId() > ls.getLedgerId()) {
                // don't offload if ledger has already been offloaded, or is empty
                if (!ls.getOffloadContext().getComplete() && ls.getSize() > 0) {
                    ledgersToOffload.add(ls);
                }
            } else {
                firstLedgerRetained = ls.getLedgerId();
                break;
            }
        }
        firstUnoffloaded = PositionImpl.get(firstLedgerRetained, 0);
    }

    if (ledgersToOffload.isEmpty()) {
        log.info("[{}] No ledgers to offload", name);
        callback.offloadComplete(firstUnoffloaded, ctx);
        return;
    }

    if (offloadMutex.tryLock()) {
        log.info("[{}] Going to offload ledgers {}", name,
                ledgersToOffload.stream().map(l -> l.getLedgerId()).collect(Collectors.toList()));

        CompletableFuture<PositionImpl> promise = new CompletableFuture<>();
        promise.whenComplete((result, exception) -> {
            offloadMutex.unlock();
            if (exception != null) {
                callback.offloadFailed(new ManagedLedgerException(exception), ctx);
            } else {
                callback.offloadComplete(result, ctx);
            }
        });
        offloadLoop(promise, ledgersToOffload, firstUnoffloaded, Optional.empty());
    } else {
        callback.offloadFailed(
                new ManagedLedgerException.OffloadInProgressException("Offload operation already running"),
                ctx);
    }
}

From source file:de.uni_koblenz.jgralab.utilities.rsa.Rsa2Tg.java

/**
 * breadth first search over SpecializesIncidenceClass edges for closest
 * superclass with correct rolename//  w  ww .  j a  v  a 2s  .c o m
 * 
 * @param inc
 * @param rolename
 * @return
 */
private IncidenceClass findClosestSuperclassWithRolename(IncidenceClass inc, String rolename) {
    IncidenceClass sup = null;
    Queue<IncidenceClass> q = new LinkedList<IncidenceClass>();
    LocalBooleanGraphMarker m = new LocalBooleanGraphMarker(sg);
    m.mark(inc);
    q.offer(inc);
    while (!q.isEmpty()) {
        IncidenceClass curr = q.poll();
        m.mark(curr);
        if ((curr != inc) && rolename.equals(curr.get_roleName())) {
            sup = curr;
            break;
        }
        for (SpecializesIncidenceClass sic : curr.getIncidentEdges(SpecializesIncidenceClass.class,
                de.uni_koblenz.jgralab.Direction.VERTEX_TO_EDGE)) {
            IncidenceClass i = (IncidenceClass) sic.getOmega();
            if (!m.isMarked(i)) {
                m.mark(i);
                q.offer(i);
            }
        }

    }
    return sup;
}

From source file:de.uni_koblenz.jgralab.utilities.rsa.Rsa2Tg.java

/**
 * /*from w ww  .ja va2s  . c  om*/
 */
private void createMayBeNestedIn() {
    System.out.println("Create MayBeNestedIn relations ...");
    updateNestedElements();

    // stores the GraphElementClass which have nested elements but are not
    // nested in another GraphElementClass
    Queue<GraphElementClass> workingList = new LinkedList<GraphElementClass>();
    Queue<GraphElementClass> topLevelNestingElements = new LinkedList<GraphElementClass>();

    // all edges have to be treated
    for (EdgeClass ec : sg.getEdgeClassVertices()) {
        workingList.add(ec);
        topLevelNestingElements.add(ec);
    }

    // create the explicitly modeled MayBeNestedIn edges
    for (GraphElement<?, ?, ?, ?> ge : nestedElements.getMarkedElements()) {
        GraphElementClass containingGEC = (GraphElementClass) ge;
        assert nestedElements.getMark(containingGEC) != null;
        assert !nestedElements.getMark(containingGEC).isEmpty();

        for (GraphElementClass containedGEC : nestedElements.getMark(containingGEC)) {
            sg.createMayBeNestedIn(containedGEC, containingGEC);
            insertContainingGECIntoWorkingList(containingGEC, containedGEC, topLevelNestingElements);
        }
    }

    checkAcyclicityOfMayBeNestedIn(topLevelNestingElements);

    // check correctness of explicit modeled MayBeNestedIn edges and create
    // implicit MayBeNestedIn edges during a breadth first search over the
    // GraphElementClasses participating in the MayBeNestedIn tree
    LocalBooleanGraphMarker isImplicitlyNested = new LocalBooleanGraphMarker(sg);
    while (!workingList.isEmpty()) {
        GraphElementClass current = workingList.poll();
        assert current != null;

        if (EdgeClass.class.isInstance(current)) {
            EdgeClass containedEC = (EdgeClass) current;

            // check constraints for explicitly nested EdgeClasses
            for (MayBeNestedIn_nestedElement i : containedEC.getIncidences(MayBeNestedIn_nestedElement.class)) {
                if (!isImplicitlyNested.isMarked(i.getEdge())) {
                    GraphElementClass containingGEC = (GraphElementClass) i.getThat();
                    checkNestingConstraints(containedEC, containingGEC);
                }
            }

            // create implicit MayBeNestedIn edges
            for (GraphElementClass containingGEC : getAllNestingElements(containedEC)) {
                isImplicitlyNested.mark(sg.createMayBeNestedIn(containedEC, containingGEC));
                if (topLevelNestingElements.contains(containedEC)) {
                    topLevelNestingElements.remove(containedEC);
                }
            }
        }

        // insert all nested GraphElementClasses into workingList
        for (MayBeNestedIn_nestingElement i : current.getIncidences(MayBeNestedIn_nestingElement.class)) {
            if (!workingList.contains(i.getThat()) && !isImplicitlyNested.isMarked(i.getEdge())) {
                workingList.add((GraphElementClass) i.getThat());
            }
        }
    }

    deleteDuplicateMayBeNestedIn();

    checkAcyclicityOfMayBeNestedIn(topLevelNestingElements);
}

From source file:edu.umn.cs.spatialHadoop.indexing.RTree.java

/**
 * Builds the RTree given a serialized list of elements. It uses the given
 * stockObject to deserialize these elements using
 * {@link TextSerializable#fromText(Text)} and build the tree. Also writes the
 * created tree to the disk directly./* w  w w . j  ava2s .com*/
 * 
 * @param element_bytes
 *          - serialization of all elements separated by new lines
 * @param offset
 *          - offset of the first byte to use in elements_bytes
 * @param len
 *          - number of bytes to use in elements_bytes
 * @param degree
 *          - Degree of the R-tree to build in terms of number of children per
 *          node
 * @param dataOut
 *          - output stream to write the result to.
 * @param fast_sort
 *          - setting this to <code>true</code> allows the method to run
 *          faster by materializing the offset of each element in the list
 *          which speeds up the comparison. However, this requires an
 *          additional 16 bytes per element. So, for each 1M elements, the
 *          method will require an additional 16 M bytes (approximately).
 */
public static void bulkLoadWrite(final byte[] element_bytes, final int offset, final int len, final int degree,
        DataOutput dataOut, final Shape stockObject, final boolean fast_sort) {
    try {

        int elementCount = 0;
        // Count number of elements in the given text
        int i_start = offset;
        final Text line = new Text();
        while (i_start < offset + len) {
            int i_end = skipToEOL(element_bytes, i_start);
            // Extract the line without end of line character
            line.set(element_bytes, i_start, i_end - i_start - 1);
            stockObject.fromText(line);
            elementCount++;
            i_start = i_end;
        }
        LOG.info("Bulk loading an RTree with " + elementCount + " elements");

        // It turns out the findBestDegree returns the best degree when the whole
        // tree is loaded to memory when processed. However, as current algorithms
        // process the tree while it's on disk, a higher degree should be selected
        // such that a node fits one file block (assumed to be 4K).
        //final int degree = findBestDegree(bytesAvailable, elementCount);

        int height = Math.max(1, (int) Math.ceil(Math.log(elementCount) / Math.log(degree)));
        int leafNodeCount = (int) Math.pow(degree, height - 1);
        if (elementCount < 2 * leafNodeCount && height > 1) {
            height--;
            leafNodeCount = (int) Math.pow(degree, height - 1);
        }
        int nodeCount = (int) ((Math.pow(degree, height) - 1) / (degree - 1));
        int nonLeafNodeCount = nodeCount - leafNodeCount;

        // Keep track of the offset of each element in the text
        final int[] offsets = new int[elementCount];
        final double[] xs = fast_sort ? new double[elementCount] : null;
        final double[] ys = fast_sort ? new double[elementCount] : null;

        i_start = offset;
        line.clear();
        for (int i = 0; i < elementCount; i++) {
            offsets[i] = i_start;
            int i_end = skipToEOL(element_bytes, i_start);
            if (xs != null) {
                // Extract the line with end of line character
                line.set(element_bytes, i_start, i_end - i_start - 1);
                stockObject.fromText(line);
                // Sample center of the shape
                xs[i] = (stockObject.getMBR().x1 + stockObject.getMBR().x2) / 2;
                ys[i] = (stockObject.getMBR().y1 + stockObject.getMBR().y2) / 2;
            }
            i_start = i_end;
        }

        /**A struct to store information about a split*/
        class SplitStruct extends Rectangle {
            /**Start and end index for this split*/
            int index1, index2;
            /**Direction of this split*/
            byte direction;
            /**Index of first element on disk*/
            int offsetOfFirstElement;

            static final byte DIRECTION_X = 0;
            static final byte DIRECTION_Y = 1;

            SplitStruct(int index1, int index2, byte direction) {
                this.index1 = index1;
                this.index2 = index2;
                this.direction = direction;
            }

            @Override
            public void write(DataOutput out) throws IOException {
                out.writeInt(offsetOfFirstElement);
                super.write(out);
            }

            void partition(Queue<SplitStruct> toBePartitioned) {
                IndexedSortable sortableX;
                IndexedSortable sortableY;

                if (fast_sort) {
                    // Use materialized xs[] and ys[] to do the comparisons
                    sortableX = new IndexedSortable() {
                        @Override
                        public void swap(int i, int j) {
                            // Swap xs
                            double tempx = xs[i];
                            xs[i] = xs[j];
                            xs[j] = tempx;
                            // Swap ys
                            double tempY = ys[i];
                            ys[i] = ys[j];
                            ys[j] = tempY;
                            // Swap id
                            int tempid = offsets[i];
                            offsets[i] = offsets[j];
                            offsets[j] = tempid;
                        }

                        @Override
                        public int compare(int i, int j) {
                            if (xs[i] < xs[j])
                                return -1;
                            if (xs[i] > xs[j])
                                return 1;
                            return 0;
                        }
                    };

                    sortableY = new IndexedSortable() {
                        @Override
                        public void swap(int i, int j) {
                            // Swap xs
                            double tempx = xs[i];
                            xs[i] = xs[j];
                            xs[j] = tempx;
                            // Swap ys
                            double tempY = ys[i];
                            ys[i] = ys[j];
                            ys[j] = tempY;
                            // Swap id
                            int tempid = offsets[i];
                            offsets[i] = offsets[j];
                            offsets[j] = tempid;
                        }

                        @Override
                        public int compare(int i, int j) {
                            if (ys[i] < ys[j])
                                return -1;
                            if (ys[i] > ys[j])
                                return 1;
                            return 0;
                        }
                    };
                } else {
                    // No materialized xs and ys. Always deserialize objects to compare
                    sortableX = new IndexedSortable() {
                        @Override
                        public void swap(int i, int j) {
                            // Swap id
                            int tempid = offsets[i];
                            offsets[i] = offsets[j];
                            offsets[j] = tempid;
                        }

                        @Override
                        public int compare(int i, int j) {
                            // Get end of line
                            int eol = skipToEOL(element_bytes, offsets[i]);
                            line.set(element_bytes, offsets[i], eol - offsets[i] - 1);
                            stockObject.fromText(line);
                            double xi = (stockObject.getMBR().x1 + stockObject.getMBR().x2) / 2;

                            eol = skipToEOL(element_bytes, offsets[j]);
                            line.set(element_bytes, offsets[j], eol - offsets[j] - 1);
                            stockObject.fromText(line);
                            double xj = (stockObject.getMBR().x1 + stockObject.getMBR().x2) / 2;
                            if (xi < xj)
                                return -1;
                            if (xi > xj)
                                return 1;
                            return 0;
                        }
                    };

                    sortableY = new IndexedSortable() {
                        @Override
                        public void swap(int i, int j) {
                            // Swap id
                            int tempid = offsets[i];
                            offsets[i] = offsets[j];
                            offsets[j] = tempid;
                        }

                        @Override
                        public int compare(int i, int j) {
                            int eol = skipToEOL(element_bytes, offsets[i]);
                            line.set(element_bytes, offsets[i], eol - offsets[i] - 1);
                            stockObject.fromText(line);
                            double yi = (stockObject.getMBR().y1 + stockObject.getMBR().y2) / 2;

                            eol = skipToEOL(element_bytes, offsets[j]);
                            line.set(element_bytes, offsets[j], eol - offsets[j] - 1);
                            stockObject.fromText(line);
                            double yj = (stockObject.getMBR().y1 + stockObject.getMBR().y2) / 2;
                            if (yi < yj)
                                return -1;
                            if (yi > yj)
                                return 1;
                            return 0;
                        }
                    };
                }

                final IndexedSorter sorter = new QuickSort();

                final IndexedSortable[] sortables = new IndexedSortable[2];
                sortables[SplitStruct.DIRECTION_X] = sortableX;
                sortables[SplitStruct.DIRECTION_Y] = sortableY;

                sorter.sort(sortables[direction], index1, index2);

                // Partition into maxEntries partitions (equally) and
                // create a SplitStruct for each partition
                int i1 = index1;
                for (int iSplit = 0; iSplit < degree; iSplit++) {
                    int i2 = index1 + (index2 - index1) * (iSplit + 1) / degree;
                    SplitStruct newSplit = new SplitStruct(i1, i2, (byte) (1 - direction));
                    toBePartitioned.add(newSplit);
                    i1 = i2;
                }
            }
        }

        // All nodes stored in level-order traversal
        Vector<SplitStruct> nodes = new Vector<SplitStruct>();
        final Queue<SplitStruct> toBePartitioned = new LinkedList<SplitStruct>();
        toBePartitioned.add(new SplitStruct(0, elementCount, SplitStruct.DIRECTION_X));

        while (!toBePartitioned.isEmpty()) {
            SplitStruct split = toBePartitioned.poll();
            if (nodes.size() < nonLeafNodeCount) {
                // This is a non-leaf
                split.partition(toBePartitioned);
            }
            nodes.add(split);
        }

        if (nodes.size() != nodeCount) {
            throw new RuntimeException(
                    "Expected node count: " + nodeCount + ". Real node count: " + nodes.size());
        }

        // Now we have our data sorted in the required order. Start building
        // the tree.
        // Store the offset of each leaf node in the tree
        FSDataOutputStream fakeOut = null;
        try {
            fakeOut = new FSDataOutputStream(new java.io.OutputStream() {
                // Null output stream
                @Override
                public void write(int b) throws IOException {
                    // Do nothing
                }

                @Override
                public void write(byte[] b, int off, int len) throws IOException {
                    // Do nothing
                }

                @Override
                public void write(byte[] b) throws IOException {
                    // Do nothing
                }
            }, null, TreeHeaderSize + nodes.size() * NodeSize);
            for (int i_leaf = nonLeafNodeCount, i = 0; i_leaf < nodes.size(); i_leaf++) {
                nodes.elementAt(i_leaf).offsetOfFirstElement = (int) fakeOut.getPos();
                if (i != nodes.elementAt(i_leaf).index1)
                    throw new RuntimeException();
                double x1, y1, x2, y2;

                // Initialize MBR to first object
                int eol = skipToEOL(element_bytes, offsets[i]);
                fakeOut.write(element_bytes, offsets[i], eol - offsets[i]);
                line.set(element_bytes, offsets[i], eol - offsets[i] - 1);
                stockObject.fromText(line);
                Rectangle mbr = stockObject.getMBR();
                x1 = mbr.x1;
                y1 = mbr.y1;
                x2 = mbr.x2;
                y2 = mbr.y2;
                i++;

                while (i < nodes.elementAt(i_leaf).index2) {
                    eol = skipToEOL(element_bytes, offsets[i]);
                    fakeOut.write(element_bytes, offsets[i], eol - offsets[i]);
                    line.set(element_bytes, offsets[i], eol - offsets[i] - 1);
                    stockObject.fromText(line);
                    mbr = stockObject.getMBR();
                    if (mbr.x1 < x1)
                        x1 = mbr.x1;
                    if (mbr.y1 < y1)
                        y1 = mbr.y1;
                    if (mbr.x2 > x2)
                        x2 = mbr.x2;
                    if (mbr.y2 > y2)
                        y2 = mbr.y2;
                    i++;
                }
                nodes.elementAt(i_leaf).set(x1, y1, x2, y2);
            }

        } finally {
            if (fakeOut != null)
                fakeOut.close();
        }

        // Calculate MBR and offsetOfFirstElement for non-leaves
        for (int i_node = nonLeafNodeCount - 1; i_node >= 0; i_node--) {
            int i_first_child = i_node * degree + 1;
            nodes.elementAt(i_node).offsetOfFirstElement = nodes.elementAt(i_first_child).offsetOfFirstElement;
            int i_child = 0;
            Rectangle mbr;
            mbr = nodes.elementAt(i_first_child + i_child);
            double x1 = mbr.x1;
            double y1 = mbr.y1;
            double x2 = mbr.x2;
            double y2 = mbr.y2;
            i_child++;

            while (i_child < degree) {
                mbr = nodes.elementAt(i_first_child + i_child);
                if (mbr.x1 < x1)
                    x1 = mbr.x1;
                if (mbr.y1 < y1)
                    y1 = mbr.y1;
                if (mbr.x2 > x2)
                    x2 = mbr.x2;
                if (mbr.y2 > y2)
                    y2 = mbr.y2;
                i_child++;
            }
            nodes.elementAt(i_node).set(x1, y1, x2, y2);
        }

        // Start writing the tree
        // write tree header (including size)
        // Total tree size. (== Total bytes written - 8 bytes for the size itself)
        dataOut.writeInt(TreeHeaderSize + NodeSize * nodeCount + len);
        // Tree height
        dataOut.writeInt(height);
        // Degree
        dataOut.writeInt(degree);
        dataOut.writeInt(elementCount);

        // write nodes
        for (SplitStruct node : nodes) {
            node.write(dataOut);
        }
        // write elements
        for (int element_i = 0; element_i < elementCount; element_i++) {
            int eol = skipToEOL(element_bytes, offsets[element_i]);
            dataOut.write(element_bytes, offsets[element_i], eol - offsets[element_i]);
        }

    } catch (IOException e) {
        e.printStackTrace();
    }
}

From source file:edu.umn.cs.spatialHadoop.core.RTree.java

/**
 * Builds the RTree given a serialized list of elements. It uses the given
 * stockObject to deserialize these elements using
 * {@link TextSerializable#fromText(Text)} and build the tree. Also writes the
 * created tree to the disk directly./*from  w ww. j a v  a 2 s.  c om*/
 * 
 * @param element_bytes
 *          - serialization of all elements separated by new lines
 * @param offset
 *          - offset of the first byte to use in elements_bytes
 * @param len
 *          - number of bytes to use in elements_bytes
 * @param degree
 *          - Degree of the R-tree to build in terms of number of children per
 *          node
 * @param dataOut
 *          - output stream to write the result to.
 * @param fast_sort
 *          - setting this to <code>true</code> allows the method to run
 *          faster by materializing the offset of each element in the list
 *          which speeds up the comparison. However, this requires an
 *          additional 16 bytes per element. So, for each 1M elements, the
 *          method will require an additional 16 M bytes (approximately).
 */
public void bulkLoadWrite(final byte[] element_bytes, final int offset, final int len, final int degree,
        DataOutput dataOut, final boolean fast_sort) {
    try {

        // Count number of elements in the given text
        int i_start = offset;
        final Text line = new Text();
        while (i_start < offset + len) {
            int i_end = skipToEOL(element_bytes, i_start);
            // Extract the line without end of line character
            line.set(element_bytes, i_start, i_end - i_start - 1);
            stockObject.fromText(line);
            elementCount++;
            i_start = i_end;
        }
        LOG.info("Bulk loading an RTree with " + elementCount + " elements");

        // It turns out the findBestDegree returns the best degree when the whole
        // tree is loaded to memory when processed. However, as current algorithms
        // process the tree while it's on disk, a higher degree should be selected
        // such that a node fits one file block (assumed to be 4K).
        //final int degree = findBestDegree(bytesAvailable, elementCount);
        LOG.info("Writing an RTree with degree " + degree);

        int height = Math.max(1, (int) Math.ceil(Math.log(elementCount) / Math.log(degree)));
        int leafNodeCount = (int) Math.pow(degree, height - 1);
        if (elementCount < 2 * leafNodeCount && height > 1) {
            height--;
            leafNodeCount = (int) Math.pow(degree, height - 1);
        }
        int nodeCount = (int) ((Math.pow(degree, height) - 1) / (degree - 1));
        int nonLeafNodeCount = nodeCount - leafNodeCount;

        // Keep track of the offset of each element in the text
        final int[] offsets = new int[elementCount];
        final double[] xs = fast_sort ? new double[elementCount] : null;
        final double[] ys = fast_sort ? new double[elementCount] : null;

        i_start = offset;
        line.clear();
        for (int i = 0; i < elementCount; i++) {
            offsets[i] = i_start;
            int i_end = skipToEOL(element_bytes, i_start);
            if (xs != null) {
                // Extract the line with end of line character
                line.set(element_bytes, i_start, i_end - i_start - 1);
                stockObject.fromText(line);
                // Sample center of the shape
                xs[i] = (stockObject.getMBR().x1 + stockObject.getMBR().x2) / 2;
                ys[i] = (stockObject.getMBR().y1 + stockObject.getMBR().y2) / 2;
            }
            i_start = i_end;
        }

        /**A struct to store information about a split*/
        class SplitStruct extends Rectangle {
            /**Start and end index for this split*/
            int index1, index2;
            /**Direction of this split*/
            byte direction;
            /**Index of first element on disk*/
            int offsetOfFirstElement;

            static final byte DIRECTION_X = 0;
            static final byte DIRECTION_Y = 1;

            SplitStruct(int index1, int index2, byte direction) {
                this.index1 = index1;
                this.index2 = index2;
                this.direction = direction;
            }

            @Override
            public void write(DataOutput out) throws IOException {
                out.writeInt(offsetOfFirstElement);
                super.write(out);
            }

            void partition(Queue<SplitStruct> toBePartitioned) {
                IndexedSortable sortableX;
                IndexedSortable sortableY;

                if (fast_sort) {
                    // Use materialized xs[] and ys[] to do the comparisons
                    sortableX = new IndexedSortable() {
                        @Override
                        public void swap(int i, int j) {
                            // Swap xs
                            double tempx = xs[i];
                            xs[i] = xs[j];
                            xs[j] = tempx;
                            // Swap ys
                            double tempY = ys[i];
                            ys[i] = ys[j];
                            ys[j] = tempY;
                            // Swap id
                            int tempid = offsets[i];
                            offsets[i] = offsets[j];
                            offsets[j] = tempid;
                        }

                        @Override
                        public int compare(int i, int j) {
                            if (xs[i] < xs[j])
                                return -1;
                            if (xs[i] > xs[j])
                                return 1;
                            return 0;
                        }
                    };

                    sortableY = new IndexedSortable() {
                        @Override
                        public void swap(int i, int j) {
                            // Swap xs
                            double tempx = xs[i];
                            xs[i] = xs[j];
                            xs[j] = tempx;
                            // Swap ys
                            double tempY = ys[i];
                            ys[i] = ys[j];
                            ys[j] = tempY;
                            // Swap id
                            int tempid = offsets[i];
                            offsets[i] = offsets[j];
                            offsets[j] = tempid;
                        }

                        @Override
                        public int compare(int i, int j) {
                            if (ys[i] < ys[j])
                                return -1;
                            if (ys[i] > ys[j])
                                return 1;
                            return 0;
                        }
                    };
                } else {
                    // No materialized xs and ys. Always deserialize objects to compare
                    sortableX = new IndexedSortable() {
                        @Override
                        public void swap(int i, int j) {
                            // Swap id
                            int tempid = offsets[i];
                            offsets[i] = offsets[j];
                            offsets[j] = tempid;
                        }

                        @Override
                        public int compare(int i, int j) {
                            // Get end of line
                            int eol = skipToEOL(element_bytes, offsets[i]);
                            line.set(element_bytes, offsets[i], eol - offsets[i] - 1);
                            stockObject.fromText(line);
                            double xi = (stockObject.getMBR().x1 + stockObject.getMBR().x2) / 2;

                            eol = skipToEOL(element_bytes, offsets[j]);
                            line.set(element_bytes, offsets[j], eol - offsets[j] - 1);
                            stockObject.fromText(line);
                            double xj = (stockObject.getMBR().x1 + stockObject.getMBR().x2) / 2;
                            if (xi < xj)
                                return -1;
                            if (xi > xj)
                                return 1;
                            return 0;
                        }
                    };

                    sortableY = new IndexedSortable() {
                        @Override
                        public void swap(int i, int j) {
                            // Swap id
                            int tempid = offsets[i];
                            offsets[i] = offsets[j];
                            offsets[j] = tempid;
                        }

                        @Override
                        public int compare(int i, int j) {
                            int eol = skipToEOL(element_bytes, offsets[i]);
                            line.set(element_bytes, offsets[i], eol - offsets[i] - 1);
                            stockObject.fromText(line);
                            double yi = (stockObject.getMBR().y1 + stockObject.getMBR().y2) / 2;

                            eol = skipToEOL(element_bytes, offsets[j]);
                            line.set(element_bytes, offsets[j], eol - offsets[j] - 1);
                            stockObject.fromText(line);
                            double yj = (stockObject.getMBR().y1 + stockObject.getMBR().y2) / 2;
                            if (yi < yj)
                                return -1;
                            if (yi > yj)
                                return 1;
                            return 0;
                        }
                    };
                }

                final IndexedSorter sorter = new QuickSort();

                final IndexedSortable[] sortables = new IndexedSortable[2];
                sortables[SplitStruct.DIRECTION_X] = sortableX;
                sortables[SplitStruct.DIRECTION_Y] = sortableY;

                sorter.sort(sortables[direction], index1, index2);

                // Partition into maxEntries partitions (equally) and
                // create a SplitStruct for each partition
                int i1 = index1;
                for (int iSplit = 0; iSplit < degree; iSplit++) {
                    int i2 = index1 + (index2 - index1) * (iSplit + 1) / degree;
                    SplitStruct newSplit = new SplitStruct(i1, i2, (byte) (1 - direction));
                    toBePartitioned.add(newSplit);
                    i1 = i2;
                }
            }
        }

        // All nodes stored in level-order traversal
        Vector<SplitStruct> nodes = new Vector<SplitStruct>();
        final Queue<SplitStruct> toBePartitioned = new LinkedList<SplitStruct>();
        toBePartitioned.add(new SplitStruct(0, elementCount, SplitStruct.DIRECTION_X));

        while (!toBePartitioned.isEmpty()) {
            SplitStruct split = toBePartitioned.poll();
            if (nodes.size() < nonLeafNodeCount) {
                // This is a non-leaf
                split.partition(toBePartitioned);
            }
            nodes.add(split);
        }

        if (nodes.size() != nodeCount) {
            throw new RuntimeException(
                    "Expected node count: " + nodeCount + ". Real node count: " + nodes.size());
        }

        // Now we have our data sorted in the required order. Start building
        // the tree.
        // Store the offset of each leaf node in the tree
        FSDataOutputStream fakeOut = null;
        try {
            fakeOut = new FSDataOutputStream(new java.io.OutputStream() {
                // Null output stream
                @Override
                public void write(int b) throws IOException {
                    // Do nothing
                }

                @Override
                public void write(byte[] b, int off, int len) throws IOException {
                    // Do nothing
                }

                @Override
                public void write(byte[] b) throws IOException {
                    // Do nothing
                }
            }, null, TreeHeaderSize + nodes.size() * NodeSize);
            for (int i_leaf = nonLeafNodeCount, i = 0; i_leaf < nodes.size(); i_leaf++) {
                nodes.elementAt(i_leaf).offsetOfFirstElement = (int) fakeOut.getPos();
                if (i != nodes.elementAt(i_leaf).index1)
                    throw new RuntimeException();
                double x1, y1, x2, y2;

                // Initialize MBR to first object
                int eol = skipToEOL(element_bytes, offsets[i]);
                fakeOut.write(element_bytes, offsets[i], eol - offsets[i]);
                line.set(element_bytes, offsets[i], eol - offsets[i] - 1);
                stockObject.fromText(line);
                Rectangle mbr = stockObject.getMBR();
                x1 = mbr.x1;
                y1 = mbr.y1;
                x2 = mbr.x2;
                y2 = mbr.y2;
                i++;

                while (i < nodes.elementAt(i_leaf).index2) {
                    eol = skipToEOL(element_bytes, offsets[i]);
                    fakeOut.write(element_bytes, offsets[i], eol - offsets[i]);
                    line.set(element_bytes, offsets[i], eol - offsets[i] - 1);
                    stockObject.fromText(line);
                    mbr = stockObject.getMBR();
                    if (mbr.x1 < x1)
                        x1 = mbr.x1;
                    if (mbr.y1 < y1)
                        y1 = mbr.y1;
                    if (mbr.x2 > x2)
                        x2 = mbr.x2;
                    if (mbr.y2 > y2)
                        y2 = mbr.y2;
                    i++;
                }
                nodes.elementAt(i_leaf).set(x1, y1, x2, y2);
            }

        } finally {
            if (fakeOut != null)
                fakeOut.close();
        }

        // Calculate MBR and offsetOfFirstElement for non-leaves
        for (int i_node = nonLeafNodeCount - 1; i_node >= 0; i_node--) {
            int i_first_child = i_node * degree + 1;
            nodes.elementAt(i_node).offsetOfFirstElement = nodes.elementAt(i_first_child).offsetOfFirstElement;
            int i_child = 0;
            Rectangle mbr;
            mbr = nodes.elementAt(i_first_child + i_child);
            double x1 = mbr.x1;
            double y1 = mbr.y1;
            double x2 = mbr.x2;
            double y2 = mbr.y2;
            i_child++;

            while (i_child < degree) {
                mbr = nodes.elementAt(i_first_child + i_child);
                if (mbr.x1 < x1)
                    x1 = mbr.x1;
                if (mbr.y1 < y1)
                    y1 = mbr.y1;
                if (mbr.x2 > x2)
                    x2 = mbr.x2;
                if (mbr.y2 > y2)
                    y2 = mbr.y2;
                i_child++;
            }
            nodes.elementAt(i_node).set(x1, y1, x2, y2);
        }

        // Start writing the tree
        // write tree header (including size)
        // Total tree size. (== Total bytes written - 8 bytes for the size itself)
        dataOut.writeInt(TreeHeaderSize + NodeSize * nodeCount + len);
        // Tree height
        dataOut.writeInt(height);
        // Degree
        dataOut.writeInt(degree);
        dataOut.writeInt(elementCount);

        // write nodes
        for (SplitStruct node : nodes) {
            node.write(dataOut);
        }
        // write elements
        for (int element_i = 0; element_i < elementCount; element_i++) {
            int eol = skipToEOL(element_bytes, offsets[element_i]);
            dataOut.write(element_bytes, offsets[element_i], eol - offsets[element_i]);
        }

    } catch (IOException e) {
        e.printStackTrace();
    }
}

From source file:org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.java

private void walkASTMarkTABREF(ASTNode ast, Set<String> cteAlias) throws SemanticException {
    Queue<Node> queue = new LinkedList<>();
    queue.add(ast);// www .  jav  a2  s .  c o m
    Map<HivePrivilegeObject, MaskAndFilterInfo> basicInfos = new LinkedHashMap<>();
    while (!queue.isEmpty()) {
        ASTNode astNode = (ASTNode) queue.poll();
        if (astNode.getToken().getType() == HiveParser.TOK_TABREF) {
            int aliasIndex = 0;
            StringBuilder additionalTabInfo = new StringBuilder();
            for (int index = 1; index < astNode.getChildCount(); index++) {
                ASTNode ct = (ASTNode) astNode.getChild(index);
                if (ct.getToken().getType() == HiveParser.TOK_TABLEBUCKETSAMPLE
                        || ct.getToken().getType() == HiveParser.TOK_TABLESPLITSAMPLE
                        || ct.getToken().getType() == HiveParser.TOK_TABLEPROPERTIES) {
                    additionalTabInfo.append(ctx.getTokenRewriteStream().toString(ct.getTokenStartIndex(),
                            ct.getTokenStopIndex()));
                } else {
                    aliasIndex = index;
                }
            }

            ASTNode tableTree = (ASTNode) (astNode.getChild(0));

            String tabIdName = getUnescapedName(tableTree);

            String alias;
            if (aliasIndex != 0) {
                alias = unescapeIdentifier(astNode.getChild(aliasIndex).getText());
            } else {
                alias = getUnescapedUnqualifiedTableName(tableTree);
            }

            // We need to know if it is CTE or not.
            // A CTE may have the same name as a table.
            // For example,
            // with select TAB1 [masking] as TAB2
            // select * from TAB2 [no masking]
            if (cteAlias.contains(tabIdName)) {
                continue;
            }

            String replacementText = null;
            Table table = null;
            try {
                table = getTableObjectByName(tabIdName);
            } catch (HiveException e) {
                // Table may not be found when materialization of CTE is on.
                LOG.info("Table " + tabIdName + " is not found in walkASTMarkTABREF.");
                continue;
            }

            List<String> colNames = new ArrayList<>();
            List<String> colTypes = new ArrayList<>();
            for (FieldSchema col : table.getAllCols()) {
                colNames.add(col.getName());
                colTypes.add(col.getType());
            }

            basicInfos.put(new HivePrivilegeObject(table.getDbName(), table.getTableName(), colNames),
                    new MaskAndFilterInfo(colTypes, additionalTabInfo.toString(), alias, astNode,
                            table.isView()));
        }
        if (astNode.getChildCount() > 0 && !ignoredTokens.contains(astNode.getToken().getType())) {
            for (Node child : astNode.getChildren()) {
                queue.offer(child);
            }
        }
    }
    List<HivePrivilegeObject> basicPrivObjs = new ArrayList<>();
    basicPrivObjs.addAll(basicInfos.keySet());
    List<HivePrivilegeObject> needRewritePrivObjs = tableMask.applyRowFilterAndColumnMasking(basicPrivObjs);
    if (needRewritePrivObjs != null && !needRewritePrivObjs.isEmpty()) {
        for (HivePrivilegeObject privObj : needRewritePrivObjs) {
            MaskAndFilterInfo info = basicInfos.get(privObj);
            String replacementText = tableMask.create(privObj, info);
            if (replacementText != null) {
                // We don't support masking/filtering against ACID query at the moment
                if (ctx.getIsUpdateDeleteMerge()) {
                    throw new SemanticException(ErrorMsg.MASKING_FILTERING_ON_ACID_NOT_SUPPORTED,
                            privObj.getDbname(), privObj.getObjectName());
                }
                tableMask.setNeedsRewrite(true);
                tableMask.addTranslation(info.astNode, replacementText);
            }
        }
    }
}

From source file:com.ricemap.spateDB.core.RTree.java

/**
 * Builds the RTree given a serialized list of elements. It uses the given
 * stockObject to deserialize these elements and build the tree. Also writes
 * the created tree to the disk directly.
 * /*from  ww  w. j  av  a  2  s. co m*/
 * @param elements
 *            - serialization of elements to be written
 * @param offset
 *            - index of the first element to use in the elements array
 * @param len
 *            - number of bytes to user from the elements array
 * @param bytesAvailable
 *            - size available (in bytes) to store the tree structures
 * @param dataOut
 *            - an output to use for writing the tree to
 * @param fast_sort
 *            - setting this to <code>true</code> allows the method to run
 *            faster by materializing the offset of each element in the list
 *            which speeds up the comparison. However, this requires an
 *            additional 16 bytes per element. So, for each 1M elements, the
 *            method will require an additional 16 M bytes (approximately).
 */
public void bulkLoadWrite(final byte[] element_bytes, final int offset, final int len, final int degree,
        DataOutput dataOut, final boolean fast_sort, final boolean columnarStorage) {
    try {
        columnar = columnarStorage;
        //TODO: the order of fields should be stable under Oracle JVM, but not guaranteed
        Field[] fields = stockObject.getClass().getDeclaredFields();

        // Count number of elements in the given text
        int i_start = offset;
        final Text line = new Text();
        while (i_start < offset + len) {
            int i_end = skipToEOL(element_bytes, i_start);
            // Extract the line without end of line character
            line.set(element_bytes, i_start, i_end - i_start - 1);
            stockObject.fromText(line);

            elementCount++;
            i_start = i_end;
        }
        LOG.info("Bulk loading an RTree with " + elementCount + " elements");

        // It turns out the findBestDegree returns the best degree when the
        // whole
        // tree is loaded to memory when processed. However, as current
        // algorithms
        // process the tree while it's on disk, a higher degree should be
        // selected
        // such that a node fits one file block (assumed to be 4K).
        // final int degree = findBestDegree(bytesAvailable, elementCount);
        LOG.info("Writing an RTree with degree " + degree);

        int height = Math.max(1, (int) Math.ceil(Math.log(elementCount) / Math.log(degree)));
        int leafNodeCount = (int) Math.pow(degree, height - 1);
        if (elementCount < 2 * leafNodeCount && height > 1) {
            height--;
            leafNodeCount = (int) Math.pow(degree, height - 1);
        }
        int nodeCount = (int) ((Math.pow(degree, height) - 1) / (degree - 1));
        int nonLeafNodeCount = nodeCount - leafNodeCount;

        // Keep track of the offset of each element in the text
        final int[] offsets = new int[elementCount];
        final int[] ids = new int[elementCount];
        final double[] ts = fast_sort ? new double[elementCount] : null;
        final double[] xs = fast_sort ? new double[elementCount] : null;
        final double[] ys = fast_sort ? new double[elementCount] : null;

        //initialize columnar data output
        ByteArrayOutputStream index_bos = new ByteArrayOutputStream();
        DataOutputStream index_dos = new DataOutputStream(index_bos);
        ByteArrayOutputStream[] bos = new ByteArrayOutputStream[fields.length];
        DataOutputStream[] dos = new DataOutputStream[fields.length];
        for (int i = 0; i < bos.length; i++) {
            bos[i] = new ByteArrayOutputStream();
            dos[i] = new DataOutputStream(bos[i]);
        }

        i_start = offset;
        line.clear();
        for (int i = 0; i < elementCount; i++) {
            offsets[i] = i_start;
            ids[i] = i;
            int i_end = skipToEOL(element_bytes, i_start);
            if (xs != null) {
                // Extract the line with end of line character
                line.set(element_bytes, i_start, i_end - i_start - 1);
                stockObject.fromText(line);
                // Sample center of the shape
                ts[i] = (stockObject.getMBR().t1 + stockObject.getMBR().t2) / 2;
                xs[i] = (stockObject.getMBR().x1 + stockObject.getMBR().x2) / 2;
                ys[i] = (stockObject.getMBR().y1 + stockObject.getMBR().y2) / 2;

                //build columnar storage
                if (stockObject instanceof Point3d) {
                    index_dos.writeDouble(ts[i]);
                    index_dos.writeDouble(xs[i]);
                    index_dos.writeDouble(ys[i]);
                } else {
                    throw new RuntimeException("Indexing non-point shape with RTREE is not supported yet");
                }

                for (int j = 0; j < fields.length; j++) {
                    if (fields[j].getType().equals(Integer.TYPE)) {
                        dos[j].writeInt(fields[j].getInt(stockObject));
                    } else if (fields[j].getType().equals(Double.TYPE)) {
                        dos[j].writeDouble(fields[j].getDouble(stockObject));
                    } else if (fields[j].getType().equals(Long.TYPE)) {
                        dos[j].writeLong(fields[j].getLong(stockObject));
                    } else {
                        continue;
                        //throw new RuntimeException("Field type is not supported yet");
                    }
                }
            }
            i_start = i_end;
        }
        index_dos.close();
        for (int i = 0; i < dos.length; i++) {
            dos[i].close();
        }

        /** A struct to store information about a split */
        class SplitStruct extends Prism {
            /** Start and end index for this split */
            int index1, index2;
            /** Direction of this split */
            byte direction;
            /** Index of first element on disk */
            int offsetOfFirstElement;

            static final byte DIRECTION_T = 0;
            static final byte DIRECTION_X = 1;
            static final byte DIRECTION_Y = 2;

            SplitStruct(int index1, int index2, byte direction) {
                this.index1 = index1;
                this.index2 = index2;
                this.direction = direction;
            }

            @Override
            public void write(DataOutput out) throws IOException {
                //
                if (columnarStorage)
                    out.writeInt(index1);
                else
                    out.writeInt(offsetOfFirstElement);
                super.write(out);
            }

            void partition(Queue<SplitStruct> toBePartitioned) {
                IndexedSortable sortableT;
                IndexedSortable sortableX;
                IndexedSortable sortableY;

                if (fast_sort) {
                    // Use materialized xs[] and ys[] to do the comparisons
                    sortableT = new IndexedSortable() {
                        @Override
                        public void swap(int i, int j) {
                            // Swap ts
                            double tempt = ts[i];
                            ts[i] = ts[j];
                            ts[j] = tempt;
                            // Swap xs
                            double tempx = xs[i];
                            xs[i] = xs[j];
                            xs[j] = tempx;
                            // Swap ys
                            double tempY = ys[i];
                            ys[i] = ys[j];
                            ys[j] = tempY;
                            // Swap id
                            int tempid = offsets[i];
                            offsets[i] = offsets[j];
                            offsets[j] = tempid;

                            tempid = ids[i];
                            ids[i] = ids[j];
                            ids[j] = tempid;
                        }

                        @Override
                        public int compare(int i, int j) {
                            if (ts[i] < ts[j])
                                return -1;
                            if (ts[i] > ts[j])
                                return 1;
                            return 0;
                        }
                    };
                    sortableX = new IndexedSortable() {
                        @Override
                        public void swap(int i, int j) {
                            // Swap ts
                            double tempt = ts[i];
                            ts[i] = ts[j];
                            ts[j] = tempt;
                            // Swap xs
                            double tempx = xs[i];
                            xs[i] = xs[j];
                            xs[j] = tempx;
                            // Swap ys
                            double tempY = ys[i];
                            ys[i] = ys[j];
                            ys[j] = tempY;
                            // Swap id
                            int tempid = offsets[i];
                            offsets[i] = offsets[j];
                            offsets[j] = tempid;
                            tempid = ids[i];
                            ids[i] = ids[j];
                            ids[j] = tempid;
                        }

                        @Override
                        public int compare(int i, int j) {
                            if (ts[i] < ts[j])
                                return -1;
                            if (xs[i] < xs[j])
                                return -1;
                            if (xs[i] > xs[j])
                                return 1;
                            return 0;
                        }
                    };

                    sortableY = new IndexedSortable() {
                        @Override
                        public void swap(int i, int j) {
                            // Swap ts
                            double tempt = ts[i];
                            ts[i] = ts[j];
                            ts[j] = tempt;
                            // Swap xs
                            double tempx = xs[i];
                            xs[i] = xs[j];
                            xs[j] = tempx;
                            // Swap ys
                            double tempY = ys[i];
                            ys[i] = ys[j];
                            ys[j] = tempY;
                            // Swap id
                            int tempid = offsets[i];
                            offsets[i] = offsets[j];
                            offsets[j] = tempid;

                            tempid = ids[i];
                            ids[i] = ids[j];
                            ids[j] = tempid;
                        }

                        @Override
                        public int compare(int i, int j) {
                            if (ys[i] < ys[j])
                                return -1;
                            if (ys[i] > ys[j])
                                return 1;
                            return 0;
                        }
                    };
                } else {
                    // No materialized xs and ys. Always deserialize objects
                    // to compare
                    sortableT = new IndexedSortable() {
                        @Override
                        public void swap(int i, int j) {
                            // Swap id
                            int tempid = offsets[i];
                            offsets[i] = offsets[j];
                            offsets[j] = tempid;

                            tempid = ids[i];
                            ids[i] = ids[j];
                            ids[j] = tempid;
                        }

                        @Override
                        public int compare(int i, int j) {
                            // Get end of line
                            int eol = skipToEOL(element_bytes, offsets[i]);
                            line.set(element_bytes, offsets[i], eol - offsets[i] - 1);
                            stockObject.fromText(line);
                            double ti = (stockObject.getMBR().t1 + stockObject.getMBR().t2) / 2;

                            eol = skipToEOL(element_bytes, offsets[j]);
                            line.set(element_bytes, offsets[j], eol - offsets[j] - 1);
                            stockObject.fromText(line);
                            double tj = (stockObject.getMBR().t1 + stockObject.getMBR().t2) / 2;
                            if (ti < tj)
                                return -1;
                            if (ti > tj)
                                return 1;
                            return 0;
                        }
                    };
                    sortableX = new IndexedSortable() {
                        @Override
                        public void swap(int i, int j) {
                            // Swap id
                            int tempid = offsets[i];
                            offsets[i] = offsets[j];
                            offsets[j] = tempid;

                            tempid = ids[i];
                            ids[i] = ids[j];
                            ids[j] = tempid;
                        }

                        @Override
                        public int compare(int i, int j) {
                            // Get end of line
                            int eol = skipToEOL(element_bytes, offsets[i]);
                            line.set(element_bytes, offsets[i], eol - offsets[i] - 1);
                            stockObject.fromText(line);
                            double xi = (stockObject.getMBR().x1 + stockObject.getMBR().x2) / 2;

                            eol = skipToEOL(element_bytes, offsets[j]);
                            line.set(element_bytes, offsets[j], eol - offsets[j] - 1);
                            stockObject.fromText(line);
                            double xj = (stockObject.getMBR().x1 + stockObject.getMBR().x2) / 2;
                            if (xi < xj)
                                return -1;
                            if (xi > xj)
                                return 1;
                            return 0;
                        }
                    };

                    sortableY = new IndexedSortable() {
                        @Override
                        public void swap(int i, int j) {
                            // Swap id
                            int tempid = offsets[i];
                            offsets[i] = offsets[j];
                            offsets[j] = tempid;

                            tempid = ids[i];
                            ids[i] = ids[j];
                            ids[j] = tempid;
                        }

                        @Override
                        public int compare(int i, int j) {
                            int eol = skipToEOL(element_bytes, offsets[i]);
                            line.set(element_bytes, offsets[i], eol - offsets[i] - 1);
                            stockObject.fromText(line);
                            double yi = (stockObject.getMBR().y1 + stockObject.getMBR().y2) / 2;

                            eol = skipToEOL(element_bytes, offsets[j]);
                            line.set(element_bytes, offsets[j], eol - offsets[j] - 1);
                            stockObject.fromText(line);
                            double yj = (stockObject.getMBR().y1 + stockObject.getMBR().y2) / 2;
                            if (yi < yj)
                                return -1;
                            if (yi > yj)
                                return 1;
                            return 0;
                        }
                    };
                }

                final IndexedSorter sorter = new QuickSort();

                final IndexedSortable[] sortables = new IndexedSortable[3];
                sortables[SplitStruct.DIRECTION_T] = sortableT;
                sortables[SplitStruct.DIRECTION_X] = sortableX;
                sortables[SplitStruct.DIRECTION_Y] = sortableY;

                sorter.sort(sortables[direction], index1, index2);

                // Partition into maxEntries partitions (equally) and
                // create a SplitStruct for each partition
                int i1 = index1;
                for (int iSplit = 0; iSplit < degree; iSplit++) {
                    int i2 = index1 + (index2 - index1) * (iSplit + 1) / degree;
                    SplitStruct newSplit;
                    if (direction == 0) {
                        newSplit = new SplitStruct(i1, i2, (byte) 1);
                    } else if (direction == 1) {
                        newSplit = new SplitStruct(i1, i2, (byte) 2);
                    } else {
                        newSplit = new SplitStruct(i1, i2, (byte) 0);
                    }
                    toBePartitioned.add(newSplit);
                    i1 = i2;
                }
            }
        }

        // All nodes stored in level-order traversal
        Vector<SplitStruct> nodes = new Vector<SplitStruct>();
        final Queue<SplitStruct> toBePartitioned = new LinkedList<SplitStruct>();
        toBePartitioned.add(new SplitStruct(0, elementCount, SplitStruct.DIRECTION_X));

        while (!toBePartitioned.isEmpty()) {
            SplitStruct split = toBePartitioned.poll();
            if (nodes.size() < nonLeafNodeCount) {
                // This is a non-leaf
                split.partition(toBePartitioned);
            }
            nodes.add(split);
        }

        if (nodes.size() != nodeCount) {
            throw new RuntimeException(
                    "Expected node count: " + nodeCount + ". Real node count: " + nodes.size());
        }

        // Now we have our data sorted in the required order. Start building
        // the tree.
        // Store the offset of each leaf node in the tree
        FSDataOutputStream fakeOut = new FSDataOutputStream(new java.io.OutputStream() {
            // Null output stream
            @Override
            public void write(int b) throws IOException {
                // Do nothing
            }

            @Override
            public void write(byte[] b, int off, int len) throws IOException {
                // Do nothing
            }

            @Override
            public void write(byte[] b) throws IOException {
                // Do nothing
            }
        }, null, TreeHeaderSize + nodes.size() * NodeSize);
        for (int i_leaf = nonLeafNodeCount, i = 0; i_leaf < nodes.size(); i_leaf++) {
            nodes.elementAt(i_leaf).offsetOfFirstElement = (int) fakeOut.getPos();
            if (i != nodes.elementAt(i_leaf).index1)
                throw new RuntimeException();
            double t1, x1, y1, t2, x2, y2;

            // Initialize MBR to first object
            int eol = skipToEOL(element_bytes, offsets[i]);
            fakeOut.write(element_bytes, offsets[i], eol - offsets[i]);
            line.set(element_bytes, offsets[i], eol - offsets[i] - 1);
            stockObject.fromText(line);
            Prism mbr = stockObject.getMBR();
            t1 = mbr.t1;
            x1 = mbr.x1;
            y1 = mbr.y1;
            t2 = mbr.t2;
            x2 = mbr.x2;
            y2 = mbr.y2;
            i++;

            while (i < nodes.elementAt(i_leaf).index2) {
                eol = skipToEOL(element_bytes, offsets[i]);
                fakeOut.write(element_bytes, offsets[i], eol - offsets[i]);
                line.set(element_bytes, offsets[i], eol - offsets[i] - 1);
                stockObject.fromText(line);
                mbr = stockObject.getMBR();
                if (mbr.t1 < t1)
                    t1 = mbr.t1;
                if (mbr.x1 < x1)
                    x1 = mbr.x1;
                if (mbr.y1 < y1)
                    y1 = mbr.y1;
                if (mbr.t2 > t2)
                    t2 = mbr.t2;
                if (mbr.x2 > x2)
                    x2 = mbr.x2;
                if (mbr.y2 > y2)
                    y2 = mbr.y2;
                i++;
            }
            nodes.elementAt(i_leaf).set(t1, x1, y1, t2, x2, y2);
        }
        fakeOut.close();
        fakeOut = null;

        // Calculate MBR and offsetOfFirstElement for non-leaves
        for (int i_node = nonLeafNodeCount - 1; i_node >= 0; i_node--) {
            int i_first_child = i_node * degree + 1;
            nodes.elementAt(i_node).offsetOfFirstElement = nodes.elementAt(i_first_child).offsetOfFirstElement;
            int i_child = 0;
            Prism mbr;
            mbr = nodes.elementAt(i_first_child + i_child);
            double t1 = mbr.t1;
            double x1 = mbr.x1;
            double y1 = mbr.y1;
            double t2 = mbr.t2;
            double x2 = mbr.x2;
            double y2 = mbr.y2;
            i_child++;

            while (i_child < degree) {
                mbr = nodes.elementAt(i_first_child + i_child);
                if (mbr.t1 < t1)
                    t1 = mbr.t1;
                if (mbr.x1 < x1)
                    x1 = mbr.x1;
                if (mbr.y1 < y1)
                    y1 = mbr.y1;
                if (mbr.t2 > t2)
                    t2 = mbr.t2;
                if (mbr.x2 > x2)
                    x2 = mbr.x2;
                if (mbr.y2 > y2)
                    y2 = mbr.y2;
                i_child++;
            }
            nodes.elementAt(i_node).set(t1, x1, y1, t2, x2, y2);
        }

        // Start writing the tree
        // write tree header (including size)
        // Total tree size. (== Total bytes written - 8 bytes for the size
        // itself)
        dataOut.writeInt(TreeHeaderSize + NodeSize * nodeCount + len);
        // Tree height
        dataOut.writeInt(height);
        // Degree
        dataOut.writeInt(degree);
        dataOut.writeInt(elementCount);

        //isColumnar
        dataOut.writeInt(columnarStorage ? 1 : 0);

        // write nodes
        for (SplitStruct node : nodes) {
            node.write(dataOut);
        }
        // write elements
        if (columnarStorage) {
            byte[] index_bs = index_bos.toByteArray();
            byte[][] bss = new byte[bos.length][];
            for (int i = 0; i < bss.length; i++) {
                bss[i] = bos[i].toByteArray();
            }
            for (int element_i = 0; element_i < elementCount; element_i++) {
                //int eol = skipToEOL(element_bytes, offsets[element_i]);
                //dataOut.write(element_bytes, offsets[element_i], eol - offsets[element_i]);
                dataOut.write(index_bs, ids[element_i] * IndexUnitSize, IndexUnitSize);
            }

            for (int i = 0; i < fields.length; i++) {
                int fieldSize = 0;
                if (fields[i].getType().equals(Integer.TYPE)) {
                    fieldSize = 4;
                } else if (fields[i].getType().equals(Long.TYPE)) {
                    fieldSize = 8;
                } else if (fields[i].getType().equals(Double.TYPE)) {
                    fieldSize = 8;
                } else {
                    //throw new RuntimeException("Unsupported field type: " + fields[i].getType().getName());
                    continue;
                }
                for (int element_i = 0; element_i < elementCount; element_i++) {
                    //int eol = skipToEOL(element_bytes, offsets[element_i]);
                    //dataOut.write(element_bytes, offsets[element_i], eol - offsets[element_i]);
                    dataOut.write(bss[i], ids[element_i] * fieldSize, fieldSize);
                }
            }
        } else {
            for (int element_i = 0; element_i < elementCount; element_i++) {
                int eol = skipToEOL(element_bytes, offsets[element_i]);
                dataOut.write(element_bytes, offsets[element_i], eol - offsets[element_i]);
            }
        }

    } catch (IOException e) {
        e.printStackTrace();
    } catch (IllegalArgumentException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    } catch (IllegalAccessException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
}