Example usage for java.util Queue add

List of usage examples for java.util Queue add

Introduction

In this page you can find the example usage for java.util Queue add.

Prototype

boolean add(E e);

Source Link

Document

Inserts the specified element into this queue if it is possible to do so immediately without violating capacity restrictions, returning true upon success and throwing an IllegalStateException if no space is currently available.

Usage

From source file:com.searchcode.app.jobs.IndexGitRepoJob.java

/**
 * Indexes all the documents in the path provided. Will also remove anything from the index if not on disk
 * Generally this is a slow update used only for the inital clone of a repository
 * NB this can be used for updates but it will be much slower as it needs to to walk the contents of the disk
 */// w  w w.  ja va 2 s.c om
public void indexDocsByPath(Path path, String repoName, String repoLocations, String repoRemoteLocation,
        boolean existingRepo) {
    SearchcodeLib scl = Singleton.getSearchCodeLib(); // Should have data object by this point
    List<String> fileLocations = new ArrayList<>();
    Queue<CodeIndexDocument> codeIndexDocumentQueue = Singleton.getCodeIndexQueue();

    // Convert once outside the main loop
    String fileRepoLocations = FilenameUtils.separatorsToUnix(repoLocations);
    boolean lowMemory = this.LOWMEMORY;
    boolean useSystemGit = this.USESYSTEMGIT;

    try {
        Files.walkFileTree(path, new SimpleFileVisitor<Path>() {
            @Override
            public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {

                while (CodeIndexer.shouldPauseAdding()) {
                    Singleton.getLogger().info("Pausing parser.");
                    try {
                        Thread.sleep(SLEEPTIME);
                    } catch (InterruptedException ex) {
                    }
                }

                // Convert Path file to unix style that way everything is easier to reason about
                String fileParent = FilenameUtils.separatorsToUnix(file.getParent().toString());
                String fileToString = FilenameUtils.separatorsToUnix(file.toString());
                String fileName = file.getFileName().toString();
                String md5Hash = Values.EMPTYSTRING;

                if (fileParent.endsWith("/.git") || fileParent.contains("/.git/")) {
                    return FileVisitResult.CONTINUE;
                }

                List<String> codeLines;
                try {
                    codeLines = Helpers.readFileLines(fileToString, MAXFILELINEDEPTH);
                } catch (IOException ex) {
                    return FileVisitResult.CONTINUE;
                }

                try {
                    FileInputStream fis = new FileInputStream(new File(fileToString));
                    md5Hash = org.apache.commons.codec.digest.DigestUtils.md5Hex(fis);
                    fis.close();
                } catch (IOException ex) {
                    Singleton.getLogger().warning("Unable to generate MD5 for " + fileToString);
                }

                // is the file minified?
                if (scl.isMinified(codeLines)) {
                    Singleton.getLogger().info("Appears to be minified will not index  " + fileToString);
                    return FileVisitResult.CONTINUE;
                }

                String languageName = scl.languageGuesser(fileName, codeLines);
                String fileLocation = fileToString.replace(fileRepoLocations, Values.EMPTYSTRING)
                        .replace(fileName, Values.EMPTYSTRING);
                String fileLocationFilename = fileToString.replace(fileRepoLocations, Values.EMPTYSTRING);
                String repoLocationRepoNameLocationFilename = fileToString;

                String newString = getBlameFilePath(fileLocationFilename);
                List<CodeOwner> owners;
                if (useSystemGit) {
                    owners = getBlameInfoExternal(codeLines.size(), repoName, fileRepoLocations, newString);
                } else {
                    owners = getBlameInfo(codeLines.size(), repoName, fileRepoLocations, newString);
                }

                String codeOwner = scl.codeOwner(owners);

                // If low memory don't add to the queue, just index it directly
                if (lowMemory) {
                    CodeIndexer.indexDocument(new CodeIndexDocument(repoLocationRepoNameLocationFilename,
                            repoName, fileName, fileLocation, fileLocationFilename, md5Hash, languageName,
                            codeLines.size(), StringUtils.join(codeLines, " "), repoRemoteLocation, codeOwner));
                } else {
                    Singleton.incrementCodeIndexLinesCount(codeLines.size());
                    codeIndexDocumentQueue.add(new CodeIndexDocument(repoLocationRepoNameLocationFilename,
                            repoName, fileName, fileLocation, fileLocationFilename, md5Hash, languageName,
                            codeLines.size(), StringUtils.join(codeLines, " "), repoRemoteLocation, codeOwner));
                }

                fileLocations.add(fileLocationFilename);
                return FileVisitResult.CONTINUE;
            }
        });
    } catch (IOException ex) {
        Singleton.getLogger().warning("ERROR - caught a " + ex.getClass() + " in " + this.getClass()
                + "\n with message: " + ex.getMessage());
    }

    if (existingRepo) {
        CodeSearcher cs = new CodeSearcher();
        List<String> indexLocations = cs.getRepoDocuments(repoName);

        for (String file : indexLocations) {
            if (!fileLocations.contains(file)) {
                Singleton.getLogger().info("Missing from disk, removing from index " + file);
                try {
                    CodeIndexer.deleteByFileLocationFilename(file);
                } catch (IOException ex) {
                    Singleton.getLogger().warning("ERROR - caught a " + ex.getClass() + " in " + this.getClass()
                            + "\n with message: " + ex.getMessage());
                }
            }
        }
    }
}

From source file:replicatorg.app.gcode.GCodeParser.java

private void buildGCodes(GCodeCommand gcode, Queue<DriverCommand> commands) throws GCodeException {
    if (!gcode.hasCode('G')) {
        throw new GCodeException("Not a G code!");
    }/*from w w  w. j a  v a 2 s.  com*/

    // start us off at our current position...
    Point5d pos = driver.getCurrentPosition(false);

    // initialize our points, etc.
    double iVal = convertToMM(gcode.getCodeValue('I'), units); // / X offset
    // for arcs
    double jVal = convertToMM(gcode.getCodeValue('J'), units); // / Y offset
    // for arcs
    @SuppressWarnings("unused")
    double kVal = convertToMM(gcode.getCodeValue('K'), units); // / Z offset
    // for arcs
    @SuppressWarnings("unused")
    double qVal = convertToMM(gcode.getCodeValue('Q'), units); // / feed
    // increment for
    // G83
    double rVal = convertToMM(gcode.getCodeValue('R'), units); // / arc radius
    double xVal = convertToMM(gcode.getCodeValue('X'), units); // / X units
    double yVal = convertToMM(gcode.getCodeValue('Y'), units); // / Y units
    double zVal = convertToMM(gcode.getCodeValue('Z'), units); // / Z units
    double aVal = convertToMM(gcode.getCodeValue('A'), units); // / A units
    double bVal = convertToMM(gcode.getCodeValue('B'), units); // / B units
    // Note: The E axis is treated internally as the A or B axis
    double eVal = convertToMM(gcode.getCodeValue('E'), units); // / E units

    // adjust for our offsets
    xVal += currentOffset.x;
    yVal += currentOffset.y;
    zVal += currentOffset.z;

    // absolute just specifies the new position
    if (absoluteMode) {
        if (gcode.hasCode('X'))
            pos.setX(xVal);
        if (gcode.hasCode('Y'))
            pos.setY(yVal);
        if (gcode.hasCode('Z'))
            pos.setZ(zVal);
        if (gcode.hasCode('A'))
            pos.setA(aVal);
        if (gcode.hasCode('E')) {
            // can't assume tool 0 == a, it's configurable in machine.xml!
            if (driver.getMachine().getTool(tool).getMotorStepperAxis().name() == "B") {
                // Base.logger.warning("Mapping axis E to axis: " + driver.getMachine().getTool(tool).getMotorStepperAxis().name());
                pos.setB(eVal);
            } else {
                // Base.logger.warning("Mapping axis E to axis: " + driver.getMachine().getTool(tool).getMotorStepperAxis().name());
                pos.setA(eVal);
            }
        }
        if (gcode.hasCode('B'))
            pos.setB(bVal);
    }
    // relative specifies a delta
    else {
        if (gcode.hasCode('X'))
            pos.setX(pos.x() + xVal);
        if (gcode.hasCode('Y'))
            pos.setY(pos.y() + yVal);
        if (gcode.hasCode('Z'))
            pos.setZ(pos.z() + zVal);
        if (gcode.hasCode('A'))
            pos.setA(pos.a() + aVal);
        if (gcode.hasCode('E')) {
            // can't assume tool 0 == a, it's configurable in machine.xml!
            if (driver.getMachine().getTool(tool).getMotorStepperAxis().name() == "B") {
                // Base.logger.warning("Mapping axis E to axis: " + driver.getMachine().getTool(tool).getMotorStepperAxis().name());
                pos.setB(pos.b() + eVal);
            } else {
                // Base.logger.warning("Mapping axis E to axis: " + driver.getMachine().getTool(tool).getMotorStepperAxis().name());
                pos.setA(pos.a() + eVal);
            }
        }
        if (gcode.hasCode('B'))
            pos.setB(pos.b() + bVal);
    }

    // Get feedrate if supplied
    if (gcode.hasCode('F')) {
        // Read feedrate in mm/min.
        feedrate = gcode.getCodeValue('F');

        // TODO: Why do we do this here, and not in individual commands?
        commands.add(new replicatorg.drivers.commands.SetFeedrate(feedrate));
    }

    GCodeEnumeration codeEnum = GCodeEnumeration.getGCode("G", (int) gcode.getCodeValue('G'));

    // handle unrecognised GCode
    if (codeEnum == null) {
        String message = "Unrecognized GCode! G" + (int) gcode.getCodeValue('G');
        Base.logger.log(Level.SEVERE, message);
        throw new GCodeException(message);
    }

    switch (codeEnum) {
    // these are basically the same thing, but G0 is supposed to do it as quickly as possible.
    // Rapid Positioning
    case G0:
        if (gcode.hasCode('F')) {
            // Allow user to explicitly override G0 feedrate if they so desire.
            commands.add(new replicatorg.drivers.commands.SetFeedrate(feedrate));
        } else {
            // Compute the most rapid possible rate for this move.
            Point5d diff = driver.getCurrentPosition(false);
            diff.sub(pos);
            diff.absolute();
            double length = diff.length();
            double selectedFR = Double.MAX_VALUE;
            Point5d maxFR = driver.getMaximumFeedrates();
            // Compute the feedrate using assuming maximum feed along each axis, and select
            // the slowest option.
            for (int idx = 0; idx < 3; idx++) {
                double axisMove = diff.get(idx);
                if (axisMove == 0) {
                    continue;
                }
                double candidate = maxFR.get(idx) * length / axisMove;
                if (candidate < selectedFR) {
                    selectedFR = candidate;
                }
            }
            // Add a sane default for the null move, just in case.
            if (selectedFR == Double.MAX_VALUE) {
                selectedFR = maxFR.get(0);
            }
            commands.add(new replicatorg.drivers.commands.SetFeedrate(selectedFR));
        }
        commands.add(new replicatorg.drivers.commands.QueuePoint(pos));
        break;
    // Linear Interpolation
    case G1:
        // set our target.
        commands.add(new replicatorg.drivers.commands.SetFeedrate(feedrate));
        commands.add(new replicatorg.drivers.commands.QueuePoint(pos));
        break;
    // Clockwise arc
    case G2:
        // Counterclockwise arc
    case G3: {
        // call our arc drawing function.
        // Note: We don't support 5D
        if (gcode.hasCode('I') || gcode.hasCode('J')) {
            // our centerpoint
            Point5d center = new Point5d();
            Point5d current = driver.getCurrentPosition(false);
            center.setX(current.x() + iVal);
            center.setY(current.y() + jVal);

            // Get the points for the arc
            if (codeEnum == GCodeEnumeration.G2)
                commands.addAll(drawArc(center, pos, true));
            else
                commands.addAll(drawArc(center, pos, false));
        }
        // or we want a radius based one
        else if (gcode.hasCode('R')) {
            throw new GCodeException("G02/G03 arcs with (R)adius parameter are not supported yet.");
        }
    }
        break;
    // dwell
    case G4:
        commands.add(new replicatorg.drivers.commands.Delay((long) gcode.getCodeValue('P')));
        break;
    case G10:
        if (gcode.hasCode('P')) {
            int offsetSystemNum = ((int) gcode.getCodeValue('P'));
            if (offsetSystemNum >= 1 && offsetSystemNum <= 6) {
                if (gcode.hasCode('X'))
                    commands.add(new replicatorg.drivers.commands.SetAxisOffset(AxisId.X, offsetSystemNum,
                            gcode.getCodeValue('X')));
                if (gcode.hasCode('Y'))
                    commands.add(new replicatorg.drivers.commands.SetAxisOffset(AxisId.Y, offsetSystemNum,
                            gcode.getCodeValue('Y')));
                if (gcode.hasCode('Z'))
                    commands.add(new replicatorg.drivers.commands.SetAxisOffset(AxisId.Z, offsetSystemNum,
                            gcode.getCodeValue('Z')));
            }
        } else
            Base.logger.warning("No coordinate system indicated use G10 Pn, where n is 0-6.");
        break;
    // Inches for Units
    case G20:
    case G70:
        units = UNITS_INCHES;
        curveSection = curveSectionInches;
        break;
    // mm for Units
    case G21:
    case G71:
        units = UNITS_MM;
        curveSection = curveSectionMM;
        break;
    // This should be "return to home".  We need to introduce new GCodes for homing.
    //replaced by G161, G162
    case G28: {
        // home all axes?
        EnumSet<AxisId> axes = getAxes(gcode);

        if (gcode.hasCode('F')) {
            commands.add(new replicatorg.drivers.commands.HomeAxes(axes, LinearDirection.POSITIVE, feedrate));
        } else {
            commands.add(new replicatorg.drivers.commands.HomeAxes(axes, LinearDirection.POSITIVE));
        }
    }
        break;
    // home negative.
    case G161: {
        // home all axes?
        EnumSet<AxisId> axes = getAxes(gcode);

        if (gcode.hasCode('F')) {
            commands.add(new replicatorg.drivers.commands.HomeAxes(axes, LinearDirection.NEGATIVE, feedrate));
        } else {
            commands.add(new replicatorg.drivers.commands.HomeAxes(axes, LinearDirection.NEGATIVE));
        }
    }
        break;
    // home positive.
    case G162: {
        // home all axes?
        EnumSet<AxisId> axes = getAxes(gcode);
        if (gcode.hasCode('F')) {
            commands.add(new replicatorg.drivers.commands.HomeAxes(axes, LinearDirection.POSITIVE, feedrate));
        } else {
            commands.add(new replicatorg.drivers.commands.HomeAxes(axes, LinearDirection.POSITIVE));
        }
    }
        break;
    // master offset
    case G53:
        currentOffset = driver.getOffset(0);
        break;
    // fixture offset 1
    case G54:
        currentOffset = driver.getOffset(1);
        break;
    // fixture offset 2
    case G55:
        currentOffset = driver.getOffset(2);
        break;
    // fixture offset 3
    case G56:
        currentOffset = driver.getOffset(3);
        break;
    // fixture offset 4
    case G57:
        currentOffset = driver.getOffset(4);
        break;
    // fixture offset 5
    case G58:
        currentOffset = driver.getOffset(5);
        break;
    // fixture offset 6
    case G59:
        currentOffset = driver.getOffset(6);
        break;
    // Absolute Positioning
    case G90:
        absoluteMode = true;
        break;
    // Incremental Positioning
    case G91:
        absoluteMode = false;
        break;
    // Set position
    case G92:
        Point5d current = driver.getCurrentPosition(false);

        if (gcode.hasCode('X'))
            current.setX(xVal);
        if (gcode.hasCode('Y'))
            current.setY(yVal);
        if (gcode.hasCode('Z'))
            current.setZ(zVal);
        if (gcode.hasCode('A'))
            current.setA(aVal);
        if (gcode.hasCode('E')) {
            // can't assume tool 0 == a, it's configurable in machine.xml!
            if (driver.getMachine().getTool(tool).getMotorStepperAxis().name() == "B") {
                // Base.logger.warning("Resetting position of axis E to axis: " + driver.getMachine().getTool(tool).getMotorStepperAxis().name());
                current.setB(eVal);
            } else {
                // Base.logger.warning("Resetting position of axis E to axis: " + driver.getMachine().getTool(tool).getMotorStepperAxis().name());
                current.setA(eVal);
            }
        }
        if (gcode.hasCode('B'))
            current.setB(bVal);

        commands.add(new replicatorg.drivers.commands.SetCurrentPosition(current));
        break;
    //       feed rate mode
    //       case G93: //inverse time feed rate
    //      case G94: // IPM feed rate (our default)
    //          case G95: //IPR feed rate
    //          TODO: make this work.
    //         break;
    // spindle speed rate
    case G97:
        commands.add(new replicatorg.drivers.commands.SetSpindleRPM(gcode.getCodeValue('S')));
        break;
    case G130:
        /// TODO:  axis ids should not be hard coded
        if (gcode.hasCode('X'))
            commands.add(new replicatorg.drivers.commands.SetStepperVoltage(0, (int) gcode.getCodeValue('X')));
        if (gcode.hasCode('Y'))
            commands.add(new replicatorg.drivers.commands.SetStepperVoltage(1, (int) gcode.getCodeValue('Y')));
        if (gcode.hasCode('Z'))
            commands.add(new replicatorg.drivers.commands.SetStepperVoltage(2, (int) gcode.getCodeValue('Z')));
        if (gcode.hasCode('A'))
            commands.add(new replicatorg.drivers.commands.SetStepperVoltage(3, (int) gcode.getCodeValue('A')));
        if (gcode.hasCode('B'))
            commands.add(new replicatorg.drivers.commands.SetStepperVoltage(4, (int) gcode.getCodeValue('B')));
        break;
    // error, error!
    default:
        throw new GCodeException("Unknown G code: G" + (int) gcode.getCodeValue('G'));
    }
}

From source file:de.uni_koblenz.jgralab.utilities.rsa.Rsa2Tg.java

/**
 * @param containingGEC/* w  w w.ja  v  a  2 s.  c om*/
 * @param containedGEC
 * @param workingList
 *            {@link Queue} which contains all elements which are nesting
 *            some element but are not nested in any element
 */
private void insertContainingGECIntoWorkingList(GraphElementClass containingGEC, GraphElementClass containedGEC,
        Queue<GraphElementClass> workingList) {
    if (workingList.contains(containedGEC)) {
        workingList.remove(containedGEC);
    }
    if (!workingList.contains(containingGEC)) {
        workingList.add(containingGEC);
    }
}

From source file:de.uni_koblenz.jgralab.utilities.rsa.Rsa2Tg.java

/**
 * /*from w w  w  .ja v a2  s  .  c  o m*/
 */
private void createMayBeNestedIn() {
    System.out.println("Create MayBeNestedIn relations ...");
    updateNestedElements();

    // stores the GraphElementClass which have nested elements but are not
    // nested in another GraphElementClass
    Queue<GraphElementClass> workingList = new LinkedList<GraphElementClass>();
    Queue<GraphElementClass> topLevelNestingElements = new LinkedList<GraphElementClass>();

    // all edges have to be treated
    for (EdgeClass ec : sg.getEdgeClassVertices()) {
        workingList.add(ec);
        topLevelNestingElements.add(ec);
    }

    // create the explicitly modeled MayBeNestedIn edges
    for (GraphElement<?, ?, ?, ?> ge : nestedElements.getMarkedElements()) {
        GraphElementClass containingGEC = (GraphElementClass) ge;
        assert nestedElements.getMark(containingGEC) != null;
        assert !nestedElements.getMark(containingGEC).isEmpty();

        for (GraphElementClass containedGEC : nestedElements.getMark(containingGEC)) {
            sg.createMayBeNestedIn(containedGEC, containingGEC);
            insertContainingGECIntoWorkingList(containingGEC, containedGEC, topLevelNestingElements);
        }
    }

    checkAcyclicityOfMayBeNestedIn(topLevelNestingElements);

    // check correctness of explicit modeled MayBeNestedIn edges and create
    // implicit MayBeNestedIn edges during a breadth first search over the
    // GraphElementClasses participating in the MayBeNestedIn tree
    LocalBooleanGraphMarker isImplicitlyNested = new LocalBooleanGraphMarker(sg);
    while (!workingList.isEmpty()) {
        GraphElementClass current = workingList.poll();
        assert current != null;

        if (EdgeClass.class.isInstance(current)) {
            EdgeClass containedEC = (EdgeClass) current;

            // check constraints for explicitly nested EdgeClasses
            for (MayBeNestedIn_nestedElement i : containedEC.getIncidences(MayBeNestedIn_nestedElement.class)) {
                if (!isImplicitlyNested.isMarked(i.getEdge())) {
                    GraphElementClass containingGEC = (GraphElementClass) i.getThat();
                    checkNestingConstraints(containedEC, containingGEC);
                }
            }

            // create implicit MayBeNestedIn edges
            for (GraphElementClass containingGEC : getAllNestingElements(containedEC)) {
                isImplicitlyNested.mark(sg.createMayBeNestedIn(containedEC, containingGEC));
                if (topLevelNestingElements.contains(containedEC)) {
                    topLevelNestingElements.remove(containedEC);
                }
            }
        }

        // insert all nested GraphElementClasses into workingList
        for (MayBeNestedIn_nestingElement i : current.getIncidences(MayBeNestedIn_nestingElement.class)) {
            if (!workingList.contains(i.getThat()) && !isImplicitlyNested.isMarked(i.getEdge())) {
                workingList.add((GraphElementClass) i.getThat());
            }
        }
    }

    deleteDuplicateMayBeNestedIn();

    checkAcyclicityOfMayBeNestedIn(topLevelNestingElements);
}

From source file:it.geosolutions.geobatch.geotiff.retile.GeotiffRetiler.java

public Queue<FileSystemEvent> execute(Queue<FileSystemEvent> events) throws ActionException {
    try {//from  w w w.jav  a  2s .  co m

        if (configuration == null) {
            final String message = "GeotiffRetiler::execute(): flow configuration is null.";
            if (LOGGER.isErrorEnabled())
                LOGGER.error(message);
            throw new ActionException(this, message);
        }
        if (events.size() == 0) {
            throw new ActionException(this,
                    "GeotiffRetiler::execute(): Unable to process an empty events queue.");
        }

        if (LOGGER.isInfoEnabled())
            LOGGER.info("GeotiffRetiler::execute(): Starting with processing...");

        listenerForwarder.started();

        // The return
        final Queue<FileSystemEvent> ret = new LinkedList<FileSystemEvent>();

        while (events.size() > 0) {

            FileSystemEvent event = events.remove();

            File eventFile = event.getSource();
            FileSystemEventType eventType = event.getEventType();

            if (eventFile.exists() && eventFile.canRead() && eventFile.canWrite()) {
                /*
                 * If here: we can start retiler actions on the incoming file event
                 */

                if (eventFile.isDirectory()) {

                    File[] fileList = eventFile.listFiles();
                    int size = fileList.length;
                    for (int progress = 0; progress < size; progress++) {

                        File inFile = fileList[progress];

                        final String absolutePath = inFile.getAbsolutePath();
                        final String inputFileName = FilenameUtils.getName(absolutePath);

                        if (LOGGER.isInfoEnabled())
                            LOGGER.info("is going to retile: " + inputFileName);

                        try {

                            listenerForwarder.setTask("GeotiffRetiler");

                            File tiledTiffFile = File.createTempFile(inFile.getName(), "_tiled.tif",
                                    getTempDir());
                            if (tiledTiffFile.exists()) {
                                // file already exists
                                // check write permission
                                if (!tiledTiffFile.canWrite()) {
                                    final String message = "Unable to over-write the temporary file called: "
                                            + tiledTiffFile.getAbsolutePath() + "\nCheck permissions.";
                                    if (LOGGER.isErrorEnabled()) {
                                        LOGGER.error(message);
                                    }
                                    throw new IllegalArgumentException(message);
                                }
                            } else if (!tiledTiffFile.createNewFile()) {
                                final String message = "Unable to create temporary file called: "
                                        + tiledTiffFile.getAbsolutePath();
                                if (LOGGER.isErrorEnabled()) {
                                    LOGGER.error(message);
                                }
                                throw new IllegalArgumentException(message);
                            }
                            final double compressionRatio = getConfiguration().getCompressionRatio();
                            final String compressionType = getConfiguration().getCompressionScheme();

                            reTile(inFile, tiledTiffFile, compressionRatio, compressionType,
                                    getConfiguration().getTileW(), getConfiguration().getTileH(),
                                    getConfiguration().isForceToBigTiff());

                            String extension = FilenameUtils.getExtension(inputFileName);
                            if (!extension.contains("tif")) {
                                extension = "tif";
                            }
                            final String outputFileName = FilenameUtils.getFullPath(absolutePath)
                                    + FilenameUtils.getBaseName(inputFileName) + "." + extension;
                            final File outputFile = new File(outputFileName);
                            // do we need to remove the input?
                            FileUtils.copyFile(tiledTiffFile, outputFile);
                            FileUtils.deleteQuietly(tiledTiffFile);

                            // set the output
                            /*
                             * COMMENTED OUT 21 Feb 2011: simone: If the event represents a Dir
                             * we have to return a Dir. Do not matter failing files.
                             * 
                             * carlo: we may also want to check if a file is already tiled!
                             * 
                             * File outputFile=reTile(inFile); if (outputFile!=null){ //TODO:
                             * here we use the same event for each file in the ret.add(new
                             * FileSystemEvent(outputFile, eventType)); }
                             */

                        } catch (UnsupportedOperationException uoe) {
                            listenerForwarder.failed(uoe);
                            if (LOGGER.isWarnEnabled())
                                LOGGER.warn(uoe.getLocalizedMessage(), uoe);
                            continue;
                        } catch (IOException ioe) {
                            listenerForwarder.failed(ioe);
                            if (LOGGER.isWarnEnabled())
                                LOGGER.warn(ioe.getLocalizedMessage(), ioe);
                            continue;
                        } catch (IllegalArgumentException iae) {
                            listenerForwarder.failed(iae);
                            if (LOGGER.isWarnEnabled())
                                LOGGER.warn(iae.getLocalizedMessage(), iae);
                            continue;
                        } finally {
                            listenerForwarder.setProgress((progress * 100) / ((size != 0) ? size : 1));
                            listenerForwarder.progressing();
                        }
                    }

                    if (LOGGER.isInfoEnabled())
                        LOGGER.info("SUCCESSFULLY completed work on: " + event.getSource());

                    // add the directory to the return
                    ret.add(event);
                } else {
                    // file is not a directory
                    try {
                        listenerForwarder.setTask("GeotiffRetiler");

                        File tiledTiffFile = File.createTempFile(eventFile.getName(), "_tiled.tif",
                                eventFile.getParentFile());
                        if (tiledTiffFile.exists()) {
                            // file already exists
                            // check write permission
                            if (!tiledTiffFile.canWrite()) {
                                final String message = "Unable to over-write the temporary file called: "
                                        + tiledTiffFile.getAbsolutePath() + "\nCheck permissions.";
                                if (LOGGER.isErrorEnabled()) {
                                    LOGGER.error(message);
                                }
                                throw new IllegalArgumentException(message);
                            }
                        } else if (!tiledTiffFile.createNewFile()) {
                            final String message = "Unable to create temporary file called: "
                                    + tiledTiffFile.getAbsolutePath();
                            if (LOGGER.isErrorEnabled()) {
                                LOGGER.error(message);
                            }
                            throw new IllegalArgumentException(message);
                        }
                        final double compressionRatio = getConfiguration().getCompressionRatio();
                        final String compressionType = getConfiguration().getCompressionScheme();

                        reTile(eventFile, tiledTiffFile, compressionRatio, compressionType,
                                getConfiguration().getTileW(), getConfiguration().getTileH(),
                                getConfiguration().isForceToBigTiff());

                        String extension = FilenameUtils.getExtension(eventFile.getName());
                        if (!extension.contains("tif")) {
                            extension = "tif";
                        }
                        final String outputFileName = FilenameUtils.getFullPath(eventFile.getAbsolutePath())
                                + FilenameUtils.getBaseName(eventFile.getName()) + "." + extension;
                        final File outputFile = new File(outputFileName);
                        // do we need to remove the input?
                        FileUtils.copyFile(tiledTiffFile, outputFile);
                        FileUtils.deleteQuietly(tiledTiffFile);

                        if (LOGGER.isInfoEnabled())
                            LOGGER.info("SUCCESSFULLY completed work on: " + event.getSource());
                        listenerForwarder.setProgress(100);
                        ret.add(new FileSystemEvent(outputFile, eventType));

                    } catch (UnsupportedOperationException uoe) {
                        listenerForwarder.failed(uoe);
                        if (LOGGER.isWarnEnabled())
                            LOGGER.warn(uoe.getLocalizedMessage(), uoe);
                        continue;
                    } catch (IOException ioe) {
                        listenerForwarder.failed(ioe);
                        if (LOGGER.isWarnEnabled())
                            LOGGER.warn(ioe.getLocalizedMessage(), ioe);
                        continue;
                    } catch (IllegalArgumentException iae) {
                        listenerForwarder.failed(iae);
                        if (LOGGER.isWarnEnabled())
                            LOGGER.warn(iae.getLocalizedMessage(), iae);
                        continue;
                    } finally {

                        listenerForwarder.setProgress((100) / ((events.size() != 0) ? events.size() : 1));
                        listenerForwarder.progressing();
                    }
                }
            } else {
                final String message = "The passed file event refers to a not existent "
                        + "or not readable/writeable file! File: " + eventFile.getAbsolutePath();
                if (LOGGER.isWarnEnabled())
                    LOGGER.warn(message);
                final IllegalArgumentException iae = new IllegalArgumentException(message);
                listenerForwarder.failed(iae);
            }
        } // endwile
        listenerForwarder.completed();

        // return
        if (ret.size() > 0) {
            events.clear();
            return ret;
        } else {
            /*
             * If here: we got an error no file are set to be returned the input queue is
             * returned
             */
            return events;
        }
    } catch (Exception t) {
        if (LOGGER.isErrorEnabled())
            LOGGER.error(t.getLocalizedMessage(), t);
        final ActionException exc = new ActionException(this, t.getLocalizedMessage(), t);
        listenerForwarder.failed(exc);
        throw exc;
    }
}

From source file:edu.umn.cs.spatialHadoop.indexing.RTree.java

/**
 * Builds the RTree given a serialized list of elements. It uses the given
 * stockObject to deserialize these elements using
 * {@link TextSerializable#fromText(Text)} and build the tree. Also writes the
 * created tree to the disk directly./* ww w. ja  v a 2 s. c o  m*/
 * 
 * @param element_bytes
 *          - serialization of all elements separated by new lines
 * @param offset
 *          - offset of the first byte to use in elements_bytes
 * @param len
 *          - number of bytes to use in elements_bytes
 * @param degree
 *          - Degree of the R-tree to build in terms of number of children per
 *          node
 * @param dataOut
 *          - output stream to write the result to.
 * @param fast_sort
 *          - setting this to <code>true</code> allows the method to run
 *          faster by materializing the offset of each element in the list
 *          which speeds up the comparison. However, this requires an
 *          additional 16 bytes per element. So, for each 1M elements, the
 *          method will require an additional 16 M bytes (approximately).
 */
public static void bulkLoadWrite(final byte[] element_bytes, final int offset, final int len, final int degree,
        DataOutput dataOut, final Shape stockObject, final boolean fast_sort) {
    try {

        int elementCount = 0;
        // Count number of elements in the given text
        int i_start = offset;
        final Text line = new Text();
        while (i_start < offset + len) {
            int i_end = skipToEOL(element_bytes, i_start);
            // Extract the line without end of line character
            line.set(element_bytes, i_start, i_end - i_start - 1);
            stockObject.fromText(line);
            elementCount++;
            i_start = i_end;
        }
        LOG.info("Bulk loading an RTree with " + elementCount + " elements");

        // It turns out the findBestDegree returns the best degree when the whole
        // tree is loaded to memory when processed. However, as current algorithms
        // process the tree while it's on disk, a higher degree should be selected
        // such that a node fits one file block (assumed to be 4K).
        //final int degree = findBestDegree(bytesAvailable, elementCount);

        int height = Math.max(1, (int) Math.ceil(Math.log(elementCount) / Math.log(degree)));
        int leafNodeCount = (int) Math.pow(degree, height - 1);
        if (elementCount < 2 * leafNodeCount && height > 1) {
            height--;
            leafNodeCount = (int) Math.pow(degree, height - 1);
        }
        int nodeCount = (int) ((Math.pow(degree, height) - 1) / (degree - 1));
        int nonLeafNodeCount = nodeCount - leafNodeCount;

        // Keep track of the offset of each element in the text
        final int[] offsets = new int[elementCount];
        final double[] xs = fast_sort ? new double[elementCount] : null;
        final double[] ys = fast_sort ? new double[elementCount] : null;

        i_start = offset;
        line.clear();
        for (int i = 0; i < elementCount; i++) {
            offsets[i] = i_start;
            int i_end = skipToEOL(element_bytes, i_start);
            if (xs != null) {
                // Extract the line with end of line character
                line.set(element_bytes, i_start, i_end - i_start - 1);
                stockObject.fromText(line);
                // Sample center of the shape
                xs[i] = (stockObject.getMBR().x1 + stockObject.getMBR().x2) / 2;
                ys[i] = (stockObject.getMBR().y1 + stockObject.getMBR().y2) / 2;
            }
            i_start = i_end;
        }

        /**A struct to store information about a split*/
        class SplitStruct extends Rectangle {
            /**Start and end index for this split*/
            int index1, index2;
            /**Direction of this split*/
            byte direction;
            /**Index of first element on disk*/
            int offsetOfFirstElement;

            static final byte DIRECTION_X = 0;
            static final byte DIRECTION_Y = 1;

            SplitStruct(int index1, int index2, byte direction) {
                this.index1 = index1;
                this.index2 = index2;
                this.direction = direction;
            }

            @Override
            public void write(DataOutput out) throws IOException {
                out.writeInt(offsetOfFirstElement);
                super.write(out);
            }

            void partition(Queue<SplitStruct> toBePartitioned) {
                IndexedSortable sortableX;
                IndexedSortable sortableY;

                if (fast_sort) {
                    // Use materialized xs[] and ys[] to do the comparisons
                    sortableX = new IndexedSortable() {
                        @Override
                        public void swap(int i, int j) {
                            // Swap xs
                            double tempx = xs[i];
                            xs[i] = xs[j];
                            xs[j] = tempx;
                            // Swap ys
                            double tempY = ys[i];
                            ys[i] = ys[j];
                            ys[j] = tempY;
                            // Swap id
                            int tempid = offsets[i];
                            offsets[i] = offsets[j];
                            offsets[j] = tempid;
                        }

                        @Override
                        public int compare(int i, int j) {
                            if (xs[i] < xs[j])
                                return -1;
                            if (xs[i] > xs[j])
                                return 1;
                            return 0;
                        }
                    };

                    sortableY = new IndexedSortable() {
                        @Override
                        public void swap(int i, int j) {
                            // Swap xs
                            double tempx = xs[i];
                            xs[i] = xs[j];
                            xs[j] = tempx;
                            // Swap ys
                            double tempY = ys[i];
                            ys[i] = ys[j];
                            ys[j] = tempY;
                            // Swap id
                            int tempid = offsets[i];
                            offsets[i] = offsets[j];
                            offsets[j] = tempid;
                        }

                        @Override
                        public int compare(int i, int j) {
                            if (ys[i] < ys[j])
                                return -1;
                            if (ys[i] > ys[j])
                                return 1;
                            return 0;
                        }
                    };
                } else {
                    // No materialized xs and ys. Always deserialize objects to compare
                    sortableX = new IndexedSortable() {
                        @Override
                        public void swap(int i, int j) {
                            // Swap id
                            int tempid = offsets[i];
                            offsets[i] = offsets[j];
                            offsets[j] = tempid;
                        }

                        @Override
                        public int compare(int i, int j) {
                            // Get end of line
                            int eol = skipToEOL(element_bytes, offsets[i]);
                            line.set(element_bytes, offsets[i], eol - offsets[i] - 1);
                            stockObject.fromText(line);
                            double xi = (stockObject.getMBR().x1 + stockObject.getMBR().x2) / 2;

                            eol = skipToEOL(element_bytes, offsets[j]);
                            line.set(element_bytes, offsets[j], eol - offsets[j] - 1);
                            stockObject.fromText(line);
                            double xj = (stockObject.getMBR().x1 + stockObject.getMBR().x2) / 2;
                            if (xi < xj)
                                return -1;
                            if (xi > xj)
                                return 1;
                            return 0;
                        }
                    };

                    sortableY = new IndexedSortable() {
                        @Override
                        public void swap(int i, int j) {
                            // Swap id
                            int tempid = offsets[i];
                            offsets[i] = offsets[j];
                            offsets[j] = tempid;
                        }

                        @Override
                        public int compare(int i, int j) {
                            int eol = skipToEOL(element_bytes, offsets[i]);
                            line.set(element_bytes, offsets[i], eol - offsets[i] - 1);
                            stockObject.fromText(line);
                            double yi = (stockObject.getMBR().y1 + stockObject.getMBR().y2) / 2;

                            eol = skipToEOL(element_bytes, offsets[j]);
                            line.set(element_bytes, offsets[j], eol - offsets[j] - 1);
                            stockObject.fromText(line);
                            double yj = (stockObject.getMBR().y1 + stockObject.getMBR().y2) / 2;
                            if (yi < yj)
                                return -1;
                            if (yi > yj)
                                return 1;
                            return 0;
                        }
                    };
                }

                final IndexedSorter sorter = new QuickSort();

                final IndexedSortable[] sortables = new IndexedSortable[2];
                sortables[SplitStruct.DIRECTION_X] = sortableX;
                sortables[SplitStruct.DIRECTION_Y] = sortableY;

                sorter.sort(sortables[direction], index1, index2);

                // Partition into maxEntries partitions (equally) and
                // create a SplitStruct for each partition
                int i1 = index1;
                for (int iSplit = 0; iSplit < degree; iSplit++) {
                    int i2 = index1 + (index2 - index1) * (iSplit + 1) / degree;
                    SplitStruct newSplit = new SplitStruct(i1, i2, (byte) (1 - direction));
                    toBePartitioned.add(newSplit);
                    i1 = i2;
                }
            }
        }

        // All nodes stored in level-order traversal
        Vector<SplitStruct> nodes = new Vector<SplitStruct>();
        final Queue<SplitStruct> toBePartitioned = new LinkedList<SplitStruct>();
        toBePartitioned.add(new SplitStruct(0, elementCount, SplitStruct.DIRECTION_X));

        while (!toBePartitioned.isEmpty()) {
            SplitStruct split = toBePartitioned.poll();
            if (nodes.size() < nonLeafNodeCount) {
                // This is a non-leaf
                split.partition(toBePartitioned);
            }
            nodes.add(split);
        }

        if (nodes.size() != nodeCount) {
            throw new RuntimeException(
                    "Expected node count: " + nodeCount + ". Real node count: " + nodes.size());
        }

        // Now we have our data sorted in the required order. Start building
        // the tree.
        // Store the offset of each leaf node in the tree
        FSDataOutputStream fakeOut = null;
        try {
            fakeOut = new FSDataOutputStream(new java.io.OutputStream() {
                // Null output stream
                @Override
                public void write(int b) throws IOException {
                    // Do nothing
                }

                @Override
                public void write(byte[] b, int off, int len) throws IOException {
                    // Do nothing
                }

                @Override
                public void write(byte[] b) throws IOException {
                    // Do nothing
                }
            }, null, TreeHeaderSize + nodes.size() * NodeSize);
            for (int i_leaf = nonLeafNodeCount, i = 0; i_leaf < nodes.size(); i_leaf++) {
                nodes.elementAt(i_leaf).offsetOfFirstElement = (int) fakeOut.getPos();
                if (i != nodes.elementAt(i_leaf).index1)
                    throw new RuntimeException();
                double x1, y1, x2, y2;

                // Initialize MBR to first object
                int eol = skipToEOL(element_bytes, offsets[i]);
                fakeOut.write(element_bytes, offsets[i], eol - offsets[i]);
                line.set(element_bytes, offsets[i], eol - offsets[i] - 1);
                stockObject.fromText(line);
                Rectangle mbr = stockObject.getMBR();
                x1 = mbr.x1;
                y1 = mbr.y1;
                x2 = mbr.x2;
                y2 = mbr.y2;
                i++;

                while (i < nodes.elementAt(i_leaf).index2) {
                    eol = skipToEOL(element_bytes, offsets[i]);
                    fakeOut.write(element_bytes, offsets[i], eol - offsets[i]);
                    line.set(element_bytes, offsets[i], eol - offsets[i] - 1);
                    stockObject.fromText(line);
                    mbr = stockObject.getMBR();
                    if (mbr.x1 < x1)
                        x1 = mbr.x1;
                    if (mbr.y1 < y1)
                        y1 = mbr.y1;
                    if (mbr.x2 > x2)
                        x2 = mbr.x2;
                    if (mbr.y2 > y2)
                        y2 = mbr.y2;
                    i++;
                }
                nodes.elementAt(i_leaf).set(x1, y1, x2, y2);
            }

        } finally {
            if (fakeOut != null)
                fakeOut.close();
        }

        // Calculate MBR and offsetOfFirstElement for non-leaves
        for (int i_node = nonLeafNodeCount - 1; i_node >= 0; i_node--) {
            int i_first_child = i_node * degree + 1;
            nodes.elementAt(i_node).offsetOfFirstElement = nodes.elementAt(i_first_child).offsetOfFirstElement;
            int i_child = 0;
            Rectangle mbr;
            mbr = nodes.elementAt(i_first_child + i_child);
            double x1 = mbr.x1;
            double y1 = mbr.y1;
            double x2 = mbr.x2;
            double y2 = mbr.y2;
            i_child++;

            while (i_child < degree) {
                mbr = nodes.elementAt(i_first_child + i_child);
                if (mbr.x1 < x1)
                    x1 = mbr.x1;
                if (mbr.y1 < y1)
                    y1 = mbr.y1;
                if (mbr.x2 > x2)
                    x2 = mbr.x2;
                if (mbr.y2 > y2)
                    y2 = mbr.y2;
                i_child++;
            }
            nodes.elementAt(i_node).set(x1, y1, x2, y2);
        }

        // Start writing the tree
        // write tree header (including size)
        // Total tree size. (== Total bytes written - 8 bytes for the size itself)
        dataOut.writeInt(TreeHeaderSize + NodeSize * nodeCount + len);
        // Tree height
        dataOut.writeInt(height);
        // Degree
        dataOut.writeInt(degree);
        dataOut.writeInt(elementCount);

        // write nodes
        for (SplitStruct node : nodes) {
            node.write(dataOut);
        }
        // write elements
        for (int element_i = 0; element_i < elementCount; element_i++) {
            int eol = skipToEOL(element_bytes, offsets[element_i]);
            dataOut.write(element_bytes, offsets[element_i], eol - offsets[element_i]);
        }

    } catch (IOException e) {
        e.printStackTrace();
    }
}

From source file:com.xpn.xwiki.store.migration.hibernate.R40000XWIKI6990DataMigration.java

@Override
public void hibernateMigrate() throws DataMigrationException, XWikiException {
    final Map<Long, Long> docs = new HashMap<Long, Long>();
    final List<String> customMappedClasses = new ArrayList<String>();
    final Map<Long, Long> objs = new HashMap<Long, Long>();
    final Queue<Map<Long, Long>> stats = new LinkedList<Map<Long, Long>>();

    // Get ids conversion list
    getStore().executeRead(getXWikiContext(), new HibernateCallback<Object>() {
        private void fillDocumentIdConversion(Session session, Map<Long, Long> map) {
            String database = getXWikiContext().getDatabase();
            @SuppressWarnings("unchecked")
            List<Object[]> results = session
                    .createQuery("select doc.id, doc.space, doc.name, doc.defaultLanguage, doc.language from "
                            + XWikiDocument.class.getName() + " as doc")
                    .list();//from   w  w w  .  j av  a 2  s  .  co m

            for (Object[] result : results) {
                long oldId = (Long) result[0];
                String space = (String) result[1];
                String name = (String) result[2];
                String defaultLanguage = (String) result[3];
                String language = (String) result[4];

                // Use a real document, since we need the language to be appended.
                // TODO: Change this when the locale is integrated
                XWikiDocument doc = new XWikiDocument(new DocumentReference(database, space, name));
                doc.setDefaultLanguage(defaultLanguage);
                doc.setLanguage(language);
                long newId = doc.getId();

                if (oldId != newId) {
                    map.put(oldId, newId);
                }
            }

            logProgress("Retrieved %d document IDs to be converted.", map.size());
        }

        private void fillObjectIdConversion(Session session, Map<Long, Long> map) {
            @SuppressWarnings("unchecked")
            List<Object[]> results = session
                    .createQuery("select obj.id, obj.name, obj.className, obj.number from "
                            + BaseObject.class.getName() + " as obj")
                    .list();
            for (Object[] result : results) {
                long oldId = (Long) result[0];
                String docName = (String) result[1];
                String className = (String) result[2];
                Integer number = (Integer) result[3];

                BaseObjectReference objRef = new BaseObjectReference(
                        R40000XWIKI6990DataMigration.this.resolver.resolve(className), number,
                        R40000XWIKI6990DataMigration.this.resolver.resolve(docName));
                long newId = Util.getHash(R40000XWIKI6990DataMigration.this.serializer.serialize(objRef));

                if (oldId != newId) {
                    map.put(oldId, newId);
                }
            }

            logProgress("Retrieved %d object IDs to be converted.", map.size());
        }

        private void fillCustomMappingMap(XWikiHibernateStore store, XWikiContext context)
                throws XWikiException, DataMigrationException {
            processCustomMappings(store, new CustomMappingCallback() {
                @Override
                public void processCustomMapping(XWikiHibernateStore store, String name, String mapping,
                        boolean hasDynamicMapping) throws XWikiException {
                    if (INTERNAL.equals(mapping) || hasDynamicMapping) {
                        customMappedClasses.add(name);
                    }
                }
            }, context);

            logProgress("Retrieved %d custom mapped classes to be processed.", customMappedClasses.size());
        }

        private void fillStatsConversionMap(Session session, Class<?> klass, Map<Long, Long> map) {
            @SuppressWarnings("unchecked")
            List<Object[]> results = session
                    .createQuery(
                            "select stats.id, stats.name, stats.number from " + klass.getName() + " as stats")
                    .list();
            for (Object[] result : results) {
                long oldId = (Long) result[0];
                String statsName = (String) result[1];
                Integer number = (Integer) result[2];

                // Do not try to convert broken records which would cause duplicated ids
                if (!statsName.startsWith(".") && !statsName.endsWith(".")) {
                    long newId = R40000XWIKI6990DataMigration.this.statsIdComputer.getId(statsName, number);

                    if (oldId != newId) {
                        map.put(oldId, newId);
                    }
                } else {
                    logger.debug("Skipping invalid statistical entry [{}] with name [{}]", oldId, statsName);
                }
            }

            String klassName = klass.getName().substring(klass.getName().lastIndexOf('.') + 1);
            logProgress("Retrieved %d %s statistics IDs to be converted.", map.size(),
                    klassName.substring(0, klassName.length() - 5).toLowerCase());
        }

        @Override
        public Object doInHibernate(Session session) throws XWikiException {
            try {
                fillDocumentIdConversion(session, docs);

                fillObjectIdConversion(session, objs);

                // Retrieve custom mapped classes
                if (getStore() instanceof XWikiHibernateStore) {
                    fillCustomMappingMap((XWikiHibernateStore) getStore(), getXWikiContext());
                }

                // Retrieve statistics ID conversion
                for (Class<?> statsClass : STATS_CLASSES) {
                    Map<Long, Long> map = new HashMap<Long, Long>();
                    fillStatsConversionMap(session, statsClass, map);
                    stats.add(map);
                }

                session.clear();
            } catch (Exception e) {
                throw new XWikiException(XWikiException.MODULE_XWIKI_STORE,
                        XWikiException.ERROR_XWIKI_STORE_MIGRATION, getName() + " migration failed", e);
            }
            return null;
        }
    });

    // Cache the configuration and the dialect
    configuration = getStore().getConfiguration();
    dialect = configuration.buildSettings().getDialect();

    // Check configuration for safe mode
    XWikiConfig config = getXWikiContext().getWiki().getConfig();
    /* True if migration should use safe but slower non-bulk native updates. */
    boolean useSafeUpdates = "1"
            .equals(config.getProperty("xwiki.store.migration." + this.getName() + ".safemode", "0"));

    // Use safe mode if the database has no temporary table supported by hibernate
    useSafeUpdates = useSafeUpdates || !configuration.buildSettings().getDialect().supportsTemporaryTables();

    // Proceed to document id conversion
    if (!docs.isEmpty()) {
        if (!useSafeUpdates) {
            // Pair table,key for table that need manual updates
            final List<String[]> tableToProcess = new ArrayList<String[]>();

            for (Class<?> docClass : DOC_CLASSES) {
                tableToProcess.addAll(getAllTableToProcess(docClass.getName()));
            }
            for (Class<?> docClass : DOCLINK_CLASSES) {
                tableToProcess.addAll(getAllTableToProcess(docClass.getName(), "docId"));
            }

            logProgress("Converting %d document IDs in %d tables...", docs.size(), tableToProcess.size());

            final long[] times = new long[tableToProcess.size() + 1];
            try {
                getStore().executeWrite(getXWikiContext(), new AbstractBulkIdConversionHibernateCallback() {
                    @Override
                    public void doBulkIdUpdate() {
                        times[timer++] += insertIdUpdates(docs);

                        for (String[] table : tableToProcess) {
                            times[timer++] += executeSqlIdUpdate(table[0], table[1]);
                        }
                    }
                });
            } catch (Exception e) {
                throw new XWikiException(XWikiException.MODULE_XWIKI_STORE,
                        XWikiException.ERROR_XWIKI_STORE_MIGRATION, getName() + " migration failed", e);
            }
            if (logger.isDebugEnabled()) {
                int timer = 0;
                logger.debug("Time elapsed for inserts: {} ms", times[timer++] / 1000000);

                for (String[] table : tableToProcess) {
                    logger.debug("Time elapsed for {} table: {} ms", table[0], times[timer++] / 1000000);
                }
            }
        } else {
            final List<String[]> docsColl = new ArrayList<String[]>();
            for (Class<?> docClass : DOC_CLASSES) {
                docsColl.addAll(getCollectionProperties(getClassMapping(docClass.getName())));
            }
            for (Class<?> docClass : DOCLINK_CLASSES) {
                docsColl.addAll(getCollectionProperties(getClassMapping(docClass.getName())));
            }

            logProgress("Converting %d document IDs in %d tables and %d collection tables...", docs.size(),
                    DOC_CLASSES.length + DOCLINK_CLASSES.length, docsColl.size());

            final long[] times = new long[DOC_CLASSES.length + DOCLINK_CLASSES.length + docsColl.size()];
            convertDbId(docs, new AbstractIdConversionHibernateCallback() {
                @Override
                public void doSingleUpdate() {
                    for (String[] coll : docsColl) {
                        times[timer++] += executeSqlIdUpdate(coll[0], coll[1]);
                    }

                    for (Class<?> doclinkClass : DOCLINK_CLASSES) {
                        times[timer++] += executeIdUpdate(doclinkClass, DOCID);
                    }
                    times[timer++] += executeIdUpdate(XWikiLink.class, DOCID);
                    times[timer++] += executeIdUpdate(XWikiRCSNodeInfo.class, ID + '.' + DOCID);
                    times[timer++] += executeIdUpdate(XWikiDocument.class, ID);
                }
            });
            if (logger.isDebugEnabled()) {
                int timer = 0;
                for (String[] coll : docsColl) {
                    logger.debug("Time elapsed for {} collection: {} ms", coll[0], times[timer++] / 1000000);
                }
                for (Class<?> doclinkClass : DOCLINK_CLASSES) {
                    logger.debug("Time elapsed for {} class: {} ms", doclinkClass.getName(),
                            times[timer++] / 1000000);
                }
                logger.debug("Time elapsed for {} class: {} ms", XWikiRCSNodeInfo.class.getName(),
                        times[timer++] / 1000000);
                logger.debug("Time elapsed for {} class: {} ms", XWikiDocument.class.getName(),
                        times[timer++] / 1000000);
            }
        }
        logProgress("All document IDs has been converted successfully.");
    } else {
        logProgress("No document IDs to convert, skipping.");
    }

    // Proceed to object id conversion
    if (!objs.isEmpty()) {
        if (!useSafeUpdates) {
            // Pair table,key for table that need manual updates
            final List<String[]> tableToProcess = new ArrayList<String[]>();

            PersistentClass objklass = getClassMapping(BaseObject.class.getName());
            tableToProcess.addAll(getCollectionProperties(objklass));

            for (Class<?> propertyClass : PROPERTY_CLASS) {
                tableToProcess.addAll(getAllTableToProcess(propertyClass.getName()));
            }
            for (String customClass : customMappedClasses) {
                tableToProcess.addAll(getAllTableToProcess(customClass));
            }
            tableToProcess.add(new String[] { objklass.getTable().getName(), getKeyColumnName(objklass) });

            logProgress("Converting %d object IDs in %d tables...", objs.size(), tableToProcess.size());

            final long[] times = new long[tableToProcess.size() + 1];
            try {
                getStore().executeWrite(getXWikiContext(), new AbstractBulkIdConversionHibernateCallback() {
                    @Override
                    public void doBulkIdUpdate() {
                        times[timer++] += insertIdUpdates(objs);

                        for (String[] table : tableToProcess) {
                            times[timer++] += executeSqlIdUpdate(table[0], table[1]);
                        }
                    }
                });
            } catch (Exception e) {
                throw new XWikiException(XWikiException.MODULE_XWIKI_STORE,
                        XWikiException.ERROR_XWIKI_STORE_MIGRATION, getName() + " migration failed", e);
            }
            if (logger.isDebugEnabled()) {
                int timer = 0;
                logger.debug("Time elapsed for inserts: {} ms", times[timer++] / 1000000);

                for (String[] table : tableToProcess) {
                    logger.debug("Time elapsed for {} table: {} ms", table[0], times[timer++] / 1000000);
                }
            }
        } else {
            // Name of classes that need manual updates
            final List<String> classToProcess = new ArrayList<String>();
            // Name of custom classes that need manual updates
            final List<String> customClassToProcess = new ArrayList<String>();
            // Pair table,key for collection table that need manual updates
            final List<String[]> objsColl = new ArrayList<String[]>();

            objsColl.addAll(getCollectionProperties(getClassMapping(BaseObject.class.getName())));
            for (Class<?> propertyClass : PROPERTY_CLASS) {
                String className = propertyClass.getName();
                PersistentClass klass = getClassMapping(className);

                // Add collection table that will not be updated by cascaded updates
                objsColl.addAll(getCollectionProperties(klass));

                // Skip classes that will be updated by cascaded updates
                if (!this.fkTables.contains(klass.getTable())) {
                    classToProcess.add(className);
                }
            }
            for (String customClass : customMappedClasses) {
                PersistentClass klass = getClassMapping(customClass);

                // Add collection table that will not be updated by cascaded updates
                objsColl.addAll(getCollectionProperties(klass));

                // Skip classes that will be updated by cascaded updates
                if (!this.fkTables.contains(klass.getTable())) {
                    customClassToProcess.add(customClass);
                }
            }

            logProgress(
                    "Converting %d object IDs in %d tables, %d custom mapped tables and %d collection tables...",
                    objs.size(), classToProcess.size() + 1, customClassToProcess.size(), objsColl.size());

            final long[] times = new long[classToProcess.size() + 1 + customClassToProcess.size()
                    + objsColl.size()];
            convertDbId(objs, new AbstractIdConversionHibernateCallback() {
                @Override
                public void doSingleUpdate() {
                    for (String[] coll : objsColl) {
                        times[timer++] += executeSqlIdUpdate(coll[0], coll[1]);
                    }

                    for (String customMappedClass : customClassToProcess) {
                        times[timer++] += executeIdUpdate(customMappedClass, ID);
                    }

                    for (String propertyClass : classToProcess) {
                        times[timer++] += executeIdUpdate(propertyClass, IDID);
                    }

                    times[timer++] += executeIdUpdate(BaseObject.class, ID);
                }
            });
            if (logger.isDebugEnabled()) {
                int timer = 0;
                for (String[] coll : objsColl) {
                    logger.debug("Time elapsed for {} collection: {} ms", coll[0], times[timer++] / 1000000);
                }
                for (String customMappedClass : customClassToProcess) {
                    logger.debug("Time elapsed for {} custom table: {} ms", customMappedClass,
                            times[timer++] / 1000000);
                }
                for (String propertyClass : classToProcess) {
                    logger.debug("Time elapsed for {} property table: {} ms", propertyClass,
                            times[timer++] / 1000000);
                }
                logger.debug("Time elapsed for {} class: {} ms", BaseObject.class.getName(),
                        times[timer++] / 1000000);
            }
        }
        logProgress("All object IDs has been converted successfully.");
    } else {
        logProgress("No object IDs to convert, skipping.");
    }

    // Proceed to statistics id conversions
    for (final Class<?> statsClass : STATS_CLASSES) {

        Map<Long, Long> map = stats.poll();
        String klassName = statsClass.getName().substring(statsClass.getName().lastIndexOf('.') + 1);
        klassName = klassName.substring(0, klassName.length() - 5).toLowerCase();

        if (!map.isEmpty()) {
            if (!useSafeUpdates) {
                final List<String[]> tableToProcess = new ArrayList<String[]>();
                final Map<Long, Long> statids = map;

                PersistentClass statklass = getClassMapping(statsClass.getName());
                tableToProcess.addAll(getCollectionProperties(statklass));
                tableToProcess
                        .add(new String[] { statklass.getTable().getName(), getKeyColumnName(statklass) });

                logProgress("Converting %d %s statistics IDs in %d tables...", map.size(), klassName,
                        tableToProcess.size());

                final long[] times = new long[tableToProcess.size() + 1];
                try {
                    getStore().executeWrite(getXWikiContext(), new AbstractBulkIdConversionHibernateCallback() {
                        @Override
                        public void doBulkIdUpdate() {
                            times[timer++] += insertIdUpdates(statids);

                            for (String[] table : tableToProcess) {
                                times[timer++] += executeSqlIdUpdate(table[0], table[1]);
                            }
                        }
                    });
                } catch (Exception e) {
                    throw new XWikiException(XWikiException.MODULE_XWIKI_STORE,
                            XWikiException.ERROR_XWIKI_STORE_MIGRATION, getName() + " migration failed", e);
                }
                if (logger.isDebugEnabled()) {
                    int timer = 0;
                    logger.debug("Time elapsed for inserts: {} ms", times[timer++] / 1000000);

                    for (String[] table : tableToProcess) {
                        logger.debug("Time elapsed for {} table: {} ms", table[0], times[timer++] / 1000000);
                    }
                }
            } else {
                final List<String[]> statsColl = new ArrayList<String[]>();
                statsColl.addAll(getCollectionProperties(getClassMapping(statsClass.getName())));

                logProgress("Converting %d %s statistics IDs in 1 tables and %d collection tables...",
                        map.size(), klassName, statsColl.size());

                final long[] times = new long[statsColl.size() + 1];
                convertDbId(map, new AbstractIdConversionHibernateCallback() {
                    @Override
                    public void doSingleUpdate() {
                        for (String[] coll : statsColl) {
                            times[timer++] += executeSqlIdUpdate(coll[0], coll[1]);
                        }
                        times[timer++] += executeIdUpdate(statsClass, ID);
                    }
                });
                if (logger.isDebugEnabled()) {
                    int timer = 0;
                    for (String[] coll : statsColl) {
                        logger.debug("Time elapsed for {} collection: {} ms", coll[0],
                                times[timer++] / 1000000);
                    }
                    logger.debug("Time elapsed for {} class: {} ms", statsClass.getName(),
                            times[timer++] / 1000000);
                }
            }
            logProgress("All %s statistics IDs has been converted successfully.", klassName);
        } else {
            logProgress("No %s statistics IDs to convert, skipping.", klassName);
        }
    }
}

From source file:org.apache.hadoop.tools.rumen.Folder.java

public int run() throws IOException {
    class JobEntryComparator implements Comparator<Pair<LoggedJob, JobTraceReader>> {
        public int compare(Pair<LoggedJob, JobTraceReader> p1, Pair<LoggedJob, JobTraceReader> p2) {
            LoggedJob j1 = p1.first();//  w  w w .  j  a  v  a2  s.c om
            LoggedJob j2 = p2.first();

            return (j1.getSubmitTime() < j2.getSubmitTime()) ? -1
                    : (j1.getSubmitTime() == j2.getSubmitTime()) ? 0 : 1;
        }
    }

    // we initialize an empty heap so if we take an error before establishing
    // a real one the finally code goes through
    Queue<Pair<LoggedJob, JobTraceReader>> heap = new PriorityQueue<Pair<LoggedJob, JobTraceReader>>();

    try {
        LoggedJob job = reader.nextJob();

        if (job == null) {
            LOG.error("The job trace is empty");

            return EMPTY_JOB_TRACE;
        }

        // If starts-after time is specified, skip the number of jobs till we reach
        // the starting time limit.
        if (startsAfter > 0) {
            LOG.info("starts-after time is specified. Initial job submit time : " + job.getSubmitTime());

            long approximateTime = job.getSubmitTime() + startsAfter;
            job = reader.nextJob();
            long skippedCount = 0;
            while (job != null && job.getSubmitTime() < approximateTime) {
                job = reader.nextJob();
                skippedCount++;
            }

            LOG.debug("Considering jobs with submit time greater than " + startsAfter + " ms. Skipped "
                    + skippedCount + " jobs.");

            if (job == null) {
                LOG.error("No more jobs to process in the trace with 'starts-after'" + " set to " + startsAfter
                        + "ms.");
                return EMPTY_JOB_TRACE;
            }
            LOG.info("The first job has a submit time of " + job.getSubmitTime());
        }

        firstJobSubmitTime = job.getSubmitTime();
        long lastJobSubmitTime = firstJobSubmitTime;

        int numberJobs = 0;

        long currentIntervalEnd = Long.MIN_VALUE;

        Path nextSegment = null;
        Outputter<LoggedJob> tempGen = null;

        if (debug) {
            LOG.debug("The first job has a submit time of " + firstJobSubmitTime);
        }

        final Configuration conf = getConf();

        try {
            // At the top of this loop, skewBuffer has at most
            // skewBufferLength entries.
            while (job != null) {
                final Random tempNameGenerator = new Random();

                lastJobSubmitTime = job.getSubmitTime();

                ++numberJobs;

                if (job.getSubmitTime() >= currentIntervalEnd) {
                    if (tempGen != null) {
                        tempGen.close();
                    }

                    nextSegment = null;
                    for (int i = 0; i < 3 && nextSegment == null; ++i) {
                        try {
                            nextSegment = new Path(tempDir,
                                    "segment-" + tempNameGenerator.nextLong() + ".json.gz");

                            if (debug) {
                                LOG.debug("The next segment name is " + nextSegment);
                            }

                            FileSystem fs = nextSegment.getFileSystem(conf);

                            try {
                                if (!fs.exists(nextSegment)) {
                                    break;
                                }

                                continue;
                            } catch (IOException e) {
                                // no code -- file did not already exist
                            }
                        } catch (IOException e) {
                            // no code -- file exists now, or directory bad. We try three
                            // times.
                        }
                    }

                    if (nextSegment == null) {
                        throw new RuntimeException("Failed to create a new file!");
                    }

                    if (debug) {
                        LOG.debug("Creating " + nextSegment + " for a job with a submit time of "
                                + job.getSubmitTime());
                    }

                    deletees.add(nextSegment);

                    tempPaths.add(nextSegment);

                    tempGen = new DefaultOutputter<LoggedJob>();
                    tempGen.init(nextSegment, conf);

                    long currentIntervalNumber = (job.getSubmitTime() - firstJobSubmitTime) / inputCycle;

                    currentIntervalEnd = firstJobSubmitTime + ((currentIntervalNumber + 1) * inputCycle);
                }

                // the temp files contain UDadjusted times, but each temp file's
                // content is in the same input cycle interval.
                if (tempGen != null) {
                    tempGen.output(job);
                }

                job = reader.nextJob();
            }
        } catch (DeskewedJobTraceReader.OutOfOrderException e) {
            return OUT_OF_ORDER_JOBS;
        } finally {
            if (tempGen != null) {
                tempGen.close();
            }
        }

        if (lastJobSubmitTime <= firstJobSubmitTime) {
            LOG.error("All of your job[s] have the same submit time." + "  Please just use your input file.");

            return ALL_JOBS_SIMULTANEOUS;
        }

        double submitTimeSpan = lastJobSubmitTime - firstJobSubmitTime;

        LOG.warn("Your input trace spans " + (lastJobSubmitTime - firstJobSubmitTime) + " ticks.");

        double foldingRatio = submitTimeSpan * (numberJobs + 1) / numberJobs / inputCycle;

        if (debug) {
            LOG.warn("run: submitTimeSpan = " + submitTimeSpan + ", numberJobs = " + numberJobs
                    + ", inputCycle = " + inputCycle);
        }

        if (reader.neededSkewBufferSize() > 0) {
            LOG.warn("You needed a -skew-buffer-length of " + reader.neededSkewBufferSize()
                    + " but no more, for this input.");
        }

        double tProbability = timeDilation * concentration / foldingRatio;

        if (debug) {
            LOG.warn("run: timeDilation = " + timeDilation + ", concentration = " + concentration
                    + ", foldingRatio = " + foldingRatio);
            LOG.warn("The transcription probability is " + tProbability);
        }

        transcriptionRateInteger = (int) Math.floor(tProbability);
        transcriptionRateFraction = tProbability - Math.floor(tProbability);

        // Now read all the inputs in parallel
        heap = new PriorityQueue<Pair<LoggedJob, JobTraceReader>>(tempPaths.size(), new JobEntryComparator());

        for (Path tempPath : tempPaths) {
            JobTraceReader thisReader = new JobTraceReader(tempPath, conf);

            closees.add(thisReader);

            LoggedJob streamFirstJob = thisReader.getNext();

            long thisIndex = (streamFirstJob.getSubmitTime() - firstJobSubmitTime) / inputCycle;

            if (debug) {
                LOG.debug("A job with submit time of " + streamFirstJob.getSubmitTime() + " is in interval # "
                        + thisIndex);
            }

            adjustJobTimes(streamFirstJob);

            if (debug) {
                LOG.debug("That job's submit time is adjusted to " + streamFirstJob.getSubmitTime());
            }

            heap.add(new Pair<LoggedJob, JobTraceReader>(streamFirstJob, thisReader));
        }

        Pair<LoggedJob, JobTraceReader> next = heap.poll();

        while (next != null) {
            maybeOutput(next.first());

            if (debug) {
                LOG.debug("The most recent job has an adjusted submit time of " + next.first().getSubmitTime());
                LOG.debug(" Its replacement in the heap will come from input engine " + next.second());
            }

            LoggedJob replacement = next.second().getNext();

            if (replacement == null) {
                next.second().close();

                if (debug) {
                    LOG.debug("That input engine is depleted.");
                }
            } else {
                adjustJobTimes(replacement);

                if (debug) {
                    LOG.debug("The replacement has an adjusted submit time of " + replacement.getSubmitTime());
                }

                heap.add(new Pair<LoggedJob, JobTraceReader>(replacement, next.second()));
            }

            next = heap.poll();
        }
    } finally {
        IOUtils.cleanup(null, reader);
        if (outGen != null) {
            outGen.close();
        }
        for (Pair<LoggedJob, JobTraceReader> heapEntry : heap) {
            heapEntry.second().close();
        }
        for (Closeable closee : closees) {
            closee.close();
        }
        if (!debug) {
            Configuration conf = getConf();

            for (Path deletee : deletees) {
                FileSystem fs = deletee.getFileSystem(conf);

                try {
                    fs.delete(deletee, false);
                } catch (IOException e) {
                    // no code
                }
            }
        }
    }

    return 0;
}

From source file:edu.umn.cs.spatialHadoop.core.RTree.java

/**
 * Builds the RTree given a serialized list of elements. It uses the given
 * stockObject to deserialize these elements using
 * {@link TextSerializable#fromText(Text)} and build the tree. Also writes the
 * created tree to the disk directly./*w  w w.ja v  a 2  s . c o  m*/
 * 
 * @param element_bytes
 *          - serialization of all elements separated by new lines
 * @param offset
 *          - offset of the first byte to use in elements_bytes
 * @param len
 *          - number of bytes to use in elements_bytes
 * @param degree
 *          - Degree of the R-tree to build in terms of number of children per
 *          node
 * @param dataOut
 *          - output stream to write the result to.
 * @param fast_sort
 *          - setting this to <code>true</code> allows the method to run
 *          faster by materializing the offset of each element in the list
 *          which speeds up the comparison. However, this requires an
 *          additional 16 bytes per element. So, for each 1M elements, the
 *          method will require an additional 16 M bytes (approximately).
 */
public void bulkLoadWrite(final byte[] element_bytes, final int offset, final int len, final int degree,
        DataOutput dataOut, final boolean fast_sort) {
    try {

        // Count number of elements in the given text
        int i_start = offset;
        final Text line = new Text();
        while (i_start < offset + len) {
            int i_end = skipToEOL(element_bytes, i_start);
            // Extract the line without end of line character
            line.set(element_bytes, i_start, i_end - i_start - 1);
            stockObject.fromText(line);
            elementCount++;
            i_start = i_end;
        }
        LOG.info("Bulk loading an RTree with " + elementCount + " elements");

        // It turns out the findBestDegree returns the best degree when the whole
        // tree is loaded to memory when processed. However, as current algorithms
        // process the tree while it's on disk, a higher degree should be selected
        // such that a node fits one file block (assumed to be 4K).
        //final int degree = findBestDegree(bytesAvailable, elementCount);
        LOG.info("Writing an RTree with degree " + degree);

        int height = Math.max(1, (int) Math.ceil(Math.log(elementCount) / Math.log(degree)));
        int leafNodeCount = (int) Math.pow(degree, height - 1);
        if (elementCount < 2 * leafNodeCount && height > 1) {
            height--;
            leafNodeCount = (int) Math.pow(degree, height - 1);
        }
        int nodeCount = (int) ((Math.pow(degree, height) - 1) / (degree - 1));
        int nonLeafNodeCount = nodeCount - leafNodeCount;

        // Keep track of the offset of each element in the text
        final int[] offsets = new int[elementCount];
        final double[] xs = fast_sort ? new double[elementCount] : null;
        final double[] ys = fast_sort ? new double[elementCount] : null;

        i_start = offset;
        line.clear();
        for (int i = 0; i < elementCount; i++) {
            offsets[i] = i_start;
            int i_end = skipToEOL(element_bytes, i_start);
            if (xs != null) {
                // Extract the line with end of line character
                line.set(element_bytes, i_start, i_end - i_start - 1);
                stockObject.fromText(line);
                // Sample center of the shape
                xs[i] = (stockObject.getMBR().x1 + stockObject.getMBR().x2) / 2;
                ys[i] = (stockObject.getMBR().y1 + stockObject.getMBR().y2) / 2;
            }
            i_start = i_end;
        }

        /**A struct to store information about a split*/
        class SplitStruct extends Rectangle {
            /**Start and end index for this split*/
            int index1, index2;
            /**Direction of this split*/
            byte direction;
            /**Index of first element on disk*/
            int offsetOfFirstElement;

            static final byte DIRECTION_X = 0;
            static final byte DIRECTION_Y = 1;

            SplitStruct(int index1, int index2, byte direction) {
                this.index1 = index1;
                this.index2 = index2;
                this.direction = direction;
            }

            @Override
            public void write(DataOutput out) throws IOException {
                out.writeInt(offsetOfFirstElement);
                super.write(out);
            }

            void partition(Queue<SplitStruct> toBePartitioned) {
                IndexedSortable sortableX;
                IndexedSortable sortableY;

                if (fast_sort) {
                    // Use materialized xs[] and ys[] to do the comparisons
                    sortableX = new IndexedSortable() {
                        @Override
                        public void swap(int i, int j) {
                            // Swap xs
                            double tempx = xs[i];
                            xs[i] = xs[j];
                            xs[j] = tempx;
                            // Swap ys
                            double tempY = ys[i];
                            ys[i] = ys[j];
                            ys[j] = tempY;
                            // Swap id
                            int tempid = offsets[i];
                            offsets[i] = offsets[j];
                            offsets[j] = tempid;
                        }

                        @Override
                        public int compare(int i, int j) {
                            if (xs[i] < xs[j])
                                return -1;
                            if (xs[i] > xs[j])
                                return 1;
                            return 0;
                        }
                    };

                    sortableY = new IndexedSortable() {
                        @Override
                        public void swap(int i, int j) {
                            // Swap xs
                            double tempx = xs[i];
                            xs[i] = xs[j];
                            xs[j] = tempx;
                            // Swap ys
                            double tempY = ys[i];
                            ys[i] = ys[j];
                            ys[j] = tempY;
                            // Swap id
                            int tempid = offsets[i];
                            offsets[i] = offsets[j];
                            offsets[j] = tempid;
                        }

                        @Override
                        public int compare(int i, int j) {
                            if (ys[i] < ys[j])
                                return -1;
                            if (ys[i] > ys[j])
                                return 1;
                            return 0;
                        }
                    };
                } else {
                    // No materialized xs and ys. Always deserialize objects to compare
                    sortableX = new IndexedSortable() {
                        @Override
                        public void swap(int i, int j) {
                            // Swap id
                            int tempid = offsets[i];
                            offsets[i] = offsets[j];
                            offsets[j] = tempid;
                        }

                        @Override
                        public int compare(int i, int j) {
                            // Get end of line
                            int eol = skipToEOL(element_bytes, offsets[i]);
                            line.set(element_bytes, offsets[i], eol - offsets[i] - 1);
                            stockObject.fromText(line);
                            double xi = (stockObject.getMBR().x1 + stockObject.getMBR().x2) / 2;

                            eol = skipToEOL(element_bytes, offsets[j]);
                            line.set(element_bytes, offsets[j], eol - offsets[j] - 1);
                            stockObject.fromText(line);
                            double xj = (stockObject.getMBR().x1 + stockObject.getMBR().x2) / 2;
                            if (xi < xj)
                                return -1;
                            if (xi > xj)
                                return 1;
                            return 0;
                        }
                    };

                    sortableY = new IndexedSortable() {
                        @Override
                        public void swap(int i, int j) {
                            // Swap id
                            int tempid = offsets[i];
                            offsets[i] = offsets[j];
                            offsets[j] = tempid;
                        }

                        @Override
                        public int compare(int i, int j) {
                            int eol = skipToEOL(element_bytes, offsets[i]);
                            line.set(element_bytes, offsets[i], eol - offsets[i] - 1);
                            stockObject.fromText(line);
                            double yi = (stockObject.getMBR().y1 + stockObject.getMBR().y2) / 2;

                            eol = skipToEOL(element_bytes, offsets[j]);
                            line.set(element_bytes, offsets[j], eol - offsets[j] - 1);
                            stockObject.fromText(line);
                            double yj = (stockObject.getMBR().y1 + stockObject.getMBR().y2) / 2;
                            if (yi < yj)
                                return -1;
                            if (yi > yj)
                                return 1;
                            return 0;
                        }
                    };
                }

                final IndexedSorter sorter = new QuickSort();

                final IndexedSortable[] sortables = new IndexedSortable[2];
                sortables[SplitStruct.DIRECTION_X] = sortableX;
                sortables[SplitStruct.DIRECTION_Y] = sortableY;

                sorter.sort(sortables[direction], index1, index2);

                // Partition into maxEntries partitions (equally) and
                // create a SplitStruct for each partition
                int i1 = index1;
                for (int iSplit = 0; iSplit < degree; iSplit++) {
                    int i2 = index1 + (index2 - index1) * (iSplit + 1) / degree;
                    SplitStruct newSplit = new SplitStruct(i1, i2, (byte) (1 - direction));
                    toBePartitioned.add(newSplit);
                    i1 = i2;
                }
            }
        }

        // All nodes stored in level-order traversal
        Vector<SplitStruct> nodes = new Vector<SplitStruct>();
        final Queue<SplitStruct> toBePartitioned = new LinkedList<SplitStruct>();
        toBePartitioned.add(new SplitStruct(0, elementCount, SplitStruct.DIRECTION_X));

        while (!toBePartitioned.isEmpty()) {
            SplitStruct split = toBePartitioned.poll();
            if (nodes.size() < nonLeafNodeCount) {
                // This is a non-leaf
                split.partition(toBePartitioned);
            }
            nodes.add(split);
        }

        if (nodes.size() != nodeCount) {
            throw new RuntimeException(
                    "Expected node count: " + nodeCount + ". Real node count: " + nodes.size());
        }

        // Now we have our data sorted in the required order. Start building
        // the tree.
        // Store the offset of each leaf node in the tree
        FSDataOutputStream fakeOut = null;
        try {
            fakeOut = new FSDataOutputStream(new java.io.OutputStream() {
                // Null output stream
                @Override
                public void write(int b) throws IOException {
                    // Do nothing
                }

                @Override
                public void write(byte[] b, int off, int len) throws IOException {
                    // Do nothing
                }

                @Override
                public void write(byte[] b) throws IOException {
                    // Do nothing
                }
            }, null, TreeHeaderSize + nodes.size() * NodeSize);
            for (int i_leaf = nonLeafNodeCount, i = 0; i_leaf < nodes.size(); i_leaf++) {
                nodes.elementAt(i_leaf).offsetOfFirstElement = (int) fakeOut.getPos();
                if (i != nodes.elementAt(i_leaf).index1)
                    throw new RuntimeException();
                double x1, y1, x2, y2;

                // Initialize MBR to first object
                int eol = skipToEOL(element_bytes, offsets[i]);
                fakeOut.write(element_bytes, offsets[i], eol - offsets[i]);
                line.set(element_bytes, offsets[i], eol - offsets[i] - 1);
                stockObject.fromText(line);
                Rectangle mbr = stockObject.getMBR();
                x1 = mbr.x1;
                y1 = mbr.y1;
                x2 = mbr.x2;
                y2 = mbr.y2;
                i++;

                while (i < nodes.elementAt(i_leaf).index2) {
                    eol = skipToEOL(element_bytes, offsets[i]);
                    fakeOut.write(element_bytes, offsets[i], eol - offsets[i]);
                    line.set(element_bytes, offsets[i], eol - offsets[i] - 1);
                    stockObject.fromText(line);
                    mbr = stockObject.getMBR();
                    if (mbr.x1 < x1)
                        x1 = mbr.x1;
                    if (mbr.y1 < y1)
                        y1 = mbr.y1;
                    if (mbr.x2 > x2)
                        x2 = mbr.x2;
                    if (mbr.y2 > y2)
                        y2 = mbr.y2;
                    i++;
                }
                nodes.elementAt(i_leaf).set(x1, y1, x2, y2);
            }

        } finally {
            if (fakeOut != null)
                fakeOut.close();
        }

        // Calculate MBR and offsetOfFirstElement for non-leaves
        for (int i_node = nonLeafNodeCount - 1; i_node >= 0; i_node--) {
            int i_first_child = i_node * degree + 1;
            nodes.elementAt(i_node).offsetOfFirstElement = nodes.elementAt(i_first_child).offsetOfFirstElement;
            int i_child = 0;
            Rectangle mbr;
            mbr = nodes.elementAt(i_first_child + i_child);
            double x1 = mbr.x1;
            double y1 = mbr.y1;
            double x2 = mbr.x2;
            double y2 = mbr.y2;
            i_child++;

            while (i_child < degree) {
                mbr = nodes.elementAt(i_first_child + i_child);
                if (mbr.x1 < x1)
                    x1 = mbr.x1;
                if (mbr.y1 < y1)
                    y1 = mbr.y1;
                if (mbr.x2 > x2)
                    x2 = mbr.x2;
                if (mbr.y2 > y2)
                    y2 = mbr.y2;
                i_child++;
            }
            nodes.elementAt(i_node).set(x1, y1, x2, y2);
        }

        // Start writing the tree
        // write tree header (including size)
        // Total tree size. (== Total bytes written - 8 bytes for the size itself)
        dataOut.writeInt(TreeHeaderSize + NodeSize * nodeCount + len);
        // Tree height
        dataOut.writeInt(height);
        // Degree
        dataOut.writeInt(degree);
        dataOut.writeInt(elementCount);

        // write nodes
        for (SplitStruct node : nodes) {
            node.write(dataOut);
        }
        // write elements
        for (int element_i = 0; element_i < elementCount; element_i++) {
            int eol = skipToEOL(element_bytes, offsets[element_i]);
            dataOut.write(element_bytes, offsets[element_i], eol - offsets[element_i]);
        }

    } catch (IOException e) {
        e.printStackTrace();
    }
}

From source file:it.geosolutions.geobatch.task.TaskExecutor.java

public Queue<FileSystemEvent> execute(Queue<FileSystemEvent> events) throws ActionException {

    listenerForwarder.started();/*from  w ww  .  java 2s  .  co  m*/

    if (configuration == null) {
        final ActionException e = new ActionException(this, "DataFlowConfig is null.");
        listenerForwarder.failed(e);
        throw e;
    }

    if (events == null || events.size() == 0) {
        final ActionException e = new ActionException(this, "Empty or null incoming events list");
        listenerForwarder.failed(e);
        throw e;
    }

    Queue<FileSystemEvent> outEvents = new LinkedList<FileSystemEvent>();

    while (events.size() > 0) {
        // get the first event
        final FileSystemEvent event = events.remove();
        final File inputFile = event.getSource();
        if (inputFile == null) {
            final ActionException e = new ActionException(this, "Input File is null");
            listenerForwarder.failed(e);
            throw e;
        }
        if (!inputFile.exists()) {
            final ActionException e = new ActionException(this, "Input File doesn't exist");
            listenerForwarder.failed(e);
            throw e;
        }
        final String inputFilePath = inputFile.getAbsolutePath();

        final String inputFileExt = FilenameUtils.getExtension(inputFilePath);

        // Getting XSL file definition
        final String xslPath = configuration.getXsl();
        final boolean useDefaultScript;

        String defaultScriptPath = configuration.getDefaultScript();
        if (inputFileExt.equalsIgnoreCase("xml")) {
            if (LOGGER.isInfoEnabled())
                LOGGER.info("Using input file as script: " + inputFilePath);
            defaultScriptPath = inputFilePath;
            useDefaultScript = false;
        } else {
            if (LOGGER.isInfoEnabled())
                LOGGER.info("Using default script: " + configuration.getDefaultScript());
            useDefaultScript = true;
        }

        final String outputName = configuration.getOutputName();

        File xslFile = null;
        InputStream is = null;

        try {

            if (xslPath != null && xslPath.trim().length() > 0) {
                final String path = Path.findLocation(xslPath, getConfigDir().getAbsolutePath());
                if (path == null) {
                    final ActionException e = new ActionException(this, "XSL file not found: " + path);
                    listenerForwarder.failed(e);
                    throw e;
                }
                xslFile = new File(path);
            }
            if (!xslFile.exists()) {
                final ActionException e = new ActionException(this, "XSL file not found: " + xslPath);
                listenerForwarder.failed(e);
                throw e;
            }

            File xmlFile = null;
            String outputFile = null;
            if (useDefaultScript) {
                if (defaultScriptPath != null && defaultScriptPath.trim().length() > 0) {
                    final String path = Path.findLocation(xslPath, getConfigDir().getAbsolutePath());
                    if (path == null) {
                        final ActionException e = new ActionException(this, "XSL file not found: " + path);
                        listenerForwarder.failed(e);
                        throw e;
                    }
                    xmlFile = new File(path);

                    final File outXmlFile = File.createTempFile("script", ".xml", getTempDir());
                    //                  outXmlFile.deleteOnExit();
                    outputFile = setScriptArguments(xmlFile.getAbsolutePath(), inputFilePath, outputName,
                            outXmlFile);
                    xmlFile = outXmlFile;
                }

            } else {
                xmlFile = inputFile;
            }
            if (!xmlFile.exists()) {
                final ActionException e = new ActionException(this, "XML file not found: " + xmlFile);
                listenerForwarder.failed(e);
                throw e;
            }

            // Setup an XML source from the input XML file
            final Source xmlSource = new StreamSource(xmlFile);

            is = new FileInputStream(xslFile);

            // XML parsing to setup a command line
            final String argument = buildArgument(xmlSource, is);
            if (LOGGER.isDebugEnabled()) {
                LOGGER.debug("Arguments: " + argument);
            }

            final Project project = new Project();
            project.init();

            final ExecTask execTask = new ExecTask();
            execTask.setProject(project);

            // Setting environment variables coming from the configuration
            // as an instance: PATH, LD_LIBRARY_PATH and similar
            Map<String, String> variables = configuration.getVariables();
            if (variables != null && !variables.isEmpty()) {
                for (String key : variables.keySet()) {
                    Variable var = new Variable();
                    var.setKey(key);
                    final String value = variables.get(key);
                    if (value != null) {
                        var.setValue(variables.get(key));
                        execTask.addEnv(var);
                    }
                }
            }

            // Setting executable
            execTask.setExecutable(configuration.getExecutable());

            // Setting Error logging
            final String errorFileName = configuration.getErrorFile();
            if (errorFileName != null) {
                File errorFile = new File(errorFileName);
                if (!errorFile.exists()) {
                    errorFile = Path.findLocation(errorFileName, getTempDir());
                    if (errorFile != null && !errorFile.exists()) {
                        try {
                            errorFile.createNewFile();
                        } catch (Throwable t) {
                            final ActionException e = new ActionException(this, t.getLocalizedMessage(), t);
                            listenerForwarder.failed(e);
                            throw e;
                        }
                    }
                }
                if (errorFile.exists()) {
                    if (LOGGER.isDebugEnabled())
                        LOGGER.debug("Using error file: " + errorFile);
                    execTask.setLogError(true);
                    execTask.setAppend(true);
                    execTask.setError(errorFile);
                    execTask.setFailonerror(true);
                }
            }

            // Setting the timeout
            Long timeOut = configuration.getTimeOut();
            if (timeOut != null) {
                execTask.setTimeout(timeOut);
            }

            // Setting command line argument
            execTask.createArg().setLine(argument);

            File output = null;
            if (configuration.getOutput() != null) {
                output = new File(configuration.getOutput());
                if (output.exists() && output.isDirectory()) {
                    final File outXmlFile = File.createTempFile("script", ".xml", getTempDir()); // TODO CHECKME: is this var used?
                    //                  outXmlFile.deleteOnExit();
                    String destFile = getScriptArguments(xmlFile.getAbsolutePath(), "srcfile");
                    if (output.isAbsolute()) {
                        //                            String basename = 
                        output = new File(output, FilenameUtils.getBaseName(destFile) + configuration
                                .getOutputName().substring(configuration.getOutputName().indexOf(".")));
                    } else {
                        output = Path.findLocation(configuration.getOutput(), inputFile.getParentFile());
                        output = new File(output, FilenameUtils.getBaseName(inputFile.getName()) + configuration
                                .getOutputName().substring(configuration.getOutputName().indexOf(".")));
                    }
                }
                execTask.setOutput(output);
            }

            // Executing
            execTask.execute();

            File outFile = (outputFile != null ? new File(outputFile) : null);

            if (configuration.getOutput() != null) {
                if (new File(configuration.getOutput()).isAbsolute()) {
                    if (output.exists() && output.isFile()) {
                        // outFile = output;
                        final File outXmlFile = File.createTempFile("script", ".xml", getTempDir());
                        //                     outXmlFile.deleteOnExit();
                        outputFile = setScriptArguments(xmlFile.getAbsolutePath(), output.getAbsolutePath(),
                                outputName, outXmlFile);
                        outFile = new File(configuration.getOutput(),
                                FilenameUtils.getBaseName(outputFile) + ".xml");
                        FileUtils.copyFile(outXmlFile, outFile);
                    }
                } else {
                    if (outFile == null)
                        outFile = inputFile;
                }
            } else if (outFile == null) {
                outFile = inputFile;
            }

            outEvents.add(new FileSystemEvent(outFile, FileSystemEventType.FILE_ADDED));
        } catch (Exception e) {
            listenerForwarder.failed(e);
            throw new ActionException(this, e.getMessage(), e);
        } finally {
            if (is != null)
                org.apache.commons.io.IOUtils.closeQuietly(is);
        }
    }

    listenerForwarder.completed();
    return outEvents;
}