Example usage for java.util Queue remove

List of usage examples for java.util Queue remove

Introduction

In this page you can find the example usage for java.util Queue remove.

Prototype

E remove();

Source Link

Document

Retrieves and removes the head of this queue.

Usage

From source file:org.apache.hadoop.hive.ql.io.CombineHiveInputFormat.java

/**
 * Create Hive splits based on CombineFileSplit.
 *//*  ww  w. ja  va 2s . c  o m*/
private InputSplit[] getCombineSplits(JobConf job, int numSplits,
        Map<String, PartitionDesc> pathToPartitionInfo) throws IOException {
    init(job);
    Map<String, ArrayList<String>> pathToAliases = mrwork.getPathToAliases();
    Map<String, Operator<? extends OperatorDesc>> aliasToWork = mrwork.getAliasToWork();
    CombineFileInputFormatShim combine = ShimLoader.getHadoopShims().getCombineFileInputFormat();

    InputSplit[] splits = null;
    if (combine == null) {
        splits = super.getSplits(job, numSplits);
        return splits;
    }

    if (combine.getInputPathsShim(job).length == 0) {
        throw new IOException("No input paths specified in job");
    }
    ArrayList<InputSplit> result = new ArrayList<InputSplit>();

    // combine splits only from same tables and same partitions. Do not combine splits from multiple
    // tables or multiple partitions.
    Path[] paths = combine.getInputPathsShim(job);

    List<Path> inpDirs = new ArrayList<Path>();
    List<Path> inpFiles = new ArrayList<Path>();
    Map<CombinePathInputFormat, CombineFilter> poolMap = new HashMap<CombinePathInputFormat, CombineFilter>();
    Set<Path> poolSet = new HashSet<Path>();

    for (Path path : paths) {
        PartitionDesc part = HiveFileFormatUtils.getPartitionDescFromPathRecursively(pathToPartitionInfo, path,
                IOPrepareCache.get().allocatePartitionDescMap());
        TableDesc tableDesc = part.getTableDesc();
        if ((tableDesc != null) && tableDesc.isNonNative()) {
            return super.getSplits(job, numSplits);
        }

        // Use HiveInputFormat if any of the paths is not splittable
        Class inputFormatClass = part.getInputFileFormatClass();
        String inputFormatClassName = inputFormatClass.getName();
        InputFormat inputFormat = getInputFormatFromCache(inputFormatClass, job);
        String deserializerClassName = null;
        try {
            deserializerClassName = part.getDeserializer(job).getClass().getName();
        } catch (Exception e) {
            // ignore
        }
        FileSystem inpFs = path.getFileSystem(job);

        // Since there is no easy way of knowing whether MAPREDUCE-1597 is present in the tree or not,
        // we use a configuration variable for the same
        if (this.mrwork != null && !this.mrwork.getHadoopSupportsSplittable()) {
            // The following code should be removed, once
            // https://issues.apache.org/jira/browse/MAPREDUCE-1597 is fixed.
            // Hadoop does not handle non-splittable files correctly for CombineFileInputFormat,
            // so don't use CombineFileInputFormat for non-splittable files

            //ie, dont't combine if inputformat is a TextInputFormat and has compression turned on

            if (inputFormat instanceof TextInputFormat) {
                Queue<Path> dirs = new LinkedList<Path>();
                FileStatus fStats = inpFs.getFileStatus(path);

                // If path is a directory
                if (fStats.isDir()) {
                    dirs.offer(path);
                } else if ((new CompressionCodecFactory(job)).getCodec(path) != null) {
                    //if compresssion codec is set, use HiveInputFormat.getSplits (don't combine)
                    splits = super.getSplits(job, numSplits);
                    return splits;
                }

                while (dirs.peek() != null) {
                    Path tstPath = dirs.remove();
                    FileStatus[] fStatus = inpFs.listStatus(tstPath, FileUtils.HIDDEN_FILES_PATH_FILTER);
                    for (int idx = 0; idx < fStatus.length; idx++) {
                        if (fStatus[idx].isDir()) {
                            dirs.offer(fStatus[idx].getPath());
                        } else if ((new CompressionCodecFactory(job))
                                .getCodec(fStatus[idx].getPath()) != null) {
                            //if compresssion codec is set, use HiveInputFormat.getSplits (don't combine)
                            splits = super.getSplits(job, numSplits);
                            return splits;
                        }
                    }
                }
            }
        }
        //don't combine if inputformat is a SymlinkTextInputFormat
        if (inputFormat instanceof SymlinkTextInputFormat) {
            splits = super.getSplits(job, numSplits);
            return splits;
        }

        Path filterPath = path;

        // Does a pool exist for this path already
        CombineFilter f = null;
        List<Operator<? extends OperatorDesc>> opList = null;

        if (!mrwork.isMapperCannotSpanPartns()) {
            //if mapper can span partitions, make sure a splits does not contain multiple
            // opList + inputFormatClassName + deserializerClassName combination
            // This is done using the Map of CombinePathInputFormat to PathFilter

            opList = HiveFileFormatUtils.doGetWorksFromPath(pathToAliases, aliasToWork, filterPath);
            CombinePathInputFormat combinePathInputFormat = new CombinePathInputFormat(opList,
                    inputFormatClassName, deserializerClassName);
            f = poolMap.get(combinePathInputFormat);
            if (f == null) {
                f = new CombineFilter(filterPath);
                LOG.info("CombineHiveInputSplit creating pool for " + path + "; using filter path "
                        + filterPath);
                combine.createPool(job, f);
                poolMap.put(combinePathInputFormat, f);
            } else {
                LOG.info("CombineHiveInputSplit: pool is already created for " + path + "; using filter path "
                        + filterPath);
                f.addPath(filterPath);
            }
        } else {
            // In the case of tablesample, the input paths are pointing to files rather than directories.
            // We need to get the parent directory as the filtering path so that all files in the same
            // parent directory will be grouped into one pool but not files from different parent
            // directories. This guarantees that a split will combine all files in the same partition
            // but won't cross multiple partitions if the user has asked so.
            if (!path.getFileSystem(job).getFileStatus(path).isDir()) { // path is not directory
                filterPath = path.getParent();
                inpFiles.add(path);
                poolSet.add(filterPath);
            } else {
                inpDirs.add(path);
            }
        }
    }

    // Processing directories
    List<CombineFileSplit> iss = new ArrayList<CombineFileSplit>();
    if (!mrwork.isMapperCannotSpanPartns()) {
        //mapper can span partitions
        //combine into as few as one split, subject to the PathFilters set
        // using combine.createPool.
        iss = Arrays.asList(combine.getSplits(job, 1));
    } else {
        for (Path path : inpDirs) {
            processPaths(job, combine, iss, path);
        }

        if (inpFiles.size() > 0) {
            // Processing files
            for (Path filterPath : poolSet) {
                combine.createPool(job, new CombineFilter(filterPath));
            }
            processPaths(job, combine, iss, inpFiles.toArray(new Path[0]));
        }
    }

    if (mrwork.getNameToSplitSample() != null && !mrwork.getNameToSplitSample().isEmpty()) {
        iss = sampleSplits(iss);
    }

    for (CombineFileSplit is : iss) {
        CombineHiveInputSplit csplit = new CombineHiveInputSplit(job, is, pathToPartitionInfo);
        result.add(csplit);
    }

    LOG.info("number of splits " + result.size());
    return result.toArray(new CombineHiveInputSplit[result.size()]);
}

From source file:it.geosolutions.geobatch.geotiff.retile.GeotiffRetiler.java

public Queue<FileSystemEvent> execute(Queue<FileSystemEvent> events) throws ActionException {
    try {//ww  w . java 2s  .  c om

        if (configuration == null) {
            final String message = "GeotiffRetiler::execute(): flow configuration is null.";
            if (LOGGER.isErrorEnabled())
                LOGGER.error(message);
            throw new ActionException(this, message);
        }
        if (events.size() == 0) {
            throw new ActionException(this,
                    "GeotiffRetiler::execute(): Unable to process an empty events queue.");
        }

        if (LOGGER.isInfoEnabled())
            LOGGER.info("GeotiffRetiler::execute(): Starting with processing...");

        listenerForwarder.started();

        // The return
        final Queue<FileSystemEvent> ret = new LinkedList<FileSystemEvent>();

        while (events.size() > 0) {

            FileSystemEvent event = events.remove();

            File eventFile = event.getSource();
            FileSystemEventType eventType = event.getEventType();

            if (eventFile.exists() && eventFile.canRead() && eventFile.canWrite()) {
                /*
                 * If here: we can start retiler actions on the incoming file event
                 */

                if (eventFile.isDirectory()) {

                    File[] fileList = eventFile.listFiles();
                    int size = fileList.length;
                    for (int progress = 0; progress < size; progress++) {

                        File inFile = fileList[progress];

                        final String absolutePath = inFile.getAbsolutePath();
                        final String inputFileName = FilenameUtils.getName(absolutePath);

                        if (LOGGER.isInfoEnabled())
                            LOGGER.info("is going to retile: " + inputFileName);

                        try {

                            listenerForwarder.setTask("GeotiffRetiler");

                            File tiledTiffFile = File.createTempFile(inFile.getName(), "_tiled.tif",
                                    getTempDir());
                            if (tiledTiffFile.exists()) {
                                // file already exists
                                // check write permission
                                if (!tiledTiffFile.canWrite()) {
                                    final String message = "Unable to over-write the temporary file called: "
                                            + tiledTiffFile.getAbsolutePath() + "\nCheck permissions.";
                                    if (LOGGER.isErrorEnabled()) {
                                        LOGGER.error(message);
                                    }
                                    throw new IllegalArgumentException(message);
                                }
                            } else if (!tiledTiffFile.createNewFile()) {
                                final String message = "Unable to create temporary file called: "
                                        + tiledTiffFile.getAbsolutePath();
                                if (LOGGER.isErrorEnabled()) {
                                    LOGGER.error(message);
                                }
                                throw new IllegalArgumentException(message);
                            }
                            final double compressionRatio = getConfiguration().getCompressionRatio();
                            final String compressionType = getConfiguration().getCompressionScheme();

                            reTile(inFile, tiledTiffFile, compressionRatio, compressionType,
                                    getConfiguration().getTileW(), getConfiguration().getTileH(),
                                    getConfiguration().isForceToBigTiff());

                            String extension = FilenameUtils.getExtension(inputFileName);
                            if (!extension.contains("tif")) {
                                extension = "tif";
                            }
                            final String outputFileName = FilenameUtils.getFullPath(absolutePath)
                                    + FilenameUtils.getBaseName(inputFileName) + "." + extension;
                            final File outputFile = new File(outputFileName);
                            // do we need to remove the input?
                            FileUtils.copyFile(tiledTiffFile, outputFile);
                            FileUtils.deleteQuietly(tiledTiffFile);

                            // set the output
                            /*
                             * COMMENTED OUT 21 Feb 2011: simone: If the event represents a Dir
                             * we have to return a Dir. Do not matter failing files.
                             * 
                             * carlo: we may also want to check if a file is already tiled!
                             * 
                             * File outputFile=reTile(inFile); if (outputFile!=null){ //TODO:
                             * here we use the same event for each file in the ret.add(new
                             * FileSystemEvent(outputFile, eventType)); }
                             */

                        } catch (UnsupportedOperationException uoe) {
                            listenerForwarder.failed(uoe);
                            if (LOGGER.isWarnEnabled())
                                LOGGER.warn(uoe.getLocalizedMessage(), uoe);
                            continue;
                        } catch (IOException ioe) {
                            listenerForwarder.failed(ioe);
                            if (LOGGER.isWarnEnabled())
                                LOGGER.warn(ioe.getLocalizedMessage(), ioe);
                            continue;
                        } catch (IllegalArgumentException iae) {
                            listenerForwarder.failed(iae);
                            if (LOGGER.isWarnEnabled())
                                LOGGER.warn(iae.getLocalizedMessage(), iae);
                            continue;
                        } finally {
                            listenerForwarder.setProgress((progress * 100) / ((size != 0) ? size : 1));
                            listenerForwarder.progressing();
                        }
                    }

                    if (LOGGER.isInfoEnabled())
                        LOGGER.info("SUCCESSFULLY completed work on: " + event.getSource());

                    // add the directory to the return
                    ret.add(event);
                } else {
                    // file is not a directory
                    try {
                        listenerForwarder.setTask("GeotiffRetiler");

                        File tiledTiffFile = File.createTempFile(eventFile.getName(), "_tiled.tif",
                                eventFile.getParentFile());
                        if (tiledTiffFile.exists()) {
                            // file already exists
                            // check write permission
                            if (!tiledTiffFile.canWrite()) {
                                final String message = "Unable to over-write the temporary file called: "
                                        + tiledTiffFile.getAbsolutePath() + "\nCheck permissions.";
                                if (LOGGER.isErrorEnabled()) {
                                    LOGGER.error(message);
                                }
                                throw new IllegalArgumentException(message);
                            }
                        } else if (!tiledTiffFile.createNewFile()) {
                            final String message = "Unable to create temporary file called: "
                                    + tiledTiffFile.getAbsolutePath();
                            if (LOGGER.isErrorEnabled()) {
                                LOGGER.error(message);
                            }
                            throw new IllegalArgumentException(message);
                        }
                        final double compressionRatio = getConfiguration().getCompressionRatio();
                        final String compressionType = getConfiguration().getCompressionScheme();

                        reTile(eventFile, tiledTiffFile, compressionRatio, compressionType,
                                getConfiguration().getTileW(), getConfiguration().getTileH(),
                                getConfiguration().isForceToBigTiff());

                        String extension = FilenameUtils.getExtension(eventFile.getName());
                        if (!extension.contains("tif")) {
                            extension = "tif";
                        }
                        final String outputFileName = FilenameUtils.getFullPath(eventFile.getAbsolutePath())
                                + FilenameUtils.getBaseName(eventFile.getName()) + "." + extension;
                        final File outputFile = new File(outputFileName);
                        // do we need to remove the input?
                        FileUtils.copyFile(tiledTiffFile, outputFile);
                        FileUtils.deleteQuietly(tiledTiffFile);

                        if (LOGGER.isInfoEnabled())
                            LOGGER.info("SUCCESSFULLY completed work on: " + event.getSource());
                        listenerForwarder.setProgress(100);
                        ret.add(new FileSystemEvent(outputFile, eventType));

                    } catch (UnsupportedOperationException uoe) {
                        listenerForwarder.failed(uoe);
                        if (LOGGER.isWarnEnabled())
                            LOGGER.warn(uoe.getLocalizedMessage(), uoe);
                        continue;
                    } catch (IOException ioe) {
                        listenerForwarder.failed(ioe);
                        if (LOGGER.isWarnEnabled())
                            LOGGER.warn(ioe.getLocalizedMessage(), ioe);
                        continue;
                    } catch (IllegalArgumentException iae) {
                        listenerForwarder.failed(iae);
                        if (LOGGER.isWarnEnabled())
                            LOGGER.warn(iae.getLocalizedMessage(), iae);
                        continue;
                    } finally {

                        listenerForwarder.setProgress((100) / ((events.size() != 0) ? events.size() : 1));
                        listenerForwarder.progressing();
                    }
                }
            } else {
                final String message = "The passed file event refers to a not existent "
                        + "or not readable/writeable file! File: " + eventFile.getAbsolutePath();
                if (LOGGER.isWarnEnabled())
                    LOGGER.warn(message);
                final IllegalArgumentException iae = new IllegalArgumentException(message);
                listenerForwarder.failed(iae);
            }
        } // endwile
        listenerForwarder.completed();

        // return
        if (ret.size() > 0) {
            events.clear();
            return ret;
        } else {
            /*
             * If here: we got an error no file are set to be returned the input queue is
             * returned
             */
            return events;
        }
    } catch (Exception t) {
        if (LOGGER.isErrorEnabled())
            LOGGER.error(t.getLocalizedMessage(), t);
        final ActionException exc = new ActionException(this, t.getLocalizedMessage(), t);
        listenerForwarder.failed(exc);
        throw exc;
    }
}

From source file:candr.yoclip.Parser.java

/**
 * Creates a collection of parsed option parameters from the queue of parameters passed in. The parameters queue is modified as parameters are
 * parsed, removing option parameters from the queue as they are parsed.
 *
 * @param parameters The command parameters that will be parsed.
 * @return a collection of parsed option parameters or {@code null} if the parameters queue is empty.
 */// w w w.  j  a va  2  s. c o m
protected List<ParsedOption<T>> getParsedParameters(final String[] parameters) {

    final LinkedList<ParsedOption<T>> parsedOptionParameters = new LinkedList<ParsedOption<T>>();

    final Queue<String> parametersQueue = new LinkedList<String>(Arrays.asList(parameters));
    while (!parametersQueue.isEmpty()) {

        ParsedOption<T> parsedOptionParameter;
        if (!isOption(parametersQueue.peek())) {

            parsedOptionParameter = getParsedArgument(parametersQueue);

        } else {

            // check for an option property match first
            parsedOptionParameter = getParsedOptionProperty(parametersQueue);

            // an option next
            if (null == parsedOptionParameter) {
                parsedOptionParameter = getParsedOption(parametersQueue);
            }
        }

        // not an option
        if (null == parsedOptionParameter) {
            final String parameter = parametersQueue.remove();
            parsedOptionParameter = new ParsedOption<T>("'" + parameter + "': Unsupported option.");
        }

        parsedOptionParameters.add(parsedOptionParameter);

    }

    return parsedOptionParameters;
}

From source file:org.amanzi.neo.services.impl.statistics.PropertyStatisticsService.java

protected void updatePropertyVault(final Node propertyVault, final PropertyVault vault)
        throws ServiceException {
    nodeService.updateProperty(propertyVault, statisticsNodeProperties.getClassProperty(),
            vault.getClassName());/*from   w  w  w. ja  v  a  2s  . co m*/

    int size = nodeService.getNodeProperty(propertyVault, getGeneralNodeProperties().getSizeProperty(),
            NumberUtils.INTEGER_ZERO, false);

    Map<Object, Integer> values = new HashMap<Object, Integer>(vault.getValuesMap());

    Queue<Integer> removedIndexes = new LinkedList<Integer>();
    Stack<Integer> processedIndex = new Stack<Integer>();

    if (size > 0) {
        for (int i = 0; i < size; i++) {
            Object property = nodeService.getNodeProperty(propertyVault,
                    statisticsNodeProperties.getValuePrefix() + i, null, true);

            Integer newCount = values.remove(property);
            if (newCount != null) {
                nodeService.updateProperty(propertyVault, statisticsNodeProperties.getCountPrefix() + i,
                        newCount);
            } else {
                removedIndexes.add(i);
            }
            processedIndex.add(i);
        }
    }

    // remove old values
    for (Integer index : removedIndexes) {
        nodeService.removeNodeProperty(propertyVault, statisticsNodeProperties.getValuePrefix() + index, false);
        nodeService.removeNodeProperty(propertyVault, statisticsNodeProperties.getCountPrefix() + index, false);
    }

    int counter = size;
    for (Entry<Object, Integer> statEntry : values.entrySet()) {
        counter = removedIndexes.isEmpty() ? counter : removedIndexes.remove();

        nodeService.updateProperty(propertyVault, statisticsNodeProperties.getValuePrefix() + counter,
                statEntry.getKey());
        nodeService.updateProperty(propertyVault, statisticsNodeProperties.getCountPrefix() + counter,
                statEntry.getValue());

        counter++;
    }

    for (Integer newIndex : removedIndexes) {
        int oldIndex = processedIndex.pop();
        nodeService.renameNodeProperty(propertyVault, statisticsNodeProperties.getValuePrefix() + oldIndex,
                statisticsNodeProperties.getValuePrefix() + newIndex, false);
        nodeService.renameNodeProperty(propertyVault, statisticsNodeProperties.getCountPrefix() + oldIndex,
                statisticsNodeProperties.getCountPrefix() + newIndex, false);
    }

    nodeService.updateProperty(propertyVault, getGeneralNodeProperties().getSizeProperty(), values.size());
    nodeService.updateProperty(propertyVault, statisticsNodeProperties.getDefaultValueProperty(),
            vault.getDefaultValue());
}

From source file:it.geosolutions.geobatch.imagemosaic.ImageMosaicAction.java

/**
 * Public or update an ImageMosaic layer on the specified GeoServer
 *//*from  w  w  w.  j  ava 2 s . co m*/
public Queue<EventObject> execute(Queue<EventObject> events) throws ActionException {

    if (LOGGER.isInfoEnabled())
        LOGGER.info("Start processing...");

    listenerForwarder.started();

    try {
        // looking for file
        if (events == null)
            throw new IllegalArgumentException("Unable to execute action with incoming null parameter");
        if (events.size() == 0)
            throw new IllegalArgumentException("Wrong number of elements for this action: " + events.size());

        /*
         * If here: we can execute the action
         */
        Queue<EventObject> ret = new LinkedList<EventObject>();

        /**
         * For each event into the queue
         */
        while (events.size() > 0) {
            final Object evObj = events.remove();

            /**
             * If the input file exists and it is a file: Check if it is: -
             * A Directory - An XML -> Serialized ImageMosaicCommand
             * 
             * Building accordingly the ImageMosaicCommand command.
             */
            final ImageMosaicCommand cmd;

            if (evObj == null) {
                ActionExceptionHandler.handleError(getConfiguration(), this, "Input null object.");
                continue;
            }

            if (evObj instanceof FileSystemEvent) {
                /*
                 * Checking input files.
                 */
                final File input = ((FileSystemEvent) evObj).getSource();
                if (!input.exists()) {
                    // no file is found for this event try with the next one
                    ActionExceptionHandler.handleError(getConfiguration(), this,
                            "The input file does not exists at url: " + input.getAbsolutePath());
                    continue;
                }

                /**
                 * the file event points to an XML file...
                 * 
                 * @see ImageMosaicCommand
                 */
                if (input.isFile() && FilenameUtils.getExtension(input.getName()).equalsIgnoreCase("xml")) {
                    if (LOGGER.isInfoEnabled()) {
                        LOGGER.info("Working on an XML command file: " + input.getAbsolutePath());
                    }

                    // try to deserialize
                    cmd = ImageMosaicCommand.deserialize(input.getAbsoluteFile());
                    if (cmd == null) {
                        ActionExceptionHandler.handleError(getConfiguration(), this,
                                "Unable to deserialize the passed file: " + input.getAbsolutePath());
                        continue;
                    }

                } else if (input.isDirectory()) {
                    if (LOGGER.isInfoEnabled()) {
                        LOGGER.info("Input file event points to a directory: " + input.getAbsolutePath());
                    }
                    String format = ((ImageMosaicConfiguration) super.getConfiguration()).getGranuleFormat();
                    if (format == null || format.isEmpty()) {
                        LOGGER.warn(
                                "No granule format specified in flow configuration... try force it to .tif");
                        format = "tif";
                    }
                    StringBuilder builder = new StringBuilder();
                    builder.append("*.");
                    builder.append(format);
                    final Collector coll = new Collector(
                            new WildcardFileFilter(builder.toString(), IOCase.INSENSITIVE));
                    // try to deserialize
                    cmd = new ImageMosaicCommand(input, coll.collect(input), null);
                } else {
                    // the file event does not point to a directory nor to an xml file
                    ActionExceptionHandler.handleError(getConfiguration(), this,
                            "The file event does not point to a directory nor to an xml file: "
                                    + input.getAbsolutePath());
                    continue;
                }
            } else if (evObj instanceof EventObject) {
                Object innerObject = ((EventObject) evObj).getSource();
                if (innerObject instanceof ImageMosaicCommand) {
                    cmd = (ImageMosaicCommand) innerObject;
                } else {
                    // the file event does not point to a directory nor to an xml file
                    ActionExceptionHandler.handleError(getConfiguration(), this,
                            "The file event does not point to a valid object: " + evObj);
                    continue;
                }
            } else {
                // the file event does not point to a directory nor to an xml file
                ActionExceptionHandler.handleError(getConfiguration(), this,
                        "The file event does not point to a valid object: " + evObj);
                continue;
            }

            /**
             * the file pointing to the directory which the layer will refer
             * to.
             */
            final File baseDir = cmd.getBaseDir();
            /**
             * a descriptor for the mosaic to handle
             */
            final ImageMosaicGranulesDescriptor mosaicDescriptor = ImageMosaicGranulesDescriptor
                    .buildDescriptor(baseDir, getConfiguration());

            if (mosaicDescriptor == null) {
                ActionExceptionHandler.handleError(getConfiguration(), this,
                        "Unable to build the imageMosaic descriptor" + cmd.getBaseDir());
                continue;
            }

            // Perform tests on the base dir file
            if (!baseDir.exists() || !baseDir.isDirectory()) {
                // no base dir exists try to build a new one using
                // addList()
                if (cmd.getAddFiles() != null) {
                    if (cmd.getAddFiles().size() > 0) {
                        // try build the baseDir
                        if (!baseDir.mkdirs()) {
                            ActionExceptionHandler.handleError(getConfiguration(), this,
                                    "Unable to create the base directory named \'" + baseDir.getAbsolutePath()
                                            + "\'.");
                            continue;
                        }
                    } else {
                        final StringBuilder msg = new StringBuilder();
                        msg.append("Unexpected not existent baseDir for this layer '")
                                .append(baseDir.getAbsolutePath())
                                .append("'.\n If you want to build a new layer try using an ")
                                .append("existent or writeable baseDir and append a list of file to use to the addFile list.");
                        ActionExceptionHandler.handleError(getConfiguration(), this, msg.toString());
                        continue;
                    }
                } else {
                    final StringBuilder msg = new StringBuilder();
                    msg.append("Unexpected not existent baseDir for this layer '")
                            .append(baseDir.getAbsolutePath())
                            .append("'.\n If you want to build a new layer try using an ")
                            .append("existent or writeable baseDir and append a list of file to use to the addFile list.");
                    ActionExceptionHandler.handleError(getConfiguration(), this, msg.toString());
                    continue;
                }
            }

            // override local cmd null params with the getConfiguration()
            cmd.copyConfigurationIntoCommand(getConfiguration());

            // prepare configuration for layername and storename
            final String layerName;
            if (cmd.getLayerName() == null) {
                layerName = baseDir.getName();
                cmd.setLayerName(layerName);
            } else {
                layerName = cmd.getLayerName();
            }
            final String storeName;
            if (cmd.getStoreName() == null) {
                storeName = layerName;
                cmd.setStoreName(storeName);
            } else {
                storeName = cmd.getStoreName();
            }

            /**
             * HERE WE HAVE A 'cmd' COMMAND FILE WHICH MAY HAVE GETADDFILE
             * OR GETDELFILE !=NULL USING THOSE LIST WE MAY:<br>
             * DEL ->DELETE FROM THE DATASTORE AN IMAGE USING THE ABSOLUTE
             * PATH.<br>
             * ADD ->INSERT INTO THE DATASTORE AN IMAGE USING THE ABSOLUTE
             * PATH.<br>
             */
            // REST library read
            GeoServerRESTReader gsReader = new GeoServerRESTReader(cmd.getGeoserverURL(), cmd.getGeoserverUID(),
                    cmd.getGeoserverPWD());
            // REST library write
            final GeoServerRESTPublisher gsPublisher = new GeoServerRESTPublisher(cmd.getGeoserverURL(),
                    cmd.getGeoserverUID(), cmd.getGeoserverPWD());

            final String workspace = cmd.getDefaultNamespace() != null ? cmd.getDefaultNamespace() : "";

            /*
             * Check if ImageMosaic layer already exists...
             */
            final boolean layerExists;

            if (cmd.getIgnoreGeoServer()) {
                if (LOGGER.isInfoEnabled()) {
                    LOGGER.info(
                            "GeoServer will be ignored by configuration. Assuming that an updated is required. ");
                }
                layerExists = true;
            } else {
                final RESTLayer layer = gsReader.getLayer(layerName);
                layerExists = layer != null;
            }

            if (layerExists) {
                if (!updateMosaicLayer(cmd, baseDir, layerName, mosaicDescriptor, gsPublisher)) {
                    ActionExceptionHandler.handleError(getConfiguration(), this, "Mosaic not Updated...");
                    continue;
                }

            } else {
                if (!createMosaicLayer(cmd, baseDir, workspace, mosaicDescriptor, layerName, gsPublisher,
                        storeName)) {
                    ActionExceptionHandler.handleError(getConfiguration(), this, "Mosaic not Created...");
                    continue;
                }
            }

            /**
             * The returned file: - one for each event - .layer file - will
             * be added to the output queue
             */
            final File layerDescriptor;

            // generate a RETURN file and append it to the return queue
            // TODO get info about store and workspace name...
            layerDescriptor = ImageMosaicOutput.writeReturn(baseDir, baseDir, cmd);
            if (layerDescriptor != null) {
                LOGGER.info("Created layer descriptor file " + layerDescriptor);
                ret.add(new FileSystemEvent(layerDescriptor, FileSystemEventType.FILE_ADDED));
            }

        } // while

        listenerForwarder.completed();

        // ... setting up the appropriate event for the next action
        return ret;

    } catch (Exception t) {
        if (LOGGER.isErrorEnabled())
            LOGGER.error(t.getLocalizedMessage(), t);
        listenerForwarder.failed(t);
        throw new ActionException(this, t.getMessage(), t);
    }
}

From source file:candr.yoclip.Parser.java

/**
 * Creates a {@code ParsedOptionParameter} for the option parameter at the head of the queue. The parsed option will not contain an error if an
 * option value is missing. The parsed option will contain an error if the option appears to have an associated value and does not take a value.
 *
 * @param parameters The current queue of command parameters.
 * @return a parsed option parameter or {@code null} in the following cases.
 * <ul>/*ww  w.  j a v a 2  s  . c o m*/
 * <li>The parameters queue is empty.</li>
 * <li>The head of the parameters queue is not an option.</li>
 * </ul>
 */
protected ParsedOption<T> getParsedOption(final Queue<String> parameters) {

    ParsedOption<T> parsedOptionParameter = null;

    final String parameter = parameters.peek();
    if (!StringUtils.isEmpty(parameter) && isOption(parameter)) {

        final String prefix = getParserOptions().getPrefix();
        final String separator = getParserOptions().getSeparator();
        final boolean isSeparatorWhitespace = StringUtils.isWhitespace(separator);

        final int separatorIndex = isSeparatorWhitespace ? -1 : parameter.indexOf(separator);
        final String optionParameterKey = parameter.substring(prefix.length(),
                separatorIndex < 0 ? parameter.length() : separatorIndex);
        final ParserOption<T> optionParameter = getParserOptions().get(optionParameterKey);
        if (null != optionParameter) {

            parameters.remove();

            // get the value if the option takes one
            if (optionParameter.hasValue()) {

                String value = null;
                if (isSeparatorWhitespace) {

                    if (parameters.size() > 0 && !isOption(parameters.peek())) {

                        // remove the value from the queue
                        value = parameters.remove();
                    }

                } else if (separatorIndex != -1) {

                    final int valueIndex = separatorIndex + 1;
                    if (valueIndex < parameter.length()) {
                        value = parameter.substring(valueIndex);
                    }
                }

                // The value can be null here, without it being an error condition, to facilitate actions later on
                // such as using a default.
                parsedOptionParameter = new ParsedOption<T>(optionParameter, value);

            } else if (separatorIndex > 1) {

                // if the separator is not white space and a value was present with the option parameter
                parsedOptionParameter = new ParsedOption<T>(optionParameter, null);
                parsedOptionParameter.setError("Does not take a value.");

            } else {

                // If the option does not take a value it must be a boolean so force it true
                parsedOptionParameter = new ParsedOption<T>(optionParameter, Boolean.TRUE.toString());
            }
        }
    }

    return parsedOptionParameter;
}

From source file:plaid.compilerjava.CompilerCore.java

public PackageRep buildPlaidPath(List<CompilationUnit> cus) throws Exception {
    //Build up a representation of plaidpath
    PackageRep plaidpath = new PackageRep("$TOPLEVEL$");
    Stack<File> directoryWorklist = new Stack<File>();
    for (String base : cc.getPlaidpath())
        handlePlaidPathEntry(base, plaidpath, directoryWorklist);

    //we want to remove the stuff we're trying to compile so that we don't make assumptions based on
    //the previous form of the source files
    //but also want a complete picture for resolving imports and thence QIs
    for (CompilationUnit c : cus) {
        String cPackage = c.getPackageString();
        for (Decl d : c.getDecls()) {
            String memberName = d.getName();
            if (plaidpath.memberExists(cPackage, memberName)) { //indicate that this is outdated and will be updated soon
                plaidpath.lookupMember(cPackage, memberName).startRecompilation();
            } else { //add shell for use in import resolution
                MemberRep newMem = null;
                if (d instanceof FieldDecl)
                    newMem = new FieldRep(memberName);
                else if (d instanceof MethodDecl)
                    newMem = new MethodRep(memberName);
                else if (d instanceof StateDecl)
                    newMem = new StateRep(memberName);
                else
                    throw new RuntimeException("New type of MemberRep not accounted for");

                //will be replaced later
                newMem.startRecompilation();
                plaidpath.addMember(cPackage, newMem);
            }/* w  ww. java2  s  . c  o  m*/

        }
    }

    Queue<StateRep> dependants = new LinkedList<StateRep>();
    for (CompilationUnit c : cus) {
        String cPackage = c.getPackageString();

        //expand imports
        List<String> declaredMembers = new ArrayList<String>(); //right now declared members are just those in the file, not the whole package
        for (Decl d : c.getDecls())
            declaredMembers.add(d.getName());
        c.getImports().checkAndExpandImports(plaidpath, declaredMembers, cPackage);

        //fill out plaidpath with declared members (shell info only)
        for (Decl d : c.getDecls()) {
            MemberRep rep = d.generateHeader(plaidpath, c.getImports(), cPackage);
            if (rep instanceof StateRep && ((StateRep) rep).hasNeeds()) {
                dependants.add((StateRep) rep); //keep track of ones we need to return to
            }
            plaidpath.addMember(cPackage, rep);
        }
    }

    while (!dependants.isEmpty()) {
        StateRep s = dependants.remove();
        List<String> newNeeds = new ArrayList<String>();
        for (String path : s.getNeeds()) {
            if (plaidpath.memberExists(path)) {
                MemberRep r = plaidpath.lookupMember(path);
                if (r instanceof StateRep) {
                    StateRep depState = (StateRep) r;
                    s.addMembers(depState.getMembers()); //TODO : make sure this still works after changing to list of MemberReps
                    newNeeds.addAll(depState.getNeeds());
                } else
                    throw new RuntimeException("Something went wrong with dependencies.");
            } else {
                throw new RuntimeException("Required Dependency " + path + " not found.");
            }
        }
        s.setNeeds(newNeeds); //replace old needs with the new needs
        if (s.hasNeeds())
            dependants.add(s);
    }

    return plaidpath;
}

From source file:org.apache.synapse.transport.nhttp.HttpCoreNIOListener.java

/**
 * Start specific end points given by InetSockeAddress list
 *
 * @param endpointsClosed InetSocketAddresses of endpoints to be started
 * @throws AxisFault/*  ww  w .  ja  v  a2s. co  m*/
 */
private void startSpecificEndpoints(List<InetSocketAddress> endpointsClosed) throws AxisFault {
    Queue<ListenerEndpoint> endpoints = new LinkedList<ListenerEndpoint>();

    // Ensure simple but stable order
    List<InetSocketAddress> addressList = endpointsClosed;
    Collections.sort(addressList, new Comparator<InetSocketAddress>() {

        public int compare(InetSocketAddress a1, InetSocketAddress a2) {
            String s1 = a1.toString();
            String s2 = a2.toString();
            return s1.compareTo(s2);
        }

    });

    for (InetSocketAddress address : addressList) {
        endpoints.add(ioReactor.listen(address));
    }

    // Wait for the endpoint to become ready, i.e. for the listener to start accepting
    // requests.
    while (!endpoints.isEmpty()) {
        ListenerEndpoint endpoint = endpoints.remove();
        try {
            endpoint.waitFor();
            if (log.isInfoEnabled()) {
                InetSocketAddress address = (InetSocketAddress) endpoint.getAddress();
                if (!address.isUnresolved()) {
                    log.info(name + " started on " + address.getHostName() + ":" + address.getPort());
                } else {
                    log.info(name + " started on " + address);
                }
            }
        } catch (InterruptedException e) {
            log.warn("Listener startup was interrupted");
            break;
        }
    }
}

From source file:it.geosolutions.geobatch.actions.commons.MoveAction.java

/**
 * Removes TemplateModelEvents from the queue and put
 *//*from w  w  w  . j av  a2  s. c om*/
public Queue<EventObject> execute(Queue<EventObject> events) throws ActionException {

    listenerForwarder.started();
    listenerForwarder.setTask("build the output absolute file name");

    // return
    final Queue<EventObject> ret = new LinkedList<EventObject>();

    listenerForwarder.setTask("Building/getting the root data structure");

    boolean moveMultipleFile;
    final int size = events.size();
    if (size == 0) {
        throw new ActionException(this, "Empty file list");
    } else if (size > 1) {
        moveMultipleFile = true;
    } else {
        moveMultipleFile = false;
    }
    if (conf.getDestination() == null) {
        throw new IllegalArgumentException("Unable to work with a null dest dir");
    }
    if (!conf.getDestination().isAbsolute()) {
        conf.setDestination(new File(this.getConfigDir(), conf.getDestination().getPath()));
        if (LOGGER.isWarnEnabled()) {
            LOGGER.warn("Destination is not an absolute path. Absolutizing destination using temp dir: "
                    + conf.getDestination());
        }
    }

    boolean moveToDir;
    if (!conf.getDestination().isDirectory()) {
        // TODO LOG
        moveToDir = false;
        if (moveMultipleFile) {
            throw new ActionException(this,
                    "Unable to run on multiple file with an output file, use directory instead");
        }
    } else {
        moveToDir = true;
    }

    while (!events.isEmpty()) {
        listenerForwarder.setTask("Generating the output");

        final EventObject event = events.remove();
        if (event == null) {
            // TODO LOG
            continue;
        }
        if (event instanceof FileSystemEvent) {
            File source = ((FileSystemEvent) event).getSource();
            File dest;
            listenerForwarder.setTask("moving to destination");
            if (moveToDir) {
                dest = conf.getDestination();
                try {
                    FileUtils.moveFileToDirectory(source, dest, true);
                } catch (IOException e) {
                    throw new ActionException(this, e.getLocalizedMessage());
                }
            } else if (moveMultipleFile) {
                dest = new File(conf.getDestination(), source.getPath());
                try {
                    FileUtils.moveFile(source, dest);
                } catch (IOException e) {
                    throw new ActionException(this, e.getLocalizedMessage());
                }
            } else {
                // LOG continue
                continue;
            }

            // add the file to the return
            ret.add(new FileSystemEvent(dest, FileSystemEventType.FILE_ADDED));
        }
    }

    listenerForwarder.completed();
    return ret;
}

From source file:com.todoroo.astrid.actfm.sync.ActFmSyncService.java

private void initializeRetryRunnable() {
    pushRetryRunnable = new Runnable() {
        public void run() {
            while (true) {
                AndroidUtilities.sleepDeep(TIME_BETWEEN_TRIES);
                if (failedPushes.isEmpty()) {
                    synchronized (ActFmSyncService.this) {
                        pushRetryThread = null;
                        return;
                    }/*from  w w w. j  a  va  2 s. co  m*/
                }
                if (failedPushes.size() > 0) {
                    // Copy into a second queue so we don't end up infinitely retrying in the same loop
                    Queue<FailedPush> toTry = new LinkedList<FailedPush>();
                    while (failedPushes.size() > 0) {
                        toTry.add(failedPushes.remove(0));
                    }
                    while (!toTry.isEmpty() && !actFmPreferenceService.isOngoing()) {
                        FailedPush pushOp = toTry.remove();
                        switch (pushOp.pushType) {
                        case PUSH_TYPE_TASK:
                            pushTask(pushOp.itemId);
                            break;
                        case PUSH_TYPE_TAG:
                            pushTag(pushOp.itemId);
                            break;
                        case PUSH_TYPE_UPDATE:
                            pushUpdate(pushOp.itemId);
                            break;
                        }
                    }
                }
            }
        }
    };
}