Example usage for java.util Queue isEmpty

List of usage examples for java.util Queue isEmpty

Introduction

In this page you can find the example usage for java.util Queue isEmpty.

Prototype

boolean isEmpty();

Source Link

Document

Returns true if this collection contains no elements.

Usage

From source file:it.geosolutions.geobatch.opensdi.ndvi.NDVIIngestAction.java

/**
 * //from www. jav  a2s  . c o m
 */
public Queue<EventObject> execute(Queue<EventObject> events) throws ActionException {

    listenerForwarder.progressing(1f, "Check config");

    listenerForwarder.started();

    NDVIIngestConfiguration configuration = getConfiguration();
    if (configuration == null) {
        throw new IllegalStateException("ActionConfig is null.");
    }

    //        List<File> ndviFiles = new ArrayList<File>();
    Map<File, Calendar[]> inputFiles = new TreeMap<File, Calendar[]>();

    while (!events.isEmpty()) {
        EventObject event = events.poll();
        if (event instanceof FileSystemEvent) {
            FileSystemEvent fse = (FileSystemEvent) event;
            File source = fse.getSource();
            if (!source.exists()) {
                LOGGER.error("File does not exist: " + source);
                continue;
            }
            Calendar interval[];
            try {
                interval = parseDekDate(source.getName());
            } catch (ActionException e) {
                LOGGER.error("Error parsing source name: " + e.getMessage());
                continue;
            }

            inputFiles.put(source, interval);
        } else {
            throw new ActionException(this, "EventObject not handled " + event);
        }
    }
    listenerForwarder.progressing(10f, "Process file");
    ImageMosaicCommand imc = processFiles(inputFiles);

    LinkedList<EventObject> ret = new LinkedList<EventObject>();
    ret.add(new EventObject(imc));
    return ret;
}

From source file:it.unibo.alchemist.model.implementations.environments.AbstractEnvironment.java

/**
 * After a node movement, recomputes the neighborhood, also notifying the
 * running simulation about the modifications. This allows movement actions
 * to be defined as LOCAL (they should be normally considered GLOBAL).
 * // w ww .  j av  a 2s.c o  m
 * @param node
 *            the node that has been moved
 */
protected final void updateNeighborhood(final Node<T> node) {
    /*
     * The following optimization allows to define as local the context of
     * reactions which are actually including a move, which should be
     * normally considered global. This because for each node which is
     * detached, all the dependencies are updated, ensuring the soundness.
     */
    if (Objects.requireNonNull(rule).isLocallyConsistent()) {
        final Neighborhood<T> newNeighborhood = rule.computeNeighborhood(Objects.requireNonNull(node), this);
        final Neighborhood<T> oldNeighborhood = neighCache.put(node.getId(), newNeighborhood);
        if (oldNeighborhood != null) {
            final Iterator<Node<T>> iter = oldNeighborhood.iterator();
            while (iter.hasNext()) {
                final Node<T> neighbor = iter.next();
                if (!newNeighborhood.contains(neighbor)) {
                    /*
                     * Neighbor lost
                     */
                    iter.remove();
                    final Neighborhood<T> neighborsNeighborhood = neighCache.get(neighbor.getId());
                    neighborsNeighborhood.removeNeighbor(node);
                    ifEngineAvailable(s -> s.neighborRemoved(node, neighbor));
                }
            }
        }
        for (final Node<T> n : newNeighborhood) {
            if (oldNeighborhood == null || !oldNeighborhood.contains(n)) {
                /*
                 * If it's a new neighbor
                 */
                neighCache.get(n.getId()).addNeighbor(node);
                ifEngineAvailable(s -> s.neighborAdded(node, n));
            }
        }
    } else {
        final Queue<Operation> operations = recursiveOperation(node);
        final TIntSet processed = new TIntHashSet(getNodesNumber());
        processed.add(node.getId());
        while (!operations.isEmpty()) {
            final Operation next = operations.poll();
            final Node<T> dest = next.destination;
            final int destId = dest.getId();
            if (!processed.contains(destId)) {
                operations.addAll(recursiveOperation(next.origin, next.destination, next.isAdd));
                processed.add(destId);
            }
        }
    }
}

From source file:org.apache.gobblin.example.wikipedia.WikipediaExtractor.java

public WikipediaExtractor(WorkUnitState workUnitState) throws IOException {

    this.workUnitState = workUnitState;
    this.rootUrl = readProp(WIKIPEDIA_API_ROOTURL, workUnitState);
    this.schema = readProp(WIKIPEDIA_AVRO_SCHEMA, workUnitState);

    this.batchSize = 5;
    this.requestedTitle = workUnitState.getProp(ConfigurationKeys.DATASET_URN_KEY);

    this.baseQuery = ImmutableMap.<String, String>builder().put("format", "json").put("action", "query")
            .put("prop", "revisions").build();

    HttpClientConfiguratorLoader httpClientConfiguratorLoader = new HttpClientConfiguratorLoader(workUnitState);
    this.httpClientConfigurator = httpClientConfiguratorLoader.getConfigurator();
    this.httpClientConfigurator.setStatePropertiesPrefix(HTTP_CLIENT_CONFIG_PREFIX).configure(workUnitState);

    try {// w w  w.  j a  va 2 s.c o  m
        Queue<JsonElement> lastRevision = retrievePageRevisions(
                ImmutableMap.<String, String>builder().putAll(this.baseQuery).put("rvprop", "ids")
                        .put("titles", this.requestedTitle).put("rvlimit", "1").build());
        this.lastRevisionId = lastRevision.isEmpty() ? -1 : parseRevision(lastRevision.poll());
    } catch (URISyntaxException use) {
        throw new IOException(use);
    }

    long baseRevision = workUnitState.getWorkunit().getLowWatermark(LongWatermark.class, new Gson()).getValue();
    if (baseRevision < 0) {
        try {
            baseRevision = createLowWatermarkForBootstrap(workUnitState);
        } catch (IOException ioe) {
            baseRevision = this.lastRevisionId;
        }
    }
    this.reader = new WikiResponseReader(baseRevision);

    workUnitState.setActualHighWatermark(new LongWatermark(this.lastRevisionId));
    this.currentBatch = new LinkedList<>();

    LOG.info(String.format("Will pull revisions %s to %s for page %s.", this.reader.lastPulledRevision,
            this.lastRevisionId, this.requestedTitle));

    this.maxRevisionsPulled = workUnitState.getPropAsInt(MAX_REVISION_PER_PAGE, DEFAULT_MAX_REVISIONS_PER_PAGE);
}

From source file:com.drunkendev.io.recurse.tests.RecursionTest.java

/**
 * Answer provided by benroth./*from w  ww  . j av  a  2 s.  co m*/
 *
 * Uses a {@link Queue} to hold directory references while traversing until
 * the queue becomes empty. Uses the {@link java.io.File} API.
 *
 * @see     <a href="http://stackoverflow.com/a/10814316/140037">Stack-Overflow answer by benroth</a>
 */
//    @Test
public void testQueue() {
    System.out.println("\nTEST: listFiles - Queue");
    time(() -> {
        Queue<File> dirsq = new LinkedList<>();
        dirsq.add(startPath.toFile());
        int files = 0;
        int dirs = 0;
        try {
            dirs++; // to count the initial dir.
            while (!dirsq.isEmpty()) {
                for (File f : dirsq.poll().listFiles()) {
                    if (isPlainDir(f)) {
                        dirsq.add(f);
                        dirs++;
                    } else if (f.isFile()) {
                        files++;
                    }
                }
            }
            System.out.format("Files: %d, dirs: %d. ", files, dirs);
        } catch (IOException ex) {
            fail(ex.getMessage());
        }
    });
}

From source file:org.wso2.carbon.registry.eventing.RegistryEventDispatcher.java

private void addToDigestEntryQueue(Map<String, List<DigestEntry>> digestEntries, String digestType) {
    Queue<DigestEntry> digestQueue = getDigestQueue(digestType);
    while (!digestQueue.isEmpty()) {
        DigestEntry entry = digestQueue.poll();
        String endpoint = entry.getEndpoint();
        List<DigestEntry> digestEntriesForEndpoint;
        if (!digestEntries.containsKey(endpoint)) {
            digestEntriesForEndpoint = new LinkedList<DigestEntry>();
            digestEntries.put(endpoint, digestEntriesForEndpoint);
        } else {//from   www. ja va2  s .c o m
            digestEntriesForEndpoint = digestEntries.get(endpoint);
        }
        digestEntriesForEndpoint.add(entry);
    }
}

From source file:eu.stratosphere.nephele.taskmanager.runtime.RuntimeInputChannelContext.java

@Override
public void destroy() {
    final Queue<Buffer> buffersToRecycle = new ArrayDeque<Buffer>();

    synchronized (this.queuedEnvelopes) {
        this.destroyCalled = true;

        while (!this.queuedEnvelopes.isEmpty()) {
            final TransferEnvelope envelope = this.queuedEnvelopes.poll();
            if (envelope.getBuffer() != null) {
                buffersToRecycle.add(envelope.getBuffer());
            }//from w ww  . j av a2s  . com
        }
    }

    while (!buffersToRecycle.isEmpty()) {
        buffersToRecycle.poll().recycleBuffer();
    }
}

From source file:org.openengsb.labs.paxexam.karaf.container.internal.KarafTestContainer.java

/**
 * Since we might get quite deep use a simple breath first search algorithm
 *///from ww  w.j a v a2s  .co m
private File searchKarafBase(File targetFolder) {
    Queue<File> searchNext = new LinkedList<File>();
    searchNext.add(targetFolder);
    while (!searchNext.isEmpty()) {
        File head = searchNext.poll();
        if (!head.isDirectory()) {
            continue;
        }
        boolean system = false;
        boolean etc = false;
        for (File file : head.listFiles()) {
            if (file.isDirectory() && file.getName().equals("system")) {
                system = true;
            }
            if (file.isDirectory() && file.getName().equals("etc")) {
                etc = true;
            }
        }
        if (system && etc) {
            return head;
        }
        searchNext.addAll(Arrays.asList(head.listFiles()));
    }
    throw new IllegalStateException("No karaf base dir found in extracted distribution.");
}

From source file:com.microsoft.office.plugin.MetadataMojo.java

/**
 * Generates a single entity set.//ww  w .  java2  s.  c o  m
 * 
 * @param schema Schema which an entity set will be related to.
 * @param base Base directory for output classes.
 * @param pkg Package.
 * @param objs Objects to be put into velocity context.
 * @param generatedEntitySets Holds information if entity set for some type already generated to prevent multiple generations.
 * @param entitySetNames Maps entity type and its set name.
 * @param type Fully qualified enity type (contains schema namespace and class name).
 * @param paths Maps entity type and path to its set related to service root.
 * @param additionalSets List of all sets were generated.
 * @throws MojoExecutionException
 */
private void generateEntitySet(Schema schema, final File base, final String pkg, final Map<String, Object> objs,
        Map<String, Boolean> generatedEntitySets, Map<String, String> entitySetNames, String type,
        Map<String, String> paths, List<EntitySet> additionalSets) throws MojoExecutionException {
    Queue<String> typesQueue = new LinkedList<String>();

    for (NavigationProperty np : schema.getEntityType(utility.getNameFromNS(type)).getNavigationProperties()) {
        addTypeToQueue(entitySetNames, type, paths, typesQueue, np);
    }

    while (!typesQueue.isEmpty()) {
        String currentType = typesQueue.poll();
        if (generatedEntitySets.get(currentType)) {
            continue;
        }

        EntitySet generatedSet = new EntitySet();
        generatedSet.setEntityType(utility.getNameInNamespace(currentType));
        String name = entitySetNames.get(currentType);
        generatedSet.setName(name);
        additionalSets.add(generatedSet);

        objs.clear();
        objs.put("entitySet", generatedSet);

        EntityType currentEntityType = schema.getEntityType(currentType);
        while (true) {
            for (NavigationProperty np : currentEntityType.getNavigationProperties()) {
                addTypeToQueue(entitySetNames, currentType, paths, typesQueue, np);
            }
            if (currentEntityType.getBaseType() != null) {
                currentEntityType = schema
                        .getEntityType(utility.getNameFromNS(currentEntityType.getBaseType()));
            } else {
                break;
            }
        }

        /******************************* EXCHANGE-SPECIFIC ******************************************/
        // As we know from spec we cannot directly create a message inside /Me/Messages
        // we must create it inside /Me/path/to/some/folder/Messages
        // the path may be one of:
        // 1. Predefined folder name - as described in metadata in navigation properties of User entity
        // example: Inbox -> /Me/Inbox/Messages
        // 2. Folder with given id
        // example: Folders('abc') -> /Me/Folders('abc')/Messages
        // 3. A child folder (may be recursively)
        // example: Folders('abc')/ChildFolders('xyz') -> /Me/Folders('abc')/ChildFolders('xyz')/Messages

        if (name.equals("Messages")) {
            objs.put("pathToSet", "Me/");
            objs.put("createPath", "Me/%s/Messages");
            objs.put("overridePath", true);
        } else if (name.equals("Events")) {
            objs.put("pathToSet", "Me/");
            objs.put("createPath", "Me/Calendars('%s')/Events");
            objs.put("overridePath", true);
        }

        if (!paths.get(currentType).equals("")) {
            objs.put("pathToSet", paths.get(currentType));
        }
        if (utility.capitalize(name).equals("Folders")) {
            objs.put("userType", schema.getEntityType("User"));
        }

        /*************************** END OF EXCHANGE-SPECIFIC BLOCK ********************************/
        if (generateInterfaces) {
            parseObj(base, pkg, "entitySet", "I" + utility.capitalize(name) + ".java", objs);
        }
        if (generateClasses) {
            parseObj(base, pkg, "entitySetImpl", utility.capitalize(name) + ".java", objs);
        }
        generatedEntitySets.put(currentType, true);
    }
}

From source file:org.apache.karaf.tooling.exam.container.internal.KarafTestContainer.java

/**
 * Since we might get quite deep use a simple breath first search algorithm
 *//*from w w  w  .  java2  s  . co m*/
private File searchKarafBase(File _targetFolder) {
    Queue<File> searchNext = new LinkedList<File>();
    searchNext.add(_targetFolder);
    while (!searchNext.isEmpty()) {
        File head = searchNext.poll();
        if (!head.isDirectory()) {
            continue;
        }
        boolean isSystem = false;
        boolean etc = false;
        for (File file : head.listFiles()) {
            if (file.isDirectory() && file.getName().equals("system")) {
                isSystem = true;
            }
            if (file.isDirectory() && file.getName().equals("etc")) {
                etc = true;
            }
        }
        if (isSystem && etc) {
            return head;
        }
        searchNext.addAll(Arrays.asList(head.listFiles()));
    }
    throw new IllegalStateException("No karaf base dir found in extracted distribution.");
}