Example usage for java.util Queue poll

List of usage examples for java.util Queue poll

Introduction

In this page you can find the example usage for java.util Queue poll.

Prototype

E poll();

Source Link

Document

Retrieves and removes the head of this queue, or returns null if this queue is empty.

Usage

From source file:eu.stratosphere.nephele.jobmanager.splitassigner.file.FileInputSplitList.java

/**
 * Returns the next file input split to be consumed by the given instance. The returned input split is selected in a
 * way that the distance between the split's storage location and the requesting {@link AbstractInstance} is as
 * short as possible./*  w w w.  j  a  va2  s.  co m*/
 * 
 * @param instance
 *        the instance requesting the next file input split
 * @return the next input split to be consumed by the given instance or <code>null</code> if all input splits have
 *         already been consumed.
 */
synchronized FileInputSplit getNextInputSplit(final AbstractInstance instance) {

    final Queue<QueueElem> instanceSplitList = getInstanceSplitList(instance);

    while (true) {

        final QueueElem candidate = instanceSplitList.poll();
        if (candidate == null) {
            return null;
        }

        if (this.masterSet.remove(candidate.getInputSplit())) {
            if (LOG.isInfoEnabled()) {
                if (candidate.distance == 0) {
                    LOG.info(instance + " receives local file input split");
                } else {
                    LOG.info(instance + " receives remote file input split (distance " + candidate.distance
                            + ")");
                }
            }
            return candidate.getInputSplit();
        }

        if (this.masterSet.isEmpty()) {
            return null;
        }
    }
}

From source file:ubic.gemma.job.progress.ProgressStatusServiceImpl.java

@Override
public synchronized List<ProgressData> getProgressStatus(String taskId) {
    if (taskId == null)
        throw new IllegalArgumentException("task id cannot be null");
    SubmittedTask task = taskRunningService.getSubmittedTask(taskId);

    List<ProgressData> statusObjects = new Vector<ProgressData>();

    if (task == null) {
        log.warn("It looks like job " + taskId + " has gone missing; assuming it is dead or finished already");

        // We should assume it is dead.
        ProgressData data = new ProgressData();
        data.setTaskId(taskId);//  w  ww . j  av  a  2 s .  c o m
        data.setDone(true);
        data.setDescription("The job has gone missing; it has already finished or failed.");
        statusObjects.add(data);

        return statusObjects;
    }

    assert task.getTaskId() != null;
    assert task.getTaskId().equals(taskId);

    Queue<String> updates = task.getProgressUpdates();
    String progressMessage = "";
    while (!updates.isEmpty()) {
        String update = updates.poll();
        progressMessage += update + "\n";
    }

    if (task.isDone()) {
        ProgressData data;
        if (task.getStatus() == SubmittedTask.Status.COMPLETED) {
            log.debug("Job " + taskId + " is done");
            data = new ProgressData(taskId, 1, progressMessage + "Done!", true);
        } else if (task.getStatus() == SubmittedTask.Status.FAILED) {
            data = new ProgressData(taskId, 1, progressMessage + "Failed!", true);
            data.setFailed(true);
        } else {
            data = new ProgressData(taskId, 1, progressMessage + "Possibly canceled.", true);
        }
        statusObjects.add(data);
    } else {
        statusObjects.add(new ProgressData(taskId, 1, progressMessage, false));
    }

    return statusObjects;
}

From source file:eu.stratosphere.nephele.jobmanager.splitassigner.LocatableInputSplitList.java

/**
 * Returns the next locatable input split to be consumed by the given instance. The returned input split is selected
 * in a/*www.j a v a  2  s  .c o m*/
 * way that the distance between the split's storage location and the requesting {@link AbstractInstance} is as
 * short as possible.
 * 
 * @param instance
 *        the instance requesting the next file input split
 * @return the next input split to be consumed by the given instance or <code>null</code> if all input splits have
 *         already been consumed.
 */
synchronized LocatableInputSplit getNextInputSplit(final AbstractInstance instance) {

    final Queue<QueueElem> instanceSplitList = getInstanceSplitList(instance);

    while (true) {

        final QueueElem candidate = instanceSplitList.poll();
        if (candidate == null) {
            return null;
        }

        if (this.masterSet.remove(candidate.getInputSplit())) {
            if (LOG.isInfoEnabled()) {
                if (candidate.distance == 0) {
                    LOG.info(instance + " receives local file input split");
                } else {
                    LOG.info(instance + " receives remote file input split (distance " + candidate.distance
                            + ")");
                }
            }
            return candidate.getInputSplit();
        }

        if (this.masterSet.isEmpty()) {
            return null;
        }
    }
}

From source file:org.phenotips.solr.HPOScriptService.java

/**
 * Get the HPO IDs of the specified phenotype and all its ancestors.
 *
 * @param id the HPO identifier to search for, in the {@code HP:1234567} format
 * @return the full set of ancestors-or-self IDs, or an empty set if the requested ID was not found in the index
 *///w w w . j a v  a  2 s .  co m
@SuppressWarnings("unchecked")
public Set<String> getAllAncestorsAndSelfIDs(final String id) {
    Set<String> results = new HashSet<String>();
    Queue<SolrDocument> nodes = new LinkedList<SolrDocument>();
    SolrDocument crt = this.get(id);
    if (crt == null) {
        return results;
    }
    nodes.add(crt);
    while (!nodes.isEmpty()) {
        crt = nodes.poll();
        results.add(String.valueOf(crt.get(ID_FIELD_NAME)));
        Object rawParents = crt.get("is_a");
        if (rawParents == null) {
            continue;
        }
        List<String> parents;
        if (rawParents instanceof String) {
            parents = Collections.singletonList(String.valueOf(rawParents));
        } else {
            parents = (List<String>) rawParents;
        }
        for (String pid : parents) {
            nodes.add(this.get(StringUtils.substringBefore(pid, " ")));
        }
    }
    return results;
}

From source file:org.apache.flink.runtime.jobmanager.splitassigner.file.FileInputSplitList.java

/**
 * Returns the next file input split to be consumed by the given instance. The returned input split is selected in a
 * way that the distance between the split's storage location and the requesting {@link org.apache.flink.runtime.instance.Instance} is as
 * short as possible./* www. java  2s .c o m*/
 * 
 * @param instance
 *        the instance requesting the next file input split
 * @return the next input split to be consumed by the given instance or <code>null</code> if all input splits have
 *         already been consumed.
 */
synchronized FileInputSplit getNextInputSplit(final Instance instance) {

    final Queue<QueueElem> instanceSplitList = getInstanceSplitList(instance);

    while (true) {

        final QueueElem candidate = instanceSplitList.poll();
        if (candidate == null) {
            return null;
        }

        if (this.masterSet.remove(candidate.getInputSplit())) {
            if (LOG.isInfoEnabled()) {
                if (candidate.distance == 0) {
                    LOG.info(instance + " receives local file input split");
                } else {
                    LOG.info(instance + " receives remote file input split (distance " + candidate.distance
                            + ")");
                }
            }
            return candidate.getInputSplit();
        }

        if (this.masterSet.isEmpty()) {
            return null;
        }
    }
}

From source file:com.cognifide.aet.executor.SuiteExecutor.java

/**
 * Returns the status of test suite processing.
 *
 * @param correlationId/*from w w  w  . j a v a 2s . co m*/
 * @return status of the test suite run identified by provided correlation ID
 */
public SuiteStatusResult getExecutionStatus(String correlationId) {
    SuiteStatusResult result = null;

    Queue<SuiteStatusResult> statusQueue = suiteStatusCache.getIfPresent(correlationId);
    if (statusQueue != null) {
        result = statusQueue.poll();
        if (result == null) {
            result = new SuiteStatusResult(ProcessingStatus.UNKNOWN);
        }
    }

    return result;
}

From source file:org.apache.flink.runtime.jobmanager.splitassigner.LocatableInputSplitList.java

/**
 * Returns the next locatable input split to be consumed by the given instance. The returned input split is selected
 * in a//ww  w.j a v  a 2 s.co  m
 * way that the distance between the split's storage location and the requesting {@link org.apache.flink.runtime.instance.Instance} is as
 * short as possible.
 * 
 * @param instance
 *        the instance requesting the next file input split
 * @return the next input split to be consumed by the given instance or <code>null</code> if all input splits have
 *         already been consumed.
 */
synchronized LocatableInputSplit getNextInputSplit(final Instance instance) {

    final Queue<QueueElem> instanceSplitList = getInstanceSplitList(instance);

    while (true) {

        final QueueElem candidate = instanceSplitList.poll();
        if (candidate == null) {
            return null;
        }

        if (this.masterSet.remove(candidate.getInputSplit())) {
            if (LOG.isInfoEnabled()) {
                if (candidate.distance == 0) {
                    LOG.info(instance + " receives local file input split");
                } else {
                    LOG.info(instance + " receives remote file input split (distance " + candidate.distance
                            + ")");
                }
            }
            return candidate.getInputSplit();
        }

        if (this.masterSet.isEmpty()) {
            return null;
        }
    }
}

From source file:org.wso2.carbon.dataservices.core.DBUtils.java

/**
 * This method is used to embed syntaxes associated with UDT attribute notations to
 * a queue of string tokens extracted from a UDT parameter.
 *
 * @param tokens      Queue of string tokens
 * @param syntaxQueue Syntax embedded tokens
 * @param isIndex     Flag to determine whether a particular string token is an inidex
 *                    or a column name/*  w  ww  .j  a va  2s .  com*/
 */
public static void getSyntaxEmbeddedQueue(Queue<String> tokens, Queue<String> syntaxQueue, boolean isIndex) {
    if (!tokens.isEmpty()) {
        if ("[".equals(tokens.peek())) {
            isIndex = true;
            tokens.poll();
            syntaxQueue.add("INEDX_START");
            syntaxQueue.add(tokens.poll());
        } else if ("]".equals(tokens.peek())) {
            isIndex = false;
            tokens.poll();
            syntaxQueue.add("INDEX_END");
        } else if (".".equals(tokens.peek())) {
            tokens.poll();
            syntaxQueue.add("DOT");
            syntaxQueue.add("COLUMN");
            syntaxQueue.add(tokens.poll());
        } else {
            if (isIndex) {
                syntaxQueue.add("INDEX");
                syntaxQueue.add(tokens.poll());
            } else {
                syntaxQueue.add("COLUMN");
                syntaxQueue.add(tokens.poll());
            }
        }
        getSyntaxEmbeddedQueue(tokens, syntaxQueue, isIndex);
    }
}

From source file:com.github.rvesse.airline.model.MetadataLoader.java

public static <C> GlobalMetadata<C> loadGlobal(Class<?> cliClass) {
    Annotation annotation = cliClass.getAnnotation(com.github.rvesse.airline.annotations.Cli.class);
    if (annotation == null)
        throw new IllegalArgumentException(
                String.format("Class %s does not have the @Cli annotation", cliClass));

    com.github.rvesse.airline.annotations.Cli cliConfig = (com.github.rvesse.airline.annotations.Cli) annotation;

    // Prepare commands
    CommandMetadata defaultCommand = null;
    if (!cliConfig.defaultCommand().equals(com.github.rvesse.airline.annotations.Cli.NO_DEFAULT.class)) {
        defaultCommand = loadCommand(cliConfig.defaultCommand());
    }/*from   w w  w.  j a v  a  2  s  . co m*/
    List<CommandMetadata> defaultGroupCommands = new ArrayList<CommandMetadata>();
    for (Class<?> cls : cliConfig.commands()) {
        defaultGroupCommands.add(loadCommand(cls));
    }

    // Prepare parser configuration
    ParserMetadata<C> parserConfig = cliConfig.parserConfiguration() != null
            ? MetadataLoader.<C>loadParser(cliConfig.parserConfiguration())
            : MetadataLoader.<C>loadParser(cliClass);

    // Prepare restrictions
    // We find restrictions in the following order:
    // 1 - Those declared via annotations
    // 2 - Those declared via the restrictions field of the @Cli annotation
    // 3 - Standard restrictions if the includeDefaultRestrctions field of
    // the @Cli annotation is true
    List<GlobalRestriction> restrictions = new ArrayList<GlobalRestriction>();
    for (Class<? extends Annotation> annotationClass : RestrictionRegistry
            .getGlobalRestrictionAnnotationClasses()) {
        annotation = cliClass.getAnnotation(annotationClass);
        if (annotation == null)
            continue;
        GlobalRestriction restriction = RestrictionRegistry.getGlobalRestriction(annotationClass, annotation);
        if (restriction != null)
            restrictions.add(restriction);
    }
    for (Class<? extends GlobalRestriction> cls : cliConfig.restrictions()) {
        restrictions.add(ParserUtil.createInstance(cls));
    }
    if (cliConfig.includeDefaultRestrictions()) {
        restrictions.addAll(AirlineUtils.arrayToList(GlobalRestriction.DEFAULTS));
    }

    // Prepare groups
    // We sort sub-groups by name length then lexically
    // This means that when we build the groups hierarchy we'll ensure we
    // build the parent groups first wherever possible
    Map<String, CommandGroupMetadata> subGroups = new TreeMap<String, CommandGroupMetadata>(
            new StringHierarchyComparator());
    List<CommandGroupMetadata> groups = new ArrayList<CommandGroupMetadata>();
    for (Group groupAnno : cliConfig.groups()) {
        String groupName = groupAnno.name();
        String subGroupPath = null;
        if (StringUtils.containsWhitespace(groupName)) {
            // Normalize the path
            subGroupPath = StringUtils.join(StringUtils.split(groupAnno.name()), ' ');
        }

        // Maybe a top level group we've already seen
        CommandGroupMetadata group = CollectionUtils.find(groups, new GroupFinder(groupName));
        if (group == null) {
            // Maybe a sub-group we've already seen
            group = subGroups.get(subGroupPath);
        }

        List<CommandMetadata> groupCommands = new ArrayList<CommandMetadata>();
        for (Class<?> cls : groupAnno.commands()) {
            groupCommands.add(loadCommand(cls));
        }

        if (group == null) {
            // Newly discovered group
            //@formatter:off
            group = loadCommandGroup(subGroupPath != null ? subGroupPath : groupName, groupAnno.description(),
                    groupAnno.hidden(), Collections.<CommandGroupMetadata>emptyList(),
                    !groupAnno.defaultCommand().equals(Group.NO_DEFAULT.class)
                            ? loadCommand(groupAnno.defaultCommand())
                            : null,
                    groupCommands);
            //@formatter:on
            if (subGroupPath == null) {
                groups.add(group);
            } else {
                // Remember sub-groups for later
                subGroups.put(subGroupPath, group);
            }
        } else {
            for (CommandMetadata cmd : groupCommands) {
                group.addCommand(cmd);
            }
        }
    }
    // Build sub-group hierarchy
    buildGroupsHierarchy(groups, subGroups);

    // Find all commands
    List<CommandMetadata> allCommands = new ArrayList<CommandMetadata>();
    allCommands.addAll(defaultGroupCommands);
    if (defaultCommand != null && !defaultGroupCommands.contains(defaultCommand)) {
        allCommands.add(defaultCommand);
    }
    for (CommandGroupMetadata group : groups) {
        allCommands.addAll(group.getCommands());
        if (group.getDefaultCommand() != null) {
            allCommands.add(group.getDefaultCommand());
        }

        Queue<CommandGroupMetadata> subGroupsQueue = new LinkedList<CommandGroupMetadata>();
        subGroupsQueue.addAll(group.getSubGroups());
        while (subGroupsQueue.size() > 0) {
            CommandGroupMetadata subGroup = subGroupsQueue.poll();
            allCommands.addAll(subGroup.getCommands());
            if (subGroup.getDefaultCommand() != null)
                allCommands.add(subGroup.getDefaultCommand());
            subGroupsQueue.addAll(subGroup.getSubGroups());
        }
    }

    // Post-process to find possible further group assignments
    loadCommandsIntoGroupsByAnnotation(allCommands, groups, defaultGroupCommands);

    return loadGlobal(cliConfig.name(), cliConfig.description(), defaultCommand, defaultGroupCommands, groups,
            restrictions, parserConfig);
}

From source file:geotag.example.sbickt.SbicktAPITest.java

@Test
public void testGetGeoTags() {
    Queue<GeoTag> listOfGeoTags = new LinkedList<GeoTag>();

    try {//from  w  w  w. j ava2  s  .c  om
        listOfGeoTags = SbicktAPI.getGeoTags(new Point3D(2.548, 2.548, 0));

        assertNotNull(listOfGeoTags);

        while (!listOfGeoTags.isEmpty()) {
            listOfGeoTags.poll().prettyPrint();
        }
    } catch (Exception e) {
        fail(e.toString());
    }
}