Example usage for java.util Queue add

List of usage examples for java.util Queue add

Introduction

In this page you can find the example usage for java.util Queue add.

Prototype

boolean add(E e);

Source Link

Document

Inserts the specified element into this queue if it is possible to do so immediately without violating capacity restrictions, returning true upon success and throwing an IllegalStateException if no space is currently available.

Usage

From source file:org.eobjects.analyzer.result.AnalyzerResultFutureTest.java

public void testMultiThreadedListenerScenario() throws Exception {
    final int threadCount = 10;

    final Thread[] threads = new Thread[threadCount];
    @SuppressWarnings({ "unchecked" })
    final Listener<NumberResult>[] listeners = new Listener[threadCount];
    final Queue<Object> resultQueue = new ArrayBlockingQueue<>(threadCount);

    for (int i = 0; i < listeners.length; i++) {
        listeners[i] = new Listener<NumberResult>() {
            @Override//from   w ww.  j  a  v a  2s .c om
            public void onSuccess(NumberResult result) {
                resultQueue.add(result);
            }

            @Override
            public void onError(RuntimeException error) {
                resultQueue.add(error);
            }
        };
    }

    final Ref<NumberResult> resultRef = new LazyRef<NumberResult>() {
        @Override
        protected NumberResult fetch() throws Throwable {
            long randomSleepTime = (long) (1000 * Math.random());
            Thread.sleep(randomSleepTime);
            return new NumberResult(43);
        }
    };

    final AnalyzerResultFuture<NumberResult> future = new AnalyzerResultFuture<>("foo", resultRef);

    for (int i = 0; i < threads.length; i++) {
        final Listener<NumberResult> listener = listeners[i];
        threads[i] = new Thread() {
            @Override
            public void run() {
                future.addListener(listener);
            }
        };
    }

    final int halfOfTheThreads = threads.length / 2;
    for (int i = 0; i < halfOfTheThreads; i++) {
        threads[i].start();
    }
    for (int i = 0; i < halfOfTheThreads; i++) {
        threads[i].join();
    }

    future.get();

    assertEquals("[43, 43, 43, 43, 43]", resultQueue.toString());
    assertEquals(halfOfTheThreads, resultQueue.size());

    for (int i = halfOfTheThreads; i < threads.length; i++) {
        threads[i].start();
    }
    for (int i = halfOfTheThreads; i < threads.length; i++) {
        threads[i].join();
    }

    assertEquals("[43, 43, 43, 43, 43, 43, 43, 43, 43, 43]", resultQueue.toString());
    assertEquals(threads.length, resultQueue.size());
}

From source file:org.phenotips.data.permissions.internal.DefaultPatientAccessHelper.java

@Override
public AccessLevel getAccessLevel(Patient patient, EntityReference user) {
    AccessLevel result = this.manager.resolveAccessLevel("none");
    if (patient == null || user == null) {
        return result;
    }/*from w  w  w.  j a  va  2s .c o  m*/
    try {
        EntityReference owner = getOwner(patient).getUser();
        Collection<Collaborator> collaborators = getCollaborators(patient);
        Set<DocumentReference> processedEntities = new HashSet<DocumentReference>();
        Queue<DocumentReference> entitiesToCheck = new LinkedList<DocumentReference>();
        entitiesToCheck.add((DocumentReference) user);
        AccessLevel currentItemAccess = null;
        DocumentReference currentItem;
        XWikiContext context = getXWikiContext();
        XWikiGroupService groupService = context.getWiki().getGroupService(context);
        while (!entitiesToCheck.isEmpty()) {
            currentItem = entitiesToCheck.poll();
            currentItemAccess = getAccessLevel(currentItem, owner, collaborators);
            if (currentItemAccess.compareTo(result) > 0) {
                result = currentItemAccess;
            }
            processedEntities.add(currentItem);
            Collection<DocumentReference> groups = groupService.getAllGroupsReferencesForMember(currentItem, 0,
                    0, context);
            groups.removeAll(processedEntities);
            entitiesToCheck.addAll(groups);
        }
    } catch (XWikiException ex) {
        this.logger.warn("Failed to compute access level for [{}] on [{}]: {}", user, patient.getId(),
                ex.getMessage());
    }
    return result;
}

From source file:gobblin.ingestion.google.webmaster.GoogleWebmasterDataFetcherImpl.java

private void expandJobs(Queue<Pair<String, FilterOperator>> jobs, String prefix) {
    for (String expanded : getUrlPartitions(prefix)) {
        jobs.add(Pair.of(expanded, FilterOperator.CONTAINS));
    }//from  w ww . j av  a2  s.  c  o  m
}

From source file:org.apache.zeppelin.socket.ConnectionManager.java

public void addUserConnection(String user, NotebookSocket conn) {
    LOGGER.debug("Add user connection {} for user: {}", conn, user);
    conn.setUser(user);/*  ww w  .  j  a  va  2 s. co  m*/
    if (userSocketMap.containsKey(user)) {
        userSocketMap.get(user).add(conn);
    } else {
        Queue<NotebookSocket> socketQueue = new ConcurrentLinkedQueue<>();
        socketQueue.add(conn);
        userSocketMap.put(user, socketQueue);
    }
}

From source file:org.opencb.commons.datastore.solr.FacetQueryParser.java

public String parseJson(String facetJson) throws IOException {
    Queue<Map<String, Object>> myQueue = new LinkedList<>();
    Map jsonMap = new ObjectMapper().readValue(facetJson, Map.class);
    myQueue.add(jsonMap);

    while (!myQueue.isEmpty()) {
        Map<String, Object> map = myQueue.remove();
        for (Map.Entry<String, Object> entry : map.entrySet()) {
            if (entry.getValue() instanceof Map) {
                Map<String, Object> innerMap = (Map<String, Object>) entry.getValue();

                // Analyse map to fill in content
                if (innerMap.containsKey("start")) {
                    // Ranges
                    innerMap.put("type", "range");
                    innerMap.put("gap", innerMap.get("step"));
                    innerMap.remove("step");
                } else if (innerMap.containsKey("q")) {
                    // Query
                    innerMap.put("type", "query");
                } else if (innerMap.containsKey("field")) {
                    // Categorical
                    innerMap.put("type", "terms");
                }//from   w  ww. ja  v  a  2s .c om

                // Check if there is a 'facet' field and insert all the items in the queue
                Object facet = innerMap.get("facet");
                if (facet != null) {
                    myQueue.add((Map<String, Object>) facet);
                }
            }
        }
    }

    return new ObjectMapper().writeValueAsString(jsonMap);
}

From source file:com.ibm.amc.feedback.FeedbackHandler.java

private void queueActionStatus(final ActionStatus status) {
    if (logger.isEntryEnabled())
        logger.entry("queueActionStatus", status);
    if (logger.isDebugEnabled())
        logger.debug("queueActionStatus", "", status.getUserId() + " " + status.getState());
    synchronized (statuses) {
        Queue<ActionStatusResponse> userStatuses = statuses.get(status.getUserId());
        if (userStatuses == null) {
            userStatuses = new LinkedList<ActionStatusResponse>();
            statuses.put(status.getUserId(), userStatuses);
        }/*from  w w w.  j  a  va 2  s.  co  m*/
        userStatuses.add(new ActionStatusResponse(status));
    }
    if (logger.isEntryEnabled())
        logger.exit("queueActionStatus");
}

From source file:org.mozilla.gecko.sync.jpake.JPakeClient.java

/**
 * Set up Receiver sequence of stages for J-PAKE. (receiver of credentials)
 *
 *///  w w w  .  j  ava2  s.c  o m
private void prepareReceiverStages() {
    Queue<JPakeStage> jStages = new LinkedList<JPakeStage>();
    jStages.add(new GetChannelStage());
    jStages.add(new ComputeStepOneStage());
    jStages.add(new PutRequestStage());
    jStages.add(new GetRequestStage());
    jStages.add(new JPakeStage() {
        @Override
        public void execute(JPakeClient jpakeClient) {

            // Notify controller that pairing has started.
            jpakeClient.onPairingStart();

            // Switch back to smaller time-out.
            jpakeClient.jpakeMaxTries = JPakeClient.MAX_TRIES;
            jpakeClient.runNextStage();
        }
    });
    jStages.add(new ComputeStepTwoStage());
    jStages.add(new PutRequestStage());
    jStages.add(new GetRequestStage());
    jStages.add(new ComputeFinalStage());
    jStages.add(new ComputeKeyVerificationStage());
    jStages.add(new PutRequestStage());
    jStages.add(new JPakeStage() {

        @Override
        public void execute(JPakeClient jpakeClient) {
            jpakeMaxTries = MAX_TRIES_LAST_MSG;
            jpakeClient.runNextStage();
        }

    });
    jStages.add(new GetRequestStage());
    jStages.add(new DecryptDataStage());
    jStages.add(new CompleteStage());

    stages = jStages;
}

From source file:com.github.wolfdogs.kemono.util.event.RootEventManager.java

private HandlerEntry addHandlerEntry(HandlerEntry entry) {
    Class<? extends Event> type = entry.getType();
    Object relatedObject = entry.getRelatedObject();

    Map<Object, Queue<HandlerEntry>> objectEntriesMap = handlerEntryContainersMap.get(type);
    if (objectEntriesMap == null) {
        objectEntriesMap = new ConcurrentHashMap<Object, Queue<HandlerEntry>>();
        handlerEntryContainersMap.put(type, objectEntriesMap);
    }/*from w  w w  .j a  v  a 2s.  c o m*/

    Queue<HandlerEntry> entries = objectEntriesMap.get(relatedObject);
    if (entries == null) {
        entries = new ConcurrentLinkedQueue<HandlerEntry>();
        objectEntriesMap.put(relatedObject, entries);
    }

    entries.add(entry);
    return entry;
}

From source file:org.finra.datagenerator.engine.negscxml.NegSCXMLFrontier.java

private void dfs(Queue<Map<String, String>> queue, AtomicBoolean flag, NegPossibleState state) {
    if (flag.get()) {
        return;//from   w  w  w.j av  a2 s .  c  om
    }

    //reached end of chart, valid assignment found only if a negative value is set
    if (state.nextState.getId().equalsIgnoreCase("end")) {
        if (state.negVariable.size() == negative) {
            queue.add(state.variables);

            if (queue.size() > 10000) {
                log.info("Queue size " + queue.size() + ", waiting");
                try {
                    Thread.sleep(500);
                } catch (InterruptedException ex) {
                    log.info("Interrupted ", ex);
                }
            }

            return;
        }
    }

    List<NegPossibleState> expand = new LinkedList<>();

    expandPositive(state, expand);
    if (state.negVariable.size() < negative) {
        expandNegative(state, expand, negative - state.negVariable.size());
    }

    for (NegPossibleState e : expand) {
        dfs(queue, flag, e);
    }
}

From source file:de.csw.lucene.ConceptFilter.java

/**
 * advances to the next token in the stream.
 * Takes into account that terms from the ontology might be constructed
 * out of several consecutive tokens.//  www .  j ava  2s.  c  o m
 * @return false at EOS
 */
@Override
public boolean incrementToken() throws IOException {

    boolean hasMoreToken = innerNextToken();
    if (!hasMoreToken) {
        return false;
    }

    Queue<AttributeSource.State> lookAhead = new LinkedList<AttributeSource.State>();
    List<String> terms = new ArrayList<String>();
    terms.add(String.copyValueOf(charTermAttribute.buffer(), 0, charTermAttribute.length()));

    while (index.isPrefix(terms) && hasMoreToken) {
        lookAhead.add(captureState());
        hasMoreToken = innerNextToken();
        terms.add(String.copyValueOf(charTermAttribute.buffer(), 0, charTermAttribute.length()));
    }

    // if we have a match ...
    if (index.hasExactMatches(StringUtils.join(terms.toArray(), OntologyIndex.PREFIX_SEPARATOR))) {

        // ..then we consume all elements in the look ahead, if present
        if (!lookAhead.isEmpty()) {
            int maxEndOffset = offsetAttribute.endOffset();
            restoreState(lookAhead.poll());
            terms.remove(0); // already present in current token
            for (String term : terms) {
                charTermAttribute.append(OntologyIndex.PREFIX_SEPARATOR);
                charTermAttribute.append(term);
            }

            offsetAttribute.setOffset(offsetAttribute.startOffset(), maxEndOffset);
        }
        typeAttribute.setType(CONCEPT_TYPE);
        if (log.isTraceEnabled()) {
            log.trace("Concept token recognized: "
                    + String.copyValueOf(charTermAttribute.buffer(), 0, charTermAttribute.length()));
        }

    } else {

        // .. else we push back in the queue the tokens already read
        if (!lookAhead.isEmpty()) {
            lookAhead.add(captureState());
            restoreState(lookAhead.poll());
            for (AttributeSource.State laterToken : lookAhead) {
                queue.add(laterToken);
            }
        }
    }

    return hasMoreToken;
}