Example usage for java.util Queue add

List of usage examples for java.util Queue add

Introduction

In this page you can find the example usage for java.util Queue add.

Prototype

boolean add(E e);

Source Link

Document

Inserts the specified element into this queue if it is possible to do so immediately without violating capacity restrictions, returning true upon success and throwing an IllegalStateException if no space is currently available.

Usage

From source file:com.android.pwdhashandroid.pwdhash.PasswordHasher.java

private Queue<Character> CreateQueue(char[] chars) {
    Queue<Character> q = new LinkedList<Character>();

    for (int i = 0; i < chars.length; i++) {
        q.add(chars[i]);
    }/*from   ww  w  .j  a v  a2  s .  co  m*/

    return q;
}

From source file:org.esigate.http.OutgoingRequestContext.java

/**
 * Set attribute and save previous attribute value
 * //from w ww  . j  a  v a 2s . co  m
 * @param id
 *            attribute name
 * @param obj
 *            value
 * @param save
 *            save previous attribute value to restore later
 */
public void setAttribute(String id, Object obj, boolean save) {
    if (save) {
        String historyAttribute = id + "history";
        Queue<Object> history = (Queue<Object>) getAttribute(historyAttribute);
        if (history == null) {
            history = new LinkedList<Object>();
            setAttribute(historyAttribute, history);
        }
        if (this.getAttribute(id) != null) {
            history.add(getAttribute(id));
        }
    }
    setAttribute(id, obj);
}

From source file:org.apache.hadoop.yarn.server.resourcemanager.recovery.TestFSRMStateStore.java

private void verifyFilesUnreadablebyHDFS(MiniDFSCluster cluster, Path root) throws Exception {
    DistributedFileSystem fs = cluster.getFileSystem();
    Queue<Path> paths = new LinkedList<>();
    paths.add(root);
    while (!paths.isEmpty()) {
        Path p = paths.poll();/*from   w ww.  j ava 2  s .  c  o  m*/
        FileStatus stat = fs.getFileStatus(p);
        if (!stat.isDirectory()) {
            try {
                LOG.warn("\n\n ##Testing path [" + p + "]\n\n");
                fs.open(p);
                Assert.fail("Super user should not be able to read [" + UserGroupInformation.getCurrentUser()
                        + "] [" + p.getName() + "]");
            } catch (AccessControlException e) {
                Assert.assertTrue(
                        e.getMessage().contains("superuser is not allowed to perform this operation"));
            } catch (Exception e) {
                Assert.fail("Should get an AccessControlException here");
            }
        }
        if (stat.isDirectory()) {
            FileStatus[] ls = fs.listStatus(p);
            for (FileStatus f : ls) {
                paths.add(f.getPath());
            }
        }
    }

}

From source file:org.apache.hadoop.mapred.nativetask.buffer.DirectBufferPool.java

public void returnBuffer(ByteBuffer buffer) throws IOException {
    if (null == buffer || !buffer.isDirect()) {
        throw new IOException("the buffer is null or the buffer returned is not direct buffer");
    }//from   w  ww .  j a va  2  s .  c om

    buffer.clear();
    int capacity = buffer.capacity();
    Queue<WeakReference<ByteBuffer>> list = bufferMap.get(capacity);
    if (null == list) {
        list = new ConcurrentLinkedQueue<WeakReference<ByteBuffer>>();
        Queue<WeakReference<ByteBuffer>> prev = bufferMap.putIfAbsent(capacity, list);
        if (prev != null) {
            list = prev;
        }
    }
    list.add(new WeakReference<ByteBuffer>(buffer));
}

From source file:asia.stampy.server.listener.subscription.AbstractAcknowledgementListenerAndInterceptor.java

@Override
public void interceptMessage(StampyMessage<?> message, HostPort hostPort) throws InterceptException {
    MessageMessage msg = (MessageMessage) message;

    String ack = msg.getHeader().getAck();

    Queue<String> queue = messages.get(hostPort);
    if (queue == null) {
        queue = new ConcurrentLinkedQueue<String>();
        messages.put(hostPort, queue);/* ww  w .ja va  2s  .  c o  m*/
    }

    queue.add(ack);
    startTimerTask(hostPort, ack);
}

From source file:com.marklogic.contentpump.DelimitedJSONReader.java

@SuppressWarnings("unchecked")
protected String findUriInJSON(String line) throws JsonParseException, IOException {
    /* Breadth-First-Search */
    Queue<Object> q = new LinkedList<Object>();
    Object root = mapper.readValue(line.getBytes(), Object.class);
    if (root instanceof Map || root instanceof ArrayList) {
        q.add(root);
    } else {/*w  w w .  j ava  2 s.  c  o  m*/
        throw new UnsupportedOperationException("invalid JSON");
    }
    while (!q.isEmpty()) {
        Object current = q.remove();
        if (current instanceof ArrayList) {
            for (Object element : (ArrayList<Object>) current) {
                if (element instanceof Map || element instanceof ArrayList) {
                    q.add(element);
                }
            }
        } else { // instanceof Map
            // First Match
            Map<String, ?> map = (Map<String, ?>) current;
            if (map.containsKey(uriName)) {
                Object uriValue = map.get(uriName);
                if (uriValue instanceof Number || uriValue instanceof String) {
                    return uriValue.toString();
                } else {
                    return null;
                }
            }
            // Add child elements to queue
            Iterator<?> it = map.entrySet().iterator();
            while (it.hasNext()) {
                Entry<String, ?> KVpair = (Entry<String, ?>) it.next();
                Object pairValue = KVpair.getValue();

                if (pairValue instanceof Map || pairValue instanceof ArrayList) {
                    q.add(pairValue);
                }
            }
            ;
        }
    }
    return null;
}

From source file:edu.uci.ics.hyracks.api.rewriter.ActivityClusterGraphRewriter.java

/**
 * Create a new super activity/*from   w ww .j a v a  2s. co m*/
 * 
 * @param acg
 *            the activity cluster
 * @param superActivities
 *            the map from activity id to current super activities
 * @param toBeExpendedMap
 *            the map from an existing super activity to its BFS expansion queue of the original activities
 * @param invertedActivitySuperActivityMap
 *            the map from the original activities to their hosted super activities
 * @param activityId
 *            the activity id for the new super activity, which is the first added acitivty's id in the super activity
 * @param activity
 *            the first activity added to the new super activity
 */
private void createNewSuperActivity(ActivityCluster acg, Map<ActivityId, SuperActivity> superActivities,
        Map<ActivityId, Queue<IActivity>> toBeExpendedMap,
        Map<IActivity, SuperActivity> invertedActivitySuperActivityMap, ActivityId activityId,
        IActivity activity) {
    SuperActivity superActivity = new SuperActivity(acg.getActivityClusterGraph(), acg.getId(), activityId);
    superActivities.put(activityId, superActivity);
    superActivity.addActivity(activity);
    Queue<IActivity> toBeExpended = new LinkedList<IActivity>();
    toBeExpended.add(activity);
    toBeExpendedMap.put(activityId, toBeExpended);
    invertedActivitySuperActivityMap.put(activity, superActivity);
}

From source file:org.apache.drill.exec.store.mpjdbc.MPJdbcGroupScan.java

@Override
public void applyAssignments(List<DrillbitEndpoint> incomingEndpoints) {
    final int numSlots = incomingEndpoints.size();
    int totalAssignmentsTobeDone = 1;
    Preconditions.checkArgument(numSlots <= totalAssignmentsTobeDone, String.format(
            "Incoming endpoints %d is greater than number of chunks %d", numSlots, totalAssignmentsTobeDone));
    final int minPerEndpointSlot = (int) Math.floor((double) totalAssignmentsTobeDone / numSlots);
    final int maxPerEndpointSlot = (int) Math.ceil((double) totalAssignmentsTobeDone / numSlots);
    /* Map for (index,endpoint)'s */
    endpointFragmentMapping = Maps.newHashMapWithExpectedSize(numSlots);
    /* Reverse mapping for above indexes */
    Map<String, Queue<Integer>> endpointHostIndexListMap = Maps.newHashMap();
    /*//  www .  j a v  a  2  s . c o m
     * Initialize these two maps
     */
    for (int i = 0; i < numSlots; ++i) {
        List<MPJdbcScanSpec> val = new ArrayList<MPJdbcScanSpec>(maxPerEndpointSlot);
        val.add(this.mPJdbcScanSpec);
        endpointFragmentMapping.put(i, val);
        String hostname = incomingEndpoints.get(i).getAddress();
        Queue<Integer> hostIndexQueue = endpointHostIndexListMap.get(hostname);
        if (hostIndexQueue == null) {
            hostIndexQueue = Lists.newLinkedList();
            endpointHostIndexListMap.put(hostname, hostIndexQueue);
        }
        hostIndexQueue.add(i);
    }
}

From source file:de.thomaskrille.dropwizard.environment_configuration.EnvironmentConfigurationFactory.java

private void replaceEnvironmentVariables(final JsonNode root) {
    Queue<JsonNode> q = Queues.newArrayDeque();

    q.add(root);
    while (!q.isEmpty()) {
        JsonNode currentNode = q.poll();

        if (!currentNode.isContainerNode()) {
            continue;
        }/*from w  ww .j a  v  a2 s  . co m*/

        if (currentNode.isObject()) {
            replaceEnvironmentVariablesForObject(q, (ObjectNode) currentNode);
        } else if (currentNode.isArray()) {
            replaceEnvironmentVariablesForArray(q, (ArrayNode) currentNode);
        }
    }
}

From source file:functionaltests.job.log.TestJobServerLogs.java

private void printDiagnosticMessage() {
    int LIMIT = 5;
    System.out.println("This test is going to fail, but before we print diagnostic message."
            + simpleDateFormat.format(new Date()));
    // iterate over all files in the 'logsLocation'
    for (File file : FileUtils.listFiles(new File(logsLocation), TrueFileFilter.INSTANCE,
            TrueFileFilter.INSTANCE)) {/*from   w  w w . j a  v  a  2 s.c  om*/
        try {
            BasicFileAttributes attr = Files.readAttributes(file.toPath(), BasicFileAttributes.class);
            System.out.println(String.format("Name: %s, Size: %d, Created: %s, Modified: %s",
                    file.getAbsolutePath(), attr.size(), attr.creationTime(), attr.lastModifiedTime()));
            BufferedReader br = new BufferedReader(new FileReader(file));
            String line;
            int i;
            // print up to LIMIT first lines
            for (i = 0; i < LIMIT && (line = br.readLine()) != null; ++i) {
                System.out.println(line);
            }

            Queue<String> queue = new CircularFifoQueue<>(LIMIT);
            // reading last LIMIT lines
            for (; (line = br.readLine()) != null; ++i) {
                queue.add(line);
            }

            if (i >= LIMIT * 2) { // if there is more line than 2*LIMIT
                System.out.println(".......");
                System.out.println("....... (skipped content)");
                System.out.println(".......");
            }
            for (String l : queue) { // print rest of the file
                System.out.println(l);
            }

            System.out.println("------------------------------------");
            System.out.println();
        } catch (IOException e) {
            System.out.println("Exception ocurred during accessing file attributes " + e);
        }
    }
}