Example usage for java.util Queue add

List of usage examples for java.util Queue add

Introduction

In this page you can find the example usage for java.util Queue add.

Prototype

boolean add(E e);

Source Link

Document

Inserts the specified element into this queue if it is possible to do so immediately without violating capacity restrictions, returning true upon success and throwing an IllegalStateException if no space is currently available.

Usage

From source file:plaid.compilerjava.CompilerCore.java

public PackageRep buildPlaidPath(List<CompilationUnit> cus) throws Exception {
    //Build up a representation of plaidpath
    PackageRep plaidpath = new PackageRep("$TOPLEVEL$");
    Stack<File> directoryWorklist = new Stack<File>();
    for (String base : cc.getPlaidpath())
        handlePlaidPathEntry(base, plaidpath, directoryWorklist);

    //we want to remove the stuff we're trying to compile so that we don't make assumptions based on
    //the previous form of the source files
    //but also want a complete picture for resolving imports and thence QIs
    for (CompilationUnit c : cus) {
        String cPackage = c.getPackageString();
        for (Decl d : c.getDecls()) {
            String memberName = d.getName();
            if (plaidpath.memberExists(cPackage, memberName)) { //indicate that this is outdated and will be updated soon
                plaidpath.lookupMember(cPackage, memberName).startRecompilation();
            } else { //add shell for use in import resolution
                MemberRep newMem = null;
                if (d instanceof FieldDecl)
                    newMem = new FieldRep(memberName);
                else if (d instanceof MethodDecl)
                    newMem = new MethodRep(memberName);
                else if (d instanceof StateDecl)
                    newMem = new StateRep(memberName);
                else
                    throw new RuntimeException("New type of MemberRep not accounted for");

                //will be replaced later
                newMem.startRecompilation();
                plaidpath.addMember(cPackage, newMem);
            }/*from  w  ww  .j  a v a2  s .c  o  m*/

        }
    }

    Queue<StateRep> dependants = new LinkedList<StateRep>();
    for (CompilationUnit c : cus) {
        String cPackage = c.getPackageString();

        //expand imports
        List<String> declaredMembers = new ArrayList<String>(); //right now declared members are just those in the file, not the whole package
        for (Decl d : c.getDecls())
            declaredMembers.add(d.getName());
        c.getImports().checkAndExpandImports(plaidpath, declaredMembers, cPackage);

        //fill out plaidpath with declared members (shell info only)
        for (Decl d : c.getDecls()) {
            MemberRep rep = d.generateHeader(plaidpath, c.getImports(), cPackage);
            if (rep instanceof StateRep && ((StateRep) rep).hasNeeds()) {
                dependants.add((StateRep) rep); //keep track of ones we need to return to
            }
            plaidpath.addMember(cPackage, rep);
        }
    }

    while (!dependants.isEmpty()) {
        StateRep s = dependants.remove();
        List<String> newNeeds = new ArrayList<String>();
        for (String path : s.getNeeds()) {
            if (plaidpath.memberExists(path)) {
                MemberRep r = plaidpath.lookupMember(path);
                if (r instanceof StateRep) {
                    StateRep depState = (StateRep) r;
                    s.addMembers(depState.getMembers()); //TODO : make sure this still works after changing to list of MemberReps
                    newNeeds.addAll(depState.getNeeds());
                } else
                    throw new RuntimeException("Something went wrong with dependencies.");
            } else {
                throw new RuntimeException("Required Dependency " + path + " not found.");
            }
        }
        s.setNeeds(newNeeds); //replace old needs with the new needs
        if (s.hasNeeds())
            dependants.add(s);
    }

    return plaidpath;
}

From source file:org.grouplens.grapht.solver.DependencySolver.java

/**
 * Resolve a desire and its dependencies, inserting them into the graph.
 *
 * @param desire The desire to resolve./*w  w w.ja  va2  s. co  m*/
 * @param context The context of {@code parent}.
 * @param deferQueue The queue of node deferrals.
 * @throws ResolutionException if there is an error resolving the nodes.
 */
private Pair<DAGNode<Component, Dependency>, Dependency> resolveFully(Desire desire, InjectionContext context,
        Queue<Deferral> deferQueue) throws ResolutionException {
    // check context depth against max to detect likely dependency cycles
    if (context.size() > maxDepth) {
        throw new CyclicDependencyException(desire, "Maximum context depth of " + maxDepth + " was reached");
    }

    // resolve the current node
    Resolution result = resolve(desire, context);

    InjectionContext newContext = context.extend(result.satisfaction, desire.getInjectionPoint());

    DAGNode<Component, Dependency> node;
    if (result.deferDependencies) {
        // extend node onto deferred queue and skip its dependencies for now
        logger.debug("Deferring dependencies of {}", result.satisfaction);
        node = DAGNode.singleton(result.makeSatisfaction());
        // FIXME Deferred and skippable bindings do not interact well
        deferQueue.add(new Deferral(node, newContext));
        return Pair.of(node, result.makeDependency());
    } else {
        return resolveDepsAndMakeNode(deferQueue, result, newContext);
    }
}

From source file:org.apache.pdfbox.pdfparser.NonSequentialPDFParser.java

/** Adds newObject to toBeParsedList if it is not an COSObject
 *  or we didn't add this COSObject already (checked via addedObjects). */
private final void addNewToList(final Queue<COSBase> toBeParsedList, final COSBase newObject,
        final Set<Long> addedObjects) {
    if (newObject instanceof COSObject) {
        final long objId = getObjectId((COSObject) newObject);
        if (!addedObjects.add(objId)) {
            return;
        }/*  w  ww  . j a v a2s .c  om*/
    }
    toBeParsedList.add(newObject);
}

From source file:com.jiangge.apns4j.impl.ApnsConnectionImpl.java

private void startErrorWorker() {
    Thread thread = new Thread(new Runnable() {

        @Override//  ww w  . j  av a2  s. c  om
        public void run() {
            Socket curSocket = socket;
            try {
                if (!isSocketAlive(curSocket)) {
                    return;
                }
                InputStream socketIs = curSocket.getInputStream();
                byte[] res = new byte[ERROR_RESPONSE_BYTES_LENGTH];
                int size = 0;

                while (true) {
                    try {
                        size = socketIs.read(res);
                        if (size > 0 || size == -1) {
                            // break, when something was read or there is no data any more
                            break;
                        }
                    } catch (SocketTimeoutException e) {
                        // There is no data. Keep reading.
                    }
                }

                int command = res[0];
                /** EN: error-response,close the socket and resent notifications
                 *  CN: ???????
                 */
                if (size == res.length && command == Command.ERROR) {
                    int status = res[1];
                    int errorId = ApnsTools.parse4ByteInt(res[2], res[3], res[4], res[5]);

                    if (logger.isInfoEnabled()) {
                        logger.info(
                                String.format("%s Received error response. status: %s, id: %s, error-desc: %s",
                                        connName, status, errorId, ErrorResponse.desc(status)));
                    }

                    Queue<PushNotification> resentQueue = new LinkedList<PushNotification>();

                    synchronized (lock) {
                        boolean found = false;
                        errorHappendedLastConn = true;
                        while (!notificationCachedQueue.isEmpty()) {
                            PushNotification pn = notificationCachedQueue.poll();
                            if (pn.getId() == errorId) {
                                found = true;
                            } else {
                                /**
                                 * https://developer.apple.com/library/ios/documentation/NetworkingInternet/Conceptual/RemoteNotificationsPG/Chapters/CommunicatingWIthAPS.html
                                 * As the document said, add the notifications which need be resent to the queue.
                                 * Igonre the error one
                                 */
                                if (found) {
                                    resentQueue.add(pn);
                                }
                            }
                        }
                        if (!found) {
                            logger.warn(connName
                                    + " Didn't find error-notification in the queue. Maybe it's time to adjust cache length. id: "
                                    + errorId);
                        }
                    }
                    // resend notifications
                    if (!resentQueue.isEmpty()) {
                        ApnsResender.getInstance().resend(name, resentQueue);
                    }
                } else {
                    // ignore and continue reading
                    logger.error(
                            connName + " Unexpected command or size. commend: " + command + " , size: " + size);
                }
            } catch (Exception e) {
                //               logger.error(connName + " " + e.getMessage(), e);
                logger.error(connName + " " + e.getMessage());
            } finally {
                /**
                 * EN: close the old socket although it may be closed once before.
                 * CN: ???
                 */
                closeSocket(curSocket);
            }
        }
    });

    thread.start();
}

From source file:org.apache.hadoop.hive.ql.QueryPlan.java

/**
 * Extract all the counters from tasks and operators.
 *///from   w  w w .ja  v a  2  s. co  m
private void extractCounters() throws IOException {
    Queue<Task<? extends Serializable>> tasksToVisit = new LinkedList<Task<? extends Serializable>>();
    Set<Task<? extends Serializable>> tasksVisited = new HashSet<Task<? extends Serializable>>();
    tasksToVisit.addAll(rootTasks);
    while (tasksToVisit.peek() != null) {
        Task<? extends Serializable> task = tasksToVisit.remove();
        tasksVisited.add(task);
        // add children to tasksToVisit
        if (task.getChildTasks() != null) {
            for (Task<? extends Serializable> childTask : task.getChildTasks()) {
                if (!tasksVisited.contains(childTask)) {
                    tasksToVisit.add(childTask);
                }
            }
        }
        if (task.getId() == null) {
            continue;
        }
        if (started.contains(task.getId()) && done.contains(task.getId())) {
            continue;
        }

        // get the counters for the task
        counters.put(task.getId(), task.getCounters());

        // check if task is started
        if (task.started()) {
            started.add(task.getId());
        }
        if (task.done()) {
            done.add(task.getId());
        }
        if (task instanceof ExecDriver) {
            ExecDriver mrTask = (ExecDriver) task;
            if (mrTask.mapStarted()) {
                started.add(task.getId() + "_MAP");
            }
            if (mrTask.mapDone()) {
                done.add(task.getId() + "_MAP");
            }
            if (mrTask.hasReduce()) {
                if (mrTask.reduceStarted()) {
                    started.add(task.getId() + "_REDUCE");
                }
                if (mrTask.reduceDone()) {
                    done.add(task.getId() + "_REDUCE");
                }
            }
        } else if (task instanceof ConditionalTask) {
            ConditionalTask cTask = (ConditionalTask) task;
            for (Task<? extends Serializable> listTask : cTask.getListTasks()) {
                if (!tasksVisited.contains(listTask)) {
                    tasksToVisit.add(listTask);
                }
            }
        }
    }
}

From source file:it.geosolutions.geobatch.actions.commons.CollectorAction.java

/**
 * Removes TemplateModelEvents from the queue and put
 *///  w  ww. j a v a 2 s  .  c  om
public Queue<EventObject> execute(Queue<EventObject> events) throws ActionException {

    listenerForwarder.started();
    listenerForwarder.setTask("build the output absolute file name");

    // return
    final Queue<EventObject> ret = new LinkedList<EventObject>();

    listenerForwarder.setTask("Building/getting the root data structure");

    if (conf.getWildcard() == null) {
        LOGGER.warn("Null wildcard: using default\'*\'");
        conf.setWildcard("*");
    }

    it.geosolutions.tools.io.file.Collector collector = new it.geosolutions.tools.io.file.Collector(
            new WildcardFileFilter(conf.getWildcard(), IOCase.INSENSITIVE), conf.getDeep());
    while (!events.isEmpty()) {

        final EventObject event = events.remove();
        if (event == null) {
            // TODO LOG
            continue;
        }
        File source = null;
        if (event.getSource() instanceof File) {
            source = ((File) event.getSource());
        }

        if (source == null || !source.exists()) {
            // LOG
            continue;
        }
        listenerForwarder.setTask("Collecting from" + source);

        List<File> files = collector.collect(source);
        if (files == null) {
            return ret;
        }
        for (File file : files) {
            if (LOGGER.isDebugEnabled()) {
                LOGGER.debug("Collected file: " + file);
            }
            ret.add(new FileSystemEvent(file, FileSystemEventType.FILE_ADDED));
        }

    }
    listenerForwarder.completed();
    return ret;
}

From source file:cn.edu.bjtu.cit.recommender.Recommender.java

@SuppressWarnings("unchecked")
public int run(String[] args) throws Exception {
    if (args.length < 2) {
        System.err.println();/*from  ww  w .  j a  v  a  2 s  . c  o m*/
        System.err.println("Usage: " + this.getClass().getName()
                + " [generic options] input output [profiling] [estimation] [clustersize]");
        System.err.println();
        printUsage();
        GenericOptionsParser.printGenericCommandUsage(System.err);

        return 1;
    }
    OptionParser parser = new OptionParser(args);

    Pipeline pipeline = new MRPipeline(Recommender.class, getConf());

    if (parser.hasOption(CLUSTER_SIZE)) {
        pipeline.getConfiguration().setInt(ClusterOracle.CLUSTER_SIZE,
                Integer.parseInt(parser.getOption(CLUSTER_SIZE).getValue()));
    }

    if (parser.hasOption(PROFILING)) {
        pipeline.getConfiguration().setBoolean(Profiler.IS_PROFILE, true);
        this.profileFilePath = parser.getOption(PROFILING).getValue();

    }

    if (parser.hasOption(ESTIMATION)) {
        estFile = parser.getOption(ESTIMATION).getValue();
        est = new Estimator(estFile, clusterSize);
    }

    if (parser.hasOption(OPT_REDUCE)) {
        pipeline.getConfiguration().setBoolean(OPT_REDUCE, true);
    }

    if (parser.hasOption(OPT_MSCR)) {
        pipeline.getConfiguration().setBoolean(OPT_MSCR, true);
    }

    if (parser.hasOption(ACTIVE_THRESHOLD)) {
        threshold = Integer.parseInt(parser.getOption("at").getValue());
    }

    if (parser.hasOption(TOP)) {
        top = Integer.parseInt(parser.getOption("top").getValue());
    }

    profiler = new Profiler(pipeline);
    /*
     * input node
     */
    PCollection<String> lines = pipeline.readTextFile(args[0]);

    if (profiler.isProfiling() && lines.getSize() > 10 * 1024 * 1024) {
        lines = lines.sample(0.1);
    }

    /*
     * S0 + GBK
     */
    PGroupedTable<Long, Long> userWithPrefs = lines.parallelDo(new MapFn<String, Pair<Long, Long>>() {

        @Override
        public Pair<Long, Long> map(String input) {
            String[] split = input.split(Estimator.DELM);
            long userID = Long.parseLong(split[0]);
            long itemID = Long.parseLong(split[1]);
            return Pair.of(userID, itemID);
        }

        @Override
        public float scaleFactor() {
            return est.getScaleFactor("S0").sizeFactor;
        }

        @Override
        public float scaleFactorByRecord() {
            return est.getScaleFactor("S0").recsFactor;
        }
    }, Writables.tableOf(Writables.longs(), Writables.longs())).groupByKey(est.getClusterSize());

    /*
     * S1
     */
    PTable<Long, Vector> userVector = userWithPrefs
            .parallelDo(new MapFn<Pair<Long, Iterable<Long>>, Pair<Long, Vector>>() {
                @Override
                public Pair<Long, Vector> map(Pair<Long, Iterable<Long>> input) {
                    Vector userVector = new RandomAccessSparseVector(Integer.MAX_VALUE, 100);
                    for (long itemPref : input.second()) {
                        userVector.set((int) itemPref, 1.0f);
                    }
                    return Pair.of(input.first(), userVector);
                }

                @Override
                public float scaleFactor() {
                    return est.getScaleFactor("S1").sizeFactor;
                }

                @Override
                public float scaleFactorByRecord() {
                    return est.getScaleFactor("S1").recsFactor;
                }
            }, Writables.tableOf(Writables.longs(), Writables.vectors()));

    userVector = profiler.profile("S0-S1", pipeline, userVector, ProfileConverter.long_vector(),
            Writables.tableOf(Writables.longs(), Writables.vectors()));

    /*
     * S2
     */
    PTable<Long, Vector> filteredUserVector = userVector
            .parallelDo(new DoFn<Pair<Long, Vector>, Pair<Long, Vector>>() {

                @Override
                public void process(Pair<Long, Vector> input, Emitter<Pair<Long, Vector>> emitter) {
                    if (input.second().getNumNondefaultElements() > threshold) {
                        emitter.emit(input);
                    }
                }

                @Override
                public float scaleFactor() {
                    return est.getScaleFactor("S2").sizeFactor;
                }

                @Override
                public float scaleFactorByRecord() {
                    return est.getScaleFactor("S2").recsFactor;
                }

            }, Writables.tableOf(Writables.longs(), Writables.vectors()));

    filteredUserVector = profiler.profile("S2", pipeline, filteredUserVector, ProfileConverter.long_vector(),
            Writables.tableOf(Writables.longs(), Writables.vectors()));

    /*
     * S3 + GBK
     */
    PGroupedTable<Integer, Integer> coOccurencePairs = filteredUserVector
            .parallelDo(new DoFn<Pair<Long, Vector>, Pair<Integer, Integer>>() {
                @Override
                public void process(Pair<Long, Vector> input, Emitter<Pair<Integer, Integer>> emitter) {
                    Iterator<Vector.Element> it = input.second().iterateNonZero();
                    while (it.hasNext()) {
                        int index1 = it.next().index();
                        Iterator<Vector.Element> it2 = input.second().iterateNonZero();
                        while (it2.hasNext()) {
                            int index2 = it2.next().index();
                            emitter.emit(Pair.of(index1, index2));
                        }
                    }
                }

                @Override
                public float scaleFactor() {
                    float size = est.getScaleFactor("S3").sizeFactor;
                    return size;
                }

                @Override
                public float scaleFactorByRecord() {
                    float recs = est.getScaleFactor("S3").recsFactor;
                    return recs;
                }
            }, Writables.tableOf(Writables.ints(), Writables.ints())).groupByKey(est.getClusterSize());

    /*
     * S4
     */
    PTable<Integer, Vector> coOccurenceVector = coOccurencePairs
            .parallelDo(new MapFn<Pair<Integer, Iterable<Integer>>, Pair<Integer, Vector>>() {
                @Override
                public Pair<Integer, Vector> map(Pair<Integer, Iterable<Integer>> input) {
                    Vector cooccurrenceRow = new RandomAccessSparseVector(Integer.MAX_VALUE, 100);
                    for (int itemIndex2 : input.second()) {
                        cooccurrenceRow.set(itemIndex2, cooccurrenceRow.get(itemIndex2) + 1.0);
                    }
                    return Pair.of(input.first(), cooccurrenceRow);
                }

                @Override
                public float scaleFactor() {
                    return est.getScaleFactor("S4").sizeFactor;
                }

                @Override
                public float scaleFactorByRecord() {
                    return est.getScaleFactor("S4").recsFactor;
                }
            }, Writables.tableOf(Writables.ints(), Writables.vectors()));

    coOccurenceVector = profiler.profile("S3-S4", pipeline, coOccurenceVector, ProfileConverter.int_vector(),
            Writables.tableOf(Writables.ints(), Writables.vectors()));

    /*
     * S5 Wrapping co-occurrence columns
     */
    PTable<Integer, VectorOrPref> wrappedCooccurrence = coOccurenceVector
            .parallelDo(new MapFn<Pair<Integer, Vector>, Pair<Integer, VectorOrPref>>() {

                @Override
                public Pair<Integer, VectorOrPref> map(Pair<Integer, Vector> input) {
                    return Pair.of(input.first(), new VectorOrPref(input.second()));
                }

                @Override
                public float scaleFactor() {
                    return est.getScaleFactor("S5").sizeFactor;
                }

                @Override
                public float scaleFactorByRecord() {
                    return est.getScaleFactor("S5").recsFactor;
                }

            }, Writables.tableOf(Writables.ints(), VectorOrPref.vectorOrPrefs()));

    wrappedCooccurrence = profiler.profile("S5", pipeline, wrappedCooccurrence, ProfileConverter.int_vopv(),
            Writables.tableOf(Writables.ints(), VectorOrPref.vectorOrPrefs()));

    /*
     * S6 Splitting user vectors
     */
    PTable<Integer, VectorOrPref> userVectorSplit = filteredUserVector
            .parallelDo(new DoFn<Pair<Long, Vector>, Pair<Integer, VectorOrPref>>() {

                @Override
                public void process(Pair<Long, Vector> input, Emitter<Pair<Integer, VectorOrPref>> emitter) {
                    long userID = input.first();
                    Vector userVector = input.second();
                    Iterator<Vector.Element> it = userVector.iterateNonZero();
                    while (it.hasNext()) {
                        Vector.Element e = it.next();
                        int itemIndex = e.index();
                        float preferenceValue = (float) e.get();
                        emitter.emit(Pair.of(itemIndex, new VectorOrPref(userID, preferenceValue)));
                    }
                }

                @Override
                public float scaleFactor() {
                    return est.getScaleFactor("S6").sizeFactor;
                }

                @Override
                public float scaleFactorByRecord() {
                    return est.getScaleFactor("S6").recsFactor;
                }
            }, Writables.tableOf(Writables.ints(), VectorOrPref.vectorOrPrefs()));

    userVectorSplit = profiler.profile("S6", pipeline, userVectorSplit, ProfileConverter.int_vopp(),
            Writables.tableOf(Writables.ints(), VectorOrPref.vectorOrPrefs()));

    /*
     * S7 Combine VectorOrPrefs
     */
    PTable<Integer, VectorAndPrefs> combinedVectorOrPref = wrappedCooccurrence.union(userVectorSplit)
            .groupByKey(est.getClusterSize())
            .parallelDo(new DoFn<Pair<Integer, Iterable<VectorOrPref>>, Pair<Integer, VectorAndPrefs>>() {

                @Override
                public void process(Pair<Integer, Iterable<VectorOrPref>> input,
                        Emitter<Pair<Integer, VectorAndPrefs>> emitter) {
                    Vector vector = null;
                    List<Long> userIDs = Lists.newArrayList();
                    List<Float> values = Lists.newArrayList();
                    for (VectorOrPref vop : input.second()) {
                        if (vector == null) {
                            vector = vop.getVector();
                        }
                        long userID = vop.getUserID();
                        if (userID != Long.MIN_VALUE) {
                            userIDs.add(vop.getUserID());
                        }
                        float value = vop.getValue();
                        if (!Float.isNaN(value)) {
                            values.add(vop.getValue());
                        }
                    }
                    emitter.emit(Pair.of(input.first(), new VectorAndPrefs(vector, userIDs, values)));
                }

                @Override
                public float scaleFactor() {
                    return est.getScaleFactor("S7").sizeFactor;
                }

                @Override
                public float scaleFactorByRecord() {
                    return est.getScaleFactor("S7").recsFactor;
                }
            }, Writables.tableOf(Writables.ints(), VectorAndPrefs.vectorAndPrefs()));

    combinedVectorOrPref = profiler.profile("S5+S6-S7", pipeline, combinedVectorOrPref,
            ProfileConverter.int_vap(), Writables.tableOf(Writables.ints(), VectorAndPrefs.vectorAndPrefs()));
    /*
     * S8 Computing partial recommendation vectors
     */
    PTable<Long, Vector> partialMultiply = combinedVectorOrPref
            .parallelDo(new DoFn<Pair<Integer, VectorAndPrefs>, Pair<Long, Vector>>() {
                @Override
                public void process(Pair<Integer, VectorAndPrefs> input, Emitter<Pair<Long, Vector>> emitter) {
                    Vector cooccurrenceColumn = input.second().getVector();
                    List<Long> userIDs = input.second().getUserIDs();
                    List<Float> prefValues = input.second().getValues();
                    for (int i = 0; i < userIDs.size(); i++) {
                        long userID = userIDs.get(i);
                        if (userID != Long.MIN_VALUE) {
                            float prefValue = prefValues.get(i);
                            Vector partialProduct = cooccurrenceColumn.times(prefValue);
                            emitter.emit(Pair.of(userID, partialProduct));
                        }
                    }
                }

                @Override
                public float scaleFactor() {
                    return est.getScaleFactor("S8").sizeFactor;
                }

                @Override
                public float scaleFactorByRecord() {
                    return est.getScaleFactor("S8").recsFactor;
                }

            }, Writables.tableOf(Writables.longs(), Writables.vectors())).groupByKey(est.getClusterSize())
            .combineValues(new CombineFn<Long, Vector>() {

                @Override
                public void process(Pair<Long, Iterable<Vector>> input, Emitter<Pair<Long, Vector>> emitter) {
                    Vector partial = null;
                    for (Vector vector : input.second()) {
                        partial = partial == null ? vector : partial.plus(vector);
                    }
                    emitter.emit(Pair.of(input.first(), partial));
                }

                @Override
                public float scaleFactor() {
                    return est.getScaleFactor("combine").sizeFactor;
                }

                @Override
                public float scaleFactorByRecord() {
                    return est.getScaleFactor("combine").recsFactor;
                }
            });

    partialMultiply = profiler.profile("S8-combine", pipeline, partialMultiply, ProfileConverter.long_vector(),
            Writables.tableOf(Writables.longs(), Writables.vectors()));

    /*
     * S9 Producing recommendations from vectors
     */
    PTable<Long, RecommendedItems> recommendedItems = partialMultiply
            .parallelDo(new DoFn<Pair<Long, Vector>, Pair<Long, RecommendedItems>>() {

                @Override
                public void process(Pair<Long, Vector> input, Emitter<Pair<Long, RecommendedItems>> emitter) {
                    Queue<RecommendedItem> topItems = new PriorityQueue<RecommendedItem>(11,
                            Collections.reverseOrder(BY_PREFERENCE_VALUE));
                    Iterator<Vector.Element> recommendationVectorIterator = input.second().iterateNonZero();
                    while (recommendationVectorIterator.hasNext()) {
                        Vector.Element element = recommendationVectorIterator.next();
                        int index = element.index();
                        float value = (float) element.get();
                        if (topItems.size() < top) {
                            topItems.add(new GenericRecommendedItem(index, value));
                        } else if (value > topItems.peek().getValue()) {
                            topItems.add(new GenericRecommendedItem(index, value));
                            topItems.poll();
                        }
                    }
                    List<RecommendedItem> recommendations = new ArrayList<RecommendedItem>(topItems.size());
                    recommendations.addAll(topItems);
                    Collections.sort(recommendations, BY_PREFERENCE_VALUE);
                    emitter.emit(Pair.of(input.first(), new RecommendedItems(recommendations)));
                }

                @Override
                public float scaleFactor() {
                    return est.getScaleFactor("S9").sizeFactor;
                }

                @Override
                public float scaleFactorByRecord() {
                    return est.getScaleFactor("S9").recsFactor;
                }

            }, Writables.tableOf(Writables.longs(), RecommendedItems.recommendedItems()));

    recommendedItems = profiler.profile("S9", pipeline, recommendedItems, ProfileConverter.long_ri(),
            Writables.tableOf(Writables.longs(), RecommendedItems.recommendedItems()));

    /*
     * Profiling
     */
    if (profiler.isProfiling()) {
        profiler.writeResultToFile(profileFilePath);
        profiler.cleanup(pipeline.getConfiguration());
        return 0;
    }
    /*
     * asText
     */
    pipeline.writeTextFile(recommendedItems, args[1]);
    PipelineResult result = pipeline.done();
    return result.succeeded() ? 0 : 1;
}

From source file:org.apache.synapse.transport.nhttp.HttpCoreNIOListener.java

/**
 * Start specific end points given by InetSockeAddress list
 *
 * @param endpointsClosed InetSocketAddresses of endpoints to be started
 * @throws AxisFault//from w w w  . j a  va 2  s  .  co  m
 */
private void startSpecificEndpoints(List<InetSocketAddress> endpointsClosed) throws AxisFault {
    Queue<ListenerEndpoint> endpoints = new LinkedList<ListenerEndpoint>();

    // Ensure simple but stable order
    List<InetSocketAddress> addressList = endpointsClosed;
    Collections.sort(addressList, new Comparator<InetSocketAddress>() {

        public int compare(InetSocketAddress a1, InetSocketAddress a2) {
            String s1 = a1.toString();
            String s2 = a2.toString();
            return s1.compareTo(s2);
        }

    });

    for (InetSocketAddress address : addressList) {
        endpoints.add(ioReactor.listen(address));
    }

    // Wait for the endpoint to become ready, i.e. for the listener to start accepting
    // requests.
    while (!endpoints.isEmpty()) {
        ListenerEndpoint endpoint = endpoints.remove();
        try {
            endpoint.waitFor();
            if (log.isInfoEnabled()) {
                InetSocketAddress address = (InetSocketAddress) endpoint.getAddress();
                if (!address.isUnresolved()) {
                    log.info(name + " started on " + address.getHostName() + ":" + address.getPort());
                } else {
                    log.info(name + " started on " + address);
                }
            }
        } catch (InterruptedException e) {
            log.warn("Listener startup was interrupted");
            break;
        }
    }
}

From source file:edu.northwestern.jcr.adapter.fedora.persistence.FedoraConnector.java

/**
 * Gets a list of all descendants of a given object in Fedora 
 * repository through resource index, applying the filter
 * if available.//from   w  ww .  j  ava 2  s. c  o  m
 * The result is in CSV format as if it is generated directly
 * from resouce index.
 *
 * @param pid pid of the object
 * @param filter filter condition applied - null if there is no filter
 * @return list of pid of the descendants that satisfy the filter condition
 */
public String[] listDescendantsRI(String pid, String filter) throws Exception {
    String[] members;
    Map<String, String> pathMap;
    Queue<String> queue;
    List<String> resultList;
    String nextPID;
    String parentPath;

    pathMap = new HashMap<String, String>();
    queue = new LinkedList<String>();
    resultList = new ArrayList<String>();

    if (pid == null) {
        try {
            members = listObjectsRI(null);
        } catch (Exception e) {
            throw e;
        }
    } else {
        // to be implemented
        members = listMembers(pid, null);
    }

    for (String member : members) {
        queue.add(member);
        pathMap.put(member, member);
    }

    if (filter != null) {
        if (pid == null) {
            try {
                members = listObjectsRI(filter);
            } catch (Exception e) {
                throw e;
            }
        } else {
            // to be implemented
            members = listMembers(pid, filter);
        }
    }

    // add only those satisfying the filter to the result list
    for (String member : members) {
        resultList.add(member);
    }

    while (!queue.isEmpty()) {
        nextPID = queue.remove();
        parentPath = pathMap.get(nextPID);

        members = listMembers(nextPID, null);

        for (String member : members) {
            queue.add(member);
            pathMap.put(member, parentPath + "," + member);
        }

        if (filter != null) {
            members = listMembers(nextPID, filter);
        }

        // add only those satisfying the filter to the result list         
        for (String member : members) {
            resultList.add(parentPath + "," + member);
        }
    }

    return (String[]) resultList.toArray(new String[0]);
}

From source file:org.apache.pdfbox.pdfparser.NonSequentialPDFParser.java

/** Adds all from newObjects to toBeParsedList if it is not an COSObject
 *  or we didn't add this COSObject already (checked via addedObjects). */
private final void addNewToList(final Queue<COSBase> toBeParsedList, final Collection<COSBase> newObjects,
        final Set<Long> addedObjects) {
    for (COSBase newObject : newObjects) {
        if (newObject instanceof COSObject) {
            final long objId = getObjectId((COSObject) newObject);
            if (!addedObjects.add(objId)) {
                continue;
            }/* w w  w.ja v a 2  s  .co m*/
        }
        toBeParsedList.add(newObject);
    }
}