Example usage for java.util Queue poll

List of usage examples for java.util Queue poll

Introduction

In this page you can find the example usage for java.util Queue poll.

Prototype

E poll();

Source Link

Document

Retrieves and removes the head of this queue, or returns null if this queue is empty.

Usage

From source file:org.joox.test.JOOXTest.java

License:asdf

@Test
public void testEachCallback() {
    final Queue<Integer> queue = new LinkedList<Integer>();

    queue.addAll(Arrays.asList(0));
    $.each(new Each() {
        @Override//from w ww  .  ja v  a 2  s  .  c om
        public void each(Context context) {
            assertEquals(context.element(), context.match());
            assertEquals(context.elementIndex(), context.matchIndex());
            assertEquals(context.elementSize(), context.matchSize());

            assertEquals((int) queue.poll(), context.matchIndex());
            assertEquals(1, context.matchSize());
            assertEquals("document", context.element().getTagName());
        }
    });

    assertTrue(queue.isEmpty());
    queue.addAll(Arrays.asList(0, 1, 2));

    $.children().each(new Each() {
        @Override
        public void each(Context context) {
            assertEquals(context.element(), context.match());
            assertEquals(context.elementIndex(), context.matchIndex());
            assertEquals(context.elementSize(), context.matchSize());

            assertEquals((int) queue.poll(), context.matchIndex());
            assertEquals(3, context.matchSize());
            assertEquals("library", context.element().getTagName());
        }
    });

    assertTrue(queue.isEmpty());
}

From source file:org.unitime.timetable.backup.SessionBackup.java

@Override
public void backup(OutputStream out, Progress progress, Long sessionId) throws IOException {
    iOut = CodedOutputStream.newInstance(out);
    iProgress = progress;//  w ww.ja  v  a2 s . c o  m
    iSessionId = sessionId;
    iHibSession = new _RootDAO().createNewSession();
    iHibSession.setCacheMode(CacheMode.IGNORE);
    iHibSessionFactory = iHibSession.getSessionFactory();
    try {
        iProgress.setStatus("Exporting Session");
        iProgress.setPhase("Loading Model", 3);
        TreeSet<ClassMetadata> allMeta = new TreeSet<ClassMetadata>(new Comparator<ClassMetadata>() {
            @Override
            public int compare(ClassMetadata m1, ClassMetadata m2) {
                return m1.getEntityName().compareTo(m2.getEntityName());
            }
        });
        allMeta.addAll(iHibSessionFactory.getAllClassMetadata().values());
        iProgress.incProgress();

        Queue<QueueItem> queue = new LinkedList<QueueItem>();

        queue.add(new QueueItem(iHibSessionFactory.getClassMetadata(Session.class), null, "uniqueId",
                Relation.None));

        Set<String> avoid = new HashSet<String>();
        // avoid following relations
        avoid.add(TimetableManager.class.getName() + ".departments");
        avoid.add(TimetableManager.class.getName() + ".solverGroups");
        avoid.add(DistributionType.class.getName() + ".departments");
        avoid.add(LastLikeCourseDemand.class.getName() + ".student");
        avoid.add(Student.class.getName() + ".lastLikeCourseDemands");

        Set<String> disallowedNotNullRelations = new HashSet<String>();
        disallowedNotNullRelations.add(Assignment.class.getName() + ".datePattern");
        disallowedNotNullRelations.add(Assignment.class.getName() + ".timePattern");
        disallowedNotNullRelations.add(LastLikeCourseDemand.class.getName() + ".student");
        disallowedNotNullRelations.add(OnlineSectioningLog.class.getName() + ".session");

        Map<String, List<QueueItem>> data = new HashMap<String, List<QueueItem>>();
        List<QueueItem> sessions = new ArrayList<QueueItem>();
        sessions.add(queue.peek());
        data.put(queue.peek().name(), sessions);

        QueueItem item = null;
        while ((item = queue.poll()) != null) {
            if (item.size() == 0)
                continue;
            for (ClassMetadata meta : allMeta) {
                if (meta.hasSubclasses())
                    continue;
                for (int i = 0; i < meta.getPropertyNames().length; i++) {
                    String property = meta.getPropertyNames()[i];
                    if (disallowedNotNullRelations.contains(meta.getEntityName() + "." + property)
                            || meta.getPropertyNullability()[i])
                        continue;
                    Type type = meta.getPropertyTypes()[i];
                    if (type instanceof EntityType && type.getReturnedClass().equals(item.clazz())) {
                        QueueItem qi = new QueueItem(meta, item, property, Relation.Parent);
                        if (!data.containsKey(qi.name())) {
                            List<QueueItem> items = new ArrayList<QueueItem>();
                            data.put(qi.name(), items);
                            queue.add(qi);
                            items.add(qi);
                            if (qi.size() > 0)
                                iProgress.info("Parent: " + qi);
                        }
                    }
                }
            }
        }
        iProgress.incProgress();

        for (List<QueueItem> list : data.values())
            queue.addAll(list);

        // The following part is needed to ensure that instructor distribution preferences are saved including their distribution types 
        List<QueueItem> distributions = new ArrayList<QueueItem>();
        for (QueueItem instructor : data.get(DepartmentalInstructor.class.getName())) {
            QueueItem qi = new QueueItem(iHibSessionFactory.getClassMetadata(DistributionPref.class),
                    instructor, "owner", Relation.Parent);
            distributions.add(qi);
            queue.add(qi);
            if (qi.size() > 0)
                iProgress.info("Extra: " + qi);
        }
        data.put(DistributionPref.class.getName(), distributions);

        while ((item = queue.poll()) != null) {
            if (item.size() == 0)
                continue;
            for (int i = 0; i < item.meta().getPropertyNames().length; i++) {
                String property = item.meta().getPropertyNames()[i];
                Type type = item.meta().getPropertyTypes()[i];
                if (type instanceof EntityType) {
                    if (avoid.contains(item.name() + "." + property))
                        continue;

                    ClassMetadata meta = iHibSessionFactory.getClassMetadata(type.getReturnedClass());
                    if (item.contains(meta.getEntityName()))
                        continue;

                    QueueItem qi = new QueueItem(meta, item, property, Relation.One);
                    List<QueueItem> items = data.get(qi.name());
                    if (items == null) {
                        items = new ArrayList<QueueItem>();
                        data.put(qi.name(), items);
                    }
                    queue.add(qi);
                    items.add(qi);

                    if (qi.size() > 0)
                        iProgress.info("One: " + qi);
                }
                if (type instanceof CollectionType) {
                    if (avoid.contains(item.name() + "." + property))
                        continue;

                    ClassMetadata meta = iHibSessionFactory.getClassMetadata(((CollectionType) type)
                            .getElementType((SessionFactoryImplementor) iHibSessionFactory).getReturnedClass());
                    if (meta == null || item.contains(meta.getEntityName()))
                        continue;

                    QueueItem qi = new QueueItem(meta, item, property, Relation.Many);
                    List<QueueItem> items = data.get(qi.name());
                    if (items == null) {
                        items = new ArrayList<QueueItem>();
                        data.put(qi.name(), items);
                    }
                    queue.add(qi);
                    items.add(qi);

                    if (qi.size() > 0)
                        iProgress.info("Many: " + qi);
                }
            }
        }
        iProgress.incProgress();

        Map<String, Set<Serializable>> allExportedIds = new HashMap<String, Set<Serializable>>();
        for (String name : new TreeSet<String>(data.keySet())) {
            List<QueueItem> list = data.get(name);
            Map<String, TableData.Table.Builder> tables = new HashMap<String, TableData.Table.Builder>();
            for (QueueItem current : list) {
                if (current.size() == 0)
                    continue;
                iProgress.info("Loading " + current);
                List<Object> objects = current.list();
                if (objects == null || objects.isEmpty())
                    continue;
                iProgress.setPhase(current.abbv() + " [" + objects.size() + "]", objects.size());
                objects: for (Object object : objects) {
                    iProgress.incProgress();

                    // Get meta data (check for sub-classes)
                    ClassMetadata meta = iHibSessionFactory.getClassMetadata(object.getClass());
                    if (meta == null)
                        meta = current.meta();
                    if (meta.hasSubclasses()) {
                        for (Iterator i = iHibSessionFactory.getAllClassMetadata().entrySet().iterator(); i
                                .hasNext();) {
                            Map.Entry entry = (Map.Entry) i.next();
                            ClassMetadata classMetadata = (ClassMetadata) entry.getValue();
                            if (classMetadata.getMappedClass().isInstance(object)
                                    && !classMetadata.hasSubclasses()) {
                                meta = classMetadata;
                                break;
                            }
                        }
                    }

                    // Get unique identifier
                    Serializable id = meta.getIdentifier(object, (SessionImplementor) iHibSession);

                    // Check if already exported
                    Set<Serializable> exportedIds = allExportedIds.get(meta.getEntityName());
                    if (exportedIds == null) {
                        exportedIds = new HashSet<Serializable>();
                        allExportedIds.put(meta.getEntityName(), exportedIds);
                    }
                    if (!exportedIds.add(id))
                        continue;

                    // Check relation to an academic session (if exists)
                    for (String property : meta.getPropertyNames()) {
                        Type type = meta.getPropertyType(property);
                        if (type instanceof EntityType && type.getReturnedClass().equals(Session.class)) {
                            Session s = (Session) meta.getPropertyValue(object, property);
                            if (s != null && !s.getUniqueId().equals(iSessionId)) {
                                iProgress.warn(meta.getEntityName()
                                        .substring(meta.getEntityName().lastIndexOf('.') + 1) + "@" + id
                                        + " belongs to a different academic session (" + s + ")");
                                continue objects; // wrong session
                            }
                        }
                    }

                    // Get appropriate table
                    TableData.Table.Builder table = tables.get(meta.getEntityName());
                    if (table == null) {
                        table = TableData.Table.newBuilder();
                        tables.put(meta.getEntityName(), table);
                        table.setName(meta.getEntityName());
                    }

                    // Export object
                    TableData.Record.Builder record = TableData.Record.newBuilder();
                    record.setId(id.toString());
                    for (String property : meta.getPropertyNames()) {
                        Type type = meta.getPropertyType(property);
                        Object value = meta.getPropertyValue(object, property);
                        if (value == null)
                            continue;
                        TableData.Element.Builder element = TableData.Element.newBuilder();
                        element.setName(property);
                        if (type instanceof PrimitiveType) {
                            element.addValue(((PrimitiveType) type).toString(value));
                        } else if (type instanceof StringType) {
                            element.addValue(((StringType) type).toString((String) value));
                        } else if (type instanceof BinaryType) {
                            element.addValueBytes(ByteString.copyFrom((byte[]) value));
                        } else if (type instanceof TimestampType) {
                            element.addValue(((TimestampType) type).toString((Date) value));
                        } else if (type instanceof DateType) {
                            element.addValue(((DateType) type).toString((Date) value));
                        } else if (type instanceof EntityType) {
                            List<Object> ids = current.relation(property, id, false);
                            if (ids != null)
                                for (Object i : ids)
                                    element.addValue(i.toString());
                            iHibSession.evict(value);
                        } else if (type instanceof CustomType && value instanceof Document) {
                            if (object instanceof CurriculumClassification && property.equals("students"))
                                continue;
                            StringWriter w = new StringWriter();
                            XMLWriter x = new XMLWriter(w, OutputFormat.createCompactFormat());
                            x.write((Document) value);
                            x.flush();
                            x.close();
                            element.addValue(w.toString());
                        } else if (type instanceof CollectionType) {
                            List<Object> ids = current.relation(property, id, false);
                            if (ids != null)
                                for (Object i : ids)
                                    element.addValue(i.toString());
                        } else if (type instanceof EmbeddedComponentType
                                && property.equalsIgnoreCase("uniqueCourseNbr")) {
                            continue;
                        } else {
                            iProgress.warn("Unknown data type: " + type + " (property " + meta.getEntityName()
                                    + "." + property + ", class " + value.getClass() + ")");
                            continue;
                        }
                        record.addElement(element.build());

                    }
                    table.addRecord(record.build());
                    iHibSession.evict(object);
                }
                current.clearCache();
            }

            for (TableData.Table.Builder table : tables.values()) {
                add(table.build());
            }
        }

        /*
        // Skip ConstraintInfo
        if (!iData.containsKey(ConstraintInfo.class.getName()))
           iData.put(ConstraintInfo.class.getName(), new QueueItem(iHibSessionFactory.getClassMetadata(ConstraintInfo.class), null, null, Relation.Empty));
                
        for (String name: items)
           export(iData.get(name));
                    
        while (true) {
         List<Object> objects = new ArrayList<Object>();
         ClassMetadata meta = null;
         for (Entity e: iObjects) {
        if (e.exported()) continue;
        if (objects.isEmpty() || meta.getEntityName().equals(e.name())) {
           meta = e.meta();
           objects.add(e.object());
           e.notifyExported();
        }
         }
         if (objects.isEmpty()) break;
         export(meta, objects, null);
        }
        */
        iProgress.setStatus("All done.");
    } finally {
        iHibSession.close();
    }
}

From source file:org.aliuge.crawler.extractor.selector.IFConditions.java

/**
 * ???//ww  w.jav  a 2s .  co  m
 * 
 * @param depend
 * @return
 */
public boolean test(Map<String, Object> selectContent) throws ExtractException {
    TreeMap<Integer, String> conIndex = Maps.newTreeMap();
    Queue<SimpleExpression> expressionQueue = Queues.newArrayDeque();
    Queue<String> logicQueue = Queues.newArrayDeque();
    // a=b and c=d or c=e or x=y
    int index = 0;
    for (String co : cond) {
        index = 0;
        while ((index = conditions.indexOf(co, index + 1)) > -1) {
            int i = index;
            conIndex.put(i, co);
        }
    }
    index = 0;
    for (Entry<Integer, String> entry : conIndex.entrySet()) {
        String subExp = conditions.substring(index, entry.getKey());
        for (String op : operations) {
            int i = subExp.indexOf(op);
            if (i > -1) {
                String[] ss = subExp.split(op);
                if (null == selectContent.get(ss[0].trim())) {
                    throw new ExtractException("?????["
                            + this.conditions + "] " + ss[0]);
                }
                expressionQueue
                        .add(new SimpleExpression(StringUtils.trim((String) selectContent.get(ss[0].trim())),
                                StringUtils.trim(ss[1]), op));
                logicQueue.add(StringUtils.trim(entry.getValue()));
            }
        }
        index = entry.getKey() + entry.getValue().length();
    }
    // ??
    String subExp = conditions.substring(index);
    for (String op : operations) {
        int i = subExp.indexOf(op);
        if (i > -1) {
            String[] ss = subExp.split(op);
            if (null == selectContent.get(ss[0].trim())) {
                throw new ExtractException("?????[" + this.conditions
                        + "] " + ss[0]);
            }
            expressionQueue.add(new SimpleExpression(StringUtils.trim((String) selectContent.get(ss[0].trim())),
                    StringUtils.trim(ss[1]), op));
        }
    }
    boolean b;
    try {
        b = expressionQueue.poll().test();
        while (!expressionQueue.isEmpty()) {
            b = cacl(b, logicQueue.poll(), expressionQueue.poll());
        }
        return b;
    } catch (Exception e) {
        e.printStackTrace();
    }

    return false;
}

From source file:org.apache.pdfbox.pdfparser.NonSequentialPDFParser.java

/**
 * Will parse every object necessary to load a single page from the pdf document.
 * We try our best to order objects according to offset in file before reading
 * to minimize seek operations./* w  w w .  j a v  a  2 s. co m*/
 * 
 * @param dict the COSObject from the parent pages.
 * @param excludeObjects dictionary object reference entries with these names will not be parsed
 * 
 * @throws IOException
 */
private void parseDictObjects(COSDictionary dict, COSName... excludeObjects) throws IOException {
    // ---- create queue for objects waiting for further parsing
    final Queue<COSBase> toBeParsedList = new LinkedList<COSBase>();
    // offset ordered object map
    final TreeMap<Long, List<COSObject>> objToBeParsed = new TreeMap<Long, List<COSObject>>();
    // in case of compressed objects offset points to stmObj
    final Set<Long> parsedObjects = new HashSet<Long>();
    final Set<Long> addedObjects = new HashSet<Long>();

    // ---- add objects not to be parsed to list of already parsed objects
    if (excludeObjects != null) {
        for (COSName objName : excludeObjects) {
            COSBase baseObj = dict.getItem(objName);
            if (baseObj instanceof COSObject) {
                parsedObjects.add(getObjectId((COSObject) baseObj));
            }
        }
    }

    addNewToList(toBeParsedList, dict.getValues(), addedObjects);

    // ---- go through objects to be parsed
    while (!(toBeParsedList.isEmpty() && objToBeParsed.isEmpty())) {
        // -- first get all COSObject from other kind of objects and
        //    put them in objToBeParsed; afterwards toBeParsedList is empty
        COSBase baseObj;
        while ((baseObj = toBeParsedList.poll()) != null) {
            if (baseObj instanceof COSStream) {
                addNewToList(toBeParsedList, ((COSStream) baseObj).getValues(), addedObjects);
            } else if (baseObj instanceof COSDictionary) {
                addNewToList(toBeParsedList, ((COSDictionary) baseObj).getValues(), addedObjects);
            } else if (baseObj instanceof COSArray) {
                final Iterator<COSBase> arrIter = ((COSArray) baseObj).iterator();
                while (arrIter.hasNext()) {
                    addNewToList(toBeParsedList, arrIter.next(), addedObjects);
                }
            } else if (baseObj instanceof COSObject) {
                COSObject obj = (COSObject) baseObj;
                long objId = getObjectId(obj);
                COSObjectKey objKey = new COSObjectKey(obj.getObjectNumber().intValue(),
                        obj.getGenerationNumber().intValue());

                if (!(parsedObjects.contains(objId) /*|| document.hasObjectInPool( objKey ) */ )) {
                    Long fileOffset = xrefTrailerResolver.getXrefTable().get(objKey);
                    //  it is allowed that object references point to null, thus we have to test
                    if (fileOffset != null) {
                        if (fileOffset > 0) {
                            objToBeParsed.put(fileOffset, Collections.singletonList(obj));
                        } else {
                            // negative offset means we have a compressed object within object stream;
                            // get offset of object stream
                            fileOffset = xrefTrailerResolver.getXrefTable()
                                    .get(new COSObjectKey(-fileOffset, 0));
                            if ((fileOffset == null) || (fileOffset <= 0)) {
                                throw new IOException(
                                        "Invalid object stream xref object reference: " + fileOffset);
                            }

                            List<COSObject> stmObjects = objToBeParsed.get(fileOffset);
                            if (stmObjects == null) {
                                objToBeParsed.put(fileOffset, stmObjects = new ArrayList<COSObject>());
                            }
                            stmObjects.add(obj);
                        }
                    } else {
                        // NULL object
                        COSObject pdfObject = document.getObjectFromPool(objKey);
                        pdfObject.setObject(COSNull.NULL);
                    }
                }
            }
        }

        // ---- read first COSObject with smallest offset;
        //      resulting object will be added to toBeParsedList
        if (objToBeParsed.isEmpty()) {
            break;
        }

        for (COSObject obj : objToBeParsed.remove(objToBeParsed.firstKey())) {
            COSBase parsedObj = parseObjectDynamically(obj, false);

            obj.setObject(parsedObj);
            addNewToList(toBeParsedList, parsedObj, addedObjects);

            parsedObjects.add(getObjectId(obj));
        }
    }
}

From source file:de.tudarmstadt.ukp.dkpro.lexsemresource.graph.EntityGraphJGraphT.java

/**
 * Creates the hyponym map, that maps from nodes to their (recursive) number of hyponyms for
 * each node. "recursive" means that the hyponyms of hyponyms are also taken into account.
 *
 * @throws UnsupportedOperationException
 * @throws LexicalSemanticResourceException
 *///from  w ww  .java 2s .  co  m
private Map<String, Integer> getHyponymCountMap() throws LexicalSemanticResourceException {
    // do only create hyponymMap, if it was not already computed
    if (hyponymCountMap != null) {
        return hyponymCountMap;
    }

    // work on the lcc, otherwise this is not going to work
    // EntityGraphJGraphT lcc = this;
    EntityGraphJGraphT lcc = this.getLargestConnectedComponent();
    lcc.removeCycles();
    int nrOfNodes = lcc.getNumberOfNodes();

    File hyponymCountMapSerializedFile = new File(
            getGraphId() + "_" + hyponymCountMapFilename + (lexSemRes.getIsCaseSensitive() ? "-cs" : "-cis"));
    hyponymCountMap = new HashMap<String, Integer>();

    if (hyponymCountMapSerializedFile.exists()) {
        logger.info("Loading saved hyponymyCountMap ...");
        hyponymCountMap = EntityGraphUtils.deserializeMap(hyponymCountMapSerializedFile);
        if (hyponymCountMap.size() != nrOfNodes) {
            throw new LexicalSemanticResourceException(
                    "HyponymCountMap does not contain an entry for each node in the graph."
                            + hyponymCountMap.size() + "/" + nrOfNodes);
        }
        logger.info("Done loading saved hyponymyCountMap");
        return hyponymCountMap;
    }

    hyponymCountMap = new HashMap<String, Integer>();

    // a queue holding the nodes to process
    Queue<String> queue = new LinkedList<String>();

    // In the entity graph a node may have more than one father.
    // Thus, we check whether a node was already visited.
    // Then, it is not expanded again.
    Set<String> visited = new HashSet<String>();

    // initialize the queue with all leaf nodes
    Set<String> leafNodes = new HashSet<String>();
    for (Entity leaf : lcc.getLeaves()) {
        leafNodes.add(leaf.getId());
    }
    queue.addAll(leafNodes);

    logger.info(leafNodes.size() + " leaf nodes.");

    ProgressMeter progress = new ProgressMeter(getNumberOfNodes());
    // while the queue is not empty
    while (!queue.isEmpty()) {
        // remove first element from queue
        String currNodeId = queue.poll();
        Entity currNode = lexSemRes.getEntityById(currNodeId);

        // in some rare cases, getEntityById might fail - so better check for nulls and fail
        // gracefully
        if (currNode == null) {
            visited.add(currNodeId);
            hyponymCountMap.put(currNodeId, 0);
        }

        logger.debug(queue.size());

        if (visited.contains(currNodeId)) {
            continue;
        }

        progress.next();

        if (logger.isDebugEnabled()) {
            logger.debug(progress + " - " + queue.size() + " left in queue");
        } else if (logger.isInfoEnabled() && (progress.getCount() % 100 == 0)) {
            logger.info(progress + " - " + queue.size() + " left in queue");
        }

        Set<Entity> children = lcc.getChildren(currNode);
        Set<String> invalidChildIds = new HashSet<String>();
        int validChildren = 0;
        int sumChildHyponyms = 0;
        boolean invalid = false;
        for (Entity child : children) {
            if (lcc.containsVertex(child)) {
                if (hyponymCountMap.containsKey(child.getId())) {
                    sumChildHyponyms += hyponymCountMap.get(child.getId());
                    validChildren++;
                } else {
                    invalid = true;
                    invalidChildIds.add(child.getId());
                }
            }
        }

        // we cannot use continue directly if invalid as this would continue the inner loop not
        // the outer loop
        if (invalid) {
            // One of the childs is not in the hyponymCountMap yet
            // Re-Enter the node into the queue and continue with next node
            // Also enter all the childs that are not in the queue yet
            queue.add(currNodeId);
            for (String childId : invalidChildIds) {
                if (!visited.contains(childId) && !queue.contains(childId)) {
                    queue.add(childId);
                }
            }
            continue;
        }

        // mark as visited
        visited.add(currNodeId);

        // number of hyponomys of current node is the number of its own hyponyms and the sum of
        // the hyponyms of its children.
        int currNodeHyponomyCount = validChildren + sumChildHyponyms;
        hyponymCountMap.put(currNodeId, currNodeHyponomyCount);

        // add parents of current node to queue
        for (Entity parent : lcc.getParents(currNode)) {
            if (lcc.containsVertex(parent)) {
                queue.add(parent.getId());
            }
        }
    } // while queue not empty

    logger.info(visited.size() + " nodes visited");
    if (visited.size() != nrOfNodes) {
        List<Entity> missed = new ArrayList<Entity>();
        for (Entity e : lcc.getNodes()) {
            if (!visited.contains(e.getId())) {
                missed.add(e);
                System.out.println("Missed: [" + e + "]");
            }
        }

        throw new LexicalSemanticResourceException(
                "Visited only " + visited.size() + " out of " + nrOfNodes + " nodes.");
    }
    if (hyponymCountMap.size() != nrOfNodes) {
        throw new LexicalSemanticResourceException(
                "HyponymCountMap does not contain an entry for each node in the graph." + hyponymCountMap.size()
                        + "/" + nrOfNodes);
    }

    /*
     * As an EntityGraph is a graph rather than a tree, the hyponymCount for top nodes can be
     * greater than the number of nodes in the graph. This is due to the multiple counting of nodes
     * having more than one parent. Thus, we have to scale hyponym counts to fall in
     * [0,NumberOfNodes].
     */
    for (String key : hyponymCountMap.keySet()) {
        if (hyponymCountMap.get(key) > hyponymCountMap.size()) {
            // TODO scaling function is not optimal (to say the least :)
            hyponymCountMap.put(key, (hyponymCountMap.size() - 1));
        }
    }

    logger.info("Computed hyponymCountMap");
    EntityGraphUtils.serializeMap(hyponymCountMap, hyponymCountMapSerializedFile);
    logger.info("Serialized hyponymCountMap");

    return hyponymCountMap;
}

From source file:org.voltdb.iv2.LeaderAppointer.java

private boolean isClusterKSafe(Set<Integer> hostsOnRing) {
    boolean retval = true;
    List<String> partitionDirs = null;

    ImmutableSortedSet.Builder<KSafetyStats.StatsPoint> lackingReplication = ImmutableSortedSet.naturalOrder();

    try {// w  w w .jav a2  s .  com
        partitionDirs = m_zk.getChildren(VoltZK.leaders_initiators, null);
    } catch (Exception e) {
        VoltDB.crashLocalVoltDB("Unable to read partitions from ZK", true, e);
    }

    //Don't fetch the values serially do it asynchronously
    Queue<ZKUtil.ByteArrayCallback> dataCallbacks = new ArrayDeque<ZKUtil.ByteArrayCallback>();
    Queue<ZKUtil.ChildrenCallback> childrenCallbacks = new ArrayDeque<ZKUtil.ChildrenCallback>();
    for (String partitionDir : partitionDirs) {
        String dir = ZKUtil.joinZKPath(VoltZK.leaders_initiators, partitionDir);
        try {
            ZKUtil.ByteArrayCallback callback = new ZKUtil.ByteArrayCallback();
            m_zk.getData(dir, false, callback, null);
            dataCallbacks.offer(callback);
            ZKUtil.ChildrenCallback childrenCallback = new ZKUtil.ChildrenCallback();
            m_zk.getChildren(dir, false, childrenCallback, null);
            childrenCallbacks.offer(childrenCallback);
        } catch (Exception e) {
            VoltDB.crashLocalVoltDB("Unable to read replicas in ZK dir: " + dir, true, e);
        }
    }
    final long statTs = System.currentTimeMillis();
    for (String partitionDir : partitionDirs) {
        int pid = LeaderElector.getPartitionFromElectionDir(partitionDir);

        String dir = ZKUtil.joinZKPath(VoltZK.leaders_initiators, partitionDir);
        try {
            // The data of the partition dir indicates whether the partition has finished
            // initializing or not. If not, the replicas may still be in the process of
            // adding themselves to the dir. So don't check for k-safety if that's the case.
            byte[] partitionState = dataCallbacks.poll().getData();
            boolean isInitializing = false;
            if (partitionState != null && partitionState.length == 1) {
                isInitializing = partitionState[0] == LeaderElector.INITIALIZING;
            }

            List<String> replicas = childrenCallbacks.poll().getChildren();
            if (pid == MpInitiator.MP_INIT_PID)
                continue;
            final boolean partitionNotOnHashRing = partitionNotOnHashRing(pid);
            if (!isInitializing && replicas.isEmpty()) {
                //These partitions can fail, just cleanup and remove the partition from the system
                if (partitionNotOnHashRing) {
                    removeAndCleanupPartition(pid);
                    continue;
                }
                tmLog.fatal("K-Safety violation: No replicas found for partition: " + pid);
                retval = false;
            } else if (!partitionNotOnHashRing) {
                //Record host ids for all partitions that are on the ring
                //so they are considered for partition detection
                for (String replica : replicas) {
                    final String split[] = replica.split("/");
                    final long hsId = Long.valueOf(split[split.length - 1].split("_")[0]);
                    final int hostId = CoreUtils.getHostIdFromHSId(hsId);
                    hostsOnRing.add(hostId);
                }
            }
            if (!isInitializing && !partitionNotOnHashRing) {
                lackingReplication
                        .add(new KSafetyStats.StatsPoint(statTs, pid, m_kfactor + 1 - replicas.size()));
            }
        } catch (Exception e) {
            VoltDB.crashLocalVoltDB("Unable to read replicas in ZK dir: " + dir, true, e);
        }
    }
    m_stats.setSafetySet(lackingReplication.build());

    return retval;
}

From source file:org.structr.web.servlet.HtmlServlet.java

@Override
protected void doGet(final HttpServletRequest request, final HttpServletResponse response) {

    final Authenticator auth = getConfig().getAuthenticator();
    List<Page> pages = null;
    boolean requestUriContainsUuids = false;

    SecurityContext securityContext;/*from   ww  w  .j  a  va2  s  .c  o  m*/
    final App app;

    try {
        final String path = request.getPathInfo();

        // check for registration (has its own tx because of write access
        if (checkRegistration(auth, request, response, path)) {

            return;
        }

        // check for registration (has its own tx because of write access
        if (checkResetPassword(auth, request, response, path)) {

            return;
        }

        // isolate request authentication in a transaction
        try (final Tx tx = StructrApp.getInstance().tx()) {
            securityContext = auth.initializeAndExamineRequest(request, response);
            tx.success();
        }

        app = StructrApp.getInstance(securityContext);

        try (final Tx tx = app.tx()) {

            // Ensure access mode is frontend
            securityContext.setAccessMode(AccessMode.Frontend);

            request.setCharacterEncoding("UTF-8");

            // Important: Set character encoding before calling response.getWriter() !!, see Servlet Spec 5.4
            response.setCharacterEncoding("UTF-8");

            boolean dontCache = false;

            logger.log(Level.FINE, "Path info {0}", path);

            // don't continue on redirects
            if (response.getStatus() == 302) {

                tx.success();
                return;
            }

            final Principal user = securityContext.getUser(false);
            if (user != null) {

                // Don't cache if a user is logged in
                dontCache = true;

            }

            final RenderContext renderContext = RenderContext.getInstance(securityContext, request, response);

            renderContext.setResourceProvider(config.getResourceProvider());

            final EditMode edit = renderContext.getEditMode(user);

            DOMNode rootElement = null;
            AbstractNode dataNode = null;

            final String[] uriParts = PathHelper.getParts(path);
            if ((uriParts == null) || (uriParts.length == 0)) {

                // find a visible page
                rootElement = findIndexPage(securityContext, pages, edit);

                logger.log(Level.FINE, "No path supplied, trying to find index page");

            } else {

                if (rootElement == null) {

                    rootElement = findPage(securityContext, pages, path, edit);

                } else {
                    dontCache = true;
                }
            }

            if (rootElement == null) { // No page found

                // Look for a file
                final File file = findFile(securityContext, request, path);
                if (file != null) {

                    streamFile(securityContext, file, request, response, edit);
                    tx.success();
                    return;

                }

                // store remaining path parts in request
                final Matcher matcher = threadLocalUUIDMatcher.get();

                for (int i = 0; i < uriParts.length; i++) {

                    request.setAttribute(uriParts[i], i);
                    matcher.reset(uriParts[i]);

                    // set to "true" if part matches UUID pattern
                    requestUriContainsUuids |= matcher.matches();

                }

                if (!requestUriContainsUuids) {

                    // Try to find a data node by name
                    dataNode = findFirstNodeByName(securityContext, request, path);

                } else {

                    dataNode = findNodeByUuid(securityContext, PathHelper.getName(path));

                }

                //if (dataNode != null && !(dataNode instanceof Linkable)) {
                if (dataNode != null) {

                    // Last path part matches a data node
                    // Remove last path part and try again searching for a page
                    // clear possible entry points
                    request.removeAttribute(POSSIBLE_ENTRY_POINTS_KEY);

                    rootElement = findPage(securityContext, pages,
                            StringUtils.substringBeforeLast(path, PathHelper.PATH_SEP), edit);

                    renderContext.setDetailsDataObject(dataNode);

                    // Start rendering on data node
                    if (rootElement == null && dataNode instanceof DOMNode) {

                        rootElement = ((DOMNode) dataNode);
                    }
                }
            }

            // look for pages with HTTP Basic Authentication (must be done as superuser)
            if (rootElement == null) {

                final HttpBasicAuthResult authResult = checkHttpBasicAuth(request, response, path);

                switch (authResult.authState()) {

                // Element with Basic Auth found and authentication succeeded
                case Authenticated:
                    final Linkable result = authResult.getRootElement();
                    if (result instanceof Page) {

                        rootElement = (DOMNode) result;
                        securityContext = authResult.getSecurityContext();
                        renderContext.pushSecurityContext(securityContext);

                    } else if (result instanceof File) {

                        streamFile(authResult.getSecurityContext(), (File) result, request, response,
                                EditMode.NONE);
                        tx.success();
                        return;

                    }
                    break;

                // Page with Basic Auth found but not yet authenticated
                case MustAuthenticate:
                    tx.success();
                    return;

                // no Basic Auth for given path, go on
                case NoBasicAuth:
                    break;
                }

            }

            // Still nothing found, do error handling
            if (rootElement == null) {
                rootElement = notFound(response, securityContext);
            }

            if (rootElement == null) {
                tx.success();
                return;
            }

            // check dont cache flag on page (if root element is a page)
            // but don't modify true to false
            dontCache |= rootElement.getProperty(Page.dontCache);

            if (EditMode.WIDGET.equals(edit) || dontCache) {

                setNoCacheHeaders(response);

            }

            if (!securityContext.isVisible(rootElement)) {

                rootElement = notFound(response, securityContext);
                if (rootElement == null) {

                    tx.success();
                    return;
                }

            } else {

                if (!EditMode.WIDGET.equals(edit) && !dontCache
                        && notModifiedSince(request, response, rootElement, dontCache)) {

                    ServletOutputStream out = response.getOutputStream();
                    out.flush();
                    //response.flushBuffer();
                    out.close();

                } else {

                    // prepare response
                    response.setCharacterEncoding("UTF-8");

                    String contentType = rootElement.getProperty(Page.contentType);

                    if (contentType == null) {

                        // Default
                        contentType = "text/html;charset=UTF-8";
                    }

                    if (contentType.equals("text/html")) {
                        contentType = contentType.concat(";charset=UTF-8");
                    }

                    response.setContentType(contentType);

                    setCustomResponseHeaders(response);

                    final boolean createsRawData = rootElement.getProperty(Page.pageCreatesRawData);

                    // async or not?
                    if (isAsync && !createsRawData) {

                        final AsyncContext async = request.startAsync();
                        final ServletOutputStream out = async.getResponse().getOutputStream();
                        final AtomicBoolean finished = new AtomicBoolean(false);
                        final DOMNode rootNode = rootElement;

                        threadPool.submit(new Runnable() {

                            @Override
                            public void run() {

                                try (final Tx tx = app.tx()) {

                                    //final long start = System.currentTimeMillis();
                                    // render
                                    rootNode.render(renderContext, 0);
                                    finished.set(true);

                                    //final long end = System.currentTimeMillis();
                                    //System.out.println("Done in " + (end-start) + " ms");
                                    tx.success();

                                } catch (Throwable t) {
                                    t.printStackTrace();
                                    final String errorMsg = t.getMessage();
                                    try {
                                        //response.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
                                        response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR,
                                                errorMsg);
                                        finished.set(true);
                                    } catch (IOException ex) {
                                        ex.printStackTrace();
                                    }
                                }
                            }

                        });

                        // start output write listener
                        out.setWriteListener(new WriteListener() {

                            @Override
                            public void onWritePossible() throws IOException {

                                try {

                                    final Queue<String> queue = renderContext.getBuffer().getQueue();
                                    while (out.isReady()) {

                                        String buffer = null;

                                        synchronized (queue) {
                                            buffer = queue.poll();
                                        }

                                        if (buffer != null) {

                                            out.print(buffer);

                                        } else {

                                            if (finished.get()) {

                                                async.complete();
                                                response.setStatus(HttpServletResponse.SC_OK);

                                                // prevent this block from being called again
                                                break;
                                            }

                                            Thread.sleep(1);
                                        }
                                    }

                                } catch (Throwable t) {
                                    t.printStackTrace();
                                }
                            }

                            @Override
                            public void onError(Throwable t) {
                                t.printStackTrace();
                            }
                        });

                    } else {

                        final StringRenderBuffer buffer = new StringRenderBuffer();
                        renderContext.setBuffer(buffer);

                        // render
                        rootElement.render(renderContext, 0);

                        try {

                            response.getOutputStream().write(buffer.getBuffer().toString().getBytes("utf-8"));
                            response.getOutputStream().flush();
                            response.getOutputStream().close();

                        } catch (IOException ioex) {
                            ioex.printStackTrace();
                        }
                    }
                }
            }

            tx.success();

        } catch (FrameworkException fex) {
            fex.printStackTrace();
            logger.log(Level.SEVERE, "Exception while processing request", fex);
        }

    } catch (IOException | FrameworkException t) {

        t.printStackTrace();
        logger.log(Level.SEVERE, "Exception while processing request", t);
        UiAuthenticator.writeInternalServerError(response);
    }
}

From source file:net.cellcloud.talk.TalkService.java

/** ???
 *///from  w w w .j  av a 2 s.c  o m
protected void noticeResume(Cellet cellet, String targetTag, Queue<Long> timestampQueue,
        Queue<Primitive> primitiveQueue, long startTime) {
    TalkSessionContext context = this.tagContexts.get(targetTag);
    if (null == context) {
        if (Logger.isDebugLevel()) {
            Logger.d(TalkService.class, "Not find session by remote tag");
        }
        return;
    }

    Message message = null;

    synchronized (context) {
        // ?
        TalkTracker tracker = context.getTracker();
        // ?? Cellet
        if (tracker.getCellet(cellet.getFeature().getIdentifier()) == cellet) {
            Session session = context.getLastSession();

            // ??
            for (int i = 0, size = timestampQueue.size(); i < size; ++i) {
                Long timestamp = timestampQueue.poll();
                Primitive primitive = primitiveQueue.poll();
                if (timestamp.longValue() >= startTime) {
                    message = this.packetResume(targetTag, timestamp, primitive);
                    if (null != message) {
                        session.write(message);
                    }
                }
            }
        }
    }
}

From source file:org.codice.ddf.spatial.ogc.csw.catalog.endpoint.CswEndpointTest.java

private List<QueryResponse> getQueryResponseBatch(int batchSize, int total) {
    Queue<Result> results = new ArrayDeque<>();
    for (int i = 1; i <= total; i++) {
        MetacardImpl metacard = new MetacardImpl();
        metacard.setId(i + "");
        results.add(new ResultImpl(metacard));
    }// w w w .j a v a 2 s.  c om

    List<QueryResponse> queryResponses = new ArrayList<>();
    while (!results.isEmpty()) {
        List<Result> batchList = new ArrayList<>();
        for (int i = 0; i < batchSize; i++) {
            Result result = results.poll();
            if (result == null) {
                break;
            }
            batchList.add(result);
        }
        queryResponses.add(new QueryResponseImpl(null, batchList, total));
    }

    // Add one empty response list to the end
    queryResponses.add(new QueryResponseImpl(null, Collections.emptyList(), 0));
    return queryResponses;
}

From source file:org.apache.giraph.worker.BspServiceSource.java

/**
 * Load saved partitions in multiple threads.
 * @param superstep superstep to load//w w  w  .ja  va2 s .c o  m
 * @param partitions list of partitions to load
 */
private void loadCheckpointVertices(final long superstep, List<Integer> partitions) {
    int numThreads = Math.min(GiraphConstants.NUM_CHECKPOINT_IO_THREADS.get(getConfiguration()),
            partitions.size());

    final Queue<Integer> partitionIdQueue = new ConcurrentLinkedQueue<>(partitions);

    final CompressionCodec codec = new CompressionCodecFactory(getConfiguration())
            .getCodec(new Path(GiraphConstants.CHECKPOINT_COMPRESSION_CODEC.get(getConfiguration())));

    long t0 = System.currentTimeMillis();

    CallableFactory<Void> callableFactory = new CallableFactory<Void>() {
        @Override
        public Callable<Void> newCallable(int callableId) {
            return new Callable<Void>() {

                @Override
                public Void call() throws Exception {
                    while (!partitionIdQueue.isEmpty()) {
                        Integer partitionId = partitionIdQueue.poll();
                        if (partitionId == null) {
                            break;
                        }
                        Path path = getSavedCheckpoint(superstep,
                                "_" + partitionId + CheckpointingUtils.CHECKPOINT_VERTICES_POSTFIX);

                        FSDataInputStream compressedStream = getFs().open(path);

                        DataInputStream stream = codec == null ? compressedStream
                                : new DataInputStream(codec.createInputStream(compressedStream));

                        Partition<I, V, E> partition = getConfiguration().createPartition(partitionId,
                                getContext());

                        partition.readFields(stream);

                        getPartitionStore().addPartition(partition);

                        stream.close();
                    }
                    return null;
                }

            };
        }
    };

    ProgressableUtils.getResultsWithNCallables(callableFactory, numThreads, "load-vertices-%d", getContext());

    LOG.info("Loaded checkpoint in " + (System.currentTimeMillis() - t0) + " ms, using " + numThreads
            + " threads");
}