Example usage for java.util LinkedList pop

List of usage examples for java.util LinkedList pop

Introduction

In this page you can find the example usage for java.util LinkedList pop.

Prototype

public E pop() 

Source Link

Document

Pops an element from the stack represented by this list.

Usage

From source file:br.com.ingenieux.mojo.jbake.SeedMojo.java

private String stripLeadingPath(String name) {
    LinkedList<String> elements = new LinkedList<>(asList(name.split("/")));

    elements.pop();

    return join(elements.iterator(), '/');
}

From source file:org.knowrob.vis.model.util.algorithm.ACCUM.java

/**
 * Diffuses a vector field around one vertex, weighted by a Gaussian of width 
 * {@code 1/sqrt(invsigma2)}. Ported from trimesh2 (2.12) (Szymon Rusinkiewicz Princeton University)
 * /*from  www.j  a v  a 2 s .  co  m*/
 * @see <a href="https://github.com/fcole/qrtsc/tree/master/trimesh2">trimesh2</a>
 * 
 * @param m
 *          CAD model analyzed
 * @param curvatures
 *          curvatures HashMap with model vertices as keys and their cuvatures as values
 * @param flags
 *          map of vertex to long value (default initialized with 0f and of the same length with the list of vertices of the model)
 * @param flag_curr
 *          atomic long value
 * @param accum
 *          accumulator
 * @param invsigma
 *          spread of the Gaussian used in weighting
 * @param flt
 *          3D vector field diffused based on the curvature
 */
@SuppressWarnings("javadoc")
private static void diffuse_vert_field(final Model m, HashMap<Vertex, Curvature> curvatures,
        Map<Vertex, Long> flags, AtomicLong flag_curr, final ACCUM accum, int v, float invsigma2, Vertex flt) {
    Vertex vert = m.getVertices().get(v);
    if (vert.getNeighbors().size() == 0) {
        // flt.set(0, 0, 0);
        accum.a(m, curvatures, vert, flt, .5f, vert);
        return;
    }

    // flt.set(0, 0, 0);
    accum.a(m, curvatures, vert, flt, vert.getPointarea(), vert);
    float sum_w = vert.getPointarea();
    final Vector3f nv = vert.getNormalVector();

    long flag_curr_val = flag_curr.incrementAndGet();
    flags.put(vert, flag_curr_val);
    LinkedList<Vertex> boundary = new LinkedList<Vertex>();
    boundary.addAll(vert.getNeighbors());
    while (boundary.size() > 0) {
        Vertex n = boundary.pop();
        if (flags.get(n) != null && flags.get(n) == flag_curr_val)
            continue;
        flags.put(n, flag_curr_val);
        if (nv.dot(n.getNormalVector()) <= 0.0f)
            continue;
        // Gaussian weight
        float w = wt(n, vert, invsigma2);
        if (w == 0.0f)
            continue;
        // Downweight things pointing in different directions
        w *= nv.dot(n.getNormalVector());
        // Surface area "belonging" to each point
        w *= n.getPointarea();
        // Accumulate weight times field at neighbor
        accum.a(m, curvatures, vert, flt, w, n);
        sum_w += w;
        for (Vertex nn : n.getNeighbors()) {
            if (flags.get(nn) != null && flags.get(nn) == flag_curr_val)
                continue;
            boundary.push(nn);
        }
    }
    flt.scale(1 / sum_w);
}

From source file:com.zh.snmp.snmpweb.service.SnmpWebService.java

@WebMethod(operationName = "getDinamicValues")
public List<DinamicValue> getDinamicValues(@WebParam(name = "deviceId") String deviceId,
        @WebParam(name = "configPath") String configPath) {
    init();/*from  w  w w . j  a va  2  s .  c o  m*/
    Device device = deviceService.findDeviceByDeviceId(deviceId);
    List<String> path = Arrays.asList(configPath.split(PATH_DELIM));
    if (path.size() > 1) {
        LinkedList<String> pathl = new LinkedList<String>(path);
        pathl.pop(); //config code
        DeviceNode dn = device.getConfigMap().findChainChild(pathl);
        if (dn != null) {
            return dn.getDinamics();
        }
    }
    return null;
}

From source file:org.apache.hadoop.hbase.util.RegionSplitter.java

static void rollingSplit(String tableName, SplitAlgorithm splitAlgo, Configuration conf)
        throws IOException, InterruptedException {
    final int minOS = conf.getInt("split.outstanding", 2);

    HTable table = new HTable(conf, tableName);

    // max outstanding splits. default == 50% of servers
    final int MAX_OUTSTANDING = Math.max(table.getConnection().getCurrentNrHRS() / 2, minOS);

    Path hbDir = FSUtils.getRootDir(conf);
    Path tableDir = FSUtils.getTableDir(hbDir, table.getName());
    Path splitFile = new Path(tableDir, "_balancedSplit");
    FileSystem fs = FileSystem.get(conf);

    // get a list of daughter regions to create
    LinkedList<Pair<byte[], byte[]>> tmpRegionSet = getSplits(table, splitAlgo);
    LinkedList<Pair<byte[], byte[]>> outstanding = Lists.newLinkedList();
    int splitCount = 0;
    final int origCount = tmpRegionSet.size();

    // all splits must compact & we have 1 compact thread, so 2 split
    // requests to the same RS can stall the outstanding split queue.
    // To fix, group the regions into an RS pool and round-robin through it
    LOG.debug("Bucketing regions by regionserver...");
    TreeMap<String, LinkedList<Pair<byte[], byte[]>>> daughterRegions = Maps.newTreeMap();
    for (Pair<byte[], byte[]> dr : tmpRegionSet) {
        String rsLocation = table.getRegionLocation(dr.getSecond()).getHostnamePort();
        if (!daughterRegions.containsKey(rsLocation)) {
            LinkedList<Pair<byte[], byte[]>> entry = Lists.newLinkedList();
            daughterRegions.put(rsLocation, entry);
        }//from  www. ja  va2s  .  c o  m
        daughterRegions.get(rsLocation).add(dr);
    }
    LOG.debug("Done with bucketing.  Split time!");
    long startTime = System.currentTimeMillis();

    // open the split file and modify it as splits finish
    FSDataInputStream tmpIn = fs.open(splitFile);
    byte[] rawData = new byte[tmpIn.available()];
    tmpIn.readFully(rawData);
    tmpIn.close();
    FSDataOutputStream splitOut = fs.create(splitFile);
    splitOut.write(rawData);

    try {
        // *** split code ***
        while (!daughterRegions.isEmpty()) {
            LOG.debug(daughterRegions.size() + " RS have regions to splt.");

            // Get RegionServer : region count mapping
            final TreeMap<ServerName, Integer> rsSizes = Maps.newTreeMap();
            Map<HRegionInfo, ServerName> regionsInfo = table.getRegionLocations();
            for (ServerName rs : regionsInfo.values()) {
                if (rsSizes.containsKey(rs)) {
                    rsSizes.put(rs, rsSizes.get(rs) + 1);
                } else {
                    rsSizes.put(rs, 1);
                }
            }

            // sort the RS by the number of regions they have
            List<String> serversLeft = Lists.newArrayList(daughterRegions.keySet());
            Collections.sort(serversLeft, new Comparator<String>() {
                public int compare(String o1, String o2) {
                    return rsSizes.get(o1).compareTo(rsSizes.get(o2));
                }
            });

            // round-robin through the RS list. Choose the lightest-loaded servers
            // first to keep the master from load-balancing regions as we split.
            for (String rsLoc : serversLeft) {
                Pair<byte[], byte[]> dr = null;

                // find a region in the RS list that hasn't been moved
                LOG.debug("Finding a region on " + rsLoc);
                LinkedList<Pair<byte[], byte[]>> regionList = daughterRegions.get(rsLoc);
                while (!regionList.isEmpty()) {
                    dr = regionList.pop();

                    // get current region info
                    byte[] split = dr.getSecond();
                    HRegionLocation regionLoc = table.getRegionLocation(split);

                    // if this region moved locations
                    String newRs = regionLoc.getHostnamePort();
                    if (newRs.compareTo(rsLoc) != 0) {
                        LOG.debug("Region with " + splitAlgo.rowToStr(split) + " moved to " + newRs
                                + ". Relocating...");
                        // relocate it, don't use it right now
                        if (!daughterRegions.containsKey(newRs)) {
                            LinkedList<Pair<byte[], byte[]>> entry = Lists.newLinkedList();
                            daughterRegions.put(newRs, entry);
                        }
                        daughterRegions.get(newRs).add(dr);
                        dr = null;
                        continue;
                    }

                    // make sure this region wasn't already split
                    byte[] sk = regionLoc.getRegionInfo().getStartKey();
                    if (sk.length != 0) {
                        if (Bytes.equals(split, sk)) {
                            LOG.debug("Region already split on " + splitAlgo.rowToStr(split)
                                    + ".  Skipping this region...");
                            ++splitCount;
                            dr = null;
                            continue;
                        }
                        byte[] start = dr.getFirst();
                        Preconditions.checkArgument(Bytes.equals(start, sk),
                                splitAlgo.rowToStr(start) + " != " + splitAlgo.rowToStr(sk));
                    }

                    // passed all checks! found a good region
                    break;
                }
                if (regionList.isEmpty()) {
                    daughterRegions.remove(rsLoc);
                }
                if (dr == null)
                    continue;

                // we have a good region, time to split!
                byte[] split = dr.getSecond();
                LOG.debug("Splitting at " + splitAlgo.rowToStr(split));
                HBaseAdmin admin = new HBaseAdmin(table.getConfiguration());
                admin.split(table.getTableName(), split);

                LinkedList<Pair<byte[], byte[]>> finished = Lists.newLinkedList();
                if (conf.getBoolean("split.verify", true)) {
                    // we need to verify and rate-limit our splits
                    outstanding.addLast(dr);
                    // with too many outstanding splits, wait for some to finish
                    while (outstanding.size() >= MAX_OUTSTANDING) {
                        finished = splitScan(outstanding, table, splitAlgo);
                        if (finished.isEmpty()) {
                            Thread.sleep(30 * 1000);
                        } else {
                            outstanding.removeAll(finished);
                        }
                    }
                } else {
                    finished.add(dr);
                }

                // mark each finished region as successfully split.
                for (Pair<byte[], byte[]> region : finished) {
                    splitOut.writeChars("- " + splitAlgo.rowToStr(region.getFirst()) + " "
                            + splitAlgo.rowToStr(region.getSecond()) + "\n");
                    splitCount++;
                    if (splitCount % 10 == 0) {
                        long tDiff = (System.currentTimeMillis() - startTime) / splitCount;
                        LOG.debug("STATUS UPDATE: " + splitCount + " / " + origCount + ". Avg Time / Split = "
                                + org.apache.hadoop.util.StringUtils.formatTime(tDiff));
                    }
                }
            }
        }
        if (conf.getBoolean("split.verify", true)) {
            while (!outstanding.isEmpty()) {
                LinkedList<Pair<byte[], byte[]>> finished = splitScan(outstanding, table, splitAlgo);
                if (finished.isEmpty()) {
                    Thread.sleep(30 * 1000);
                } else {
                    outstanding.removeAll(finished);
                    for (Pair<byte[], byte[]> region : finished) {
                        splitOut.writeChars("- " + splitAlgo.rowToStr(region.getFirst()) + " "
                                + splitAlgo.rowToStr(region.getSecond()) + "\n");
                    }
                }
            }
        }
        LOG.debug("All regions have been successfully split!");
    } finally {
        long tDiff = System.currentTimeMillis() - startTime;
        LOG.debug("TOTAL TIME = " + org.apache.hadoop.util.StringUtils.formatTime(tDiff));
        LOG.debug("Splits = " + splitCount);
        LOG.debug("Avg Time / Split = " + org.apache.hadoop.util.StringUtils.formatTime(tDiff / splitCount));

        splitOut.close();
        if (table != null) {
            table.close();
        }
    }
    fs.delete(splitFile, false);
}

From source file:io.fabric8.spring.cloud.kubernetes.reload.ConfigurationChangeDetector.java

/**
 * Finds all registered property sources of the given type.
 *///ww w. j  a va2 s.  c  o  m
protected <S extends PropertySource<?>> List<S> findPropertySources(Class<S> sourceClass) {
    List<S> managedSources = new LinkedList<>();

    LinkedList<PropertySource<?>> sources = toLinkedList(environment.getPropertySources());
    while (!sources.isEmpty()) {
        PropertySource<?> source = sources.pop();
        if (source instanceof CompositePropertySource) {
            CompositePropertySource comp = (CompositePropertySource) source;
            sources.addAll(comp.getPropertySources());
        } else if (sourceClass.isInstance(source)) {
            managedSources.add(sourceClass.cast(source));
        }
    }

    return managedSources;
}

From source file:com.github.helenusdriver.commons.lang3.reflect.ReflectionUtils.java

/**
 * Gets all declared members of a given type (up the super class hierarchy).
 *
 * @author paouelle/*from w w  w . ja  v  a 2 s  .  c  om*/
 *
 * @param <T> the type of members to retrieve (either {@link Field},
 *            {@link Method}, or {@link Constructor})
 *
 * @param  type the type of members to retrieve
 * @param  clazz the class from which to find all declared members
 * @param  up <code>true</code> to look up the class hierarchy;
 *         <code>false</code> to only look at the specified class level
 * @return a list in the provided order for all declared members
 * @throws NullPointerException if <code>type</code> or
 *         <code>clazz</code> is <code>null</code>
 * @throws IllegalArgumentException if <code>type</code> is not
 *         {@link Field}, {@link Method}, or {@link Constructor}
 */
public static <T extends Member> List<T> getAllDeclaredMembers(Class<T> type, Class<?> clazz, boolean up) {
    org.apache.commons.lang3.Validate.notNull(type, "invalid null member type");
    org.apache.commons.lang3.Validate.notNull(clazz, "invalid null class");
    final LinkedList<Class<?>> classes = new LinkedList<>();

    if (up) {
        while (clazz != null) {
            classes.push(clazz);
            clazz = clazz.getSuperclass();
        }
    } else {
        classes.push(clazz);
    }
    final List<T> members = new ArrayList<>(12);

    while (!classes.isEmpty()) {
        clazz = classes.pop();
        for (final T m : ReflectionUtils.getDeclaredMembers(type, clazz)) {
            members.add(m);
        }
    }
    return members;
}

From source file:com.github.helenusdriver.commons.lang3.reflect.ReflectionUtils.java

/**
 * Gets all members of a given type (up the super class hierarchy) annotated
 * with the specified annotation.//from w w  w.  j  a  v a2 s  .  co m
 *
 * @author paouelle
 *
 * @param <T> the type of members to retrieve (either {@link Field},
 *            {@link Method}, or {@link Constructor})
 *
 * @param  type the type of members to retrieve
 * @param  clazz the class from which to find all annotated members
 * @param  annotation the annotation for which to find all members
 * @param  up <code>true</code> to look up the class hierarchy;
 *         <code>false</code> to only look at the specified class level
 * @return a list in the provided order for all annotated members
 * @throws NullPointerException if <code>type</code>,
 *         <code>clazz</code> or <code>annotation</code> is <code>null</code>
 * @throws IllegalArgumentException if <code>type</code> is not
 *         {@link Field}, {@link Method}, or {@link Constructor}
 */
public static <T extends Member> List<T> getAllMembersAnnotatedWith(Class<T> type, Class<?> clazz,
        Class<? extends Annotation> annotation, boolean up) {
    org.apache.commons.lang3.Validate.notNull(type, "invalid null member type");
    org.apache.commons.lang3.Validate.notNull(clazz, "invalid null class");
    org.apache.commons.lang3.Validate.notNull(annotation, "invalid null annotation class");
    final LinkedList<Class<?>> classes = new LinkedList<>();

    if (up) {
        while (clazz != null) {
            classes.push(clazz);
            clazz = clazz.getSuperclass();
        }
    } else {
        classes.push(clazz);
    }
    final List<T> members = new ArrayList<>(12);

    while (!classes.isEmpty()) {
        clazz = classes.pop();
        for (final T m : ReflectionUtils.getDeclaredMembers(type, clazz)) {
            if ((m instanceof AnnotatedElement)
                    && ((AnnotatedElement) m).getAnnotationsByType(annotation).length > 0) {
                members.add(m);
            }
        }
    }
    return members;
}

From source file:org.bimserver.charting.SupportFunctions.java

public static ArrayList<LinkedHashMap<String, Object>> getDataWithTreeStructure(String structureKeyword,
        IfcModelInterface model, Chart chart) {
    ArrayList<LinkedHashMap<String, Object>> rawData = new ArrayList<>();
    // Get units.
    String units = "units";
    SIPrefix prefix = SupportFunctions.getLengthUnitPrefix(model);
    if (prefix != null)
        units = prefix.getLiteral();//w  w  w  .  j  ava2s.c  o  m
    // Prepare for static iteration.
    int maxDepth = 0;
    LinkedList<IfcObjectWithTrace> leaves = new LinkedList<>();
    LinkedList<IfcObjectWithTrace> parts = new LinkedList<>();
    // Iterate, but start with projects.
    for (IfcProject ifcProject : model.getAll(IfcProject.class))
        parts.add(new IfcObjectWithTrace(ifcProject));
    // Iterate the IFC going 1 level at a time (ex: Projects -> Sites, then Sites -> Buildings, then Buildings -> IfcProducts, then IfcProducts -> IfcProducts).
    while (parts.size() > 0) {
        IfcObjectWithTrace entry = parts.pop();
        StackTrace traceAtThisPoint = entry.Key;
        IfcObject parentObject = entry.Value;
        // Get name to be added to stack.
        int parentId = parentObject.getExpressId();
        String ifcParentName = (parentId >= 0)
                ? String.format("%s (%d)", parentObject.getName(), parentObject.getExpressId())
                : parentObject.getName();
        // Make the stack trace.
        StackTrace traceAtChildren = new StackTrace(traceAtThisPoint);
        traceAtChildren.add(ifcParentName);
        // Track the children that are getting put into the raw data at this point.
        LinkedList<IfcObjectWithTrace> childrenInThisPass = new LinkedList<>();
        // Walk the relationship from the parent to its child objects.
        for (IfcRelDecomposes ifcRelDecomposes : parentObject.getIsDecomposedBy()) {
            // Iterate what the object decomposes into.
            for (IfcObjectDefinition definition : ifcRelDecomposes.getRelatedObjects())
                childrenInThisPass.add(new IfcObjectWithTrace(traceAtChildren, (IfcObject) definition));
        }
        // If IfcObject happens to be something like an IfcBuildingStorey, go looking through its structure.
        if (parentObject instanceof IfcSpatialStructureElement) {
            IfcSpatialStructureElement ifcSpatialStructureElement = (IfcSpatialStructureElement) parentObject;
            for (IfcRelContainedInSpatialStructure ifcRelContainedInSpatialStructure : ifcSpatialStructureElement
                    .getContainsElements())
                for (IfcProduct ifcProduct : ifcRelContainedInSpatialStructure.getRelatedElements()) {
                    Double area = getRoughAreaEstimateFromIfcProduct(ifcProduct);
                    childrenInThisPass.add(new IfcObjectWithTrace(traceAtChildren, ifcProduct, area));
                }
        }
        // Test if this node is a leaf. If it is, keep it.
        if (childrenInThisPass.size() == 0) {
            leaves.add(entry);
            // Update depth.
            int depthAtThisPoint = traceAtThisPoint.size() + 1;
            if (depthAtThisPoint > maxDepth)
                maxDepth = depthAtThisPoint;
        } else
            parts.addAll(childrenInThisPass);
    }
    // Derive the column names.
    ArrayList<String> hierarchyColumnNames = new ArrayList<>();
    for (int i = 0; i < maxDepth; i++)
        hierarchyColumnNames.add(String.format("%s%d", structureKeyword, i + 1));
    // Update the chart configuration.
    chart.setDimensionLookupKeys(structureKeyword, hierarchyColumnNames);
    chart.setDimensionLookupKey("size", "size");
    chart.setDimensionLookupKey("label", "label");
    chart.setDimensionLookupKey("color", hierarchyColumnNames.get(Math.max(0, maxDepth - 2)));
    // Iterate the leaf nodes.
    for (IfcObjectWithTrace leaf : leaves) {
        StackTrace traceAtThisPoint = leaf.Key;
        IfcObject leafObject = leaf.Value;
        // Prepare to store this raw data entry.
        LinkedHashMap<String, Object> leafDataEntry = new LinkedHashMap<>();
        // Prepare to iterate backwards along column names (ex. hierarchy10, ..., hierarchy1).
        int leafDepthIndex = maxDepth - 1;
        int sizeOfStack = traceAtThisPoint.size();
        int stackUpperBound = leafDepthIndex - 1;
        int stackLowerRange = stackUpperBound - sizeOfStack;
        // Iterate backwards along column names.
        for (int i = leafDepthIndex; i >= 0; i--) {
            String column = hierarchyColumnNames.get(i);
            String value;
            if (i == leafDepthIndex) {
                value = String.format("%s (%d)", leafObject.getName(), leafObject.getOid());
                if (units != null && leaf.Size != null) {
                    if (leaf.Size > 0)
                        value += String.format(" ~%s %s\u00B2", leaf.Size.intValue(), units);
                    else
                        value += String.format(" %s %s\u00B2", leaf.Size, units);
                }
                leafDataEntry.put("label", leafObject.getName());
                leafDataEntry.put("size", leaf.Size);
            } else if (stackLowerRange < i && i <= stackUpperBound) {
                int index = sizeOfStack - (stackUpperBound - i) - 1;
                value = traceAtThisPoint.get(index);
            } else
                value = null;
            // Add column.
            leafDataEntry.put(column, value);
        }
        // Add the data.
        rawData.add(leafDataEntry);
    }
    // Send it all back.
    return rawData;
}

From source file:com.github.helenusdriver.commons.lang3.reflect.ReflectionUtils.java

/**
 * Gets all members of a given type (up the super class hierarchy) annotated
 * with the specified annotation./*from   w w  w .jav  a  2  s .  c  om*/
 *
 * @author paouelle
 *
 * @param <T> the type of members to retrieve (either {@link Field},
 *            {@link Method}, or {@link Constructor})
 * @param <A> the type of annotation to search for
 *
 * @param  type the type of members to retrieve
 * @param  clazz the class from which to find all annotated members
 * @param  annotation the annotation for which to find all members
 * @param  up <code>true</code> to look up the class hierarchy;
 *         <code>false</code> to only look at the specified class level
 * @return a non-<code>null</code> map in the provided order for all annotated
 *         members with their found annotations
 * @throws NullPointerException if <code>type</code>,
 *         <code>clazz</code> or <code>annotation</code> is <code>null</code>
 * @throws IllegalArgumentException if <code>type</code> is not
 *         {@link Field}, {@link Method}, or {@link Constructor}
 */
public static <T extends Member, A extends Annotation> Map<T, A[]> getAllAnnotationsForMembersAnnotatedWith(
        Class<T> type, Class<?> clazz, Class<A> annotation, boolean up) {
    org.apache.commons.lang3.Validate.notNull(type, "invalid null member type");
    org.apache.commons.lang3.Validate.notNull(clazz, "invalid null class");
    org.apache.commons.lang3.Validate.notNull(annotation, "invalid null annotation class");
    final LinkedList<Class<?>> classes = new LinkedList<>();

    if (up) {
        while (clazz != null) {
            classes.push(clazz);
            clazz = clazz.getSuperclass();
        }
    } else {
        classes.push(clazz);
    }
    final Map<T, A[]> members = new LinkedHashMap<>(12);

    while (!classes.isEmpty()) {
        clazz = classes.pop();
        for (final T m : ReflectionUtils.getDeclaredMembers(type, clazz)) {
            if (m instanceof AnnotatedElement) {
                final A[] as = ((AnnotatedElement) m).getAnnotationsByType(annotation);

                if (as.length > 0) {
                    members.put(m, as);
                }
            }
        }
    }
    return members;
}

From source file:com.adobe.acs.commons.mcp.impl.processes.asset.HierarchicalElement.java

@SuppressWarnings("squid:S00112")
default void visitAllFolders(CheckedConsumer<HierarchicalElement> visitor,
        CheckedFunction<HierarchicalElement, Stream<HierarchicalElement>> childFunction) throws Exception {
    LinkedList<HierarchicalElement> nodes = new LinkedList<>();
    nodes.add(this);
    while (!nodes.isEmpty()) {
        HierarchicalElement node = nodes.pop();
        childFunction.apply(node).forEach(nodes::add);
        visitor.accept(node);/*from   ww  w . j  a  v  a  2s . c o m*/
    }
}