Example usage for java.util LinkedList isEmpty

List of usage examples for java.util LinkedList isEmpty

Introduction

In this page you can find the example usage for java.util LinkedList isEmpty.

Prototype

boolean isEmpty();

Source Link

Document

Returns true if this list contains no elements.

Usage

From source file:com.asakusafw.runtime.directio.hadoop.HadoopDataSourceUtil.java

/**
 * Searches file/directories by pattern.
 * @param fs target file system//w w  w .  j ava 2 s . c  om
 * @param base base path
 * @param pattern search pattern
 * @return found files, or an empty list if not found
 * @throws IOException if failed to search by I/O error
 * @throws IllegalArgumentException if some parameters were {@code null}
 */
public static List<FileStatus> search(FileSystem fs, Path base, FilePattern pattern) throws IOException {
    if (fs == null) {
        throw new IllegalArgumentException("fs must not be null"); //$NON-NLS-1$
    }
    if (base == null) {
        throw new IllegalArgumentException("base must not be null"); //$NON-NLS-1$
    }
    if (pattern == null) {
        throw new IllegalArgumentException("pattern must not be null"); //$NON-NLS-1$
    }
    if (LOG.isDebugEnabled()) {
        LOG.debug(MessageFormat.format("Start searching for files (path={0}, resourcePattern={1})", //$NON-NLS-1$
                base, pattern));
    }
    List<FileStatus> current = new ArrayList<>(1);
    try {
        FileStatus stat = fs.getFileStatus(base);
        current.add(stat);
    } catch (FileNotFoundException e) {
        return Collections.emptyList();
    }
    int steps = 0;
    LinkedList<Segment> segments = new LinkedList<>(pattern.getSegments());
    while (segments.isEmpty() == false) {
        if (segments.getFirst().isTraverse()) {
            segments.removeFirst();
            current = recursiveStep(fs, current);
        } else {
            List<Path> step = consumeStep(segments);
            current = globStep(fs, current, step);
        }
        steps++;
    }
    if (LOG.isDebugEnabled()) {
        LOG.debug(MessageFormat.format(
                "Finish searching for files (path={0}, resourcePattern={1}, results={2}, steps={3})", //$NON-NLS-1$
                base, pattern, current.size(), steps));
    }
    return current;
}

From source file:org.eclipse.wb.internal.core.model.description.helpers.ComponentDescriptionHelper.java

/**
 * @param editor//from w  w w.j  av  a 2 s .  co m
 *          the {@link AstEditor} in context of which we work now.
 * @param key
 *          the {@link ComponentDescriptionKey} of requested {@link ComponentDescription}.
 * @param additionalDescriptionInfos
 *          additional {@link ClassResourceInfo}'s to parse after {@link ClassResourceInfo}'s
 *          collected for component {@link Class}. May be empty, but not <code>null</code>.
 * 
 * @return the {@link ComponentDescription} of component with given {@link Class}.
 * @throws Exception
 *           if no {@link ComponentDescription} can be found.
 */
private static ComponentDescription getDescription0(AstEditor editor, ComponentDescriptionKey key,
        List<ClassResourceInfo> additionalDescriptionInfos) throws Exception {
    EditorState state = EditorState.get(editor);
    ILoadingContext context = EditorStateLoadingContext.get(state);
    Class<?> componentClass = key.getComponentClass();
    //
    try {
        // prepare result description
        ComponentDescription componentDescription = new ComponentDescription(key);
        addConstructors(editor.getJavaProject(), componentDescription);
        componentDescription.setBeanInfo(ReflectionUtils.getBeanInfo(componentClass));
        componentDescription.setBeanDescriptor(new IntrospectionHelper(componentClass).getBeanDescriptor());
        // prepare list of description resources, from generic to specific
        LinkedList<ClassResourceInfo> descriptionInfos;
        {
            descriptionInfos = Lists.newLinkedList();
            DescriptionHelper.addDescriptionResources(descriptionInfos, context, componentClass);
            Assert.isTrueException(!descriptionInfos.isEmpty(),
                    ICoreExceptionConstants.DESCRIPTION_NO_DESCRIPTIONS, componentClass.getName());
            // at last append additional description resource
            descriptionInfos.addAll(additionalDescriptionInfos);
        }
        // prepare Digester
        Digester digester;
        {
            digester = new Digester();
            digester.setLogger(new NoOpLog());
            addRules(digester, editor, componentClass);
        }
        // read descriptions from generic to specific
        for (ClassResourceInfo descriptionInfo : descriptionInfos) {
            ResourceInfo resourceInfo = descriptionInfo.resource;
            // read next description
            {
                componentDescription.setCurrentClass(descriptionInfo.clazz);
                digester.push(componentDescription);
                // do parse
                InputStream is = resourceInfo.getURL().openStream();
                try {
                    digester.parse(is);
                } finally {
                    IOUtils.closeQuietly(is);
                }
            }
            // clear parts that can not be inherited
            if (descriptionInfo.clazz == componentClass) {
                setDescriptionWithInnerTags(componentDescription, resourceInfo);
            } else {
                componentDescription.clearCreations();
                componentDescription.setDescription(null);
            }
        }
        // set toolkit
        if (componentDescription.getToolkit() == null) {
            for (int i = descriptionInfos.size() - 1; i >= 0; i--) {
                ClassResourceInfo descriptionInfo = descriptionInfos.get(i);
                ToolkitDescription toolkit = descriptionInfo.resource.getToolkit();
                if (toolkit != null) {
                    componentDescription.setToolkit(toolkit);
                    break;
                }
            }
            Assert.isTrueException(componentDescription.getToolkit() != null,
                    ICoreExceptionConstants.DESCRIPTION_NO_TOOLKIT, componentClass.getName());
        }
        // icon, default creation
        setIcon(context, componentDescription, componentClass);
        configureDefaultCreation(componentDescription);
        // final operations
        {
            Assert.isNotNull(componentDescription.getModelClass());
            componentDescription.joinProperties();
        }
        // add to caches
        if (key.isPureComponent() && !"true".equals(componentDescription.getParameter("dontCacheDescription"))
                && shouldCacheDescriptions_inPackage(descriptionInfos.getLast(), componentClass)) {
            componentDescription.setCached(true);
        }
        // mark for caching presentation
        if (shouldCachePresentation(descriptionInfos.getLast(), componentClass)) {
            componentDescription.setPresentationCached(true);
        }
        // use processors
        for (IDescriptionProcessor processor : getDescriptionProcessors()) {
            processor.process(editor, componentDescription);
        }
        // well, we have result
        return componentDescription;
    } catch (SAXParseException e) {
        throw new DesignerException(ICoreExceptionConstants.DESCRIPTION_LOAD_ERROR, e.getException(),
                componentClass.getName());
    }
}

From source file:edu.uci.ics.jung.graph.predicates.ConnectedGraphPredicate.java

/**
 * Returns <code>true</code> if there exists a path from each 
 * vertex to all other vertices (ignoring edge direction).
 * //  w ww. ja  v a2 s  .c om
 * <p>Returns <code>true</code> for an empty graph.</p>
 * 
 * @see org.apache.commons.collections.Predicate#evaluate(java.lang.Object)
 */
public boolean evaluateGraph(ArchetypeGraph graph) {
    Graph g = (Graph) graph;
    if (g.numVertices() == 0)
        return true;

    Vertex start = (Vertex) g.getVertices().iterator().next(); // pick any vertex
    Set visited = new HashSet();
    LinkedList stack = new LinkedList();
    stack.add(start);
    // traverse through graph in depth-first order
    while (!stack.isEmpty()) {
        Vertex v = (Vertex) stack.removeFirst();
        visited.add(v);
        Set neighbors = v.getNeighbors();
        for (Iterator n_it = neighbors.iterator(); n_it.hasNext();) {
            Vertex w = (Vertex) n_it.next();
            if (!visited.contains(w))
                stack.addFirst(w);
        }
    }
    return (visited.size() == g.numVertices());
}

From source file:com.asakusafw.directio.tools.DirectIoDelete.java

@Override
public int run(String[] args) throws Exception {
    LinkedList<String> argList = new LinkedList<>();
    Collections.addAll(argList, args);
    boolean recursive = false;
    while (argList.isEmpty() == false) {
        String arg = argList.removeFirst();
        if (arg.equals("-r") || arg.equals("-recursive")) { //$NON-NLS-1$ //$NON-NLS-2$
            recursive = true;/*from   ww w  .  j  a  v a 2s . co  m*/
        } else if (arg.equals("--")) { //$NON-NLS-1$
            break;
        } else {
            argList.addFirst(arg);
            break;
        }
    }
    if (argList.size() < 2) {
        LOG.error(MessageFormat.format("Invalid arguments: {0}", Arrays.toString(args)));
        System.err.println(MessageFormat.format("Usage: hadoop {0} -conf <datasource-conf.xml> [-r] "
                + "base-path resource-pattern [resource-pattern [...]]", getClass().getName()));
        return 1;
    }
    String path = argList.removeFirst();
    List<FilePattern> patterns = new ArrayList<>();
    for (String arg : argList) {
        patterns.add(FilePattern.compile(arg));
    }
    if (repository == null) {
        repository = HadoopDataSourceUtil.loadRepository(getConf());
    }
    String basePath = repository.getComponentPath(path);
    DirectDataSource source = repository.getRelatedDataSource(path);
    for (FilePattern pattern : patterns) {
        source.delete(basePath, pattern, recursive, new Counter());
    }
    return 0;
}

From source file:com.unboundid.scim2.common.utils.Parser.java

/**
 * Read a filter from the reader.//from   ww  w . j a  v  a 2  s .c  o  m
 *
 * @param reader The reader to read the filter from.
 * @param isValueFilter Whether to read the filter as a value filter.
 * @return The parsed filter.
 * @throws BadRequestException If the filter string could not be parsed.
 */
private static Filter readFilter(final StringReader reader, final boolean isValueFilter)
        throws BadRequestException {
    final Stack<Filter> outputStack = new Stack<Filter>();
    final Stack<String> precedenceStack = new Stack<String>();

    String token;
    String previousToken = null;

    while ((token = readFilterToken(reader, isValueFilter)) != null) {
        if (token.equals("(") && expectsNewFilter(previousToken)) {
            precedenceStack.push(token);
        } else if (token.equalsIgnoreCase(FilterType.NOT.getStringValue()) && expectsNewFilter(previousToken)) {
            // "not" should be followed by an (
            String nextToken = readFilterToken(reader, isValueFilter);
            if (nextToken == null) {
                throw BadRequestException.invalidFilter("Unexpected end of filter string");
            }
            if (!nextToken.equals("(")) {
                final String msg = String.format("Expected '(' at position %d", reader.mark);
                throw BadRequestException.invalidFilter(msg);
            }
            precedenceStack.push(token);
        } else if (token.equals(")") && !expectsNewFilter(previousToken)) {
            String operator = closeGrouping(precedenceStack, outputStack, false);
            if (operator == null) {
                final String msg = String.format(
                        "No opening parenthesis matching closing " + "parenthesis at position %d", reader.mark);
                throw BadRequestException.invalidFilter(msg);
            }
            if (operator.equalsIgnoreCase(FilterType.NOT.getStringValue())) {
                // Treat "not" the same as "(" except wrap everything in a not filter.
                outputStack.push(Filter.not(outputStack.pop()));
            }
        } else if (token.equalsIgnoreCase(FilterType.AND.getStringValue())
                && !expectsNewFilter(previousToken)) {
            // and has higher precedence than or.
            precedenceStack.push(token);
        } else if (token.equalsIgnoreCase(FilterType.OR.getStringValue()) && !expectsNewFilter(previousToken)) {
            // pop all the pending ands first before pushing or.
            LinkedList<Filter> andComponents = new LinkedList<Filter>();
            while (!precedenceStack.isEmpty()) {
                if (precedenceStack.peek().equalsIgnoreCase(FilterType.AND.getStringValue())) {
                    precedenceStack.pop();
                    andComponents.addFirst(outputStack.pop());
                } else {
                    break;
                }
                if (!andComponents.isEmpty()) {
                    andComponents.addFirst(outputStack.pop());
                    outputStack.push(Filter.and(andComponents));
                }
            }

            precedenceStack.push(token);
        } else if (token.endsWith("[") && expectsNewFilter(previousToken)) {
            // This is a complex value filter.
            final Path filterAttribute;
            try {
                filterAttribute = parsePath(token.substring(0, token.length() - 1));
            } catch (final BadRequestException e) {
                Debug.debugException(e);
                final String msg = String.format("Invalid attribute path at position %d: %s", reader.mark,
                        e.getMessage());
                throw BadRequestException.invalidFilter(msg);
            }

            if (filterAttribute.isRoot()) {
                final String msg = String.format("Attribute path expected at position %d", reader.mark);
                throw BadRequestException.invalidFilter(msg);
            }

            outputStack.push(Filter.hasComplexValue(filterAttribute, readFilter(reader, true)));
        } else if (isValueFilter && token.equals("]") && !expectsNewFilter(previousToken)) {
            break;
        } else if (expectsNewFilter(previousToken)) {
            // This must be an attribute path followed by operator and maybe value.
            final Path filterAttribute;
            try {
                filterAttribute = parsePath(token);
            } catch (final BadRequestException e) {
                Debug.debugException(e);
                final String msg = String.format("Invalid attribute path at position %d: %s", reader.mark,
                        e.getMessage());
                throw BadRequestException.invalidFilter(msg);
            }

            if (filterAttribute.isRoot()) {
                final String msg = String.format("Attribute path expected at position %d", reader.mark);
                throw BadRequestException.invalidFilter(msg);
            }

            String op = readFilterToken(reader, isValueFilter);

            if (op == null) {
                throw BadRequestException.invalidFilter("Unexpected end of filter string");
            }

            if (op.equalsIgnoreCase(FilterType.PRESENT.getStringValue())) {
                outputStack.push(Filter.pr(filterAttribute));
            } else {
                ValueNode valueNode;
                try {
                    // Mark the beginning of the JSON value so we can later reset back
                    // to this position and skip the actual chars that were consumed
                    // by Jackson. The Jackson parser is buffered and reads everything
                    // until the end of string.
                    reader.mark(0);
                    ScimJsonFactory scimJsonFactory = (ScimJsonFactory) JsonUtils.getObjectReader()
                            .getFactory();
                    JsonParser parser = scimJsonFactory.createScimFilterParser(reader);
                    // The object mapper will return a Java null for JSON null.
                    // Have to distinguish between reading a JSON null and encountering
                    // the end of string.
                    if (parser.getCurrentToken() == null && parser.nextToken() == null) {
                        // End of string.
                        valueNode = null;
                    } else {
                        valueNode = parser.readValueAsTree();

                        // This is actually a JSON null. Use NullNode.
                        if (valueNode == null) {
                            valueNode = JsonUtils.getJsonNodeFactory().nullNode();
                        }
                    }
                    // Reset back to the beginning of the JSON value.
                    reader.reset();
                    // Skip the number of chars consumed by JSON parser.
                    reader.skip(parser.getCurrentLocation().getCharOffset());
                } catch (IOException e) {
                    final String msg = String.format("Invalid comparison value at position %d: %s", reader.mark,
                            e.getMessage());
                    throw BadRequestException.invalidFilter(msg);
                }

                if (valueNode == null) {
                    throw BadRequestException.invalidFilter("Unexpected end of filter string");
                }

                if (op.equalsIgnoreCase(FilterType.EQUAL.getStringValue())) {
                    outputStack.push(Filter.eq(filterAttribute, valueNode));
                } else if (op.equalsIgnoreCase(FilterType.NOT_EQUAL.getStringValue())) {
                    outputStack.push(Filter.ne(filterAttribute, valueNode));
                } else if (op.equalsIgnoreCase(FilterType.CONTAINS.getStringValue())) {
                    outputStack.push(Filter.co(filterAttribute, valueNode));
                } else if (op.equalsIgnoreCase(FilterType.STARTS_WITH.getStringValue())) {
                    outputStack.push(Filter.sw(filterAttribute, valueNode));
                } else if (op.equalsIgnoreCase(FilterType.ENDS_WITH.getStringValue())) {
                    outputStack.push(Filter.ew(filterAttribute, valueNode));
                } else if (op.equalsIgnoreCase(FilterType.GREATER_THAN.getStringValue())) {
                    outputStack.push(Filter.gt(filterAttribute, valueNode));
                } else if (op.equalsIgnoreCase(FilterType.GREATER_OR_EQUAL.getStringValue())) {
                    outputStack.push(Filter.ge(filterAttribute, valueNode));
                } else if (op.equalsIgnoreCase(FilterType.LESS_THAN.getStringValue())) {
                    outputStack.push(Filter.lt(filterAttribute, valueNode));
                } else if (op.equalsIgnoreCase(FilterType.LESS_OR_EQUAL.getStringValue())) {
                    outputStack.push(Filter.le(filterAttribute, valueNode));
                } else {
                    final String msg = String.format("Unrecognized attribute operator '%s' at position %d. "
                            + "Expected: eq,ne,co,sw,ew,pr,gt,ge,lt,le", op, reader.mark);
                    throw BadRequestException.invalidFilter(msg);
                }
            }
        } else {
            final String msg = String.format("Unexpected character '%s' at position %d", token, reader.mark);
            throw BadRequestException.invalidFilter(msg);
        }
        previousToken = token;
    }

    closeGrouping(precedenceStack, outputStack, true);

    if (outputStack.isEmpty()) {
        throw BadRequestException.invalidFilter("Unexpected end of filter string");
    }
    return outputStack.pop();
}

From source file:io.fabric8.spring.cloud.kubernetes.reload.ConfigurationChangeDetector.java

/**
 * Finds all registered property sources of the given type.
 */// w  w w. j  a  v a 2s  .  c  om
protected <S extends PropertySource<?>> List<S> findPropertySources(Class<S> sourceClass) {
    List<S> managedSources = new LinkedList<>();

    LinkedList<PropertySource<?>> sources = toLinkedList(environment.getPropertySources());
    while (!sources.isEmpty()) {
        PropertySource<?> source = sources.pop();
        if (source instanceof CompositePropertySource) {
            CompositePropertySource comp = (CompositePropertySource) source;
            sources.addAll(comp.getPropertySources());
        } else if (sourceClass.isInstance(source)) {
            managedSources.add(sourceClass.cast(source));
        }
    }

    return managedSources;
}

From source file:org.apache.hadoop.hbase.util.RegionSplitter.java

static void rollingSplit(String tableName, SplitAlgorithm splitAlgo, Configuration conf)
        throws IOException, InterruptedException {
    final int minOS = conf.getInt("split.outstanding", 2);

    HTable table = new HTable(conf, tableName);

    // max outstanding splits. default == 50% of servers
    final int MAX_OUTSTANDING = Math.max(table.getConnection().getCurrentNrHRS() / 2, minOS);

    Path hbDir = FSUtils.getRootDir(conf);
    Path tableDir = FSUtils.getTableDir(hbDir, table.getName());
    Path splitFile = new Path(tableDir, "_balancedSplit");
    FileSystem fs = FileSystem.get(conf);

    // get a list of daughter regions to create
    LinkedList<Pair<byte[], byte[]>> tmpRegionSet = getSplits(table, splitAlgo);
    LinkedList<Pair<byte[], byte[]>> outstanding = Lists.newLinkedList();
    int splitCount = 0;
    final int origCount = tmpRegionSet.size();

    // all splits must compact & we have 1 compact thread, so 2 split
    // requests to the same RS can stall the outstanding split queue.
    // To fix, group the regions into an RS pool and round-robin through it
    LOG.debug("Bucketing regions by regionserver...");
    TreeMap<String, LinkedList<Pair<byte[], byte[]>>> daughterRegions = Maps.newTreeMap();
    for (Pair<byte[], byte[]> dr : tmpRegionSet) {
        String rsLocation = table.getRegionLocation(dr.getSecond()).getHostnamePort();
        if (!daughterRegions.containsKey(rsLocation)) {
            LinkedList<Pair<byte[], byte[]>> entry = Lists.newLinkedList();
            daughterRegions.put(rsLocation, entry);
        }//w  ww .  j  a v  a  2  s .c om
        daughterRegions.get(rsLocation).add(dr);
    }
    LOG.debug("Done with bucketing.  Split time!");
    long startTime = System.currentTimeMillis();

    // open the split file and modify it as splits finish
    FSDataInputStream tmpIn = fs.open(splitFile);
    byte[] rawData = new byte[tmpIn.available()];
    tmpIn.readFully(rawData);
    tmpIn.close();
    FSDataOutputStream splitOut = fs.create(splitFile);
    splitOut.write(rawData);

    try {
        // *** split code ***
        while (!daughterRegions.isEmpty()) {
            LOG.debug(daughterRegions.size() + " RS have regions to splt.");

            // Get RegionServer : region count mapping
            final TreeMap<ServerName, Integer> rsSizes = Maps.newTreeMap();
            Map<HRegionInfo, ServerName> regionsInfo = table.getRegionLocations();
            for (ServerName rs : regionsInfo.values()) {
                if (rsSizes.containsKey(rs)) {
                    rsSizes.put(rs, rsSizes.get(rs) + 1);
                } else {
                    rsSizes.put(rs, 1);
                }
            }

            // sort the RS by the number of regions they have
            List<String> serversLeft = Lists.newArrayList(daughterRegions.keySet());
            Collections.sort(serversLeft, new Comparator<String>() {
                public int compare(String o1, String o2) {
                    return rsSizes.get(o1).compareTo(rsSizes.get(o2));
                }
            });

            // round-robin through the RS list. Choose the lightest-loaded servers
            // first to keep the master from load-balancing regions as we split.
            for (String rsLoc : serversLeft) {
                Pair<byte[], byte[]> dr = null;

                // find a region in the RS list that hasn't been moved
                LOG.debug("Finding a region on " + rsLoc);
                LinkedList<Pair<byte[], byte[]>> regionList = daughterRegions.get(rsLoc);
                while (!regionList.isEmpty()) {
                    dr = regionList.pop();

                    // get current region info
                    byte[] split = dr.getSecond();
                    HRegionLocation regionLoc = table.getRegionLocation(split);

                    // if this region moved locations
                    String newRs = regionLoc.getHostnamePort();
                    if (newRs.compareTo(rsLoc) != 0) {
                        LOG.debug("Region with " + splitAlgo.rowToStr(split) + " moved to " + newRs
                                + ". Relocating...");
                        // relocate it, don't use it right now
                        if (!daughterRegions.containsKey(newRs)) {
                            LinkedList<Pair<byte[], byte[]>> entry = Lists.newLinkedList();
                            daughterRegions.put(newRs, entry);
                        }
                        daughterRegions.get(newRs).add(dr);
                        dr = null;
                        continue;
                    }

                    // make sure this region wasn't already split
                    byte[] sk = regionLoc.getRegionInfo().getStartKey();
                    if (sk.length != 0) {
                        if (Bytes.equals(split, sk)) {
                            LOG.debug("Region already split on " + splitAlgo.rowToStr(split)
                                    + ".  Skipping this region...");
                            ++splitCount;
                            dr = null;
                            continue;
                        }
                        byte[] start = dr.getFirst();
                        Preconditions.checkArgument(Bytes.equals(start, sk),
                                splitAlgo.rowToStr(start) + " != " + splitAlgo.rowToStr(sk));
                    }

                    // passed all checks! found a good region
                    break;
                }
                if (regionList.isEmpty()) {
                    daughterRegions.remove(rsLoc);
                }
                if (dr == null)
                    continue;

                // we have a good region, time to split!
                byte[] split = dr.getSecond();
                LOG.debug("Splitting at " + splitAlgo.rowToStr(split));
                HBaseAdmin admin = new HBaseAdmin(table.getConfiguration());
                admin.split(table.getTableName(), split);

                LinkedList<Pair<byte[], byte[]>> finished = Lists.newLinkedList();
                if (conf.getBoolean("split.verify", true)) {
                    // we need to verify and rate-limit our splits
                    outstanding.addLast(dr);
                    // with too many outstanding splits, wait for some to finish
                    while (outstanding.size() >= MAX_OUTSTANDING) {
                        finished = splitScan(outstanding, table, splitAlgo);
                        if (finished.isEmpty()) {
                            Thread.sleep(30 * 1000);
                        } else {
                            outstanding.removeAll(finished);
                        }
                    }
                } else {
                    finished.add(dr);
                }

                // mark each finished region as successfully split.
                for (Pair<byte[], byte[]> region : finished) {
                    splitOut.writeChars("- " + splitAlgo.rowToStr(region.getFirst()) + " "
                            + splitAlgo.rowToStr(region.getSecond()) + "\n");
                    splitCount++;
                    if (splitCount % 10 == 0) {
                        long tDiff = (System.currentTimeMillis() - startTime) / splitCount;
                        LOG.debug("STATUS UPDATE: " + splitCount + " / " + origCount + ". Avg Time / Split = "
                                + org.apache.hadoop.util.StringUtils.formatTime(tDiff));
                    }
                }
            }
        }
        if (conf.getBoolean("split.verify", true)) {
            while (!outstanding.isEmpty()) {
                LinkedList<Pair<byte[], byte[]>> finished = splitScan(outstanding, table, splitAlgo);
                if (finished.isEmpty()) {
                    Thread.sleep(30 * 1000);
                } else {
                    outstanding.removeAll(finished);
                    for (Pair<byte[], byte[]> region : finished) {
                        splitOut.writeChars("- " + splitAlgo.rowToStr(region.getFirst()) + " "
                                + splitAlgo.rowToStr(region.getSecond()) + "\n");
                    }
                }
            }
        }
        LOG.debug("All regions have been successfully split!");
    } finally {
        long tDiff = System.currentTimeMillis() - startTime;
        LOG.debug("TOTAL TIME = " + org.apache.hadoop.util.StringUtils.formatTime(tDiff));
        LOG.debug("Splits = " + splitCount);
        LOG.debug("Avg Time / Split = " + org.apache.hadoop.util.StringUtils.formatTime(tDiff / splitCount));

        splitOut.close();
        if (table != null) {
            table.close();
        }
    }
    fs.delete(splitFile, false);
}

From source file:com.espertech.esper.epl.core.ResultSetProcessorSimple.java

/**
 * Applies the select-clause to the given events returning the selected events. The number of events stays the
 * same, i.e. this method does not filter it just transforms the result set.
 * <p>/*from   w  w w  .  ja v a  2  s  .c o  m*/
 * Also applies a having clause.
 * @param exprProcessor - processes each input event and returns output event
 * @param events - input events
 * @param optionalHavingNode - supplies the having-clause expression
 * @param isNewData - indicates whether we are dealing with new data (istream) or old data (rstream)
 * @param isSynthesize - set to true to indicate that synthetic events are required for an iterator result set
 * @param exprEvaluatorContext context for expression evalauation
 * @return output events, one for each input event
 */
protected static EventBean[] getSelectEventsHaving(SelectExprProcessor exprProcessor, EventBean[] events,
        ExprEvaluator optionalHavingNode, boolean isNewData, boolean isSynthesize,
        ExprEvaluatorContext exprEvaluatorContext) {
    if (events == null) {
        return null;
    }

    LinkedList<EventBean> result = new LinkedList<EventBean>();

    EventBean[] eventsPerStream = new EventBean[1];
    for (EventBean theEvent : events) {
        eventsPerStream[0] = theEvent;

        Boolean passesHaving = (Boolean) optionalHavingNode.evaluate(eventsPerStream, isNewData,
                exprEvaluatorContext);
        if ((passesHaving == null) || (!passesHaving)) {
            continue;
        }

        result.add(exprProcessor.process(eventsPerStream, isNewData, isSynthesize, exprEvaluatorContext));
    }

    if (!result.isEmpty()) {
        return result.toArray(new EventBean[result.size()]);
    } else {
        return null;
    }
}

From source file:org.gradle.api.LocationAwareException.java

/**
 * Returns the reportable causes for this failure.
 *
 * @return The causes. Never returns null, returns an empty list if this exception has no reportable causes.
 *///from  w w  w. jav a 2s.  c  o  m
public List<Throwable> getReportableCauses() {
    List<Throwable> causes = new ArrayList<Throwable>();
    LinkedList<Throwable> queue = new LinkedList<Throwable>();
    addCauses(target, queue);
    while (!queue.isEmpty()) {
        Throwable t = queue.removeFirst();
        causes.add(t);
        addCauses(t, queue);
    }
    return causes;
}

From source file:com.github.helenusdriver.commons.lang3.reflect.ReflectionUtils.java

/**
 * Gets all declared members of a given type (up the super class hierarchy).
 *
 * @author paouelle/*  www .  j av  a  2  s. c  o  m*/
 *
 * @param <T> the type of members to retrieve (either {@link Field},
 *            {@link Method}, or {@link Constructor})
 *
 * @param  type the type of members to retrieve
 * @param  clazz the class from which to find all declared members
 * @param  up <code>true</code> to look up the class hierarchy;
 *         <code>false</code> to only look at the specified class level
 * @return a list in the provided order for all declared members
 * @throws NullPointerException if <code>type</code> or
 *         <code>clazz</code> is <code>null</code>
 * @throws IllegalArgumentException if <code>type</code> is not
 *         {@link Field}, {@link Method}, or {@link Constructor}
 */
public static <T extends Member> List<T> getAllDeclaredMembers(Class<T> type, Class<?> clazz, boolean up) {
    org.apache.commons.lang3.Validate.notNull(type, "invalid null member type");
    org.apache.commons.lang3.Validate.notNull(clazz, "invalid null class");
    final LinkedList<Class<?>> classes = new LinkedList<>();

    if (up) {
        while (clazz != null) {
            classes.push(clazz);
            clazz = clazz.getSuperclass();
        }
    } else {
        classes.push(clazz);
    }
    final List<T> members = new ArrayList<>(12);

    while (!classes.isEmpty()) {
        clazz = classes.pop();
        for (final T m : ReflectionUtils.getDeclaredMembers(type, clazz)) {
            members.add(m);
        }
    }
    return members;
}