Example usage for java.util LinkedList isEmpty

List of usage examples for java.util LinkedList isEmpty

Introduction

In this page you can find the example usage for java.util LinkedList isEmpty.

Prototype

boolean isEmpty();

Source Link

Document

Returns true if this list contains no elements.

Usage

From source file:org.squashtest.tm.domain.library.structures.LibraryTree.java

/**
 * removes a node and its subtree/* w w  w.j a  v  a2  s  . co m*/
 *
 * @param key
 */
public void cut(IDENT key) {
    T node = getNode(key);

    T parent = node.getParent();
    if (parent != null) {
        parent.getChildren().remove(node);
    }

    LinkedList<T> processing = new LinkedList<>();
    processing.add(node);

    while (!processing.isEmpty()) {
        T current = processing.pop();
        List<T> layer = layers.get(current.getDepth());
        layer.remove(current);
        processing.addAll(current.getChildren());
    }

}

From source file:org.geoserver.importer.Directory.java

@Override
public void prepare(ProgressMonitor m) throws IOException {
    files = new ArrayList<FileData>();

    //recursively search for spatial files, maintain a queue of directories to recurse into
    LinkedList<File> q = new LinkedList<File>();
    q.add(file);//from www  . j ava 2s  .c  o m

    while (!q.isEmpty()) {
        File dir = q.poll();

        if (m.isCanceled()) {
            return;
        }
        m.setTask("Scanning " + dir.getPath());

        //get all the regular (non directory) files
        Set<File> all = new LinkedHashSet<File>(Arrays.asList(dir.listFiles(new FilenameFilter() {
            public boolean accept(File dir, String name) {
                return !new File(dir, name).isDirectory();
            }
        })));

        //scan all the files looking for spatial ones
        for (File f : dir.listFiles()) {
            if (f.isHidden()) {
                all.remove(f);
                continue;
            }
            if (f.isDirectory()) {
                if (!recursive && !f.equals(file)) {
                    //skip it
                    continue;
                }
                // @hacky - ignore __MACOSX
                // this could probably be dealt with in a better way elsewhere
                // like by having Directory ignore the contents since they
                // are all hidden files anyway
                if (!"__MACOSX".equals(f.getName())) {
                    Directory d = new Directory(f);
                    d.prepare(m);

                    files.add(d);
                }
                //q.push(f);
                continue;
            }

            //special case for .aux files, they are metadata but get picked up as readable 
            // by the erdas imagine reader...just ignore them for now 
            if ("aux".equalsIgnoreCase(FilenameUtils.getExtension(f.getName()))) {
                continue;
            }

            //determine if this is a spatial format or not
            DataFormat format = DataFormat.lookup(f);

            if (format != null) {
                SpatialFile sf = newSpatialFile(f, format);

                //gather up the related files
                sf.prepare(m);

                files.add(sf);

                all.removeAll(sf.allFiles());
            }
        }

        //take any left overs and add them as unspatial/unrecognized
        for (File f : all) {
            files.add(new ASpatialFile(f));
        }
    }

    format = format();
    //        //process ignored for files that should be grouped with the spatial files
    //        for (DataFile df : files) {
    //            SpatialFile sf = (SpatialFile) df;
    //            String base = FilenameUtils.getBaseName(sf.getFile().getName());
    //            for (Iterator<File> i = ignored.iterator(); i.hasNext(); ) {
    //                File f = i.next();
    //                if (base.equals(FilenameUtils.getBaseName(f.getName()))) {
    //                    //.prj file?
    //                    if ("prj".equalsIgnoreCase(FilenameUtils.getExtension(f.getName()))) {
    //                        sf.setPrjFile(f);
    //                    }
    //                    else {
    //                        sf.getSuppFiles().add(f);
    //                    }
    //                    i.remove();
    //                }
    //            }
    //        }
    //        
    //        //take any left overs and add them as unspatial/unrecognized
    //        for (File f : ignored) {
    //            files.add(new ASpatialFile(f));
    //        }
    //        
    //        return files;
    //        
    //        for (DataFile f : files()) {
    //            f.prepare();
    //        }
}

From source file:ldbc.snb.datagen.generator.BTERKnowsGenerator.java

private void generateRemainingEdges() {
    LinkedList<Integer> stubs = new LinkedList<Integer>();
    for (int i = 0; i < graphSize; ++i) {
        long difference = expectedDegree[i] - adjacencyMatrix[i].getCardinality();
        if (difference > 0) {
            for (int j = 0; j < difference; ++j) {
                stubs.add(i);/*from w w w  .j  a  va  2 s .com*/
            }
        }
    }
    Collections.shuffle(stubs, random);
    while (!stubs.isEmpty()) {
        int node1 = stubs.get(0);
        stubs.remove(0);
        if (!stubs.isEmpty()) {
            int node2 = stubs.get(0);
            stubs.remove(0);
            if (node1 != node2) {
                adjacencyMatrix[node1].add(node2);
                adjacencyMatrix[node2].add(node1);
            }
        }
    }
}

From source file:gdsc.smlm.results.filter.HysteresisFilter.java

@Override
public void setup(MemoryPeakResults peakResults) {
    ok = new HashSet<PeakResult>();

    // Create a set of candidates and valid peaks
    MemoryPeakResults traceResults = new MemoryPeakResults();

    // Initialise peaks to check
    LinkedList<PeakResult> candidates = new LinkedList<PeakResult>();
    for (PeakResult result : peakResults.getResults()) {
        switch (getStatus(result)) {
        case OK:/* w w w.j a  v a2 s  .c o  m*/
            ok.add(result);
            traceResults.add(result);
            break;
        case CANDIDATE:
            candidates.add(result);
            traceResults.add(result);
            break;
        default:
            break;
        }
    }

    if (candidates.isEmpty())
        return;

    // Find average precision of the candidates and use it for the search
    // distance
    SummaryStatistics stats = new SummaryStatistics();
    final double nmPerPixel = peakResults.getNmPerPixel();
    final double gain = peakResults.getGain();
    final boolean emCCD = peakResults.isEMCCD();
    for (PeakResult peakResult : candidates) {
        stats.addValue(peakResult.getPrecision(nmPerPixel, gain, emCCD));
    }
    double distanceThreshold = stats.getMean() * searchDistance / nmPerPixel;

    // Trace through candidates
    TraceManager tm = new TraceManager(traceResults);
    tm.setTraceMode(TraceMode.LATEST_FORERUNNER);
    tm.traceMolecules(distanceThreshold, 1);
    Trace[] traces = tm.getTraces();

    for (Trace trace : traces) {
        if (trace.size() > 1) {
            // Check if the trace touches a valid point
            boolean isOk = false;
            for (PeakResult result : trace.getPoints()) {
                if (ok.contains(result)) {
                    isOk = true;
                    break;
                }
                ok.add(result);
            }
            // Add the entire trace to the OK points
            if (isOk) {
                for (PeakResult result : trace.getPoints()) {
                    ok.add(result);
                }
            }
        }
    }
}

From source file:com.twitter.distributedlog.service.balancer.ClusterBalancer.java

void moveRemainingStreamsFromSource(Host source, List<Host> hosts, Optional<RateLimiter> rateLimiter) {
    LinkedList<String> streamsToMove = new LinkedList<String>(source.streams);
    Collections.shuffle(streamsToMove);

    if (logger.isDebugEnabled()) {
        logger.debug("Try to move remaining streams from {} : {}", source, streamsToMove);
    }/*  ww  w .  j  a  v a 2  s . c o  m*/

    int hostIdx = hosts.size() - 1;

    while (!streamsToMove.isEmpty()) {
        if (rateLimiter.isPresent()) {
            rateLimiter.get().acquire();
        }

        Host target = hosts.get(hostIdx);
        if (!target.address.equals(source.address)) {
            String stream = streamsToMove.remove();
            // move the stream
            if (moveStream(stream, source, target)) {
                source.streams.remove(stream);
                target.streams.add(stream);
            }
        }
        --hostIdx;
        if (hostIdx < 0) {
            hostIdx = hosts.size() - 1;
        }
    }
}

From source file:org.rhq.core.pc.operation.OperationThreadPoolGateway.java

/**
 * This is called by the {@link OperationInvocation} when it finished to notify this gateway that if there are any
 * other pending operations for the resource, that the next one is allowed to be executed.
 *
 * @param operation the operation that has just completed
 *///from w w  w .j av  a 2  s .  c  o  m
public void operationCompleted(OperationInvocation operation) {
    Integer operationResourceId = Integer.valueOf(operation.getResourceId());

    synchronized (resourceQueues) {
        if (stopped) {
            return;
        }

        allOperations.remove(operation.getJobId());

        LinkedList<OperationInvocation> queuedOps = resourceQueues.get(operationResourceId);
        if (queuedOps != null) {
            // if there are no more operations waiting to be invoked on the resource, clean up the linked list;
            // otherwise, pop the next operation from the list and submit it to the thread pool for execution
            if (queuedOps.isEmpty()) {
                resourceQueues.remove(operationResourceId);
            } else {
                OperationInvocation nextOperation = queuedOps.remove();

                try {
                    log.debug("Resource is no longer busy - the next operation in line will be invoked: "
                            + nextOperation);
                    threadPool.execute(nextOperation);
                } catch (Exception e) {
                    log.error("Failed to submit next operation: " + nextOperation);
                }
            }
        }
    }

    return;
}

From source file:org.alfresco.module.versionsdiff.VersionsDifferenceWebscript.java

@Override
protected Map<String, Object> executeImpl(WebScriptRequest req, Status status, Cache cache) {
    if (null == req) {
        logger.error("VersionsDifferenceWebscript.java: The request URL is not well formatted");
        throw new WebScriptException("VersionsDifferenceWebscript.java: The request URL is not well formatted");
    } else {//from   ww  w.  j  av a 2 s.  c  om

        // generate the returned model object
        Map<String, Object> model = new HashMap<String, Object>();

        // node reference to the last version of the document
        NodeRef lastVersRef = getArgsNodeRef(req);

        // node reference to the selected version of the document
        NodeRef selectVersRef = getArgsVersRef(req);

        // Instantiate the diff_match_patch object
        diff_match_patch diffMatchPatch = new diff_match_patch();

        // selectedVersRef is the first parameter for INSERT and DELETE right computation
        LinkedList<Diff> diffList = diffMatchPatch.diff_main(getPlainTxtTrasformation(selectVersRef),
                getPlainTxtTrasformation(lastVersRef));

        // semantic cleanup post-processing for human readable differentiation
        diffMatchPatch.diff_cleanupSemantic(diffList);

        LinkedList<String[]> diffObjList = new LinkedList<String[]>();

        // loop through the Diffs LinkedList
        while (!diffList.isEmpty()) {
            // Pop of the first element in the list
            Diff element = diffList.pop();
            String[] obj = { element.operation.toString(), element.text.toString() };
            diffObjList.add(obj);
        }

        model.put("result", diffObjList);
        return model;
    }
}

From source file:org.openanzo.glitter.query.Projection.java

private void initialize() throws UnnamedProjectionException {
    // determine if this is an aggregate
    if (!this.groupByVars.isEmpty()) {
        this.isAggregate = true;
    } else {/*from  ww  w  . j av  a 2 s .  co m*/
        // see if any aggregate functions are involved, which means we're grouping in one big (happy) group
        LinkedList<Expression> expressions = new LinkedList<Expression>(this.projectedExpressions);
        while (!expressions.isEmpty()) {
            Expression e = expressions.removeFirst();
            if (e instanceof FunctionCall) {
                FunctionCall fc = (FunctionCall) e;
                if (fc.getFunction() instanceof AggregateFunction) {
                    this.isAggregate = true;
                    break;
                }
                expressions.addAll(fc.getArguments());
            }
        }
    }
    // check that we have an output name for every projected expression
    if (this.projectedAs.size() < this.projectedExpressions.size())
        throw new UnnamedProjectionException(this.projectedExpressions.get(this.projectedAs.size()));
    if (this.projectedAs.size() > this.projectedExpressions.size())
        throw new GlitterRuntimeException(ExceptionConstants.GLITTER.MORE_NAMES);
    int i;
    if ((i = this.projectedAs.indexOf(null)) != -1)
        throw new UnnamedProjectionException(this.projectedExpressions.get(i));
    for (i = 0; i < this.projectedAs.size(); i++)
        this.aliasMap.put(this.projectedAs.get(i), this.projectedExpressions.get(i));
}

From source file:com.hp.alm.ali.idea.services.EntityService.java

public void requestCachedEntity(final EntityRef ref, final List<String> properties,
        final EntityListener callback) {
    ApplicationUtil.executeOnPooledThread(new Runnable() {
        public void run() {
            final LinkedList<Entity> done = new LinkedList<Entity>();
            listeners.fire(new WeakListeners.Action<EntityListener>() {
                public void fire(EntityListener listener) {
                    if (done.isEmpty() && listener instanceof CachingEntityListener) {
                        Entity cached = ((CachingEntityListener) listener).lookup(ref);
                        if (cached != null) {
                            for (String property : properties) {
                                if (!cached.isInitialized(property)) {
                                    return;
                                }//from  ww w . ja  v a 2  s .c  o m
                            }
                            done.add(cached);
                        }
                    }
                }
            });
            if (done.isEmpty()) {
                // all properties are fetched. possible optimization is to request only properties from the
                // current request + properties initialized in cached value (if any)
                getEntityAsync(ref, callback);
            } else {
                callback.entityLoaded(done.getFirst(), EntityListener.Event.CACHE);
            }
        }
    });
}

From source file:org.eclipse.che.api.builder.internal.SourcesManagerImpl.java

private void download(String downloadUrl, java.io.File downloadTo) throws IOException {
    HttpURLConnection conn = null;
    try {//from   w  ww .ja  va 2  s . c  o m
        final LinkedList<java.io.File> q = new LinkedList<>();
        q.add(downloadTo);
        final long start = System.currentTimeMillis();
        final List<Pair<String, String>> md5sums = new LinkedList<>();
        while (!q.isEmpty()) {
            java.io.File current = q.pop();
            java.io.File[] list = current.listFiles();
            if (list != null) {
                for (java.io.File f : list) {
                    if (f.isDirectory()) {
                        q.push(f);
                    } else {
                        md5sums.add(Pair.of(com.google.common.io.Files.hash(f, Hashing.md5()).toString(),
                                downloadTo.toPath().relativize(f.toPath()).toString().replace("\\", "/"))); //Replacing of "\" is need for windows support
                    }
                }
            }
        }
        final long end = System.currentTimeMillis();
        if (md5sums.size() > 0) {
            LOG.debug("count md5sums of {} files, time: {}ms", md5sums.size(), (end - start));
        }
        conn = (HttpURLConnection) new URL(downloadUrl).openConnection();
        conn.setConnectTimeout(CONNECT_TIMEOUT);
        conn.setReadTimeout(READ_TIMEOUT);
        final EnvironmentContext context = EnvironmentContext.getCurrent();
        if (context.getUser() != null && context.getUser().getToken() != null) {
            conn.setRequestProperty(HttpHeaders.AUTHORIZATION, context.getUser().getToken());
        }
        if (!md5sums.isEmpty()) {
            conn.setRequestMethod(HttpMethod.POST);
            conn.setRequestProperty("Content-type", MediaType.TEXT_PLAIN);
            conn.setRequestProperty(HttpHeaders.ACCEPT, MediaType.MULTIPART_FORM_DATA);
            conn.setDoOutput(true);
            try (OutputStream output = conn.getOutputStream(); Writer writer = new OutputStreamWriter(output)) {
                for (Pair<String, String> pair : md5sums) {
                    writer.write(pair.first);
                    writer.write(' ');
                    writer.write(pair.second);
                    writer.write('\n');
                }
            }
        }
        final int responseCode = conn.getResponseCode();
        if (responseCode == HttpURLConnection.HTTP_OK) {
            final String contentType = conn.getHeaderField("content-type");
            if (contentType.startsWith(MediaType.MULTIPART_FORM_DATA)) {
                final HeaderParameterParser headerParameterParser = new HeaderParameterParser();
                final String boundary = headerParameterParser.parse(contentType).get("boundary");
                try (InputStream in = conn.getInputStream()) {
                    MultipartStream multipart = new MultipartStream(in, boundary.getBytes());
                    boolean hasMore = multipart.skipPreamble();
                    while (hasMore) {
                        final Map<String, List<String>> headers = parseChunkHeader(
                                CharStreams.readLines(new StringReader(multipart.readHeaders())));
                        final List<String> contentDisposition = headers.get("content-disposition");
                        final String name = headerParameterParser.parse(contentDisposition.get(0)).get("name");
                        if ("updates".equals(name)) {
                            int length = -1;
                            List<String> contentLengthHeader = headers.get("content-length");
                            if (contentLengthHeader != null && !contentLengthHeader.isEmpty()) {
                                length = Integer.parseInt(contentLengthHeader.get(0));
                            }
                            if (length < 0 || length > 204800) {
                                java.io.File tmp = java.io.File.createTempFile("tmp", ".zip", directory);
                                try {
                                    try (FileOutputStream fOut = new FileOutputStream(tmp)) {
                                        multipart.readBodyData(fOut);
                                    }
                                    ZipUtils.unzip(tmp, downloadTo);
                                } finally {
                                    if (tmp.exists()) {
                                        tmp.delete();
                                    }
                                }
                            } else {
                                final ByteArrayOutputStream bOut = new ByteArrayOutputStream(length);
                                multipart.readBodyData(bOut);
                                ZipUtils.unzip(new ByteArrayInputStream(bOut.toByteArray()), downloadTo);
                            }
                        } else if ("removed-paths".equals(name)) {
                            final ByteArrayOutputStream bOut = new ByteArrayOutputStream();
                            multipart.readBodyData(bOut);
                            final String[] removed = JsonHelper.fromJson(
                                    new ByteArrayInputStream(bOut.toByteArray()), String[].class, null);
                            for (String path : removed) {
                                java.io.File f = new java.io.File(downloadTo, path);
                                if (!f.delete()) {
                                    throw new IOException(String.format("Unable delete %s", path));
                                }
                            }
                        } else {
                            // To /dev/null :)
                            multipart.readBodyData(DEV_NULL);
                        }
                        hasMore = multipart.readBoundary();
                    }
                }
            } else {
                try (InputStream in = conn.getInputStream()) {
                    ZipUtils.unzip(in, downloadTo);
                }
            }
        } else if (responseCode != HttpURLConnection.HTTP_NO_CONTENT) {
            throw new IOException(
                    String.format("Invalid response status %d from remote server. ", responseCode));
        }
    } catch (ParseException | JsonParseException e) {
        throw new IOException(e.getMessage(), e);
    } finally {
        if (conn != null) {
            conn.disconnect();
        }
    }
}