Example usage for java.util LinkedList isEmpty

List of usage examples for java.util LinkedList isEmpty

Introduction

In this page you can find the example usage for java.util LinkedList isEmpty.

Prototype

boolean isEmpty();

Source Link

Document

Returns true if this list contains no elements.

Usage

From source file:io.druid.firehose.google.StaticGoogleBlobStoreFirehoseFactory.java

@Override
public Firehose connect(StringInputRowParser stringInputRowParser) throws IOException {
    Preconditions.checkNotNull(storage, "null storage");

    final LinkedList<GoogleBlob> objectQueue = Lists.newLinkedList(blobs);

    return new FileIteratingFirehose(new Iterator<LineIterator>() {
        @Override/*  w ww  .j ava  2  s.co  m*/
        public boolean hasNext() {
            return !objectQueue.isEmpty();
        }

        @Override
        public LineIterator next() {
            final GoogleBlob nextURI = objectQueue.poll();

            final String bucket = nextURI.getBucket();
            final String path = nextURI.getPath().startsWith("/") ? nextURI.getPath().substring(1)
                    : nextURI.getPath();

            try {
                final InputStream innerInputStream = new GoogleByteSource(storage, bucket, path).openStream();

                final InputStream outerInputStream = path.endsWith(".gz")
                        ? CompressionUtils.gzipInputStream(innerInputStream)
                        : innerInputStream;

                return IOUtils.lineIterator(
                        new BufferedReader(new InputStreamReader(outerInputStream, Charsets.UTF_8)));
            } catch (Exception e) {
                LOG.error(e, "Exception opening bucket[%s] blob[%s]", bucket, path);

                throw Throwables.propagate(e);
            }
        }

        @Override
        public void remove() {
            throw new UnsupportedOperationException();
        }
    }, stringInputRowParser);
}

From source file:io.druid.firehose.azure.StaticAzureBlobStoreFirehoseFactory.java

@Override
public Firehose connect(StringInputRowParser stringInputRowParser) throws IOException {
    Preconditions.checkNotNull(azureStorage, "null azureStorage");

    final LinkedList<AzureBlob> objectQueue = Lists.newLinkedList(blobs);

    return new FileIteratingFirehose(new Iterator<LineIterator>() {
        @Override//from w  ww.ja va2 s .co m
        public boolean hasNext() {
            return !objectQueue.isEmpty();
        }

        @Override
        public LineIterator next() {
            final AzureBlob nextURI = objectQueue.poll();

            final String container = nextURI.getContainer();
            final String path = nextURI.getPath().startsWith("/") ? nextURI.getPath().substring(1)
                    : nextURI.getPath();

            try {
                final InputStream innerInputStream = new AzureByteSource(azureStorage, container, path)
                        .openStream();

                final InputStream outerInputStream = path.endsWith(".gz")
                        ? CompressionUtils.gzipInputStream(innerInputStream)
                        : innerInputStream;

                return IOUtils.lineIterator(
                        new BufferedReader(new InputStreamReader(outerInputStream, Charsets.UTF_8)));
            } catch (Exception e) {
                log.error(e, "Exception opening container[%s] blob[%s]", container, path);

                throw Throwables.propagate(e);
            }
        }

        @Override
        public void remove() {
            throw new UnsupportedOperationException();
        }
    }, stringInputRowParser);
}

From source file:ltsa.jung.TreeLikeGraphLayout.java

/**
 * Computes the depths of every node by a BFS on the graph.  Stores 
 * the depth in {depths} and the maximal depth in {maxDepth}.
 *///w  w w  . j a v  a2s . co m
protected void computeDepth() {
    if (root == null || graph == null)
        return;
    LinkedList<V> queue = new LinkedList<V>();
    queue.add(root);
    depths.put(root, 0);

    while (!queue.isEmpty()) {
        V v = queue.removeFirst(); // FIFO for BFS order

        for (V successor : graph.getSuccessors(v)) {
            if (!depths.containsKey(successor)) {
                int depth = depths.get(v) + 1;
                depths.put(successor, depth);
                if (depth > maxDepth)
                    maxDepth = depth;
                queue.add(successor);
            }
        }
    }
}

From source file:com.opera.core.systems.scope.stp.services.ScopeWindowManager.java

public void closeAllWindows() {
    int toBeClosed = windows.size();

    if (supportsClosingWindows()) {
        LinkedList<Integer> list = Lists.newLinkedList(windows.asStack());
        while (!list.isEmpty()) {
            closeWindow(list.removeFirst());
        }/*  www  . ja va 2s.c o  m*/
    } else if (services.getExec().getActionList().contains(CLOSE_ALL_PAGES_ACTION)) {
        services.getExec().action(CLOSE_ALL_PAGES_ACTION);
        sleep(OperaIntervals.WINDOW_CLOSE_USING_ACTION_SLEEP.getMs() * toBeClosed);
    } else {
        // This is a different type of exception than the one in closeWindow(i)
        throw new UnsupportedOperationException("Product does not support closing windows");
    }
}

From source file:com.zh.snmp.snmpcore.services.impl.DeviceServiceImpl.java

@Override
public Device setDeviceConfig(String nodeId, List<String> path, List<DinamicValue> dinamicValues, int mode) {
    Device device = findDeviceByDeviceId(nodeId);
    if (device == null) {
        return null;
    }//w w w  .j av a2 s  .co  m
    LinkedList<String> pathl = new LinkedList<String>(path);
    DeviceNode dconfig = device.getConfigMap();
    if (!pathl.isEmpty()) {
        String rootc = pathl.pop();
        if (!dconfig.getCode().equals(rootc) || pathl.isEmpty()) {
            return null;
        }
    }
    DeviceNode node = dconfig.findChainChild(pathl);
    if (node != null) {
        node.setSelected(mode == 1);
        if (dinamicValues != null) {
            for (DinamicValue dv : dinamicValues) {
                node.setDinamicValue(dv.getCode(), dv.getValue());
            }
        }
        device.setConfigMap(dconfig);
        return save(device);
    } else {
        return null;
    }
}

From source file:org.jsweet.test.transpiler.TranspilerTests.java

@Test
public void testCommandLine() throws Throwable {
    File outDir = new File(new File(TMPOUT_DIR), getCurrentTestName() + "/" + ModuleKind.none);

    Process process = ProcessUtil.runCommand("java", line -> {
        System.out.println(line);
    }, null, "-cp", System.getProperty("java.class.path"), //
            JSweetCommandLineLauncher.class.getName(), //
            "--tsout", outDir.getPath(), //
            "--jsout", outDir.getPath(), //
            "--sourceMap", //
            "-i", TEST_DIRECTORY_NAME + "/org/jsweet/test/transpiler/source/blocksgame");

    assertTrue(process.exitValue() == 0);
    LinkedList<File> files = new LinkedList<>();
    Util.addFiles(".ts", outDir, files);
    assertTrue(!files.isEmpty());
    Util.addFiles(".js", outDir, files);
    assertTrue(!files.isEmpty());/*from w  ww  .  j  av a2 s  .c o  m*/
    Util.addFiles(".js.map", outDir, files);
    assertTrue(!files.isEmpty());

}

From source file:org.elasticwarehouse.core.graphite.RRDManager.java

public static boolean expandRRDFile(String tmpfolder, String filenamepath, LinkedList<String> customattributes,
        String targetfilenamepath) throws IOException, ParseException {
    //sources_ = customattributes;
    //RrdDef def = rrdDb_.getRrdDef();
    LinkedList<DsDef> newdefs = new LinkedList<DsDef>();
    //String[] datasources = rrdDb_.getDsNames();
    //initDataSources(customattributes, true);
    for (String sourceName : customattributes/*sources_*/) {
        /*boolean found = false;
        for(String currentsourceName : datasources)
          {//from  w  w w. j a v  a 2  s  .c o m
           if( currentsourceName.equals(sourceName) )
           {
              found = true;
              break;
           }
          }*/
        //if( !found )
        //{
        LOGGER.info("Adding: " + sourceName + " in " + targetfilenamepath + "(org:" + filenamepath + ")");
        //def.addDatasource(sourceName, GAUGE, 600, 0, Double.NaN);
        newdefs.add(new DsDef(sourceName, GAUGE, 600, 0, Double.NaN));
        //}
    }
    if (!newdefs.isEmpty()) {
        /*if( rrdDb_ != null )
        {
           rrdDb_.close();
           rrdDb_ = null;
        }*/

        MonitoringManager.closeFilesInElasticSearchMonitors();

        String tmpFilename = tmpfolder + "/" + FilenameUtils.getBaseName(filenamepath) + ".rrd.tmp";
        //String tmpFilenameCopy = tmpfolder+"/"+FilenameUtils.getBaseName(filenamepath)+".rrd.tmpcopy";

        Files.deleteIfExists(new File(tmpFilename).toPath());
        //Files.deleteIfExists(new File(tmpFilenameCopy).toPath());
        FileTools.copy(filenamepath, tmpFilename);
        /*int attemp=0;
        for(;;)
        {
           if( attemp == 5)
              break;
           try{
              attemp++;
              Files.deleteIfExists(new File(filenamepath).toPath());
              break;
           }catch(java.nio.file.FileSystemException e)
           {
              LOGGER.info("Got java.nio.file.FileSystemException, waiting...." + e.getMessage() );
           }
           try
           {
              Thread.sleep(2300);
           }catch(InterruptedException e)
           {
              EWLogger.logerror(e);
           }
        }*/

        //if( attemp < 5) 
        //{
        RrdToolkit.addDatasources(tmpFilename, targetfilenamepath, newdefs);
        //FileTools.copy(tmpFilename, filenamepath);
        MonitoringManager.reopenFilesInElasticSearchMonitors();
        Files.deleteIfExists(new File(tmpFilename).toPath());
        //Files.deleteIfExists(new File(tmpFilenameCopy).toPath());
        //}
    }
    return true;
}

From source file:io.druid.firehose.cloudfiles.StaticCloudFilesFirehoseFactory.java

@Override
public Firehose connect(StringInputRowParser stringInputRowParser) throws IOException, ParseException {
    Preconditions.checkNotNull(cloudFilesApi, "null cloudFilesApi");

    final LinkedList<CloudFilesBlob> objectQueue = Lists.newLinkedList(blobs);

    return new FileIteratingFirehose(new Iterator<LineIterator>() {

        @Override//from ww  w .j a v a2s  .  c  o  m
        public boolean hasNext() {
            return !objectQueue.isEmpty();
        }

        @Override
        public LineIterator next() {
            final CloudFilesBlob nextURI = objectQueue.poll();

            final String region = nextURI.getRegion();
            final String container = nextURI.getContainer();
            final String path = nextURI.getPath();

            log.info("Retrieving file from region[%s], container[%s] and path [%s]", region, container, path);
            CloudFilesObjectApiProxy objectApi = new CloudFilesObjectApiProxy(cloudFilesApi, region, container);
            final CloudFilesByteSource byteSource = new CloudFilesByteSource(objectApi, path);

            try {
                final InputStream innerInputStream = byteSource.openStream();
                final InputStream outerInputStream = path.endsWith(".gz")
                        ? CompressionUtils.gzipInputStream(innerInputStream)
                        : innerInputStream;

                return IOUtils.lineIterator(
                        new BufferedReader(new InputStreamReader(outerInputStream, Charsets.UTF_8)));
            } catch (IOException e) {
                log.error(e, "Exception opening container[%s] blob[%s] from region[%s]", container, path,
                        region);

                throw Throwables.propagate(e);
            }
        }

        @Override
        public void remove() {
            throw new UnsupportedOperationException();
        }

    }, stringInputRowParser);
}

From source file:org.trnltk.experiment.bruteforce.BruteForceExperiments.java

@Test
public void shouldParseTbmmJournal_b0241h() throws IOException {
    final File tokenizedFile = new File("core/src/test/resources/tokenizer/tbmm_b0241h_tokenized.txt");
    final List<String> lines = Files.readLines(tokenizedFile, Charsets.UTF_8);
    final LinkedList<String> words = new LinkedList<String>();
    for (String line : lines) {
        words.addAll(Lists.newArrayList(Splitter.on(" ").trimResults().omitEmptyStrings().split(line)));
    }//from   w  w  w .  j  av  a 2s. c o  m

    final StopWatch stopWatch = new StopWatch();
    int parseResultCount = 0;
    final int MAX_WORD_LENGTH = 100;

    int[] wordCountsByLength = new int[MAX_WORD_LENGTH];
    int[] parseResultCountTotalsByTokenLength = new int[MAX_WORD_LENGTH];

    stopWatch.start();
    stopWatch.suspend();

    for (String word : words) {
        stopWatch.resume();
        final LinkedList<MorphemeContainer> morphemeContainers = parser.parse(new TurkishSequence(word));
        stopWatch.suspend();
        if (morphemeContainers.isEmpty())
            System.out.println("Word is not parsable " + word);
        parseResultCount += morphemeContainers.size();
        parseResultCountTotalsByTokenLength[word.length()] += morphemeContainers.size();
        wordCountsByLength[word.length()]++;
    }

    stopWatch.stop();

    final double[] parseResultCountAvgsByLength = new double[MAX_WORD_LENGTH];
    for (int i = 0; i < parseResultCountTotalsByTokenLength.length; i++) {
        int totalParseResultCount = parseResultCountTotalsByTokenLength[i];
        final int wordCount = wordCountsByLength[i];
        parseResultCountAvgsByLength[i] = Double.valueOf(totalParseResultCount) / Double.valueOf(wordCount);
    }

    System.out.println("Total time :" + stopWatch.toString());
    System.out.println("Nr of tokens : " + words.size());
    System.out.println("Nr of parse results : " + parseResultCount);
    System.out.println("Avg time : " + (stopWatch.getTime() * 1.0d) / (words.size() * 1.0d) + " ms");
    System.out.println("Avg parse result count : " + (parseResultCount * 1.0) / (words.size() * 1.0));
    System.out.println("Word counts by token length " + "\n\t" + Arrays.toString(wordCountsByLength));
    System.out.println("Parse result count totals by token length " + "\n\t"
            + Arrays.toString(parseResultCountTotalsByTokenLength));
    System.out.println("Parse result count avgs by token length " + "\n\t"
            + Arrays.toString(parseResultCountAvgsByLength));
}

From source file:com.offbynull.voip.kademlia.GraphHelper.java

private ArrayList<BitString> removePrefixesForNextLevel(LinkedList<BitString> sortedPrefixes) {
    ArrayList<BitString> ret = new ArrayList<>();

    if (sortedPrefixes.isEmpty()) {
        return ret;
    }//from  w w w.ja v  a2  s . c om

    int hitCount = sortedPrefixes.peekFirst().getBitLength();

    while (!sortedPrefixes.isEmpty()) {
        if (sortedPrefixes.peekFirst().getBitLength() == hitCount) {
            ret.add(sortedPrefixes.removeFirst());
        } else {
            break;
        }
    }

    return ret;
}