Example usage for java.util LinkedList size

List of usage examples for java.util LinkedList size

Introduction

In this page you can find the example usage for java.util LinkedList size.

Prototype

int size

To view the source code for java.util LinkedList size.

Click Source Link

Usage

From source file:com.mirth.connect.plugins.serverlog.ServerLogProvider.java

public LinkedList<String[]> getServerLogs(String sessionId) {
    // work with deep copied clone of the static server logs object in
    // order to avoid multiple threads ConcurrentModificationException.
    LinkedList<String[]> serverLogsCloned = new LinkedList<String[]>();

    try {//  www .j a v a 2s  .c  om
        serverLogsCloned = (LinkedList<String[]>) SerializationUtils.clone(serverLogs);
    } catch (SerializationException e) {
        // ignore
    }

    if (lastDisplayedServerLogIdBySessionId.containsKey(sessionId)) {
        // client exist with the sessionId.
        // -> only display new log entries.
        long lastDisplayedServerLogId = lastDisplayedServerLogIdBySessionId.get(sessionId);

        LinkedList<String[]> newServerLogEntries = new LinkedList<String[]>();
        // FYI, channelLog.size() will never be larger than LOG_SIZE =
        // 100.
        for (String[] aServerLog : serverLogsCloned) {
            if (lastDisplayedServerLogId < Long.parseLong(aServerLog[0])) {
                newServerLogEntries.addLast(aServerLog);
            }
        }

        if (newServerLogEntries.size() > 0) {
            // put the lastDisplayedLogId into the HashMap. index 0 is
            // the most recent entry, and index0 of that entry contains
            // the logId.
            lastDisplayedServerLogIdBySessionId.put(sessionId, Long.parseLong(newServerLogEntries.get(0)[0]));
        }

        try {
            return SerializationUtils.clone(newServerLogEntries);
        } catch (SerializationException e) {
            logger.error(e);
        }
    } else {
        // brand new client. i.e. brand new session id, and all log
        // entries are new.
        // -> display all log entries.
        if (serverLogsCloned.size() > 0) {
            lastDisplayedServerLogIdBySessionId.put(sessionId, Long.parseLong(serverLogsCloned.get(0)[0]));
        } else {
            // no log exist at all. put the currentLogId-1, which is the
            // very latest logId.
            lastDisplayedServerLogIdBySessionId.put(sessionId, logId - 1);
        }

        try {
            return SerializationUtils.clone(serverLogsCloned);
        } catch (SerializationException e) {
            logger.error(e);
        }
    }

    return null;
}

From source file:com.asakusafw.directio.tools.DirectIoDelete.java

@Override
public int run(String[] args) throws Exception {
    LinkedList<String> argList = new LinkedList<>();
    Collections.addAll(argList, args);
    boolean recursive = false;
    while (argList.isEmpty() == false) {
        String arg = argList.removeFirst();
        if (arg.equals("-r") || arg.equals("-recursive")) { //$NON-NLS-1$ //$NON-NLS-2$
            recursive = true;//from   w w w  .  j av a 2s .  c o  m
        } else if (arg.equals("--")) { //$NON-NLS-1$
            break;
        } else {
            argList.addFirst(arg);
            break;
        }
    }
    if (argList.size() < 2) {
        LOG.error(MessageFormat.format("Invalid arguments: {0}", Arrays.toString(args)));
        System.err.println(MessageFormat.format("Usage: hadoop {0} -conf <datasource-conf.xml> [-r] "
                + "base-path resource-pattern [resource-pattern [...]]", getClass().getName()));
        return 1;
    }
    String path = argList.removeFirst();
    List<FilePattern> patterns = new ArrayList<>();
    for (String arg : argList) {
        patterns.add(FilePattern.compile(arg));
    }
    if (repository == null) {
        repository = HadoopDataSourceUtil.loadRepository(getConf());
    }
    String basePath = repository.getComponentPath(path);
    DirectDataSource source = repository.getRelatedDataSource(path);
    for (FilePattern pattern : patterns) {
        source.delete(basePath, pattern, recursive, new Counter());
    }
    return 0;
}

From source file:com.shopzilla.hadoop.mapreduce.MiniMRClusterContextMRTest.java

@Test
public void testWordCount() throws Exception {
    Path input = new Path("/user/test/keywords_data");
    Path output = new Path("/user/test/word_count");

    Job job = new Job(configuration);

    job.setJobName("Word Count Test");

    job.setMapperClass(WordCountMapper.class);
    job.setReducerClass(SumReducer.class);

    job.setInputFormatClass(TextInputFormat.class);
    job.setMapOutputKeyClass(Text.class);
    job.setMapOutputValueClass(LongWritable.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(LongWritable.class);
    job.setNumReduceTasks(1);/*from   w w w  .  j  a  va  2 s.c o  m*/
    FileInputFormat.setInputPaths(job, input);
    FileOutputFormat.setOutputPath(job, output);

    assertTrue("All files from /data classpath directory should have been copied into HDFS",
            miniMRClusterContext.getFileSystem().exists(input));

    job.waitForCompletion(true);

    assertTrue("Output file should have been created", miniMRClusterContext.getFileSystem().exists(output));

    final LinkedList<String> expectedLines = new LinkedList<String>();
    expectedLines.add("goodbye\t1");
    expectedLines.add("hello\t1");
    expectedLines.add("world\t2");

    miniMRClusterContext.processData(output, new Function<String, Void>() {
        @Override
        public Void apply(String line) {
            assertEquals(expectedLines.pop(), line);
            return null;
        }
    });
    assertEquals(0, expectedLines.size());
}

From source file:com.stimulus.archiva.domain.EmailFilter.java

public void setPriority(int id, Priority priority) {
    LinkedList<FilterRule> list = filterRules;
    FilterRule ar = filterRules.get(id);
    list.remove(ar);//from w w  w. j  a va2s.co  m

    switch (priority) {
    case HIGHER:
        if ((id - 1) <= 0)
            list.addFirst(ar);
        else
            list.add(id - 1, ar);
        break;
    case LOWER:
        if ((id + 1) >= list.size())
            list.addLast(ar);
        else
            list.add(id + 1, ar);
        break;
    case HIGHEST:
        list.addFirst(ar);
        break;
    case LOWEST:
        list.addLast(ar);
        break;
    }
}

From source file:cn.keke.travelmix.publictransport.type.EfaConnectionResponseHandler.java

public void handle(HttpResponse response) throws IOException {
    if (this.job.isFinished()) {
        return;/*from   w w w.  ja va2 s . co m*/
    }
    HttpEntity entity = response.getEntity();
    BufferedInputStream in;
    if (this.zipped) {
        in = new BufferedInputStream(new GZIPInputStream(entity.getContent()));
    } else {
        in = new BufferedInputStream(entity.getContent());
    }

    String responseText = IOUtils.toString(in, CHARSET_ISO_8859_1);
    if (this.job.isFinished()) {
        return;
    }
    // LOG.info("PT response: " + responseText);
    LinkedList<PartialRoute> partialRoutes = parseExternalRouteResponse(responseText);
    if (!partialRoutes.isEmpty()) {
        LOG.info("Got " + partialRoutes.size() + " partial routes");
        if (!this.job.setFinished(this.url)) {
            return;
        }
        RouteResult result = readRouteInfo(partialRoutes);
        createRouteResponse(this.sb, result);
        this.job.setHandled();
    } else {
        LOG.info("No partial routes received: " + url);
    }
}

From source file:com.espertech.esper.regression.support.PatternTestHarness.java

private void runTest(PatternTestStyle testStyle) throws Exception {
    Configuration config = SupportConfigFactory.getConfiguration();
    config.addEventType("A", SupportBean_A.class);
    config.addEventType("B", SupportBean_B.class);
    config.addEventType("C", SupportBean_C.class);
    config.addEventType("D", SupportBean_D.class);
    config.addEventType("E", SupportBean_E.class);
    config.addEventType("F", SupportBean_F.class);
    config.addEventType("G", SupportBean_G.class);
    EPServiceProvider serviceProvider = EPServiceProviderManager.getDefaultProvider(config);
    serviceProvider.initialize();//from  w ww . j av a  2 s  .co  m

    EPRuntime runtime = serviceProvider.getEPRuntime();

    // Send the start time to the runtime
    if (sendEventCollection.getTime(EventCollection.ON_START_EVENT_ID) != null) {
        TimerEvent startTime = new CurrentTimeEvent(
                sendEventCollection.getTime(EventCollection.ON_START_EVENT_ID));
        runtime.sendEvent(startTime);
        log.debug(".runTest Start time is " + startTime);
    }

    // Set up expression filters and match listeners

    int index = 0;
    for (EventExpressionCase descriptor : caseList.getResults()) {
        String expressionText = descriptor.getExpressionText();
        EPStatementObjectModel model = descriptor.getObjectModel();

        EPStatement statement = null;

        try {
            if (model != null) {
                statement = serviceProvider.getEPAdministrator().create(model, "name--" + expressionText);
            } else {
                if (testStyle == PatternTestStyle.USE_PATTERN_LANGUAGE) {
                    statement = serviceProvider.getEPAdministrator().createPattern(expressionText,
                            "name--" + expressionText);
                } else if (testStyle == PatternTestStyle.USE_EPL) {
                    String text = "@Audit('pattern') @Audit('pattern-instances') select * from pattern ["
                            + expressionText + "]";
                    statement = serviceProvider.getEPAdministrator().createEPL(text);
                    expressionText = text;
                } else if (testStyle == PatternTestStyle.COMPILE_TO_MODEL) {
                    String text = "select * from pattern [" + expressionText + "]";
                    EPStatementObjectModel mymodel = serviceProvider.getEPAdministrator().compileEPL(text);
                    statement = serviceProvider.getEPAdministrator().create(mymodel);
                    expressionText = text;
                } else if (testStyle == PatternTestStyle.COMPILE_TO_EPL) {
                    String text = "select * from pattern [" + expressionText + "]";
                    EPStatementObjectModel mymodel = serviceProvider.getEPAdministrator().compileEPL(text);
                    String reverse = mymodel.toEPL();
                    statement = serviceProvider.getEPAdministrator().createEPL(reverse);
                    expressionText = reverse;
                } else {
                    throw new IllegalArgumentException("Unknown test style");
                }
            }
        } catch (Exception ex) {
            String text = expressionText;
            if (model != null) {
                text = "Model: " + model.toEPL();
            }
            log.fatal(".runTest Failed to create statement for style " + testStyle + " pattern expression="
                    + text, ex);
            TestCase.fail();
        }

        // We stop the statement again and start after the first listener was added.
        // Thus we can handle patterns that fireStatementStopped on startup.
        statement.stop();

        expressions[index] = statement;
        expressions[index].addListener(listeners[index]);

        // Start the statement again: listeners now got called for on-start events such as for a "not"
        statement.start();

        index++;
    }

    // Some expressions may fireStatementStopped as soon as they are started, such as a "not b()" expression, for example.
    // Check results for any such listeners/expressions.
    // NOTE: For EPL statements we do not support calling listeners when a pattern that fires upon start.
    // Reason is that this should not be a relevant functionality of a pattern, the start pattern
    // event itself cannot carry any information and is thus ignore. Note subsequent events
    // generated by the same pattern are fine.
    int totalEventsReceived = 0;
    if (testStyle != PatternTestStyle.USE_PATTERN_LANGUAGE) {
        clearListenerEvents();
        totalEventsReceived += countExpectedEvents(EventCollection.ON_START_EVENT_ID);
    } else // Patterns do need to handle event publishing upon pattern expression start (patterns that turn true right away)
    {
        checkResults(testStyle, EventCollection.ON_START_EVENT_ID);
        totalEventsReceived += countListenerEvents();
        clearListenerEvents();
    }

    // Send actual test events
    for (Map.Entry<String, Object> entry : sendEventCollection.entrySet()) {
        String eventId = entry.getKey();

        // Manipulate the time when this event was send
        if (sendEventCollection.getTime(eventId) != null) {
            TimerEvent currentTimeEvent = new CurrentTimeEvent(sendEventCollection.getTime(eventId));
            runtime.sendEvent(currentTimeEvent);
            log.debug(".runTest Sending event " + entry.getKey() + " = " + entry.getValue() + "  timed "
                    + currentTimeEvent);
        }

        // Send event itself
        runtime.sendEvent(entry.getValue());

        // Check expected results for this event
        checkResults(testStyle, eventId);

        // Count and clear the list of events that each listener has received
        totalEventsReceived += countListenerEvents();
        clearListenerEvents();
    }

    // Count number of expected matches
    int totalExpected = 0;
    for (EventExpressionCase descriptor : caseList.getResults()) {
        for (LinkedList<EventDescriptor> events : descriptor.getExpectedResults().values()) {
            totalExpected += events.size();
        }
    }

    if (totalExpected != totalEventsReceived) {
        log.debug(".test Count expected does not match count received, expected=" + totalExpected + " received="
                + totalEventsReceived);
        TestCase.assertTrue(false);
    }

    // Kill all expressions
    for (EPStatement expression : expressions) {
        expression.removeAllListeners();
    }

    // Send test events again to also test that all were indeed killed
    for (Map.Entry<String, Object> entry : sendEventCollection.entrySet()) {
        runtime.sendEvent(entry.getValue());
    }

    // Make sure all listeners are still at zero
    for (SupportUpdateListener listener : listeners) {
        if (listener.getNewDataList().size() > 0) {
            log.debug(".test A match was received after stopping all expressions");
            TestCase.assertTrue(false);
        }
    }
}

From source file:com.moorestudio.seniorimageprocessing.SeniorSorter.java

public void sortImages() {
    LinkedList<Map.Entry<String, Long>> timestampList = new LinkedList<>(timestampData.entrySet());
    sort(timestampList, (x, y) -> x.getValue() > y.getValue() ? -1 : x.getValue().equals(y.getValue()) ? 0 : 1);
    // Sort in reverse so that the most recent timestamps are first.e so that the most recent timestamps are first.

    LinkedList<Map.Entry<File, Long>> imageDataList = new LinkedList<>(imageData.entrySet());
    sort(imageDataList, (x, y) -> x.getValue() > y.getValue() ? -1 : x.getValue().equals(y.getValue()) ? 0 : 1); // Sort in reverse so that the most recent timestamps are first.

    // For the gui update
    int idCount = imageDataList.size();

    //Take the first image and the first timestamp scan taken, which is last in the list, 
    //and sync the camera time to the timestamp time.Both are throwaways.
    if (!timestampList.isEmpty() && !imageDataList.isEmpty() && parent.syncTime) {
        Map.Entry<File, Long> iData = imageDataList.pollLast();
        Map.Entry<String, Long> tsData = timestampList.pollLast();

        //Make the offset
        cameraTimeOffset = tsData.getValue() - iData.getValue();
    }//  w  w  w. j av a  2  s  .  c o  m

    //add the file to the top timestamp student until it is no longer more than it
    while (!timestampList.isEmpty() && !imageDataList.isEmpty()) {
        Map.Entry<File, Long> iData = imageDataList.peekFirst();
        Map.Entry<String, Long> tsData = timestampList.pollFirst();
        ArrayList<File> studentImages = new ArrayList<>();
        while (!imageDataList.isEmpty() && iData.getValue() + cameraTimeOffset > tsData.getValue()) {
            iData = imageDataList.pollFirst();
            studentImages.add(iData.getKey());
            iData = imageDataList.peekFirst();
            //update the GUI
            parent.addProgress((.125 / parent.numThreads) / idCount);
        }
        if (!studentImages.isEmpty()) {
            parent.addImagesToStudent(tsData.getKey(), studentImages);
        }
    }

    //add the unsorted images to the parent's unsorted queue
    for (Map.Entry<File, Long> entry : imageDataList) {
        parent.unsortedFiles.add(entry.getKey());
        //update the GUI
        parent.addProgress((.125 / parent.numThreads) / idCount);
    }
}

From source file:io.syndesis.jsondb.impl.SqlJsonDB.java

private int deleteJsonRecords(Handle dbi, String baseDBPath, String like) {

    LinkedList<String> params = getAllParentPaths(baseDBPath);

    StringBuilder sql = new StringBuilder("DELETE from jsondb where path LIKE ?");
    if (!params.isEmpty()) {
        sql.append(" OR path in ( ").append(String.join(", ", Collections.nCopies(params.size(), "?")))
                .append(" )");
    }// w  w w.j  a va  2 s.co  m

    params.addFirst(like);
    return dbi.update(sql.toString(), params.toArray());
}

From source file:com.axway.ebxml.CertificateChain.java

/**
 * Constructor/*w  w  w.  j a va 2 s.c  o  m*/
 * @param certificatePath path to a p7b or DER encoded file
 * @return Array of X509Certificate
 * @throws java.io.FileNotFoundException
 * @throws java.security.cert.CertificateException
 */
public CertificateChain(String certificatePath) throws CertificateException, IOException {
    if (certificatePath == null)
        throw new IllegalArgumentException("certificatePath expected");

    logger.debug("Loading certificate from: " + certificatePath);

    LinkedList<X509Certificate> returnList = new LinkedList<X509Certificate>();
    FileInputStream fis = new FileInputStream(certificatePath);
    try {
        CertificateFactory cf = CertificateFactory.getInstance("X.509");
        Collection certificates = cf.generateCertificates(fis);
        for (Object cert : certificates) {
            returnList.add((X509Certificate) cert);
            logger.debug("Certificate: " + cert);
        }
    } finally {
        fis.close();
    }

    chain = returnList.toArray(new X509Certificate[returnList.size()]);
}

From source file:com.snowplowanalytics.snowplow.tracker.emitter.Emitter.java

/**
 * Sends all events in the buffer to the collector.
 *//*ww  w  .  jav a2 s . co m*/
public void flushBuffer() {
    if (buffer.isEmpty()) {
        logger.debug("Buffer is empty, exiting flush operation..");
        return;
    }

    if (httpMethod == HttpMethod.GET) {
        int success_count = 0;
        LinkedList<Payload> unsentPayloads = new LinkedList<Payload>();

        for (Payload payload : buffer) {
            int status_code = sendGetData(payload).getStatusLine().getStatusCode();
            if (status_code == 200)
                success_count++;
            else
                unsentPayloads.add(payload);
        }

        if (unsentPayloads.size() == 0) {
            if (requestCallback != null)
                requestCallback.onSuccess(success_count);
        } else if (requestCallback != null)
            requestCallback.onFailure(success_count, unsentPayloads);

    } else if (httpMethod == HttpMethod.POST) {
        LinkedList<Payload> unsentPayload = new LinkedList<Payload>();

        SchemaPayload postPayload = new SchemaPayload();
        postPayload.setSchema(Constants.SCHEMA_PAYLOAD_DATA);

        ArrayList<Map> eventMaps = new ArrayList<Map>();
        for (Payload payload : buffer) {
            eventMaps.add(payload.getMap());
        }
        postPayload.setData(eventMaps);

        int status_code = sendPostData(postPayload).getStatusLine().getStatusCode();
        if (status_code == 200 && requestCallback != null)
            requestCallback.onSuccess(buffer.size());
        else if (requestCallback != null) {
            unsentPayload.add(postPayload);
            requestCallback.onFailure(0, unsentPayload);
        }
    }

    // Empties current buffer
    buffer.clear();
}