Example usage for java.util LinkedList getFirst

List of usage examples for java.util LinkedList getFirst

Introduction

In this page you can find the example usage for java.util LinkedList getFirst.

Prototype

public E getFirst() 

Source Link

Document

Returns the first element in this list.

Usage

From source file:org.batoo.jpa.core.pool.GenericKeyedPool.java

/**
 * @param maxSize/* w  w w  . j  av  a  2 s .  c  o m*/
 *            the maximum size of the pool
 * @param pool
 *            the pool
 * @param key
 *            the key
 * @throws Exception
 * 
 * @since $version
 * @author hceylan
 */
private void shrinkTo(K key, LinkedList<V> pool, int maxSize) throws Exception {
    while (pool.size() > maxSize) {
        final V obj = pool.getFirst();
        this.factory.destroyObject(key, obj);
    }
}

From source file:org.jbpm.db.JbpmSession.java

public void popCurrentSession() {
    LinkedList stack = (LinkedList) currentJbpmSessionStack.get();
    if ((stack == null) || (stack.isEmpty()) || (stack.getFirst() != this)) {
        log.warn("can't pop current session: are you calling JbpmSession.close() multiple times ?");
    } else {//from   w  w w.j av a2 s.  co m
        stack.removeFirst();
    }
}

From source file:com.servioticy.queueclient.SimpleQueueClient.java

@Override
protected Object getImpl() {
    LinkedList<Object> queue;
    Object returnValue;// w w  w  .  ja  va2 s . c  o m
    try {
        queue = readQueue();
    } catch (Exception e) {
        return null;
    }
    if (queue.isEmpty()) {
        return null;
    }
    returnValue = queue.getFirst();
    queue.removeFirst();

    try {
        writeQueue(queue);
    } catch (Exception e) {
    }

    return returnValue;
}

From source file:edu.cornell.mannlib.vedit.controller.BaseEditController.java

protected EditProcessObject createEpo(HttpServletRequest request, boolean forceNew) {
    /* this is actually a bit of a misnomer, because we will reuse an epo
    if an epoKey parameter is passed */
    EditProcessObject epo = null;/*from w ww.  j a  v  a 2  s  .c  o  m*/
    HashMap epoHash = getEpoHash(request);
    String existingEpoKey = request.getParameter("_epoKey");
    if (!forceNew && existingEpoKey != null && epoHash.get(existingEpoKey) != null) {
        epo = (EditProcessObject) epoHash.get(existingEpoKey);
        epo.setKey(existingEpoKey);
        epo.setUseRecycledBean(true);
    } else {
        LinkedList epoKeylist = getEpoKeylist(request);
        if (epoHash.size() == MAX_EPOS) {
            try {
                epoHash.remove(epoKeylist.getFirst());
                epoKeylist.removeFirst();
            } catch (Exception e) {
                // see JIRA issue VITRO-340, "Odd exception from backend editing"
                // possible rare concurrency issue here
                log.error("Error removing old EPO", e);
            }
        }
        Random rand = new Random();
        String epoKey = createEpoKey();
        while (epoHash.get(epoKey) != null) {
            epoKey += Integer.toHexString(rand.nextInt());
        }
        epo = new EditProcessObject();
        epoHash.put(epoKey, epo);
        epoKeylist.add(epoKey);
        epo.setKey(epoKey);
        epo.setReferer(
                (forceNew) ? request.getRequestURL().append('?').append(request.getQueryString()).toString()
                        : request.getHeader("Referer"));
        epo.setSession(request.getSession());
    }
    return epo;
}

From source file:org.mbs3.duplicatefinder.DriverClass.java

public void run() {
    try {// w w w . j ava  2  s  .c o  m
        Vector stop = new Vector();
        DuplicateFinder df = new DuplicateFinder(path, recurse, stop);
        long wastedSpaceTotal = 0;

        System.out.println("Looking for duplicate files in " + path);
        Thread t = new Thread(null, df, "Search Thread");
        t.start();
        // while(df.getDuplicates().size() < 50)
        // {
        // Thread.yield();
        // System.err.println("loop");
        // }

        // System.out.println("Halting the search thread");
        // stop.add(new Object());

        t.join();
        // System.out.println("done searcing for files, checking for
        // dupes");
        @SuppressWarnings("unchecked")
        Hashtable<String, LinkedList<String>> dupes = (Hashtable<String, LinkedList<String>>) df.getAllFiles();

        Enumeration<String> e = dupes.keys();
        while (e.hasMoreElements()) {
            String key = (String) e.nextElement();

            Object temp = dupes.get(key);
            @SuppressWarnings("unchecked")
            LinkedList<String> dupeFiles = (LinkedList<String>) temp;

            int num = dupeFiles.size();
            if (num <= 1)
                continue;

            FileEntry first = new FileEntry(dupeFiles.getFirst());
            long singleFileSize = first.getFile().length();
            long wastedSpaceThisFile = (num - 1) * first.getFile().length();
            wastedSpaceTotal += wastedSpaceThisFile;

            System.out.println(num + " files of size " + singleFileSize + " bytes wasting "
                    + wastedSpaceThisFile + " bytes with identical checksums:");

            Iterator<String> i = dupeFiles.iterator();
            while (i.hasNext()) {
                FileEntry fe = new FileEntry(i.next());
                System.out.println("\t" + fe.getFile().getAbsolutePath());
                wastedSpaceThisFile += (fe.getFile().length() / 1024.0d);
            }
        }

        System.out
                .println("End of list - Total wasted space in duplicate files: " + wastedSpaceTotal + " bytes");
    } catch (Exception ex) {
        ex.printStackTrace();
    }

}

From source file:org.apache.hadoop.hdfs.server.namenode.FSImageTransactionalStorageInspector.java

/**
 * @return the image files that have the most recent associated 
 * transaction IDs.  If there are multiple storage directories which 
 * contain equal images, we'll return them all.
 * //from  w  w w .  j a  v a  2  s  .c  o  m
 * @throws FileNotFoundException if not images are found.
 */
@Override
List<FSImageFile> getLatestImages() throws IOException {
    LinkedList<FSImageFile> ret = new LinkedList<FSImageFile>();
    for (FSImageFile img : foundImages) {
        if (ret.isEmpty()) {
            ret.add(img);
        } else {
            FSImageFile cur = ret.getFirst();
            if (cur.txId == img.txId) {
                ret.add(img);
            } else if (cur.txId < img.txId) {
                ret.clear();
                ret.add(img);
            }
        }
    }
    if (ret.isEmpty()) {
        throw new FileNotFoundException("No valid image files found");
    }
    return ret;
}

From source file:org.alfresco.bm.event.AbstractResultService.java

/**
 * Reports the oldest stats for the events and pops it off the list
 * /* w w w .  j a v a  2 s.c  o  m*/
 * @param windowMultiple        the number of reporting entries to hold per event
 * @return                      <tt>true</tt> to stop processing
 */
private boolean reportAndCycleStats(Map<String, LinkedList<DescriptiveStatistics>> statsByEventName,
        Map<String, LinkedList<AtomicInteger>> failuresByEventName, long currentWindowStartTime,
        long currentWindowEndTime, int windowMultiple, ResultHandler handler) {
    // Handle stats
    Map<String, DescriptiveStatistics> stats = new HashMap<String, DescriptiveStatistics>(
            statsByEventName.size() + 7);
    for (Map.Entry<String, LinkedList<DescriptiveStatistics>> entry : statsByEventName.entrySet()) {
        // Grab the OLDEST stats from the beginning of the list
        String eventName = entry.getKey();
        LinkedList<DescriptiveStatistics> ll = entry.getValue();
        try {
            DescriptiveStatistics eventStats = ll.getFirst();
            stats.put(eventName, eventStats);
            if (ll.size() == windowMultiple) {
                // We have enough reporting points for the window, so pop the first and add a new to the end
                ll.pop();
            }
            ll.add(new DescriptiveStatistics());
        } catch (NoSuchElementException e) {
            throw new RuntimeException(
                    "An event name did not have a result for the reporting period: " + statsByEventName);
        }
    }

    // Handle failures
    Map<String, Integer> failures = new HashMap<String, Integer>(statsByEventName.size() + 7);
    for (Map.Entry<String, LinkedList<AtomicInteger>> entry : failuresByEventName.entrySet()) {
        // Grab the OLDEST stats from the beginning of the list
        String eventName = entry.getKey();
        LinkedList<AtomicInteger> ll = entry.getValue();
        try {
            AtomicInteger eventFailures = ll.getFirst();
            failures.put(eventName, Integer.valueOf(eventFailures.get()));
            if (ll.size() == windowMultiple) {
                // We have enough reporting points for the window, so pop the first and add a new to the end
                ll.pop();
            }
            ll.add(new AtomicInteger());
        } catch (NoSuchElementException e) {
            throw new RuntimeException("An event name did not have a failure count for the reporting period: "
                    + failuresByEventName);
        }
    }

    boolean stop = false;
    try {
        boolean go = handler.processResult(currentWindowStartTime, currentWindowEndTime, stats, failures);
        stop = !go;
    } catch (Throwable e) {
        logger.error("Exception while making callback.", e);
    }
    return stop;
}

From source file:no.norrs.projects.andronary.service.DokproUioService.java

public HashMap<String, List<String>> lookup(String dict) {
    HashMap<String, List<String>> results = new HashMap<String, List<String>>();
    try {//  w w  w.  jav  a  2s .c  om
        HttpResponse response;
        switch (selectedDict) {
        case nbNO: {
            response = HttpUtil.GET(
                    new URL(String.format("%s?OPP=%s&ordbok=%s&s=%s",
                            "http://www.dokpro.uio.no/perl/ordboksoek/ordbok.cgi", dict, "bokmaal", "n")),
                    null);
            break;
        }
        case nnNO: {
            response = HttpUtil.GET(
                    new URL(String.format("%s?OPP=%s&ordbok=%s&s=%s",
                            "http://www.dokpro.uio.no/perl/ordboksoek/ordbok.cgi", dict, "nynorsk", "n")),
                    null);
            break;
        }
        default:
            return null;
        }
        String responseString = HttpUtil.readString(response.getEntity().getContent(), "ISO-8859-1");

        //System.out.println(responseString);
        Parser parser = new Parser(responseString);

        NodeList list = parser
                .parse(new AndFilter(new TagNameFilter("TR"), new HasAttributeFilter("valign", "top")));

        // refuses to find colspan 2        Tag (1665[0,1665],1694[0,1694]): TD align="left" colspan="2"   LAME
        // Problems with parser like finding attribute "colspan" etc, makes filtering annoying.
        // Apply ugly results hack begins here:
        //@todo GET A CLEANER SOLUTION ....

        Node[] nodes = list.toNodeArray();

        // Skipping first and 2 last results by the filter, static content not removed by filter.
        for (int i = 1; i < nodes.length - 2; i++) {
            Node test = nodes[i].getFirstChild().getFirstChild();

            if (test.getParent().getNextSibling().getFirstChild().getText().equals("TABLE")) {
                LinkedList<String> dataToAdd = new LinkedList<String>(Arrays
                        .asList(test.getParent().getNextSibling().toPlainTextString().split("&nbsp;&nbsp")));
                String topic = dataToAdd.getFirst().trim();
                dataToAdd.removeFirst();
                results.put(topic, dataToAdd);
            }

        }

        //System.out.println("Results: " + results.size());
    } catch (URISyntaxException ex) {
        Logger.getLogger(DokproUioService.class.getName()).log(Level.SEVERE, null, ex);
    } catch (ParserException ex) {
        Logger.getLogger(DokproUioService.class.getName()).log(Level.SEVERE, null, ex);
    } catch (IOException ex) {
        Logger.getLogger(DokproUioService.class.getName()).log(Level.SEVERE, null, ex);
    }
    return results;
}

From source file:org.apache.jackrabbit.standalone.cli.JcrParser.java

/**
 * Parse the user's input.//from ww w. j  a va2 s  .c  o m
 * @param input
 *        user's input
 * @throws JcrParserException
 *         if the input is illegal
 * @throws ConfigurationException
 *         if the mapped command can't be mapped to a Commons Chain Command
 */
public void parse(String input) throws JcrParserException, ConfigurationException {
    this.cl = null;
    this.cmd = null;

    // Validate input
    if (input == null || input.length() == 0) {
        throw new JcrParserException("exception.parse.input.empty");
    }

    // Extract arguments
    LinkedList args = this.getArguments(input);

    // The first arg is the command name
    String cmdName = (String) args.getFirst();
    args.removeFirst();

    // Get the command line descriptor
    cl = CommandLineFactory.getInstance().getCommandLine(cmdName);

    // populate with the given params
    populate(cl, args);

    // validate the command line
    validate(cl);

    // Create Chain Command
    String impl = cl.getImpl();
    if (impl == null) {
        impl = cl.getName();
    }
    cmd = catalog.getCommand(impl);

    if (cmd == null) {
        throw new JcrParserException("no chain command for name " + impl);
    }

}

From source file:org.edmcouncil.rdf_toolkit.SesameRdfFormatter.java

/** Main method, but throws exceptions for use from inside other Java code. */
public static void run(String[] args) throws Exception {
    URI baseUri = null;/* w w w .  j  a v a 2 s  .c o m*/
    String baseUriString = "";
    String uriPattern = null;
    String uriReplacement = null;
    boolean useDtdSubset = false;
    boolean inlineBlankNodes = false;
    boolean inferBaseUri = false;
    URI inferredBaseUri = null;
    String[] leadingComments = null;
    String[] trailingComments = null;
    String indent = "\t";
    SesameSortedRDFWriterFactory.StringDataTypeOptions stringDataTypeOption = SesameSortedRDFWriterFactory.StringDataTypeOptions.implicit;

    // Parse the command line options.
    CommandLineParser parser = new BasicParser();
    CommandLine line = parser.parse(options, args);

    // Print out help, if requested.
    if (line.hasOption("h")) {
        usage(options);
        return;
    }

    // Check if required arguments provided.
    if (!line.hasOption("s")) {
        logger.error("No source (input) file specified, nothing to format.  Use --help for help.");
        return;
    }
    if (!line.hasOption("t")) {
        logger.error("No target (target) file specified, cannot format source.  Use --help for help.");
        return;
    }

    // Check if source files exists.
    String sourceFilePath = line.getOptionValue("s");
    File sourceFile = new File(sourceFilePath);
    if (!sourceFile.exists()) {
        logger.error("Source file does not exist: " + sourceFilePath);
        return;
    }
    if (!sourceFile.isFile()) {
        logger.error("Source file is not a file: " + sourceFilePath);
        return;
    }
    if (!sourceFile.canRead()) {
        logger.error("Source file is not readable: " + sourceFilePath);
        return;
    }

    // Check if target file can be written.
    String targetFilePath = line.getOptionValue("t");
    File targetFile = new File(targetFilePath);
    if (targetFile.exists()) {
        if (!targetFile.isFile()) {
            logger.error("Target file is not a file: " + targetFilePath);
            return;
        }
        if (!targetFile.canWrite()) {
            logger.error("Target file is not writable: " + targetFilePath);
            return;
        }
    }

    // Create directory for target file, if required.
    File targetFileDir = targetFile.getParentFile();
    if (targetFileDir != null) {
        targetFileDir.mkdirs();
        if (!targetFileDir.exists()) {
            logger.error("Target file directory could not be created: " + targetFileDir.getAbsolutePath());
            return;
        }
    }

    // Check if a base URI was provided
    try {
        if (line.hasOption("bu")) {
            baseUriString = line.getOptionValue("bu");
            baseUri = new URIImpl(baseUriString);
            if (baseUriString.endsWith("#")) {
                logger.warn("base URI ends in '#', which is unusual: " + baseUriString);
            }
        }
    } catch (Throwable t) {
        baseUri = null;
        baseUriString = "";
    }

    // Check if there is a valid URI pattern/replacement pair
    if (line.hasOption("up")) {
        if (line.hasOption("ur")) {
            if (line.getOptionValue("up").length() < 1) {
                logger.error("A URI pattern cannot be an empty string.  Use --help for help.");
                return;
            }
            uriPattern = line.getOptionValue("up");
            uriReplacement = line.getOptionValue("ur");
        } else {
            logger.error(
                    "If a URI pattern is specified, a URI replacement must also be specified.  Use --help for help.");
            return;
        }
    } else {
        if (line.hasOption("ur")) {
            logger.error(
                    "If a URI replacement is specified, a URI pattern must also be specified.  Use --help for help.");
            return;
        }
    }

    // Check if a DTD subset should be used for namespace shortening in XML
    if (line.hasOption("dtd")) {
        useDtdSubset = true;
    }

    // Check if blank nodes should be rendered inline
    if (line.hasOption("ibn")) {
        inlineBlankNodes = true;
    }

    // Check if the base URI should be set to be the same as the OWL ontology URI
    if (line.hasOption("ibu")) {
        inferBaseUri = true;
    }

    // Check if there are leading comments
    if (line.hasOption("lc")) {
        leadingComments = line.getOptionValues("lc");
    }

    // Check if there are trailing comments
    if (line.hasOption("tc")) {
        trailingComments = line.getOptionValues("tc");
    }

    // Check if there is a string data type option
    if (line.hasOption("sdt")) {
        stringDataTypeOption = SesameSortedRDFWriterFactory.StringDataTypeOptions
                .getByOptionValue(line.getOptionValue("sdt"));
    }

    // Check if an explicit indent string has been provided
    if (line.hasOption("i")) {
        indent = "ABC".replaceFirst("ABC", line.getOptionValue("i")); // use 'replaceFirst' to get cheap support for escaped characters like tabs
    }

    // Load RDF file.
    SesameSortedRDFWriterFactory.SourceFormats sourceFormat = null;
    if (line.hasOption("sfmt")) {
        sourceFormat = SesameSortedRDFWriterFactory.SourceFormats.getByOptionValue(line.getOptionValue("sfmt"));
    } else {
        sourceFormat = SesameSortedRDFWriterFactory.SourceFormats.auto;
    }
    if (sourceFormat == null) {
        logger.error("Unsupported or unrecognised source format: " + line.getOptionValue("sfmt"));
        return;
    }
    RDFFormat sesameSourceFormat = null;
    if (SesameSortedRDFWriterFactory.SourceFormats.auto.equals(sourceFormat)) {
        sesameSourceFormat = Rio.getParserFormatForFileName(sourceFilePath, RDFFormat.TURTLE);
    } else {
        sesameSourceFormat = sourceFormat.getRDFFormat();
    }
    if (sesameSourceFormat == null) {
        logger.error("Unsupported or unrecognised source format enum: " + sourceFormat);
    }
    Model sourceModel = Rio.parse(new FileInputStream(sourceFile), baseUriString, sesameSourceFormat);

    // Do any URI replacements
    if ((uriPattern != null) && (uriReplacement != null)) {
        Model replacedModel = new TreeModel();
        for (Statement st : sourceModel) {
            Resource replacedSubject = st.getSubject();
            if (replacedSubject instanceof URI) {
                replacedSubject = new URIImpl(
                        replacedSubject.stringValue().replaceFirst(uriPattern, uriReplacement));
            }

            URI replacedPredicate = st.getPredicate();
            replacedPredicate = new URIImpl(
                    replacedPredicate.stringValue().replaceFirst(uriPattern, uriReplacement));

            Value replacedObject = st.getObject();
            if (replacedObject instanceof URI) {
                replacedObject = new URIImpl(
                        replacedObject.stringValue().replaceFirst(uriPattern, uriReplacement));
            }

            Statement replacedStatement = new StatementImpl(replacedSubject, replacedPredicate, replacedObject);
            replacedModel.add(replacedStatement);
        }
        // Do URI replacements in namespaces as well.
        Set<Namespace> namespaces = sourceModel.getNamespaces();
        for (Namespace nmsp : namespaces) {
            replacedModel.setNamespace(nmsp.getPrefix(),
                    nmsp.getName().replaceFirst(uriPattern, uriReplacement));
        }
        sourceModel = replacedModel;
        // This is also the right time to do URI replacement in the base URI, if appropriate
        if (baseUri != null) {
            baseUriString = baseUriString.replaceFirst(uriPattern, uriReplacement);
            baseUri = new URIImpl(baseUriString);
        }
    }

    // Infer the base URI, if requested
    if (inferBaseUri) {
        LinkedList<URI> owlOntologyUris = new LinkedList<URI>();
        for (Statement st : sourceModel) {
            if ((SesameSortedRDFWriter.rdfType.equals(st.getPredicate()))
                    && (SesameSortedRDFWriter.owlOntology.equals(st.getObject()))
                    && (st.getSubject() instanceof URI)) {
                owlOntologyUris.add((URI) st.getSubject());
            }
        }
        if (owlOntologyUris.size() >= 1) {
            Comparator<URI> uriComparator = new Comparator<URI>() {
                @Override
                public int compare(URI uri1, URI uri2) {
                    return uri1.toString().compareTo(uri2.toString());
                }
            };
            owlOntologyUris.sort(uriComparator);
            inferredBaseUri = owlOntologyUris.getFirst();
        }
    }

    // Write sorted RDF file.
    SesameSortedRDFWriterFactory.TargetFormats targetFormat = null;
    if (line.hasOption("tfmt")) {
        targetFormat = SesameSortedRDFWriterFactory.TargetFormats.getByOptionValue(line.getOptionValue("tfmt"));
    } else {
        targetFormat = SesameSortedRDFWriterFactory.TargetFormats.turtle;
    }
    if (targetFormat == null) {
        logger.error("Unsupported or unrecognised target format: " + line.getOptionValue("tfmt"));
        return;
    }
    SesameSortedRDFWriter.ShortUriPreferences shortUriPref = null;
    if (line.hasOption("sup")) {
        shortUriPref = SesameSortedRDFWriter.ShortUriPreferences.getByOptionValue(line.getOptionValue("sup"));
    } else {
        shortUriPref = SesameSortedRDFWriter.ShortUriPreferences.prefix;
    }
    if (shortUriPref == null) {
        logger.error("Unsupported or unrecognised short URI preference: " + line.getOptionValue("sup"));
        return;
    }

    Writer targetWriter = new OutputStreamWriter(new FileOutputStream(targetFile), "UTF-8");
    SesameSortedRDFWriterFactory factory = new SesameSortedRDFWriterFactory(targetFormat);
    Map<String, Object> writerOptions = new HashMap<String, Object>();
    if (baseUri != null) {
        writerOptions.put("baseUri", baseUri);
    } else if (inferBaseUri && (inferredBaseUri != null)) {
        writerOptions.put("baseUri", inferredBaseUri);
    }
    if (indent != null) {
        writerOptions.put("indent", indent);
    }
    if (shortUriPref != null) {
        writerOptions.put("shortUriPref", shortUriPref);
    }
    writerOptions.put("useDtdSubset", useDtdSubset);
    writerOptions.put("inlineBlankNodes", inlineBlankNodes);
    writerOptions.put("leadingComments", leadingComments);
    writerOptions.put("trailingComments", trailingComments);
    writerOptions.put("stringDataTypeOption", stringDataTypeOption);
    RDFWriter rdfWriter = factory.getWriter(targetWriter, writerOptions);
    Rio.write(sourceModel, rdfWriter);
    targetWriter.flush();
    targetWriter.close();
}