List of usage examples for java.util LinkedList isEmpty
boolean isEmpty();
From source file:de.cubeisland.engine.core.util.McUUID.java
private static void getProfiles(List<Profile> profiles, LinkedList<String> players) { int amount = players.size(); CubeEngine.getLog().debug("Query UUID for: " + StringUtils.implode(",", players)); ArrayNode node = mapper.createArrayNode(); while (!players.isEmpty()) { ObjectNode criteria = mapper.createObjectNode(); criteria.put("name", players.poll()); criteria.put("agent", AGENT); node.add(criteria);/*from ww w.j a v a2s.c om*/ } int page = 1; try { CubeEngine.getLog().info("Query Mojang for {} UUIDs", amount); while (amount > 0) { int read = readProfilesFromInputStream(postQuery(node, page++).getInputStream(), profiles); if (read == 0) { CubeEngine.getLog().info("No Answer for {} players", amount); } else if (read != amount) { amount -= read; continue; } return; } } catch (IOException e) { CubeEngine.getLog().error(e, "Could not retrieve UUID for given names!"); } }
From source file:org.accelio.jxio.jxioConnection.impl.JxioResourceManager.java
public static MsgPool getMsgPool(int size, int in, int out) { MsgPool pool = null;/* w ww.j a v a 2s .co m*/ String key = getMsgPoolKey(size, in, out); synchronized (msgPools) { LinkedList<MsgPool> list = msgPools.get(key); if (list == null) { list = new LinkedList<MsgPool>(); msgPools.put(key, list); pool = new MsgPool(size, in, out); } else if (list.isEmpty()) { pool = new MsgPool(size, in, out); } else { pool = list.poll(); } } if (LOG.isDebugEnabled()) { LOG.debug("returning " + pool); } return pool; }
From source file:de.hasait.clap.impl.CLAPClassNode.java
private static <A extends Annotation> A findAnnotation(final Class<?> pClass, final Class<A> pAnnotationClass) { final LinkedList<Class<?>> queue = new LinkedList<Class<?>>(); queue.add(pClass);//w w w . ja va 2 s . c o m while (!queue.isEmpty()) { final Class<?> clazz = queue.removeFirst(); if (clazz != null) { final A result = clazz.getAnnotation(pAnnotationClass); if (result != null) { return result; } queue.add(clazz.getSuperclass()); for (final Class<?> interfaze : clazz.getInterfaces()) { queue.add(interfaze); } } } return null; }
From source file:mx.unam.ecologia.gye.coalescence.model.UniParentalGene.java
public static final void traverse(UniParentalGene upgene, UniParentalGeneVisitor visitor) { //log.debug("traverse()"); LinkedList<UniParentalGene> queue = new LinkedList<UniParentalGene>(); if (upgene.isAncestor()) { queue.add(upgene);/* www . jav a 2 s .c o m*/ } else { return; //nothing to be done! } while (!(queue.isEmpty())) { UniParentalGene upg = (UniParentalGene) queue.removeFirst(); visitor.visit(upg); //log.debug("traverse()::Visited " + upg.toNHXString()); if (upg.m_LDescendant != null) { queue.add(upg.m_LDescendant); } if (upg.m_RDescendant != null) { queue.add(upg.m_RDescendant); } } }
From source file:Main.java
public static LinkedList<File> listLinkedFiles(String strPath) { LinkedList<File> list = new LinkedList<File>(); File dir = new File(strPath); if (!dir.isDirectory()) { return null; }//from www .j a v a 2s. c o m File file[] = dir.listFiles(); for (int i = 0; i < file.length; i++) { if (file[i].isDirectory()) list.add(file[i]); else System.out.println(file[i].getAbsolutePath()); } File tmp; while (!list.isEmpty()) { tmp = (File) list.removeFirst(); if (tmp.isDirectory()) { file = tmp.listFiles(); if (file == null) continue; for (int i = 0; i < file.length; i++) { if (file[i].isDirectory()) list.add(file[i]); else System.out.println(file[i].getAbsolutePath()); } } else { System.out.println(tmp.getAbsolutePath()); } } return list; }
From source file:org.xchain.namespaces.jsl.AbstractTemplateCommand.java
protected static ElementOutputState[] getElementOutputState() { LinkedList<ElementOutputState[]> stack = elementOutputStateStackTL.get(); if (stack == null || stack.isEmpty()) { throw new IllegalStateException("getElementOutputState() called outside of execute method."); }//from w w w.j a v a 2 s . c om return stack.getFirst(); }
From source file:com.tamingtext.classifier.bayes.ExtractTrainingData.java
/** * Extract training data from a lucene index. * <p>//from w w w. j a va 2 s .c om * Iterates over documents in the lucene index, the values in the categoryFields are inspected and if found to * contain any of the strings found in the category file, a training data item will be emitted, assigned to the * matching category and containing the terms found in the fields listed in textFields. Output is written to * the output directory with one file per category. * <p> * The category file contains one line per category, each line contains a number of whitespace delimited strings. * The first string on each line is the category name, while subsequent strings will be used to identify documents * that belong in that category. * <p> * 'Technology Computers Macintosh' will cause documents that contain either 'Technology', 'Computers' or 'Machintosh' * in one of their categoryFields to be assigned to the 'Technology' category. * * * @param indexDir * directory of lucene index to extract from * * @param maxDocs * the maximum number of documents to process. * * @param categoryFile * file containing category strings to extract * * @param categoryFields * list of fields to match against category data * * @param textFields * list of fields containing terms to extract * * @param outputDir * directory to write output to * * @throws IOException */ public static void extractTraininingData(File indexDir, File categoryFile, Collection<String> categoryFields, Collection<String> textFields, File outputDir, boolean useTermVectors) throws IOException { log.info("Index dir: " + indexDir); log.info("Category file: " + categoryFile); log.info("Output dir: " + outputDir); log.info("Category fields: " + categoryFields.toString()); log.info("Text fields: " + textFields.toString()); log.info("Use Term Vectors?: " + useTermVectors); OpenObjectIntHashMap<String> categoryCounts = new OpenObjectIntHashMap<String>(); Map<String, List<String>> categories = readCategoryFile(categoryFile); Directory dir = FSDirectory.open(indexDir); IndexReader reader = IndexReader.open(dir, true); int max = reader.maxDoc(); StringBuilder buf = new StringBuilder(); for (int i = 0; i < max; i++) { if (!reader.isDeleted(i)) { Document d = reader.document(i); String category = null; // determine whether any of the fields in this document contain a // category in the category list fields: for (String field : categoryFields) { for (Field f : d.getFields(field)) { if (f.isStored() && !f.isBinary()) { String fieldValue = f.stringValue().toLowerCase(); for (String cat : categories.keySet()) { List<String> cats = categories.get(cat); for (String c : cats) { if (fieldValue.contains(c)) { category = cat; break fields; } } } } } } if (category == null) continue; // append the terms from each of the textFields to the training data for this document. buf.setLength(0); for (String field : textFields) { if (useTermVectors) { appendVectorTerms(buf, reader.getTermFreqVector(i, field)); } else { appendFieldText(buf, d.getField(field)); } } getWriterForCategory(outputDir, category).printf("%s\t%s\n", category, buf.toString()); categoryCounts.adjustOrPutValue(category, 1, 1); } } if (log.isInfoEnabled()) { StringBuilder b = new StringBuilder(); b.append("\nCatagory document counts:\n"); LinkedList<String> keyList = new LinkedList<String>(); categoryCounts.keysSortedByValue(keyList); String key; while (!keyList.isEmpty()) { key = keyList.removeLast(); b.append(categoryCounts.get(key)).append('\t').append(key).append('\n'); } log.info(b.toString()); } }
From source file:org.xchain.namespaces.jsl.AbstractTemplateCommand.java
/** * Returns the command execute state array for the current thread. *///from w w w .ja v a 2s . com protected static CommandExecutionState[] getCommandExecutionState() { LinkedList<CommandExecutionState[]> stack = commandExecutionStateStackTL.get(); if (stack == null || stack.isEmpty()) { throw new IllegalStateException("getCommandExecutionState() called outside of execute method."); } return stack.getFirst(); }
From source file:com.github.dozermapper.core.util.MappingUtils.java
@SuppressWarnings("unchecked") public static List<Class<?>> getInterfaceHierarchy(Class<?> srcClass, BeanContainer beanContainer) { final List<Class<?>> result = new LinkedList<>(); Class<?> realClass = getRealClass(srcClass, beanContainer); final LinkedList<Class> interfacesToProcess = new LinkedList<>(); Class[] interfaces = realClass.getInterfaces(); interfacesToProcess.addAll(Arrays.asList(interfaces)); while (!interfacesToProcess.isEmpty()) { Class<?> iface = interfacesToProcess.remove(); if (!result.contains(iface)) { result.add(iface);/*from ww w . j a va 2 s. co m*/ for (Class subiface : iface.getInterfaces()) { // if we haven't processed this interface yet then add it to be processed if (!result.contains(subiface)) { interfacesToProcess.add(subiface); } } } } return result; }
From source file:org.dozer.util.MappingUtils.java
@SuppressWarnings("unchecked") public static List<Class<?>> getInterfaceHierarchy(Class<?> srcClass) { final List<Class<?>> result = new LinkedList<Class<?>>(); Class<?> realClass = getRealClass(srcClass); final LinkedList<Class> interfacesToProcess = new LinkedList<Class>(); Class[] interfaces = realClass.getInterfaces(); interfacesToProcess.addAll(Arrays.asList(interfaces)); while (!interfacesToProcess.isEmpty()) { Class<?> iface = interfacesToProcess.remove(); if (!result.contains(iface)) { result.add(iface);/*from www . j a v a 2 s .co m*/ for (Class subiface : iface.getInterfaces()) { // if we haven't processed this interface yet then add it to be processed if (!result.contains(subiface)) { interfacesToProcess.add(subiface); } } } } return result; }