List of usage examples for java.util LinkedList addFirst
public void addFirst(E e)
From source file:com.yahoo.bard.webservice.util.SimplifiedIntervalList.java
/** * Back elements off the list until the insertion is at the correct endpoint of the list and then merge and append * the original contents of the list back in. * * @param interval The interval to be merged and added *//* w ww. j a v a2s .c om*/ private void mergeInner(Interval interval) { Interval previous = peekLast(); LinkedList<Interval> buffer = new LinkedList<>(); while (previous != null && interval.getStart().isBefore(previous.getStart())) { buffer.addFirst(previous); removeLast(); previous = peekLast(); } appendWithMerge(interval); buffer.stream().forEach(this::appendWithMerge); }
From source file:org.apache.xmlgraphics.image.loader.pipeline.PipelineFactory.java
private ImageProviderPipeline findPipeline(DefaultEdgeDirectory dir, ImageFlavor originFlavor, ImageRepresentation destination) { DijkstraAlgorithm dijkstra = new DijkstraAlgorithm(dir); ImageRepresentation origin = new ImageRepresentation(originFlavor); dijkstra.execute(origin, destination); if (log.isTraceEnabled()) { log.trace("Lowest penalty: " + dijkstra.getLowestPenalty(destination)); }//from w w w . j a va 2s . c om Vertex prev = destination; Vertex pred = dijkstra.getPredecessor(destination); if (pred == null) { if (log.isTraceEnabled()) { log.trace("No route found!"); } return null; } else { LinkedList stops = new LinkedList(); while ((pred = dijkstra.getPredecessor(prev)) != null) { ImageConversionEdge edge = (ImageConversionEdge) dir.getBestEdge(pred, prev); stops.addFirst(edge); prev = pred; } ImageProviderPipeline pipeline = new ImageProviderPipeline(manager.getCache(), null); Iterator iter = stops.iterator(); while (iter.hasNext()) { ImageConversionEdge edge = (ImageConversionEdge) iter.next(); pipeline.addConverter(edge.getImageConverter()); } return pipeline; } }
From source file:net.sourceforge.subsonic.controller.MainController.java
private List<MediaFile> getAncestors(MediaFile dir) throws IOException { LinkedList<MediaFile> result = new LinkedList<MediaFile>(); try {/*from w ww .ja v a 2s.c om*/ MediaFile parent = mediaFileService.getParentOf(dir); while (parent != null && !mediaFileService.isRoot(parent)) { result.addFirst(parent); parent = mediaFileService.getParentOf(parent); } } catch (SecurityException x) { // Happens if Podcast directory is outside music folder. } return result; }
From source file:org.dcm4chee.archive.conf.defaults.DeepEquals.java
/** * Deeply compare two Map instances. After quick short-circuit tests, this method * uses a temporary Map so that this method can run in O(N) time. * @param dualKey Maps/* w ww. j a va 2s. co m*/ * @param stack add items to compare to the Stack (Stack versus recursion) * @param visited Set containing items that have already been compared, to prevent cycles. * @return false if the Maps are for certain not equals. 'true' indicates that 'on the surface' the maps * are equal, however, it will place the contents of the Maps on the stack for further comparisons. */ private static boolean compareUnorderedMap(DualKey dualKey, LinkedList stack, Set visited) { Map map1 = (Map) dualKey._key1; Map map2 = (Map) dualKey._key2; // Same instance check already performed... if (map1.size() != map2.size()) { return false; } Map fastLookup = new HashMap(); for (Map.Entry entry : (Set<Map.Entry>) map2.entrySet()) { fastLookup.put(entry.getKey(), entry); } for (Map.Entry entry : (Set<Map.Entry>) map1.entrySet()) { Map.Entry other = (Map.Entry) fastLookup.get(entry.getKey()); if (other == null) { return false; } DualKey dk = new DualKey(entry.getKey(), other.getKey(), entry.getKey().toString(), dualKey); if (!visited.contains(dk)) { // Push keys for further comparison stack.addFirst(dk); } dk = new DualKey(entry.getValue(), other.getValue(), entry.getKey().toString(), dualKey); if (!visited.contains(dk)) { // Push values for further comparison stack.addFirst(dk); } } return true; }
From source file:org.apache.sling.jcr.resource.internal.helper.jcr.JcrItemResourceFactory.java
private Item getHistoricItem(Item item, String versionSpecifier) throws RepositoryException { Item currentItem = item;//from w w w . j av a 2 s. c o m LinkedList<String> relPath = new LinkedList<String>(); Node version = null; while (!"/".equals(currentItem.getPath())) { if (isVersionable(currentItem)) { version = getFrozenNode((Node) currentItem, versionSpecifier); break; } else { relPath.addFirst(currentItem.getName()); currentItem = currentItem.getParent(); } } if (version != null) { return getSubitem(version, StringUtils.join(relPath.iterator(), '/')); } return null; }
From source file:org.apache.impala.infra.tableflattener.SchemaFlattener.java
private void createChildDataset(String name, Schema srcSchema, LinkedList<Field> parentFields, FlattenedSchema parentDataset) { // Ensure that the parent schema has an id field so the child can reference the // parent. A single id field is sufficient. if (parentFields.isEmpty() || !parentFields.getFirst().name().equals(parentDataset.getIdFieldName())) { parentFields.addFirst(SchemaUtil.createField(parentDataset.getIdFieldName(), Type.LONG)); }/*from w w w.ja v a2s . co m*/ FlattenedSchema childDataset = new FlattenedSchema(name, parentDataset); LinkedList<Field> fields = Lists.newLinkedList(); String parentIdFieldName = parentDataset.getName() + childDataset.getNameSeparator() + childDataset.getIdFieldName(); Field parentIdField = SchemaUtil.createField(parentIdFieldName, Type.LONG); childDataset.setParentIdField(parentIdField); fields.add(parentIdField); Schema valueSchema; if (srcSchema.getType() == Type.ARRAY) { fields.add(SchemaUtil.createField(childDataset.getArrayIdxFieldName(), Type.LONG)); valueSchema = srcSchema.getElementType(); } else { Preconditions.checkState(srcSchema.getType() == Type.MAP); fields.add(SchemaUtil.createField(childDataset.getMapKeyFieldName(), Type.STRING)); valueSchema = srcSchema.getValueType(); } if (SchemaUtil.isSimpleType(valueSchema)) { fields.add(SchemaUtil.createField(childDataset.getCollectionValueFieldName(), valueSchema)); } else { if (SchemaUtil.isNullable(valueSchema)) { fields.add(SchemaUtil.createField( childDataset.getIsNullFieldName(childDataset.getCollectionValueFieldName()), Type.BOOLEAN)); valueSchema = SchemaUtil.reduceUnionToNonNull(valueSchema); } if (SchemaUtil.requiresChildDataset(valueSchema)) { createChildDataset(childDataset.getChildOfCollectionName(), valueSchema, fields, childDataset); } else { addRecordFields(valueSchema, childDataset, fields, childDataset.getCollectionValueFieldName() + childDataset.getNameSeparator()); } } finishCreatingDataset(fields, childDataset); }
From source file:net.spfbl.data.Generic.java
public static String findGeneric(String token) { String mask = null;/*from ww w . ja va2 s .c om*/ LinkedList<String> regexList = new LinkedList<String>(); if (token == null) { return null; } else if (Domain.isHostname(token)) { token = Domain.normalizeHostname(token, true); String host = token; do { int index = host.indexOf('.') + 1; host = host.substring(index); String token2 = '.' + host; if (MAP.containsGeneric(token2)) { return token2; } regexList.addFirst(token2); } while (host.contains(".")); if ((host = convertDomainToMask(token)) != null) { if (MAP.containsGeneric(host)) { return host; } } if ((host = convertHostToMask(token)) != null) { mask = host; do { int index = host.indexOf('.') + 1; host = host.substring(index); String token2 = '.' + host; if (MAP.containsGeneric(token2)) { return token2; } regexList.addFirst(token2); } while (host.contains(".")); } } else if (token.contains("@")) { int index = token.lastIndexOf('@') + 1; token = token.substring(index); token = Domain.normalizeHostname(token, true); return findGeneric(token); } else { regexList.add(token); } try { // Verifica um critrio do REGEX. String regex; if ((regex = REGEX.get(regexList)) != null) { if (mask != null) { int index = regex.indexOf('=') + 1; if (!regex.contains("[0-9a-f]+") && !regex.contains("[0-9a-z]+") && !regex.contains("[a-z]+")) { Pattern pattern = Pattern.compile(regex.substring(index)); index = mask.length(); while ((index = mask.lastIndexOf('.', index - 1)) >= 0) { String subMask = mask.substring(index); if (Domain.isOfficialTLD(subMask)) { // Do nothing. } else if (Domain.isDomain(subMask)) { if ((subMask = Generic.convertDomainToMask(subMask)) != null) { Matcher matcher = pattern.matcher(subMask.replace('#', '0')); if (matcher.matches()) { if (addGenericExact(subMask)) { Block.clear(subMask.replace('#', '0'), "GENERIC"); Server.logDebug( "new GENERIC '" + subMask + "' added by '" + regex + "'."); return subMask; } } } } else { Matcher matcher = pattern.matcher(subMask.replace('#', '0').replace(".H.", ".0a.")); if (matcher.matches()) { if (addGenericExact(subMask)) { Block.clear(subMask.replace('#', '0').replace(".H.", ".0a."), "GENERIC"); Server.logDebug("new GENERIC '" + subMask + "' added by '" + regex + "'."); return subMask; } break; } } } } } return regex; } } catch (Exception ex) { Server.logError(ex); } return null; }
From source file:org.dcm4che3.conf.core.misc.DeepEquals.java
/** * Deeply compare to Arrays []. Both arrays must be of the same type, same length, and all * elements within the arrays must be deeply equal in order to return true. * @param array1 [] type (Object[], String[], etc.) * @param array2 [] type (Object[], String[], etc.) * @param stack add items to compare to the Stack (Stack versus recursion) * @param visited Set of objects already compared (prevents cycles) * @return true if the two arrays are the same length and contain deeply equivalent items. */// w w w .j a va2 s . c o m private static boolean compareArrays(Object array1, Object array2, LinkedList stack, Set visited) { // Same instance check already performed... int len = Array.getLength(array1); if (len != Array.getLength(array2)) { return false; } // try sorting /*if (len >0 ) { if (Array.get(array1, 0) instanceof Comparable) { Class<?> c = Array.get(array1, 0).getClass(); if (ClassUtils.isPrimitiveOrWrapper(c)) { } else { Arrays.sort((Object[]) array1); Arrays.sort((Object[]) array2); } } }*/ for (int i = 0; i < len; i++) { DualKey dk = new DualKey(Array.get(array1, i), Array.get(array2, i)); if (!visited.contains(dk)) { // push contents for further comparison stack.addFirst(dk); } } return true; }
From source file:com.hp.alm.ali.idea.ui.chooser.FilterableTree.java
public void handle(HierarchicalEntityModel model, String entityType, String filter) { EntityQuery query = new EntityQuery(entityType); query.addColumn("parent-id", 75); query.setValue("name", "'*" + filter + "*'"); LinkedList<List<Entity>> queue = new LinkedList<List<Entity>>(); while (query != null) { List<Entity> todo = model.queryForNodes(query); query = handle(model, todo);//from w w w . j a v a2s . c om if (!todo.isEmpty()) { // wait until we know parents queue.addFirst(todo); } } for (List<Entity> todo : queue) { for (Entity entity : todo) { if (model.getEntityNode(entity.getId(), entity.getType()) == null) { int parentId = Integer.valueOf(entity.getPropertyValue("parent-id")); EntityNode parent = model.getEntityNode(parentId, Metadata.getParentEntity(entity.getType())); if (parent != null) { handleNode(model, parent, entity); } } } } }
From source file:com.asakusafw.directio.tools.DirectIoDelete.java
@Override public int run(String[] args) throws Exception { LinkedList<String> argList = new LinkedList<>(); Collections.addAll(argList, args); boolean recursive = false; while (argList.isEmpty() == false) { String arg = argList.removeFirst(); if (arg.equals("-r") || arg.equals("-recursive")) { //$NON-NLS-1$ //$NON-NLS-2$ recursive = true;/* w w w.j a v a 2 s . c om*/ } else if (arg.equals("--")) { //$NON-NLS-1$ break; } else { argList.addFirst(arg); break; } } if (argList.size() < 2) { LOG.error(MessageFormat.format("Invalid arguments: {0}", Arrays.toString(args))); System.err.println(MessageFormat.format("Usage: hadoop {0} -conf <datasource-conf.xml> [-r] " + "base-path resource-pattern [resource-pattern [...]]", getClass().getName())); return 1; } String path = argList.removeFirst(); List<FilePattern> patterns = new ArrayList<>(); for (String arg : argList) { patterns.add(FilePattern.compile(arg)); } if (repository == null) { repository = HadoopDataSourceUtil.loadRepository(getConf()); } String basePath = repository.getComponentPath(path); DirectDataSource source = repository.getRelatedDataSource(path); for (FilePattern pattern : patterns) { source.delete(basePath, pattern, recursive, new Counter()); } return 0; }