List of usage examples for java.util LinkedList removeFirst
public E removeFirst()
From source file:org.eclim.plugin.jdt.command.impl.ImplCommand.java
/** * Gets the type to be edited.//w ww . j a v a2 s .c om * * @param src The ICompilationUnit of the source file to edit. * @param commandLine The command line. * @return The IType to be edited. */ protected IType getType(ICompilationUnit src, CommandLine commandLine) throws Exception { IType type = null; // If a qualified name for the type being modified was supplied if (commandLine.hasOption(Options.TYPE_OPTION)) { IJavaProject javaProject = src.getJavaProject(); String typeFQN = commandLine.getValue(Options.TYPE_OPTION); int indexOfDollar = typeFQN.indexOf("$"); // If we are dealing with an anonymous inner class, findType does not work // so lets find it starting at the class containing the anonymous class. if (indexOfDollar > 0) { String primaryTypeFQN = typeFQN.substring(0, indexOfDollar); IType primaryType = javaProject.findType(primaryTypeFQN); LinkedList<IJavaElement> todo = new LinkedList<IJavaElement>(); todo.add(primaryType); while (!todo.isEmpty()) { IJavaElement element = todo.removeFirst(); if (element instanceof IType) { IType tempType = (IType) element; String name = tempType.getFullyQualifiedName(); if (name.equals(typeFQN)) { type = tempType; break; } } if (element instanceof IParent) { for (IJavaElement child : ((IParent) element).getChildren()) { todo.add(child); } } } // Else it is a normal class/interface so find works } else { type = javaProject.findType(typeFQN); } // If not, we need to find it based on the current selection } else { type = TypeUtils.getType(src, getOffset(commandLine)); } return type; }
From source file:com.asakusafw.runtime.stage.launcher.LauncherOptionsParser.java
private String consumeApplicationClassName(LinkedList<String> rest) { if (rest.isEmpty()) { throw new IllegalArgumentException("the first argument must be target application class name"); }/* w ww. j a v a 2 s.co m*/ return rest.removeFirst(); }
From source file:org.gephi.statistics.plugin.GraphDistance.java
public void execute(HierarchicalGraph hgraph, AttributeModel attributeModel) { isCanceled = false;//from w ww. ja v a 2s . c o m AttributeTable nodeTable = attributeModel.getNodeTable(); AttributeColumn eccentricityCol = nodeTable.getColumn(ECCENTRICITY); AttributeColumn closenessCol = nodeTable.getColumn(CLOSENESS); AttributeColumn betweenessCol = nodeTable.getColumn(BETWEENNESS); if (eccentricityCol == null) { eccentricityCol = nodeTable.addColumn(ECCENTRICITY, "Eccentricity", AttributeType.DOUBLE, AttributeOrigin.COMPUTED, new Double(0)); } if (closenessCol == null) { closenessCol = nodeTable.addColumn(CLOSENESS, "Closeness Centrality", AttributeType.DOUBLE, AttributeOrigin.COMPUTED, new Double(0)); } if (betweenessCol == null) { betweenessCol = nodeTable.addColumn(BETWEENNESS, "Betweenness Centrality", AttributeType.DOUBLE, AttributeOrigin.COMPUTED, new Double(0)); } hgraph.readLock(); N = hgraph.getNodeCount(); betweenness = new double[N]; eccentricity = new double[N]; closeness = new double[N]; diameter = 0; avgDist = 0; shortestPaths = 0; radius = Integer.MAX_VALUE; HashMap<Node, Integer> indicies = new HashMap<Node, Integer>(); int index = 0; for (Node s : hgraph.getNodes()) { indicies.put(s, index); index++; } Progress.start(progress, hgraph.getNodeCount()); int count = 0; for (Node s : hgraph.getNodes()) { Stack<Node> S = new Stack<Node>(); LinkedList<Node>[] P = new LinkedList[N]; double[] theta = new double[N]; int[] d = new int[N]; for (int j = 0; j < N; j++) { P[j] = new LinkedList<Node>(); theta[j] = 0; d[j] = -1; } int s_index = indicies.get(s); theta[s_index] = 1; d[s_index] = 0; LinkedList<Node> Q = new LinkedList<Node>(); Q.addLast(s); while (!Q.isEmpty()) { Node v = Q.removeFirst(); S.push(v); int v_index = indicies.get(v); EdgeIterable edgeIter = null; if (isDirected) { edgeIter = ((HierarchicalDirectedGraph) hgraph).getOutEdgesAndMetaOutEdges(v); } else { edgeIter = hgraph.getEdgesAndMetaEdges(v); } for (Edge edge : edgeIter) { Node reachable = hgraph.getOpposite(v, edge); int r_index = indicies.get(reachable); if (d[r_index] < 0) { Q.addLast(reachable); d[r_index] = d[v_index] + 1; } if (d[r_index] == (d[v_index] + 1)) { theta[r_index] = theta[r_index] + theta[v_index]; P[r_index].addLast(v); } } } double reachable = 0; for (int i = 0; i < N; i++) { if (d[i] > 0) { avgDist += d[i]; eccentricity[s_index] = (int) Math.max(eccentricity[s_index], d[i]); closeness[s_index] += d[i]; diameter = Math.max(diameter, d[i]); reachable++; } } radius = (int) Math.min(eccentricity[s_index], radius); if (reachable != 0) { closeness[s_index] /= reachable; } shortestPaths += reachable; double[] delta = new double[N]; while (!S.empty()) { Node w = S.pop(); int w_index = indicies.get(w); ListIterator<Node> iter1 = P[w_index].listIterator(); while (iter1.hasNext()) { Node u = iter1.next(); int u_index = indicies.get(u); delta[u_index] += (theta[u_index] / theta[w_index]) * (1 + delta[w_index]); } if (w != s) { betweenness[w_index] += delta[w_index]; } } count++; if (isCanceled) { hgraph.readUnlockAll(); return; } Progress.progress(progress, count); } avgDist /= shortestPaths;//mN * (mN - 1.0f); for (Node s : hgraph.getNodes()) { AttributeRow row = (AttributeRow) s.getNodeData().getAttributes(); int s_index = indicies.get(s); if (!isDirected) { betweenness[s_index] /= 2; } if (isNormalized) { closeness[s_index] = (closeness[s_index] == 0) ? 0 : 1.0 / closeness[s_index]; betweenness[s_index] /= isDirected ? (N - 1) * (N - 2) : (N - 1) * (N - 2) / 2; } row.setValue(eccentricityCol, eccentricity[s_index]); row.setValue(closenessCol, closeness[s_index]); row.setValue(betweenessCol, betweenness[s_index]); } hgraph.readUnlock(); }
From source file:org.geoserver.importer.Directory.java
public List<Directory> flatten() { List<Directory> flat = new ArrayList<Directory>(); LinkedList<Directory> q = new LinkedList<Directory>(); q.addLast(this); while (!q.isEmpty()) { Directory dir = q.removeFirst(); flat.add(dir);/*w ww . j a v a 2s .com*/ for (Iterator<FileData> it = dir.getFiles().iterator(); it.hasNext();) { FileData f = it.next(); if (f instanceof Directory) { Directory d = (Directory) f; it.remove(); q.addLast(d); } } } return flat; }
From source file:org.springframework.amqp.rabbit.connection.CachingConnectionFactory.java
private Channel getChannel(boolean transactional) { LinkedList<ChannelProxy> channelList = transactional ? this.cachedChannelsTransactional : this.cachedChannelsNonTransactional; Channel channel = null;//w w w. j a v a 2s . c om synchronized (channelList) { if (!channelList.isEmpty()) { channel = channelList.removeFirst(); } } if (channel != null) { if (logger.isTraceEnabled()) { logger.trace("Found cached Rabbit Channel"); } } else { channel = getCachedChannelProxy(channelList, transactional); } return channel; }
From source file:opennlp.model.OnePassDataIndexer.java
protected List index(LinkedList<Event> events, Map<String, Integer> predicateIndex) { Map<String, Integer> omap = new HashMap<String, Integer>(); int numEvents = events.size(); int outcomeCount = 0; List eventsToCompare = new ArrayList(numEvents); List<Integer> indexedContext = new ArrayList<Integer>(); for (int eventIndex = 0; eventIndex < numEvents; eventIndex++) { Event ev = (Event) events.removeFirst(); String[] econtext = ev.getContext(); ComparableEvent ce;//www . j ava 2s. c om int ocID; String oc = ev.getOutcome(); if (omap.containsKey(oc)) { ocID = omap.get(oc); } else { ocID = outcomeCount++; omap.put(oc, ocID); } for (int i = 0; i < econtext.length; i++) { String pred = econtext[i]; if (predicateIndex.containsKey(pred)) { indexedContext.add(predicateIndex.get(pred)); } } // drop events with no active features if (indexedContext.size() > 0) { int[] cons = new int[indexedContext.size()]; for (int ci = 0; ci < cons.length; ci++) { cons[ci] = indexedContext.get(ci); } ce = new ComparableEvent(ocID, cons); eventsToCompare.add(ce); } else { LOG.debug("Dropped event " + ev.getOutcome() + ":" + Arrays.asList(ev.getContext())); } // recycle the TIntArrayList indexedContext.clear(); } outcomeLabels = toIndexedStringArray(omap); predLabels = toIndexedStringArray(predicateIndex); return eventsToCompare; }
From source file:cross.io.misc.WorkflowZipper.java
/** * Saves the currently assigned workflow elements, matching currently * assigned FileFilter to File. Marks all files for deletion on exit. * * @param f the file to save to/*www. j av a 2 s . c o m*/ * @return true if the workflow was zipped, false otherwise * @throws RuntimeException if IOExceptions are encountered */ public boolean save(final File f) { if (this.zipWorkflow) { HashSet<String> zipEntries = new HashSet<>(); final int bufsize = 1024; final File zipFile = f; ZipOutputStream zos; try { final FileOutputStream fos = new FileOutputStream(zipFile); zos = new ZipOutputStream(new BufferedOutputStream(fos)); log.info("Created zip output stream"); final byte[] input_buffer = new byte[bufsize]; File basedir = FileTools.prependDefaultDirsWithPrefix("", null, this.iw.getStartupDate()); if (this.deleteOnExit) { log.info("marked basedir for deletion on exit: {}", basedir); basedir.deleteOnExit(); } if (flatten) { log.info("setting basedir to parent file: {}", basedir.getParentFile()); basedir = basedir.getParentFile(); final Iterator<IWorkflowResult> iter = this.iw.getResults(); while (iter.hasNext()) { final IWorkflowResult iwr = iter.next(); if (iwr instanceof IWorkflowFileResult) { final IWorkflowFileResult iwfr = (IWorkflowFileResult) iwr; final File file = iwfr.getFile(); log.info("Retrieving file result {}", file); // mark file for deletion final File parent = file.getParentFile(); log.info("Retrieving parent of file result {}", parent); // Also delete the parent directory in which file was // contained, // unless it is the base directory + possibly additional // defaultDirs if (parent.getAbsolutePath().startsWith(basedir.getAbsolutePath()) && !parent.getAbsolutePath().equals(basedir.getAbsolutePath())) { log.info("Marking file and parent for deletion"); if (this.deleteOnExit) { parent.deleteOnExit(); file.deleteOnExit(); } } if (file.getAbsolutePath().startsWith(basedir.getAbsolutePath())) { log.info("Marking file for deletion"); if (this.deleteOnExit) { file.deleteOnExit(); } } if ((this.ff != null) && !this.ff.accept(file)) { // Skip file if file filter does not accept it continue; } else { log.info("Adding zip entry!"); addZipEntry(bufsize, zos, input_buffer, file, zipEntries); } } } } else { LinkedList<File> files = new LinkedList<>(Arrays.asList(basedir.listFiles(ff))); File archiveBase = basedir.getParentFile(); while (!files.isEmpty()) { File currentFile = files.removeFirst(); if (currentFile.isDirectory()) { files.addAll(Arrays.asList(currentFile.listFiles(ff))); } else { try { String relativePath = FileTools.getRelativeFile(archiveBase, currentFile).getPath() .replaceAll("\\\\", "/"); log.info("Adding zip entry for {} below {}", relativePath, archiveBase); addRelativeZipEntry(bufsize, zos, input_buffer, relativePath, currentFile, zipEntries); } catch (Exception ex) { log.warn("Caught exception while retrieving relative path:", ex); } } if (this.deleteOnExit) { log.info("Marking file for deletion"); currentFile.deleteOnExit(); } } } try { zos.flush(); zos.close(); } catch (final IOException e) { throw new RuntimeException(e); } } catch (final IOException e) { throw new RuntimeException(e); } return true; } else { log.debug("Configured to not zip Workflow results!"); return false; } }
From source file:org.flowable.decision.DecisionAnalysisService.java
private List<UserTask> findUserTasksBefore(UserTask startUserTask) { List<UserTask> result = new ArrayList<>(); Set<String> visitedFlowElements = new HashSet<>(); LinkedList<FlowElement> elementsToVisit = new LinkedList<>(); for (SequenceFlow flow : startUserTask.getIncomingFlows()) { elementsToVisit.add(flow.getSourceFlowElement()); }/* w w w . j av a 2 s . co m*/ while (!elementsToVisit.isEmpty()) { FlowElement flowElement = elementsToVisit.removeFirst(); if (!visitedFlowElements.contains(flowElement.getId())) { if (flowElement instanceof FlowNode) { FlowNode flowNode = (FlowNode) flowElement; if (!flowNode.getIncomingFlows().isEmpty()) { for (SequenceFlow flow : flowNode.getIncomingFlows()) { elementsToVisit.add(flow.getSourceFlowElement()); } } if (flowElement instanceof UserTask) { UserTask userTask = (UserTask) flowElement; if (userTask.getFormKey() != null) { result.add(userTask); } } } } visitedFlowElements.add(flowElement.getId()); } return result; }
From source file:fi.hsl.parkandride.back.UtilizationDao.java
@Transactional(readOnly = true, isolation = READ_COMMITTED, propagation = MANDATORY) @Override/*from ww w.j ava 2 s .c o m*/ public List<Utilization> findUtilizationsWithResolution(UtilizationKey utilizationKey, DateTime start, DateTime end, Minutes resolution) { ArrayList<Utilization> results = new ArrayList<>(); Optional<Utilization> first = findUtilizationAtInstant(utilizationKey, start); try (CloseableIterator<Utilization> rest = findUtilizationsBetween(utilizationKey, start, end)) { LinkedList<Utilization> utilizations = Stream .concat(StreamUtil.asStream(first), StreamUtil.asStream(rest)) .collect(Collectors.toCollection(LinkedList::new)); Utilization current = null; for (DateTime instant = start; !instant.isAfter(end); instant = instant.plus(resolution)) { while (!utilizations.isEmpty() && !utilizations.getFirst().timestamp.isAfter(instant)) { current = utilizations.removeFirst(); } if (current != null) { current.timestamp = instant; results.add(current.copy()); } } } return results; }
From source file:org.apache.hadoop.hdfs.server.datanode.TestDirectoryScannerDelta.java
private void messWithDelta(LinkedList<FileAndBlockId> blocksToBeRemoved, LinkedList<FileAndBlockId> blocksToBeUpdated, LinkedList<FileAndBlockId> blocksToBeAdded) throws IOException { for (int i = 0, n = blocksToBeAdded.size() / 4; i < n; i++) { FileAndBlockId f = blocksToBeAdded.removeFirst(); if (i % 2 == 0) { delta.addBlock(nsid, f.block); } else {/* ww w . j av a 2 s . c o m*/ delta.removeBlock(nsid, f.block); } } for (int i = 0, n = blocksToBeRemoved.size() / 4; i < n; i++) { FileAndBlockId f = blocksToBeAdded.removeFirst(); if (i % 2 == 0) { delta.addBlock(nsid, f.block); } else { delta.removeBlock(nsid, f.block); } } for (int i = 0, n = blocksToBeUpdated.size() / 4; i < n; i++) { FileAndBlockId f = blocksToBeUpdated.removeFirst(); removeFile(fs, f.fileName); } }