List of usage examples for java.util LinkedList isEmpty
boolean isEmpty();
From source file:cross.io.misc.WorkflowZipper.java
/** * Saves the currently assigned workflow elements, matching currently * assigned FileFilter to File. Marks all files for deletion on exit. * * @param f the file to save to/* w ww .j a v a 2 s . c o m*/ * @return true if the workflow was zipped, false otherwise * @throws RuntimeException if IOExceptions are encountered */ public boolean save(final File f) { if (this.zipWorkflow) { HashSet<String> zipEntries = new HashSet<>(); final int bufsize = 1024; final File zipFile = f; ZipOutputStream zos; try { final FileOutputStream fos = new FileOutputStream(zipFile); zos = new ZipOutputStream(new BufferedOutputStream(fos)); log.info("Created zip output stream"); final byte[] input_buffer = new byte[bufsize]; File basedir = FileTools.prependDefaultDirsWithPrefix("", null, this.iw.getStartupDate()); if (this.deleteOnExit) { log.info("marked basedir for deletion on exit: {}", basedir); basedir.deleteOnExit(); } if (flatten) { log.info("setting basedir to parent file: {}", basedir.getParentFile()); basedir = basedir.getParentFile(); final Iterator<IWorkflowResult> iter = this.iw.getResults(); while (iter.hasNext()) { final IWorkflowResult iwr = iter.next(); if (iwr instanceof IWorkflowFileResult) { final IWorkflowFileResult iwfr = (IWorkflowFileResult) iwr; final File file = iwfr.getFile(); log.info("Retrieving file result {}", file); // mark file for deletion final File parent = file.getParentFile(); log.info("Retrieving parent of file result {}", parent); // Also delete the parent directory in which file was // contained, // unless it is the base directory + possibly additional // defaultDirs if (parent.getAbsolutePath().startsWith(basedir.getAbsolutePath()) && !parent.getAbsolutePath().equals(basedir.getAbsolutePath())) { log.info("Marking file and parent for deletion"); if (this.deleteOnExit) { parent.deleteOnExit(); file.deleteOnExit(); } } if (file.getAbsolutePath().startsWith(basedir.getAbsolutePath())) { log.info("Marking file for deletion"); if (this.deleteOnExit) { file.deleteOnExit(); } } if ((this.ff != null) && !this.ff.accept(file)) { // Skip file if file filter does not accept it continue; } else { log.info("Adding zip entry!"); addZipEntry(bufsize, zos, input_buffer, file, zipEntries); } } } } else { LinkedList<File> files = new LinkedList<>(Arrays.asList(basedir.listFiles(ff))); File archiveBase = basedir.getParentFile(); while (!files.isEmpty()) { File currentFile = files.removeFirst(); if (currentFile.isDirectory()) { files.addAll(Arrays.asList(currentFile.listFiles(ff))); } else { try { String relativePath = FileTools.getRelativeFile(archiveBase, currentFile).getPath() .replaceAll("\\\\", "/"); log.info("Adding zip entry for {} below {}", relativePath, archiveBase); addRelativeZipEntry(bufsize, zos, input_buffer, relativePath, currentFile, zipEntries); } catch (Exception ex) { log.warn("Caught exception while retrieving relative path:", ex); } } if (this.deleteOnExit) { log.info("Marking file for deletion"); currentFile.deleteOnExit(); } } } try { zos.flush(); zos.close(); } catch (final IOException e) { throw new RuntimeException(e); } } catch (final IOException e) { throw new RuntimeException(e); } return true; } else { log.debug("Configured to not zip Workflow results!"); return false; } }
From source file:cn.keke.travelmix.publictransport.type.EfaConnectionResponseHandler.java
public void handle(HttpResponse response) throws IOException { if (this.job.isFinished()) { return;//from w w w . java 2 s . co m } HttpEntity entity = response.getEntity(); BufferedInputStream in; if (this.zipped) { in = new BufferedInputStream(new GZIPInputStream(entity.getContent())); } else { in = new BufferedInputStream(entity.getContent()); } String responseText = IOUtils.toString(in, CHARSET_ISO_8859_1); if (this.job.isFinished()) { return; } // LOG.info("PT response: " + responseText); LinkedList<PartialRoute> partialRoutes = parseExternalRouteResponse(responseText); if (!partialRoutes.isEmpty()) { LOG.info("Got " + partialRoutes.size() + " partial routes"); if (!this.job.setFinished(this.url)) { return; } RouteResult result = readRouteInfo(partialRoutes); createRouteResponse(this.sb, result); this.job.setHandled(); } else { LOG.info("No partial routes received: " + url); } }
From source file:org.springframework.amqp.rabbit.connection.CachingConnectionFactory.java
private Channel getChannel(boolean transactional) { LinkedList<ChannelProxy> channelList = transactional ? this.cachedChannelsTransactional : this.cachedChannelsNonTransactional; Channel channel = null;//from www . j av a2 s . c o m synchronized (channelList) { if (!channelList.isEmpty()) { channel = channelList.removeFirst(); } } if (channel != null) { if (logger.isTraceEnabled()) { logger.trace("Found cached Rabbit Channel"); } } else { channel = getCachedChannelProxy(channelList, transactional); } return channel; }
From source file:org.flowable.decision.DecisionAnalysisService.java
private List<UserTask> findUserTasksBefore(UserTask startUserTask) { List<UserTask> result = new ArrayList<>(); Set<String> visitedFlowElements = new HashSet<>(); LinkedList<FlowElement> elementsToVisit = new LinkedList<>(); for (SequenceFlow flow : startUserTask.getIncomingFlows()) { elementsToVisit.add(flow.getSourceFlowElement()); }/*from w w w .j av a2 s.com*/ while (!elementsToVisit.isEmpty()) { FlowElement flowElement = elementsToVisit.removeFirst(); if (!visitedFlowElements.contains(flowElement.getId())) { if (flowElement instanceof FlowNode) { FlowNode flowNode = (FlowNode) flowElement; if (!flowNode.getIncomingFlows().isEmpty()) { for (SequenceFlow flow : flowNode.getIncomingFlows()) { elementsToVisit.add(flow.getSourceFlowElement()); } } if (flowElement instanceof UserTask) { UserTask userTask = (UserTask) flowElement; if (userTask.getFormKey() != null) { result.add(userTask); } } } } visitedFlowElements.add(flowElement.getId()); } return result; }
From source file:playground.johannes.socialnets.NetworkGenerator2.java
public SocialNetwork generate(Population population) { random = new Random(); logger.info("Initializing social network..."); SocialNetwork net = new SocialNetwork(); for (Person p : population) { egoList.add(net.addEgo(p));//from ww w. ja v a 2 s.c o m } logger.info("Initializing degree distribution..."); TObjectIntHashMap<Ego> stubsMap = initDegreeDistribution(net.getVertices()); // try { // dumpStats(net, null, 0); // } catch (FileNotFoundException e) { // // TODO Auto-generated catch block // e.printStackTrace(); // } catch (IOException e) { // // TODO Auto-generated catch block // e.printStackTrace(); // } LinkedList<Ego> pendingNodes = new LinkedList<Ego>(net.getVertices()); Collections.shuffle(pendingNodes); while (!pendingNodes.isEmpty()) { logger.info("Starting new component..."); Ego v1 = pendingNodes.getFirst(); Collection<Ego> nextWave = new HashSet<Ego>(); int stubs = stubsMap.get(v1); for (int i = 0; i < stubs; i++) { Ego n1 = findNeighbour(v1, egoList, stubsMap, false); if (n1 == null) { pendingNodes.remove(v1); break; } if (!formConnection(v1, n1, net, stubsMap)) { /* * Should never happen (?) */ int v2stubs = stubsMap.get(v1); int v3stubs = stubsMap.get(n1); System.err.println("The selected neighbour is not valid!" + " v2stubs=" + v2stubs + ", v3stubs=" + v3stubs); System.exit(-1); } else { nextWave.add(n1); if (stubsMap.get(n1) == 0) pendingNodes.remove(n1); } } if (stubsMap.get(v1) == 0) pendingNodes.remove(v1); while (!nextWave.isEmpty()) { nextWave = closeTriads(nextWave, stubsMap, pendingNodes, net); } } int sum = 0; for (int val : stubsMap.getValues()) { sum += val; } System.err.println(sum + " stubs left!"); return net; }
From source file:org.gephi.statistics.plugin.GraphDistance.java
public void execute(HierarchicalGraph hgraph, AttributeModel attributeModel) { isCanceled = false;//from w ww .j av a2 s. c o m AttributeTable nodeTable = attributeModel.getNodeTable(); AttributeColumn eccentricityCol = nodeTable.getColumn(ECCENTRICITY); AttributeColumn closenessCol = nodeTable.getColumn(CLOSENESS); AttributeColumn betweenessCol = nodeTable.getColumn(BETWEENNESS); if (eccentricityCol == null) { eccentricityCol = nodeTable.addColumn(ECCENTRICITY, "Eccentricity", AttributeType.DOUBLE, AttributeOrigin.COMPUTED, new Double(0)); } if (closenessCol == null) { closenessCol = nodeTable.addColumn(CLOSENESS, "Closeness Centrality", AttributeType.DOUBLE, AttributeOrigin.COMPUTED, new Double(0)); } if (betweenessCol == null) { betweenessCol = nodeTable.addColumn(BETWEENNESS, "Betweenness Centrality", AttributeType.DOUBLE, AttributeOrigin.COMPUTED, new Double(0)); } hgraph.readLock(); N = hgraph.getNodeCount(); betweenness = new double[N]; eccentricity = new double[N]; closeness = new double[N]; diameter = 0; avgDist = 0; shortestPaths = 0; radius = Integer.MAX_VALUE; HashMap<Node, Integer> indicies = new HashMap<Node, Integer>(); int index = 0; for (Node s : hgraph.getNodes()) { indicies.put(s, index); index++; } Progress.start(progress, hgraph.getNodeCount()); int count = 0; for (Node s : hgraph.getNodes()) { Stack<Node> S = new Stack<Node>(); LinkedList<Node>[] P = new LinkedList[N]; double[] theta = new double[N]; int[] d = new int[N]; for (int j = 0; j < N; j++) { P[j] = new LinkedList<Node>(); theta[j] = 0; d[j] = -1; } int s_index = indicies.get(s); theta[s_index] = 1; d[s_index] = 0; LinkedList<Node> Q = new LinkedList<Node>(); Q.addLast(s); while (!Q.isEmpty()) { Node v = Q.removeFirst(); S.push(v); int v_index = indicies.get(v); EdgeIterable edgeIter = null; if (isDirected) { edgeIter = ((HierarchicalDirectedGraph) hgraph).getOutEdgesAndMetaOutEdges(v); } else { edgeIter = hgraph.getEdgesAndMetaEdges(v); } for (Edge edge : edgeIter) { Node reachable = hgraph.getOpposite(v, edge); int r_index = indicies.get(reachable); if (d[r_index] < 0) { Q.addLast(reachable); d[r_index] = d[v_index] + 1; } if (d[r_index] == (d[v_index] + 1)) { theta[r_index] = theta[r_index] + theta[v_index]; P[r_index].addLast(v); } } } double reachable = 0; for (int i = 0; i < N; i++) { if (d[i] > 0) { avgDist += d[i]; eccentricity[s_index] = (int) Math.max(eccentricity[s_index], d[i]); closeness[s_index] += d[i]; diameter = Math.max(diameter, d[i]); reachable++; } } radius = (int) Math.min(eccentricity[s_index], radius); if (reachable != 0) { closeness[s_index] /= reachable; } shortestPaths += reachable; double[] delta = new double[N]; while (!S.empty()) { Node w = S.pop(); int w_index = indicies.get(w); ListIterator<Node> iter1 = P[w_index].listIterator(); while (iter1.hasNext()) { Node u = iter1.next(); int u_index = indicies.get(u); delta[u_index] += (theta[u_index] / theta[w_index]) * (1 + delta[w_index]); } if (w != s) { betweenness[w_index] += delta[w_index]; } } count++; if (isCanceled) { hgraph.readUnlockAll(); return; } Progress.progress(progress, count); } avgDist /= shortestPaths;//mN * (mN - 1.0f); for (Node s : hgraph.getNodes()) { AttributeRow row = (AttributeRow) s.getNodeData().getAttributes(); int s_index = indicies.get(s); if (!isDirected) { betweenness[s_index] /= 2; } if (isNormalized) { closeness[s_index] = (closeness[s_index] == 0) ? 0 : 1.0 / closeness[s_index]; betweenness[s_index] /= isDirected ? (N - 1) * (N - 2) : (N - 1) * (N - 2) / 2; } row.setValue(eccentricityCol, eccentricity[s_index]); row.setValue(closenessCol, closeness[s_index]); row.setValue(betweenessCol, betweenness[s_index]); } hgraph.readUnlock(); }
From source file:org.geoserver.importer.Directory.java
public List<Directory> flatten() { List<Directory> flat = new ArrayList<Directory>(); LinkedList<Directory> q = new LinkedList<Directory>(); q.addLast(this); while (!q.isEmpty()) { Directory dir = q.removeFirst(); flat.add(dir);//from w ww . j av a 2 s.c o m for (Iterator<FileData> it = dir.getFiles().iterator(); it.hasNext();) { FileData f = it.next(); if (f instanceof Directory) { Directory d = (Directory) f; it.remove(); q.addLast(d); } } } return flat; }
From source file:org.apache.jackrabbit.oak.console.impl.Console.java
private NodeState browseDown(NodeState father, LinkedList<String> path) { String node = path.poll();//from w w w.j av a2 s . c o m NodeState child = null; if (!StringUtils.isEmpty(node)) { if (father.hasChildNode(node)) { child = father.getChildNode(node); } else { System.err.println("Wrong path. Node doesn't exists. " + node); return null; } } else { child = father; } if (!path.isEmpty()) { child = browseDown(child, path); } return child; }
From source file:net.rptools.tokentool.controller.ManageOverlays_Controller.java
private boolean confirmDelete(LinkedList<File> overlayFiles) { String confirmationText = I18N.getString("ManageOverlays.dialog.delete.confirmation"); if (overlayFiles.isEmpty()) return false; else if (overlayFiles.size() == 1) { confirmationText += overlayFiles.get(0).getName() + "?"; } else {/*from ww w . j a v a 2s .c o m*/ confirmationText += I18N.getString("ManageOverlays.dialog.delete.confirmation.these") + overlayFiles.size() + I18N.getString("ManageOverlays.dialog.delete.confirmation.overlays"); } Alert alert = new Alert(AlertType.CONFIRMATION); alert.setTitle(I18N.getString("ManageOverlays.dialog.delete.title")); alert.setContentText(confirmationText); Optional<ButtonType> result = alert.showAndWait(); if ((result.isPresent()) && (result.get() == ButtonType.OK)) { return true; } return false; }
From source file:org.apache.drill.exec.compile.ClassTransformer.java
public Class<?> getImplementationClass(final QueryClassLoader classLoader, final TemplateClassDefinition<?> templateDefinition, final String entireClass, final String materializedClassName) throws ClassTransformationException { // unfortunately, this hasn't been set up at construction time, so we have to do it here final ScalarReplacementOption scalarReplacementOption = ScalarReplacementOption .fromString(optionManager.getOption(SCALAR_REPLACEMENT_VALIDATOR)); try {//from ww w . ja va 2 s . c o m final long t1 = System.nanoTime(); final ClassSet set = new ClassSet(null, templateDefinition.getTemplateClassName(), materializedClassName); final byte[][] implementationClasses = classLoader.getClassByteCode(set.generated, entireClass); long totalBytecodeSize = 0; Map<String, Pair<byte[], ClassNode>> classesToMerge = Maps.newHashMap(); for (byte[] clazz : implementationClasses) { totalBytecodeSize += clazz.length; final ClassNode node = AsmUtil.classFromBytes(clazz, ClassReader.EXPAND_FRAMES); if (!AsmUtil.isClassOk(logger, "implementationClasses", node)) { throw new IllegalStateException("Problem found with implementationClasses"); } classesToMerge.put(node.name, Pair.of(clazz, node)); } final LinkedList<ClassSet> names = Lists.newLinkedList(); final Set<ClassSet> namesCompleted = Sets.newHashSet(); names.add(set); while (!names.isEmpty()) { final ClassSet nextSet = names.removeFirst(); if (namesCompleted.contains(nextSet)) { continue; } final ClassNames nextPrecompiled = nextSet.precompiled; final byte[] precompiledBytes = byteCodeLoader.getClassByteCodeFromPath(nextPrecompiled.clazz); final ClassNames nextGenerated = nextSet.generated; // keeps only classes that have not be merged Pair<byte[], ClassNode> classNodePair = classesToMerge.remove(nextGenerated.slash); final ClassNode generatedNode; if (classNodePair != null) { generatedNode = classNodePair.getValue(); } else { generatedNode = null; } /* * TODO * We're having a problem with some cases of scalar replacement, but we want to get * the code in so it doesn't rot anymore. * * Here, we use the specified replacement option. The loop will allow us to retry if * we're using TRY. */ MergedClassResult result = null; boolean scalarReplace = scalarReplacementOption != ScalarReplacementOption.OFF && entireClass.length() < MAX_SCALAR_REPLACE_CODE_SIZE; while (true) { try { result = MergeAdapter.getMergedClass(nextSet, precompiledBytes, generatedNode, scalarReplace); break; } catch (RuntimeException e) { // if we had a problem without using scalar replacement, then rethrow if (!scalarReplace) { throw e; } // if we did try to use scalar replacement, decide if we need to retry or not if (scalarReplacementOption == ScalarReplacementOption.ON) { // option is forced on, so this is a hard error throw e; } /* * We tried to use scalar replacement, with the option to fall back to not using it. * Log this failure before trying again without scalar replacement. */ logger.info("scalar replacement failure (retrying)\n", e); scalarReplace = false; } } for (String s : result.innerClasses) { s = s.replace(DrillFileUtils.SEPARATOR_CHAR, '.'); names.add(nextSet.getChild(s)); } classLoader.injectByteCode(nextGenerated.dot, result.bytes); namesCompleted.add(nextSet); } // adds byte code of the classes that have not been merged to make them accessible for outer class for (Map.Entry<String, Pair<byte[], ClassNode>> clazz : classesToMerge.entrySet()) { classLoader.injectByteCode(clazz.getKey().replace(DrillFileUtils.SEPARATOR_CHAR, '.'), clazz.getValue().getKey()); } Class<?> c = classLoader.findClass(set.generated.dot); if (templateDefinition.getExternalInterface().isAssignableFrom(c)) { logger.debug("Compiled and merged {}: bytecode size = {}, time = {} ms.", c.getSimpleName(), DrillStringUtils.readable(totalBytecodeSize), (System.nanoTime() - t1 + 500_000) / 1_000_000); return c; } throw new ClassTransformationException("The requested class did not implement the expected interface."); } catch (CompileException | IOException | ClassNotFoundException e) { throw new ClassTransformationException( String.format("Failure generating transformation classes for value: \n %s", entireClass), e); } }