List of usage examples for java.util Stack pop
public synchronized E pop()
From source file:org.apache.fontbox.cff.Type1CharStringParser.java
private List<Object> parse(byte[] bytes, List<byte[]> subrs, List<Object> sequence) throws IOException { DataInput input = new DataInput(bytes); while (input.hasRemaining()) { int b0 = input.readUnsignedByte(); if (b0 == CALLSUBR) { // callsubr command Object obj = sequence.remove(sequence.size() - 1); if (!(obj instanceof Integer)) { LOG.warn("Parameter " + obj + " for CALLSUBR is ignored, integer expected in glyph '" + glyphName + "' of font " + fontName); continue; }/* w ww . j a v a2s.c om*/ Integer operand = (Integer) obj; if (operand >= 0 && operand < subrs.size()) { byte[] subrBytes = subrs.get(operand); parse(subrBytes, subrs, sequence); Object lastItem = sequence.get(sequence.size() - 1); if (lastItem instanceof CharStringCommand && ((CharStringCommand) lastItem).getKey().getValue()[0] == RETURN) { sequence.remove(sequence.size() - 1); // remove "return" command } } else { LOG.warn("CALLSUBR is ignored, operand: " + operand + ", subrs.size(): " + subrs.size() + " in glyph '" + glyphName + "' of font " + fontName); // remove all parameters (there can be more than one) while (sequence.get(sequence.size() - 1) instanceof Integer) { sequence.remove(sequence.size() - 1); } } } else if (b0 == TWO_BYTE && input.peekUnsignedByte(0) == CALLOTHERSUBR) { // callothersubr command (needed in order to expand Subrs) input.readByte(); Integer othersubrNum = (Integer) sequence.remove(sequence.size() - 1); Integer numArgs = (Integer) sequence.remove(sequence.size() - 1); // othersubrs 0-3 have their own semantics Stack<Integer> results = new Stack<Integer>(); switch (othersubrNum) { case 0: results.push(removeInteger(sequence)); results.push(removeInteger(sequence)); sequence.remove(sequence.size() - 1); // end flex sequence.add(0); sequence.add(new CharStringCommand(TWO_BYTE, CALLOTHERSUBR)); break; case 1: // begin flex sequence.add(1); sequence.add(new CharStringCommand(TWO_BYTE, CALLOTHERSUBR)); break; case 3: // allows hint replacement results.push(removeInteger(sequence)); break; default: // all remaining othersubrs use this fallback mechanism for (int i = 0; i < numArgs; i++) { results.push(removeInteger(sequence)); } break; } // pop must follow immediately while (input.peekUnsignedByte(0) == TWO_BYTE && input.peekUnsignedByte(1) == POP) { input.readByte(); // B0_POP input.readByte(); // B1_POP sequence.add(results.pop()); } if (results.size() > 0) { LOG.warn("Value left on the PostScript stack in glyph " + glyphName + " of font " + fontName); } } else if (b0 >= 0 && b0 <= 31) { sequence.add(readCommand(input, b0)); } else if (b0 >= 32 && b0 <= 255) { sequence.add(readNumber(input, b0)); } else { throw new IllegalArgumentException(); } } return sequence; }
From source file:org.apache.tajo.engine.planner.PhysicalPlannerImpl.java
private PhysicalExec createPlanRecursive(TaskAttemptContext ctx, LogicalNode logicalNode, Stack<LogicalNode> stack) throws IOException { PhysicalExec leftExec;/*from ww w . j av a 2 s . c om*/ PhysicalExec rightExec; switch (logicalNode.getType()) { case ROOT: LogicalRootNode rootNode = (LogicalRootNode) logicalNode; stack.push(rootNode); leftExec = createPlanRecursive(ctx, rootNode.getChild(), stack); stack.pop(); return leftExec; case EXPRS: EvalExprNode evalExpr = (EvalExprNode) logicalNode; return new EvalExprExec(ctx, evalExpr); case CREATE_TABLE: case INSERT: case STORE: StoreTableNode storeNode = (StoreTableNode) logicalNode; stack.push(storeNode); leftExec = createPlanRecursive(ctx, storeNode.getChild(), stack); stack.pop(); return createStorePlan(ctx, storeNode, leftExec); case SELECTION: SelectionNode selNode = (SelectionNode) logicalNode; stack.push(selNode); leftExec = createPlanRecursive(ctx, selNode.getChild(), stack); stack.pop(); return new SelectionExec(ctx, selNode, leftExec); case PROJECTION: ProjectionNode prjNode = (ProjectionNode) logicalNode; stack.push(prjNode); leftExec = createPlanRecursive(ctx, prjNode.getChild(), stack); stack.pop(); return new ProjectionExec(ctx, prjNode, leftExec); case TABLE_SUBQUERY: { TableSubQueryNode subQueryNode = (TableSubQueryNode) logicalNode; stack.push(subQueryNode); leftExec = createPlanRecursive(ctx, subQueryNode.getSubQuery(), stack); stack.pop(); return new ProjectionExec(ctx, subQueryNode, leftExec); } case PARTITIONS_SCAN: case SCAN: leftExec = createScanPlan(ctx, (ScanNode) logicalNode, stack); return leftExec; case GROUP_BY: GroupbyNode grpNode = (GroupbyNode) logicalNode; stack.push(grpNode); leftExec = createPlanRecursive(ctx, grpNode.getChild(), stack); stack.pop(); return createGroupByPlan(ctx, grpNode, leftExec); case WINDOW_AGG: WindowAggNode windowAggNode = (WindowAggNode) logicalNode; stack.push(windowAggNode); leftExec = createPlanRecursive(ctx, windowAggNode.getChild(), stack); stack.pop(); return createWindowAgg(ctx, windowAggNode, leftExec); case DISTINCT_GROUP_BY: DistinctGroupbyNode distinctNode = (DistinctGroupbyNode) logicalNode; stack.push(distinctNode); leftExec = createPlanRecursive(ctx, distinctNode.getChild(), stack); stack.pop(); return createDistinctGroupByPlan(ctx, distinctNode, leftExec); case HAVING: HavingNode havingNode = (HavingNode) logicalNode; stack.push(havingNode); leftExec = createPlanRecursive(ctx, havingNode.getChild(), stack); stack.pop(); return new HavingExec(ctx, havingNode, leftExec); case SORT: SortNode sortNode = (SortNode) logicalNode; stack.push(sortNode); leftExec = createPlanRecursive(ctx, sortNode.getChild(), stack); stack.pop(); return createSortPlan(ctx, sortNode, leftExec); case JOIN: JoinNode joinNode = (JoinNode) logicalNode; stack.push(joinNode); leftExec = createPlanRecursive(ctx, joinNode.getLeftChild(), stack); rightExec = createPlanRecursive(ctx, joinNode.getRightChild(), stack); stack.pop(); return createJoinPlan(ctx, joinNode, leftExec, rightExec); case UNION: UnionNode unionNode = (UnionNode) logicalNode; stack.push(unionNode); leftExec = createPlanRecursive(ctx, unionNode.getLeftChild(), stack); rightExec = createPlanRecursive(ctx, unionNode.getRightChild(), stack); stack.pop(); return new UnionExec(ctx, leftExec, rightExec); case LIMIT: LimitNode limitNode = (LimitNode) logicalNode; stack.push(limitNode); leftExec = createPlanRecursive(ctx, limitNode.getChild(), stack); stack.pop(); return new LimitExec(ctx, limitNode.getInSchema(), limitNode.getOutSchema(), leftExec, limitNode); case INDEX_SCAN: IndexScanNode indexScanNode = (IndexScanNode) logicalNode; leftExec = createIndexScanExec(ctx, indexScanNode); return leftExec; case CREATE_INDEX: CreateIndexNode createIndexNode = (CreateIndexNode) logicalNode; stack.push(createIndexNode); leftExec = createPlanRecursive(ctx, createIndexNode.getChild(), stack); stack.pop(); return new StoreIndexExec(ctx, createIndexNode, leftExec); default: return null; } }
From source file:kilim.analysis.BasicBlock.java
public ArrayList<BasicBlock> getSubBlocks() throws KilimException { if (subBlocks == null) { if (!hasFlag(IS_SUBROUTINE)) return null; subBlocks = new ArrayList<BasicBlock>(10); Stack<BasicBlock> stack = new Stack<BasicBlock>(); this.setFlag(SUB_BLOCK); stack.add(this); while (!stack.isEmpty()) { BasicBlock b = stack.pop(); subBlocks.add(b);/*from w w w . ja v a 2 s .c o m*/ if (b.lastInstruction() == JSR) { // add the following block, but not its target BasicBlock follower = b.getFollowingBlock(); if (!follower.hasFlag(SUB_BLOCK)) { follower.setFlag(SUB_BLOCK); stack.push(follower); } continue; } for (BasicBlock succ : b.successors) { if (succ == this) { throw new KilimException("JSRs looping back to themselves are not supported"); } if (!succ.hasFlag(SUB_BLOCK)) { succ.setFlag(SUB_BLOCK); stack.push(succ); } } } Collections.sort(subBlocks); } return subBlocks; }
From source file:net.dv8tion.jda.core.entities.impl.MessageImpl.java
@Override public synchronized String getStrippedContent() { if (strippedContent == null) { String tmp = getContent(); //all the formatting keys to keep track of String[] keys = new String[] { "*", "_", "`", "~~" }; //find all tokens (formatting strings described above) TreeSet<FormatToken> tokens = new TreeSet<>((t1, t2) -> Integer.compare(t1.start, t2.start)); for (String key : keys) { Matcher matcher = Pattern.compile(Pattern.quote(key)).matcher(tmp); while (matcher.find()) { tokens.add(new FormatToken(key, matcher.start())); }//from ww w .ja v a2 s . c o m } //iterate over all tokens, find all matching pairs, and add them to the list toRemove Stack<FormatToken> stack = new Stack<>(); List<FormatToken> toRemove = new ArrayList<>(); boolean inBlock = false; for (FormatToken token : tokens) { if (stack.empty() || !stack.peek().format.equals(token.format) || stack.peek().start + token.format.length() == token.start) { //we are at opening tag if (!inBlock) { //we are outside of block -> handle normally if (token.format.equals("`")) { //block start... invalidate all previous tags stack.clear(); inBlock = true; } stack.push(token); } else if (token.format.equals("`")) { //we are inside of a block -> handle only block tag stack.push(token); } } else if (!stack.empty()) { //we found a matching close-tag toRemove.add(stack.pop()); toRemove.add(token); if (token.format.equals("`") && stack.empty()) { //close tag closed the block inBlock = false; } } } //sort tags to remove by their start-index and iteratively build the remaining string Collections.sort(toRemove, (t1, t2) -> Integer.compare(t1.start, t2.start)); StringBuilder out = new StringBuilder(); int currIndex = 0; for (FormatToken formatToken : toRemove) { if (currIndex < formatToken.start) { out.append(tmp.substring(currIndex, formatToken.start)); } currIndex = formatToken.start + formatToken.format.length(); } if (currIndex < tmp.length()) { out.append(tmp.substring(currIndex)); } //return the stripped text, escape all remaining formatting characters (did not have matching open/close before or were left/right of block strippedContent = out.toString().replace("*", "\\*").replace("_", "\\_").replace("~", "\\~"); } return strippedContent; }
From source file:gdt.data.entity.ArchiveHandler.java
private boolean append(Entigrator entigrator, String root$, String source$, TarArchiveOutputStream aos) { try {/*from w w w . jav a 2 s . co m*/ File[] fa = null; File source = new File(source$); if (source.exists()) if (source.isFile()) fa = new File[] { source }; else fa = source.listFiles(); if (fa == null) return true; File recordFile = null; Stack<TarArchiveEntry> s = new Stack<TarArchiveEntry>(); int cnt = 0; TarArchiveEntry entry = null; for (File aFa : fa) { recordFile = aFa; entry = new TarArchiveEntry(recordFile); entry.setSize(recordFile.length()); s.clear(); getTarEntries(entry, s, root$); cnt = s.size(); // System.out.println("EximpExpert:append:cnt=" + cnt); File nextFile = null; for (int j = 0; j < cnt; j++) { entry = (TarArchiveEntry) s.pop(); try { String nextFile$ = entigrator.getEntihome() + "/" + entry.getName(); // System.out.println("EximpExpert:append:try next file=" + nextFile$); nextFile = new File(nextFile$); if (!nextFile.exists() || nextFile.length() < 1) { System.out.println("ArchiveHandler:append:wrong next file=" + nextFile$); continue; } aos.putArchiveEntry(entry); IOUtils.copy(new FileInputStream(nextFile$), aos); // System.out.println("EximpExpert:tar_write:j="+j); aos.closeArchiveEntry(); } catch (Exception ee) { // System.out.println("EximpExpert:append:" + ee.toString()); LOGGER.severe(":append:" + ee.toString()); } } } //System.out.println("EximpExpert:tar_write:finish"); return true; //System.out.println("EximpExpert:tar_write:exit"); } catch (Exception e) { LOGGER.severe(":append:" + e.toString()); return false; } }
From source file:gdt.jgui.entity.index.JIndexPanel.java
/** * Execute the response locator.//ww w .j a va2s . c o m * @param console the main console. * @param locator$ the locator string. */ @Override public void response(JMainConsole console, String locator$) { // System.out.println("JIndexPanel:response:locator="+locator$); try { Properties locator = Locator.toProperties(locator$); String action$ = locator.getProperty(JRequester.REQUESTER_ACTION); // System.out.println("IndexPanel:response:action="+action$); if (ACTION_CREATE_INDEX.equals(action$)) { String entihome$ = locator.getProperty(Entigrator.ENTIHOME); String entityKey$ = locator.getProperty(EntityHandler.ENTITY_KEY); String text$ = locator.getProperty(JTextEditor.TEXT); Entigrator entigrator = console.getEntigrator(entihome$); Sack index = entigrator.ent_new("index", text$); index = entigrator.ent_assignProperty(index, "index", index.getProperty("label")); index.putAttribute(new Core(null, "icon", "index.png")); entigrator.save(index); entigrator.saveHandlerIcon(JEntitiesPanel.class, "index.png"); entityKey$ = index.getKey(); JIndexPanel ip = new JIndexPanel(); String ipLocator$ = ip.getLocator(); ipLocator$ = Locator.append(ipLocator$, Entigrator.ENTIHOME, entihome$); ipLocator$ = Locator.append(ipLocator$, EntityHandler.ENTITY_KEY, entityKey$); JEntityPrimaryMenu.reindexEntity(console, ipLocator$); Stack<String> s = console.getTrack(); s.pop(); console.setTrack(s); JConsoleHandler.execute(console, ipLocator$); return; } if (ACTION_CREATE_GROUP.equals(action$)) { //System.out.println("IndexPanel:response:create group.locator="); String entihome$ = locator.getProperty(Entigrator.ENTIHOME); String entityKey$ = locator.getProperty(EntityHandler.ENTITY_KEY); String text$ = locator.getProperty(JTextEditor.TEXT); Entigrator entigrator = console.getEntigrator(entihome$); Sack index = entigrator.getEntityAtKey(entityKey$); String encodedSelection$ = locator.getProperty(SELECTION); byte[] ba = Base64.decodeBase64(encodedSelection$); String selection$ = new String(ba, "UTF-8"); // System.out.println("IndexPanel:response:create group:selection="+Locator.remove(Locator.LOCATOR_ICON, selection$)); locator = Locator.toProperties(selection$); String nodeType$ = locator.getProperty(NODE_TYPE); String parentKey$ = locator.getProperty(NODE_KEY); // System.out.println("IndexPanel:response:create group:parent key="+parentKey$); if (NODE_TYPE_GROUP.equals(nodeType$)) { if (!index.existsElement("index.jlocator")) index.createElement("index.jlocator"); String groupKey$ = Identity.key(); Properties groupLocator = new Properties(); groupLocator.setProperty(Locator.LOCATOR_TITLE, text$); groupLocator.setProperty(Locator.LOCATOR_ICON, Support.readHandlerIcon(null, JEntitiesPanel.class, "group.png")); groupLocator.setProperty(NODE_TYPE, NODE_TYPE_GROUP); groupLocator.setProperty(NODE_KEY, groupKey$); groupLocator.setProperty(NODE_GROUP_KEY, parentKey$); groupLocator.setProperty(INDEX_KEY, entityKey$); String groupLocator$ = Locator.toString(groupLocator); index.putElementItem("index.jlocator", new Core(null, groupKey$, groupLocator$)); index.putElementItem("index.selection", new Core(null, "selection", groupKey$)); entigrator.save(index); JConsoleHandler.execute(console, getLocator()); } } if (ACTION_RENAME_GROUP.equals(action$)) { String entihome$ = locator.getProperty(Entigrator.ENTIHOME); String entityKey$ = locator.getProperty(EntityHandler.ENTITY_KEY); String text$ = locator.getProperty(JTextEditor.TEXT); Entigrator entigrator = console.getEntigrator(entihome$); Sack index = entigrator.getEntityAtKey(entityKey$); String encodedSelection$ = locator.getProperty(SELECTION); byte[] ba = Base64.decodeBase64(encodedSelection$); String selection$ = new String(ba, "UTF-8"); // System.out.println("IndexPanel:response:rename group:selection="+selection$); locator = Locator.toProperties(selection$); String nodeKey$ = locator.getProperty(NODE_KEY); locator.setProperty(Locator.LOCATOR_TITLE, text$); Core core = index.getElementItem("index.jlocator", nodeKey$); core.value = Locator.toString(locator); index.putElementItem("index.jlocator", core); entigrator.save(index); JConsoleHandler.execute(console, getLocator()); return; } if (ACTION_RENAME_REFERENCE.equals(action$)) { String entihome$ = locator.getProperty(Entigrator.ENTIHOME); String entityKey$ = locator.getProperty(EntityHandler.ENTITY_KEY); String text$ = locator.getProperty(JTextEditor.TEXT); Entigrator entigrator = console.getEntigrator(entihome$); Sack index = entigrator.getEntityAtKey(entityKey$); String encodedSelection$ = locator.getProperty(SELECTION); byte[] ba = Base64.decodeBase64(encodedSelection$); String selection$ = new String(ba, "UTF-8"); locator = Locator.toProperties(selection$); String nodeKey$ = locator.getProperty(NODE_KEY); if (!index.existsElement("index.title")) index.createElement("index.title"); Core core = index.getElementItem("index.title", nodeKey$); if (core == null) core = new Core(null, nodeKey$, text$); else core.value = text$; index.putElementItem("index.title", core); entigrator.save(index); JConsoleHandler.execute(console, getLocator()); return; } if (ACTION_SET_ICON_GROUP.equals(action$)) { String icon$ = locator.getProperty(JIconSelector.ICON); Entigrator entigrator = console.getEntigrator(entihome$); index = entigrator.getEntityAtKey(entityKey$); String encodedSelection$ = locator.getProperty(SELECTION); byte[] ba = Base64.decodeBase64(encodedSelection$); String selection$ = new String(ba, "UTF-8"); //System.out.println("IndexPanel:response:set icon group:selection="+selection$); locator = Locator.toProperties(selection$); String nodeKey$ = locator.getProperty(NODE_KEY); locator.setProperty(Locator.LOCATOR_ICON, entigrator.readIconFromIcons(icon$)); Core core = index.getElementItem("index.jlocator", nodeKey$); core.value = Locator.toString(locator); index.putElementItem("index.jlocator", core); entigrator.save(index); JConsoleHandler.execute(console, getLocator()); return; } if (ACTION_SET_ICON_REFERENCE.equals(action$)) { String icon$ = locator.getProperty(JIconSelector.ICON); Entigrator entigrator = console.getEntigrator(entihome$); index = entigrator.getEntityAtKey(entityKey$); String encodedSelection$ = locator.getProperty(SELECTION); byte[] ba = Base64.decodeBase64(encodedSelection$); String selection$ = new String(ba, "UTF-8"); locator = Locator.toProperties(selection$); String nodeKey$ = locator.getProperty(NODE_KEY); if (!index.existsElement("index.title")) index.createElement("index.title"); Core core = index.getElementItem("index.title", nodeKey$); if (core != null) core.type = entigrator.readIconFromIcons(icon$); else core = new Core(entigrator.readIconFromIcons(icon$), nodeKey$, null); index.putElementItem("index.title", core); entigrator.save(index); JConsoleHandler.execute(console, getLocator()); return; } } catch (Exception e) { LOGGER.severe(e.toString()); } }
From source file:com.cloudbees.hudson.plugins.folder.AbstractFolder.java
@Exported(name = "healthReport") public List<HealthReport> getBuildHealthReports() { if (healthMetrics == null || healthMetrics.isEmpty()) { return Collections.<HealthReport>emptyList(); }/*from w w w .j a v a2 s.c o m*/ List<HealthReport> reports = healthReports; if (reports != null && nextHealthReportsRefreshMillis > System.currentTimeMillis()) { // cache is still valid return reports; } // ensure we refresh on average once every HEALTH_REPORT_CACHE_REFRESH_MIN but not all at once nextHealthReportsRefreshMillis = System.currentTimeMillis() + TimeUnit.MINUTES.toMillis(HEALTH_REPORT_CACHE_REFRESH_MIN * 3 / 4) + ENTROPY.nextInt((int) TimeUnit.MINUTES.toMillis(HEALTH_REPORT_CACHE_REFRESH_MIN / 2)); reports = new ArrayList<HealthReport>(); List<FolderHealthMetric.Reporter> reporters = new ArrayList<FolderHealthMetric.Reporter>( healthMetrics.size()); boolean recursive = false; boolean topLevelOnly = true; for (FolderHealthMetric metric : healthMetrics) { recursive = recursive || metric.getType().isRecursive(); topLevelOnly = topLevelOnly && metric.getType().isTopLevelItems(); reporters.add(metric.reporter()); } for (AbstractFolderProperty<?> p : getProperties()) { for (FolderHealthMetric metric : p.getHealthMetrics()) { recursive = recursive || metric.getType().isRecursive(); topLevelOnly = topLevelOnly && metric.getType().isTopLevelItems(); reporters.add(metric.reporter()); } } if (recursive) { Stack<Iterable<? extends Item>> stack = new Stack<Iterable<? extends Item>>(); stack.push(getItems()); if (topLevelOnly) { while (!stack.isEmpty()) { for (Item item : stack.pop()) { if (item instanceof TopLevelItem) { for (FolderHealthMetric.Reporter reporter : reporters) { reporter.observe(item); } if (item instanceof Folder) { stack.push(((Folder) item).getItems()); } } } } } else { while (!stack.isEmpty()) { for (Item item : stack.pop()) { for (FolderHealthMetric.Reporter reporter : reporters) { reporter.observe(item); } if (item instanceof Folder) { stack.push(((Folder) item).getItems()); } } } } } else { for (Item item : getItems()) { for (FolderHealthMetric.Reporter reporter : reporters) { reporter.observe(item); } } } for (FolderHealthMetric.Reporter reporter : reporters) { reports.addAll(reporter.report()); } for (AbstractFolderProperty<?> p : getProperties()) { reports.addAll(p.getHealthReports()); } Collections.sort(reports); healthReports = reports; // idempotent write return reports; }
From source file:com.amazonaws.mobileconnectors.s3.transfermanager.TransferManager.java
/** * Downloads all objects in the virtual directory designated by the * keyPrefix given to the destination directory given. All virtual * subdirectories will be downloaded recursively. * * @param bucketName The bucket containing the virtual directory * @param keyPrefix The key prefix for the virtual directory, or null for * the entire bucket. All subdirectories will be downloaded * recursively./*from w ww. j a v a 2s . c om*/ * @param destinationDirectory The directory to place downloaded files. * Subdirectories will be created as necessary. */ public MultipleFileDownload downloadDirectory(String bucketName, String keyPrefix, File destinationDirectory) { if (keyPrefix == null) { keyPrefix = ""; } final List<S3ObjectSummary> objectSummaries = new LinkedList<S3ObjectSummary>(); final Stack<String> commonPrefixes = new Stack<String>(); commonPrefixes.add(keyPrefix); long totalSize = 0; // Recurse all virtual subdirectories to get a list of object summaries. // This is a depth-first search. do { final String prefix = commonPrefixes.pop(); ObjectListing listObjectsResponse = null; do { if (listObjectsResponse == null) { final ListObjectsRequest listObjectsRequest = new ListObjectsRequest() .withBucketName(bucketName).withDelimiter(DEFAULT_DELIMITER).withPrefix(prefix); listObjectsResponse = s3.listObjects(listObjectsRequest); } else { listObjectsResponse = s3.listNextBatchOfObjects(listObjectsResponse); } for (final S3ObjectSummary s : listObjectsResponse.getObjectSummaries()) { // Skip any files that are also virtual directories, since // we can't save both a directory and a file of the same // name. if (!s.getKey().equals(prefix) && !listObjectsResponse.getCommonPrefixes().contains(s.getKey() + DEFAULT_DELIMITER)) { objectSummaries.add(s); totalSize += s.getSize(); } else { log.debug("Skipping download for object " + s.getKey() + " since it is also a virtual directory"); } } commonPrefixes.addAll(listObjectsResponse.getCommonPrefixes()); } while (listObjectsResponse.isTruncated()); } while (!commonPrefixes.isEmpty()); /* This is the hook for adding additional progress listeners */ final ProgressListenerChain additionalListeners = new ProgressListenerChain(); final TransferProgress transferProgress = new TransferProgress(); transferProgress.setTotalBytesToTransfer(totalSize); /* * Bind additional progress listeners to this * MultipleFileTransferProgressUpdatingListener to receive * ByteTransferred events from each single-file download implementation. */ final ProgressListener listener = new MultipleFileTransferProgressUpdatingListener(transferProgress, additionalListeners); final List<DownloadImpl> downloads = new ArrayList<DownloadImpl>(); final String description = "Downloading from " + bucketName + "/" + keyPrefix; final MultipleFileDownloadImpl multipleFileDownload = new MultipleFileDownloadImpl(description, transferProgress, additionalListeners, keyPrefix, bucketName, downloads); multipleFileDownload.setMonitor(new MultipleFileTransferMonitor(multipleFileDownload, downloads)); final CountDownLatch latch = new CountDownLatch(1); final MultipleFileTransferStateChangeListener transferListener = new MultipleFileTransferStateChangeListener( latch, multipleFileDownload); for (final S3ObjectSummary summary : objectSummaries) { // TODO: non-standard delimiters final File f = new File(destinationDirectory, summary.getKey()); final File parentFile = f.getParentFile(); if (!parentFile.exists() && !parentFile.mkdirs()) { throw new RuntimeException("Couldn't create parent directories for " + f.getAbsolutePath()); } // All the single-file downloads share the same // MultipleFileTransferProgressUpdatingListener and // MultipleFileTransferStateChangeListener downloads.add((DownloadImpl) doDownload(new GetObjectRequest(summary.getBucketName(), summary.getKey()) .withGeneralProgressListener(listener), f, transferListener, null, false)); } if (downloads.isEmpty()) { multipleFileDownload.setState(TransferState.Completed); return multipleFileDownload; } // Notify all state changes waiting for the downloads to all be queued // to wake up and continue. latch.countDown(); return multipleFileDownload; }
From source file:com.amazonaws.services.s3.transfer.TransferManager.java
/** * Downloads all objects in the virtual directory designated by the * keyPrefix given to the destination directory given. All virtual * subdirectories will be downloaded recursively. * * @param bucketName//from w ww . j a va 2 s . c om * The bucket containing the virtual directory * @param keyPrefix * The key prefix for the virtual directory, or null for the * entire bucket. All subdirectories will be downloaded * recursively. * @param destinationDirectory * The directory to place downloaded files. Subdirectories will * be created as necessary. */ public MultipleFileDownload downloadDirectory(String bucketName, String keyPrefix, File destinationDirectory) { if (keyPrefix == null) keyPrefix = ""; List<S3ObjectSummary> objectSummaries = new LinkedList<S3ObjectSummary>(); Stack<String> commonPrefixes = new Stack<String>(); commonPrefixes.add(keyPrefix); long totalSize = 0; // Recurse all virtual subdirectories to get a list of object summaries. // This is a depth-first search. do { String prefix = commonPrefixes.pop(); ObjectListing listObjectsResponse = null; do { if (listObjectsResponse == null) { ListObjectsRequest listObjectsRequest = new ListObjectsRequest().withBucketName(bucketName) .withDelimiter(DEFAULT_DELIMITER).withPrefix(prefix); listObjectsResponse = s3.listObjects(listObjectsRequest); } else { listObjectsResponse = s3.listNextBatchOfObjects(listObjectsResponse); } for (S3ObjectSummary s : listObjectsResponse.getObjectSummaries()) { // Skip any files that are also virtual directories, since // we can't save both a directory and a file of the same // name. if (!s.getKey().equals(prefix) && !listObjectsResponse.getCommonPrefixes().contains(s.getKey() + DEFAULT_DELIMITER)) { objectSummaries.add(s); totalSize += s.getSize(); } else { log.debug("Skipping download for object " + s.getKey() + " since it is also a virtual directory"); } } commonPrefixes.addAll(listObjectsResponse.getCommonPrefixes()); } while (listObjectsResponse.isTruncated()); } while (!commonPrefixes.isEmpty()); /* This is the hook for adding additional progress listeners */ ProgressListenerChain additionalListeners = new ProgressListenerChain(); TransferProgress transferProgress = new TransferProgress(); transferProgress.setTotalBytesToTransfer(totalSize); /* * Bind additional progress listeners to this * MultipleFileTransferProgressUpdatingListener to receive * ByteTransferred events from each single-file download implementation. */ ProgressListener listener = new MultipleFileTransferProgressUpdatingListener(transferProgress, additionalListeners); List<DownloadImpl> downloads = new ArrayList<DownloadImpl>(); String description = "Downloading from " + bucketName + "/" + keyPrefix; final MultipleFileDownloadImpl multipleFileDownload = new MultipleFileDownloadImpl(description, transferProgress, additionalListeners, keyPrefix, bucketName, downloads); multipleFileDownload.setMonitor(new MultipleFileTransferMonitor(multipleFileDownload, downloads)); final CountDownLatch latch = new CountDownLatch(1); MultipleFileTransferStateChangeListener transferListener = new MultipleFileTransferStateChangeListener( latch, multipleFileDownload); for (S3ObjectSummary summary : objectSummaries) { // TODO: non-standard delimiters File f = new File(destinationDirectory, summary.getKey()); File parentFile = f.getParentFile(); if (!parentFile.exists() && !parentFile.mkdirs()) { throw new RuntimeException("Couldn't create parent directories for " + f.getAbsolutePath()); } // All the single-file downloads share the same // MultipleFileTransferProgressUpdatingListener and // MultipleFileTransferStateChangeListener downloads.add((DownloadImpl) doDownload( new GetObjectRequest(summary.getBucketName(), summary.getKey()) .<GetObjectRequest>withGeneralProgressListener(listener), f, transferListener, null, false)); } if (downloads.isEmpty()) { multipleFileDownload.setState(TransferState.Completed); return multipleFileDownload; } // Notify all state changes waiting for the downloads to all be queued // to wake up and continue. latch.countDown(); return multipleFileDownload; }
From source file:org.alfresco.repo.model.filefolder.FileFolderServiceImpl.java
/** * A deep version of listSimple. Which recursively walks down the tree from a given starting point, returning * the node refs of files or folders found along the way. * <p>/*from w w w.j ava 2s. co m*/ * The folder filter is called for each sub-folder to determine whether to search in that sub-folder, should a subfolder be excluded * then all its chidren are excluded as well. * * @param contextNodeRef the starting point. * @param files return nodes of type files. * @param folders return nodes of type folders. * @param folderFilter filter controls which folders to search. If null then all subfolders are searched. * @return list of node references */ /* <p> * MER: I've added this rather than changing listSimple to minimise the risk of breaking * the existing code. This is a quick performance improvement between using * XPath which is awful or adding new methods to the NodeService/DB This is also a dangerous method in that it can return a * lot of data and take a long time. */ private List<NodeRef> listSimpleDeep(NodeRef contextNodeRef, boolean files, boolean folders, SubFolderFilter folderFilter) { if (logger.isDebugEnabled()) { logger.debug("searchSimpleDeep contextNodeRef:" + contextNodeRef); } // To hold the results. List<NodeRef> result = new ArrayList<NodeRef>(); // Build a list of folder types Set<QName> folderTypeQNames = buildFolderTypes(); Set<QName> fileTypeQNames = (files ? buildFileTypes() : new HashSet<QName>(0)); if (!folders && !files) { return Collections.emptyList(); } // Shortcut if (folderTypeQNames.size() == 0) { return Collections.emptyList(); } Stack<NodeRef> toSearch = new Stack<NodeRef>(); toSearch.push(contextNodeRef); // Now we need to walk down the folders. while (!toSearch.empty()) { NodeRef currentDir = toSearch.pop(); List<ChildAssociationRef> folderAssocRefs = nodeService.getChildAssocs(currentDir, folderTypeQNames); for (ChildAssociationRef folderRef : folderAssocRefs) { // We have some child folders boolean include = true; if (folderFilter != null) { include = folderFilter.isEnterSubfolder(folderRef); if (include) { // yes search in these subfolders toSearch.push(folderRef.getChildRef()); } } else { // No filter - Add the folders in the currentDir toSearch.push(folderRef.getChildRef()); } if (folders && include) { result.add(folderRef.getChildRef()); } } if (files) { // Add the files in the current dir List<ChildAssociationRef> fileAssocRefs = nodeService.getChildAssocs(currentDir, fileTypeQNames); for (ChildAssociationRef fileRef : fileAssocRefs) { result.add(fileRef.getChildRef()); } } } if (logger.isDebugEnabled()) { logger.debug("searchSimpleDeep finished size:" + result.size()); } // Done return result; }