List of usage examples for java.util Stack push
public E push(E item)
From source file:forge.limited.BoosterDraft.java
protected boolean generateProduct() { switch (this.draftFormat) { case Full: // Draft from all cards in Forge final Supplier<List<PaperCard>> s = new UnOpenedProduct(SealedProduct.Template.genericBooster); for (int i = 0; i < 3; i++) { this.product.add(s); }/*from w w w . j a v a2s . com*/ IBoosterDraft.LAND_SET_CODE[0] = CardEdition.Predicates .getRandomSetWithAllBasicLands(FModel.getMagicDb().getEditions()); break; case Block: // Draft from cards by block or set case FantasyBlock: final List<CardBlock> blocks = new ArrayList<>(); final IStorage<CardBlock> storage = this.draftFormat == LimitedPoolType.Block ? FModel.getBlocks() : FModel.getFantasyBlocks(); for (final CardBlock b : storage) { if (b.getCntBoostersDraft() > 0) { blocks.add(b); } } final CardBlock block = SGuiChoose.oneOrNone("Choose Block", blocks); if (block == null) { return false; } final List<CardEdition> cardSets = block.getSets(); if (cardSets.isEmpty()) { SOptionPane.showErrorDialog(block.toString() + " does not contain any set combinations."); return false; } final Stack<String> sets = new Stack<>(); for (int k = cardSets.size() - 1; k >= 0; k--) { sets.add(cardSets.get(k).getCode()); } for (final String setCode : block.getMetaSetNames()) { if (block.getMetaSet(setCode).isDraftable()) { sets.push(setCode); // to the beginning } } final int nPacks = block.getCntBoostersDraft(); if (sets.size() > 1) { final Object p = SGuiChoose.oneOrNone("Choose Set Combination", getSetCombos(sets)); if (p == null) { return false; } final String[] pp = p.toString().split("/"); for (int i = 0; i < nPacks; i++) { this.product.add(block.getBooster(pp[i])); } } else { final IUnOpenedProduct product1 = block.getBooster(sets.get(0)); for (int i = 0; i < nPacks; i++) { this.product.add(product1); } } IBoosterDraft.LAND_SET_CODE[0] = block.getLandSet(); break; case Custom: final List<CustomLimited> myDrafts = loadCustomDrafts(); if (myDrafts.isEmpty()) { SOptionPane.showMessageDialog("No custom draft files found."); } else { final CustomLimited customDraft = SGuiChoose.oneOrNone("Choose Custom Draft", myDrafts); if (customDraft == null) { return false; } this.setupCustomDraft(customDraft); } break; default: throw new NoSuchElementException("Draft for mode " + this.draftFormat + " has not been set up!"); } this.pack = this.get8BoosterPack(); return true; }
From source file:com.haulmont.cuba.core.app.EntityDiffManager.java
/** * Generate class difference for selected not null object * * @param diffObject Object/*from ww w.j a v a 2 s.c o m*/ * @param firstValue First value * @param secondValue Second value * @param viewProperty View property * @param metaProperty Meta property * @param diffBranch Diff branch * @return Property difference */ protected EntityPropertyDiff generateClassDiffFor(Object diffObject, @Nullable Object firstValue, @Nullable Object secondValue, ViewProperty viewProperty, MetaProperty metaProperty, Stack<Object> diffBranch) { // link boolean isLinkChange = !Objects.equals(firstValue, secondValue); isLinkChange = !(diffObject instanceof EmbeddableEntity) && isLinkChange; EntityClassPropertyDiff classPropertyDiff = new EntityClassPropertyDiff(firstValue, secondValue, metaProperty, isLinkChange); boolean isInternalChange = false; diffBranch.push(diffObject); List<EntityPropertyDiff> propertyDiffs = getPropertyDiffs(viewProperty.getView(), (Entity) firstValue, (Entity) secondValue, diffBranch); diffBranch.pop(); if (!propertyDiffs.isEmpty()) { isInternalChange = true; classPropertyDiff.setPropertyDiffs(propertyDiffs); } if (isInternalChange || isLinkChange) return classPropertyDiff; else return null; }
From source file:org.apache.flink.streaming.api.datastream.KeyedStream.java
/** * Validates that a given type of element (as encoded by the provided {@link TypeInformation}) can be * used as a key in the {@code DataStream.keyBy()} operation. This is done by searching depth-first the * key type and checking if each of the composite types satisfies the required conditions * (see {@link #validateKeyTypeIsHashable(TypeInformation)}). * * @param keyType The {@link TypeInformation} of the key. *///from w w w . ja v a 2 s. c om private TypeInformation<KEY> validateKeyType(TypeInformation<KEY> keyType) { Stack<TypeInformation<?>> stack = new Stack<>(); stack.push(keyType); List<TypeInformation<?>> unsupportedTypes = new ArrayList<>(); while (!stack.isEmpty()) { TypeInformation<?> typeInfo = stack.pop(); if (!validateKeyTypeIsHashable(typeInfo)) { unsupportedTypes.add(typeInfo); } if (typeInfo instanceof TupleTypeInfoBase) { for (int i = 0; i < typeInfo.getArity(); i++) { stack.push(((TupleTypeInfoBase) typeInfo).getTypeAt(i)); } } } if (!unsupportedTypes.isEmpty()) { throw new InvalidProgramException("Type " + keyType + " cannot be used as key. Contained " + "UNSUPPORTED key types: " + StringUtils.join(unsupportedTypes, ", ") + ". Look " + "at the keyBy() documentation for the conditions a type has to satisfy in order to be " + "eligible for a key."); } return keyType; }
From source file:org.apache.fop.render.rtf.rtflib.rtfdoc.RtfTextrun.java
/** * Inserts paragraph break before all close group marks. * * @throws IOException for I/O problems * @return The paragraph break element//from ww w . j av a2s .c o m */ public RtfParagraphBreak addParagraphBreak() throws IOException { // get copy of children list List children = getChildren(); Stack tmp = new Stack(); RtfParagraphBreak par = null; // delete all previous CloseGroupMark int deletedCloseGroupCount = 0; ListIterator lit = children.listIterator(children.size()); while (lit.hasPrevious() && (lit.previous() instanceof RtfCloseGroupMark)) { tmp.push(Integer.valueOf(((RtfCloseGroupMark) lit.next()).getBreakType())); lit.remove(); deletedCloseGroupCount++; } if (children.size() != 0) { // add paragraph break and restore all deleted close group marks setChildren(children); par = new RtfParagraphBreak(this, writer); for (int i = 0; i < deletedCloseGroupCount; i++) { addCloseGroupMark(((Integer) tmp.pop()).intValue()); } } return par; }
From source file:com.abstratt.mdd.internal.frontend.textuml.TextUMLCompiler.java
/** * Given a position in a compilation unit, finds the contextual package name. * /*from w ww .j a va2 s. co m*/ * @param toParse source of compilation unit * @param line line number, starting from 1 * @param col column number, starting from 1 * @return the name of the contextual package */ public String findPackageName(String toParse, final int line, final int col) { Token token = findTokenAt(toParse, line, col); if (token == null) return null; final Stack<String> segments = new Stack<String>(); for (Node current = token; current != null; current = current.parent()) { current.apply(new AnalysisAdapter() { @Override public void caseAStart(AStart node) { segments.push(TextUMLCore.getSourceMiner().getQualifiedIdentifier( ((APackageHeading) node.getPackageHeading()).getQualifiedIdentifier())); } public void caseASubNamespace(ASubNamespace node) { segments.push(TextUMLCore.getSourceMiner() .getQualifiedIdentifier(((APackageHeading) node.getPackageHeading()))); } }); } if (segments.isEmpty()) return null; StringBuffer result = new StringBuffer(); while (!segments.isEmpty()) { result.append(segments.pop()); result.append(NamedElement.SEPARATOR); } result.delete((result.length() - NamedElement.SEPARATOR.length()), result.length()); return result.toString(); }
From source file:com.cyclopsgroup.waterview.tool.PopulateToolsValve.java
/** * Override or implement method of parent class or interface * /*from w w w .j a va2 s . co m*/ * @see com.cyclopsgroup.waterview.Valve#invoke(com.cyclopsgroup.waterview.UIRuntime) */ public void invoke(UIRuntime runtime) throws Exception { Stack processedTools = new Stack(); for (Iterator i = toolDefinitions.values().iterator(); i.hasNext();) { ToolDef def = (ToolDef) i.next(); UITool tool = null; try { if (def.lifecycle == ToolLifecycle.REQUEST) { tool = createRequestTool(runtime, def); } else if (def.lifecycle == ToolLifecycle.SESSION) { tool = createSessionTool(runtime, def); } else if (def.lifecycle == ToolLifecycle.APPLICATION) { tool = createApplicationTool(runtime, def); } tool.setName(def.name); processedTools.push(tool); runtime.getPageContext().put(def.name, tool); if (tool instanceof RequestListener) { ((RequestListener) tool).prepareForRequest(runtime); } } catch (Exception e) { getLogger().warn("Tool initialization error", e); } } try { invokeNext(runtime); } catch (Exception e) { throw e; } finally { while (!processedTools.isEmpty()) { UITool tool = (UITool) processedTools.pop(); runtime.getPageContext().remove(tool.getName()); try { if (tool instanceof RequestListener) { ((RequestListener) tool).disposeForRequest(runtime); } } catch (Exception e) { getLogger().warn("Tool disposing error", e); } } } }
From source file:org.apache.hadoop.tools.DistCpV1.java
/** * Initialize DFSCopyFileMapper specific job-configuration. * @param conf : The dfs/mapred configuration. * @param jobConf : The handle to the jobConf object to be initialized. * @param args Arguments// w w w.j a v a2 s . co m * @return true if it is necessary to launch a job. */ static boolean setup(Configuration conf, JobConf jobConf, final Arguments args) throws IOException { jobConf.set(DST_DIR_LABEL, args.dst.toUri().toString()); //set boolean values final boolean update = args.flags.contains(Options.UPDATE); final boolean skipCRCCheck = args.flags.contains(Options.SKIPCRC); final boolean overwrite = !update && args.flags.contains(Options.OVERWRITE) && !args.dryrun; jobConf.setBoolean(Options.UPDATE.propertyname, update); jobConf.setBoolean(Options.SKIPCRC.propertyname, skipCRCCheck); jobConf.setBoolean(Options.OVERWRITE.propertyname, overwrite); jobConf.setBoolean(Options.IGNORE_READ_FAILURES.propertyname, args.flags.contains(Options.IGNORE_READ_FAILURES)); jobConf.setBoolean(Options.PRESERVE_STATUS.propertyname, args.flags.contains(Options.PRESERVE_STATUS)); final String randomId = getRandomId(); JobClient jClient = new JobClient(jobConf); Path stagingArea; try { stagingArea = JobSubmissionFiles.getStagingDir(jClient.getClusterHandle(), conf); } catch (InterruptedException ie) { throw new IOException(ie); } Path jobDirectory = new Path(stagingArea + NAME + "_" + randomId); FsPermission mapredSysPerms = new FsPermission(JobSubmissionFiles.JOB_DIR_PERMISSION); FileSystem.mkdirs(jClient.getFs(), jobDirectory, mapredSysPerms); jobConf.set(JOB_DIR_LABEL, jobDirectory.toString()); long maxBytesPerMap = conf.getLong(BYTES_PER_MAP_LABEL, BYTES_PER_MAP); FileSystem dstfs = args.dst.getFileSystem(conf); // get tokens for all the required FileSystems.. TokenCache.obtainTokensForNamenodes(jobConf.getCredentials(), new Path[] { args.dst }, conf); boolean dstExists = dstfs.exists(args.dst); boolean dstIsDir = false; if (dstExists) { dstIsDir = dstfs.getFileStatus(args.dst).isDirectory(); } // default logPath Path logPath = args.log; if (logPath == null) { String filename = "_distcp_logs_" + randomId; if (!dstExists || !dstIsDir) { Path parent = args.dst.getParent(); if (null == parent) { // If dst is '/' on S3, it might not exist yet, but dst.getParent() // will return null. In this case, use '/' as its own parent to prevent // NPE errors below. parent = args.dst; } if (!dstfs.exists(parent)) { dstfs.mkdirs(parent); } logPath = new Path(parent, filename); } else { logPath = new Path(args.dst, filename); } } FileOutputFormat.setOutputPath(jobConf, logPath); // create src list, dst list FileSystem jobfs = jobDirectory.getFileSystem(jobConf); Path srcfilelist = new Path(jobDirectory, "_distcp_src_files"); Path dstfilelist = new Path(jobDirectory, "_distcp_dst_files"); Path dstdirlist = new Path(jobDirectory, "_distcp_dst_dirs"); jobConf.set(SRC_LIST_LABEL, srcfilelist.toString()); jobConf.set(DST_DIR_LIST_LABEL, dstdirlist.toString()); int srcCount = 0, cnsyncf = 0, dirsyn = 0; long fileCount = 0L, dirCount = 0L, byteCount = 0L, cbsyncs = 0L, skipFileCount = 0L, skipByteCount = 0L; try (SequenceFile.Writer src_writer = SequenceFile.createWriter(jobConf, Writer.file(srcfilelist), Writer.keyClass(LongWritable.class), Writer.valueClass(FilePair.class), Writer.compression(SequenceFile.CompressionType.NONE)); SequenceFile.Writer dst_writer = SequenceFile.createWriter(jobConf, Writer.file(dstfilelist), Writer.keyClass(Text.class), Writer.valueClass(Text.class), Writer.compression(SequenceFile.CompressionType.NONE)); SequenceFile.Writer dir_writer = SequenceFile.createWriter(jobConf, Writer.file(dstdirlist), Writer.keyClass(Text.class), Writer.valueClass(FilePair.class), Writer.compression(SequenceFile.CompressionType.NONE));) { // handle the case where the destination directory doesn't exist // and we've only a single src directory OR we're updating/overwriting // the contents of the destination directory. final boolean special = (args.srcs.size() == 1 && !dstExists) || update || overwrite; Path basedir = null; HashSet<Path> parentDirsToCopy = new HashSet<Path>(); if (args.basedir != null) { FileSystem basefs = args.basedir.getFileSystem(conf); basedir = args.basedir.makeQualified(basefs.getUri(), basefs.getWorkingDirectory()); if (!basefs.isDirectory(basedir)) { throw new IOException("Basedir " + basedir + " is not a directory."); } } for (Iterator<Path> srcItr = args.srcs.iterator(); srcItr.hasNext();) { final Path src = srcItr.next(); FileSystem srcfs = src.getFileSystem(conf); FileStatus srcfilestat = srcfs.getFileStatus(src); Path root = special && srcfilestat.isDirectory() ? src : src.getParent(); if (dstExists && !dstIsDir && (args.srcs.size() > 1 || srcfilestat.isDirectory())) { // destination should not be a file throw new IOException("Destination " + args.dst + " should be a dir" + " if multiple source paths are there OR if" + " the source path is a dir"); } if (basedir != null) { root = basedir; Path parent = src.getParent().makeQualified(srcfs.getUri(), srcfs.getWorkingDirectory()); while (parent != null && !parent.equals(basedir)) { if (!parentDirsToCopy.contains(parent)) { parentDirsToCopy.add(parent); String dst = makeRelative(root, parent); FileStatus pst = srcfs.getFileStatus(parent); src_writer.append(new LongWritable(0), new FilePair(pst, dst)); dst_writer.append(new Text(dst), new Text(parent.toString())); dir_writer.append(new Text(dst), new FilePair(pst, dst)); if (++dirsyn > SYNC_FILE_MAX) { dirsyn = 0; dir_writer.sync(); } } parent = parent.getParent(); } if (parent == null) { throw new IOException("Basedir " + basedir + " is not a prefix of source path " + src); } } if (srcfilestat.isDirectory()) { ++srcCount; final String dst = makeRelative(root, src); if (!update || !dirExists(conf, new Path(args.dst, dst))) { ++dirCount; src_writer.append(new LongWritable(0), new FilePair(srcfilestat, dst)); } dst_writer.append(new Text(dst), new Text(src.toString())); } Stack<FileStatus> pathstack = new Stack<FileStatus>(); for (pathstack.push(srcfilestat); !pathstack.empty();) { FileStatus cur = pathstack.pop(); FileStatus[] children = srcfs.listStatus(cur.getPath()); for (int i = 0; i < children.length; i++) { boolean skipPath = false; final FileStatus child = children[i]; final String dst = makeRelative(root, child.getPath()); ++srcCount; if (child.isDirectory()) { pathstack.push(child); if (!update || !dirExists(conf, new Path(args.dst, dst))) { ++dirCount; } else { skipPath = true; // skip creating dir at destination } } else { Path destPath = new Path(args.dst, dst); if (cur.isFile() && (args.srcs.size() == 1)) { // Copying a single file; use dst path provided by user as // destination file rather than destination directory Path dstparent = destPath.getParent(); FileSystem destFileSys = destPath.getFileSystem(jobConf); if (!(destFileSys.exists(dstparent) && destFileSys.getFileStatus(dstparent).isDirectory())) { destPath = dstparent; } } //skip path if the src and the dst files are the same. skipPath = update && sameFile(srcfs, child, dstfs, destPath, skipCRCCheck); //skip path if it exceed file limit or size limit skipPath |= fileCount == args.filelimit || byteCount + child.getLen() > args.sizelimit; if (!skipPath) { ++fileCount; byteCount += child.getLen(); if (LOG.isTraceEnabled()) { LOG.trace("adding file " + child.getPath()); } ++cnsyncf; cbsyncs += child.getLen(); if (cnsyncf > SYNC_FILE_MAX || cbsyncs > maxBytesPerMap) { src_writer.sync(); dst_writer.sync(); cnsyncf = 0; cbsyncs = 0L; } } else { ++skipFileCount; skipByteCount += child.getLen(); if (LOG.isTraceEnabled()) { LOG.trace("skipping file " + child.getPath()); } } } if (!skipPath) { src_writer.append(new LongWritable(child.isDirectory() ? 0 : child.getLen()), new FilePair(child, dst)); } dst_writer.append(new Text(dst), new Text(child.getPath().toString())); } if (cur.isDirectory()) { String dst = makeRelative(root, cur.getPath()); dir_writer.append(new Text(dst), new FilePair(cur, dst)); if (++dirsyn > SYNC_FILE_MAX) { dirsyn = 0; dir_writer.sync(); } } } } } LOG.info("sourcePathsCount(files+directories)=" + srcCount); LOG.info("filesToCopyCount=" + fileCount); LOG.info("bytesToCopyCount=" + TraditionalBinaryPrefix.long2String(byteCount, "", 1)); if (update) { LOG.info("filesToSkipCopyCount=" + skipFileCount); LOG.info("bytesToSkipCopyCount=" + TraditionalBinaryPrefix.long2String(skipByteCount, "", 1)); } if (args.dryrun) { return false; } int mapCount = setMapCount(byteCount, jobConf); // Increase the replication of _distcp_src_files, if needed setReplication(conf, jobConf, srcfilelist, mapCount); FileStatus dststatus = null; try { dststatus = dstfs.getFileStatus(args.dst); } catch (FileNotFoundException fnfe) { LOG.info(args.dst + " does not exist."); } // create dest path dir if copying > 1 file if (dststatus == null) { if (srcCount > 1 && !dstfs.mkdirs(args.dst)) { throw new IOException("Failed to create" + args.dst); } } final Path sorted = new Path(jobDirectory, "_distcp_sorted"); checkDuplication(jobfs, dstfilelist, sorted, conf); if (dststatus != null && args.flags.contains(Options.DELETE)) { long deletedPathsCount = deleteNonexisting(dstfs, dststatus, sorted, jobfs, jobDirectory, jobConf, conf); LOG.info("deletedPathsFromDestCount(files+directories)=" + deletedPathsCount); } Path tmpDir = new Path( (dstExists && !dstIsDir) || (!dstExists && srcCount == 1) ? args.dst.getParent() : args.dst, "_distcp_tmp_" + randomId); jobConf.set(TMP_DIR_LABEL, tmpDir.toUri().toString()); // Explicitly create the tmpDir to ensure that it can be cleaned // up by fullyDelete() later. tmpDir.getFileSystem(conf).mkdirs(tmpDir); LOG.info("sourcePathsCount=" + srcCount); LOG.info("filesToCopyCount=" + fileCount); LOG.info("bytesToCopyCount=" + TraditionalBinaryPrefix.long2String(byteCount, "", 1)); jobConf.setInt(SRC_COUNT_LABEL, srcCount); jobConf.setLong(TOTAL_SIZE_LABEL, byteCount); return (fileCount + dirCount) > 0; }
From source file:com.projity.pm.graphic.model.transform.NodeCacheTransformer.java
private Map<GraphicNode, List<GraphicNode>> extractAssignments(List list) { Map<GraphicNode, List<GraphicNode>> map = new HashMap<GraphicNode, List<GraphicNode>>(); GraphicNode current, last;/*from w w w. ja v a 2s. c o m*/ Stack<GraphicNode> path = new Stack<GraphicNode>(); for (ListIterator i = list.listIterator(); i.hasNext();) { current = (GraphicNode) i.next(); if (current.getLevel() == 1) { path.clear(); path.push(current); continue; } while ((last = path.peek()).getLevel() >= current.getLevel()) path.pop(); if (current.isAssignment()) { GraphicNode task = path.peek(); List<GraphicNode> ass = map.get(task); if (ass == null) { ass = new LinkedList<GraphicNode>(); map.put(task, ass); } ass.add(current); i.remove(); } path.push(current); } return map; }
From source file:com.abstratt.mdd.internal.frontend.textuml.TextUMLCompiler.java
/** * Given a position in a compilation unit, finds the contextual namespace. * //from www . j av a 2 s. c o m * @param toParse source of compilation unit * @param line line number, starting from 1 * @param col column number, starting from 1 * @return the name of the contextual namespace */ public String findNamespace(String toParse, final int line, final int col) { Token token = findTokenAt(toParse, line, col, false, true); if (token == null) return null; final Stack<String> segments = new Stack<String>(); for (Node current = token; current != null; current = current.parent()) { current.apply(new AnalysisAdapter() { @Override public void caseAStart(AStart node) { segments.push(TextUMLCore.getSourceMiner().getQualifiedIdentifier( ((APackageHeading) node.getPackageHeading()).getQualifiedIdentifier())); } public void caseASubNamespace(ASubNamespace node) { segments.push(TextUMLCore.getSourceMiner() .getQualifiedIdentifier(((APackageHeading) node.getPackageHeading()))); } @Override public void caseAClassDef(AClassDef node) { segments.push(((AClassHeader) node.getClassHeader()).getIdentifier().getText()); } @Override public void caseAStereotypeDef(AStereotypeDef node) { segments.push(((AStereotypeDefHeader) node.getStereotypeDefHeader()).getIdentifier().getText()); } @Override public void caseAAssociationDef(AAssociationDef node) { final String associationName = ((AAssociationHeader) node.getAssociationHeader()) .getIdentifier().getText(); if (associationName.length() > 0) segments.push(associationName); } }); } if (segments.isEmpty()) return null; StringBuffer result = new StringBuffer(); while (!segments.isEmpty()) { result.append(segments.pop()); result.append(NamedElement.SEPARATOR); } result.delete((result.length() - NamedElement.SEPARATOR.length()), result.length()); return result.toString(); }
From source file:com.anite.antelope.zebra.om.AntelopeProcessInstance.java
/** * @param results//from w ww. j a va2 s. c om * @param q * @throws HibernateException */ private void recursivelyQueryChildProcesses(List results, Query q) throws HibernateException { // Recursive Process children Stack checkList = new Stack(); checkList.push(this); while (!checkList.isEmpty()) { AntelopeProcessInstance processInstance = (AntelopeProcessInstance) checkList.pop(); q.setLong("guid", processInstance.getProcessInstanceId().longValue()); for (Iterator it = q.iterate(); it.hasNext();) { AntelopeProcessInstance childProcess = (AntelopeProcessInstance) it.next(); results.add(childProcess); checkList.push(childProcess); } } }