List of usage examples for java.util Stack pop
public synchronized E pop()
From source file:org.apache.bigtop.bigpetstore.qstream.HttpLoadGen.java
/** * Appends via REST calls./*w w w.j a va2 s .c o m*/ */ public LinkedBlockingQueue<Transaction> startWriteQueue(final int milliseconds) { /** * Write queue. Every 5 seconds, write */ final LinkedBlockingQueue<Transaction> transactionQueue = new LinkedBlockingQueue<Transaction>( getQueueSize()); new Thread() { @Override public void run() { int fileNumber = 0; while (true) { waitFor(milliseconds, transactionQueue); System.out.println("CLEARING " + transactionQueue.size() + " elements from queue."); Stack<Transaction> transactionsToWrite = new Stack<Transaction>(); transactionQueue.drainTo(transactionsToWrite); /** * pop transactions from the queue, and sent them over http as json. */ while (!transactionsToWrite.isEmpty()) { try { String trAsJson = URLEncoder.encode(Utils.toJson(transactionsToWrite.pop())); /** * i.e. wget http://localhost:3000/rpush/guestbook/{"name":"cos boudnick", "state":"...",...} */ HttpResponse resp = Utils.get(path + "/" + trAsJson); if (total % 20 == 0) System.out.println("wrote customer " + trAsJson); total++; } catch (Throwable t) { System.err.println("transaction failed.... !"); t.printStackTrace(); } System.out.println("TRANSACTIONS SO FAR " + total++ + " RATE " + total / ((System.currentTimeMillis() - startTime) / 1000)); } } } }.start(); return transactionQueue; }
From source file:ru.emdev.ldap.util.EmDevSchemaLdifExtractor.java
/** * Assembles the destination file by appending file components previously * pushed on the fileComponentStack argument. * * @param fileComponentStack stack containing pushed file components * @return the assembled destination file *///from ww w . j a v a 2 s . c om private File assembleDestinationFile(Stack<String> fileComponentStack) { File destinationFile = outputDirectory.getAbsoluteFile(); while (!fileComponentStack.isEmpty()) { destinationFile = new File(destinationFile, fileComponentStack.pop()); } return destinationFile; }
From source file:alluxio.job.move.MoveDefinition.java
/** * Returns {@link URIStatus} for all paths under the specified path, including the path itself. * * The statuses will be listed in the order they are visited by depth-first search. * * @param path the target path/*w w w. j a v a2 s. c o m*/ * @return a list of the {@link URIStatus} for all paths under the given path * @throws Exception if an exception occurs */ private List<URIStatus> getPathStatuses(AlluxioURI path) throws Exception { // Depth-first search to to find all files under path. Stack<AlluxioURI> pathsToConsider = new Stack<>(); pathsToConsider.add(path); List<URIStatus> allStatuses = Lists.newArrayList(); while (!pathsToConsider.isEmpty()) { AlluxioURI nextPath = pathsToConsider.pop(); URIStatus status = mFileSystem.getStatus(nextPath); allStatuses.add(status); if (status.isFolder()) { List<URIStatus> childStatuses = mFileSystem.listStatus(nextPath); for (URIStatus childStatus : childStatuses) { if (childStatus.isFolder()) { pathsToConsider.push(new AlluxioURI(childStatus.getPath())); } else { allStatuses.add(childStatus); } } } } return ImmutableList.copyOf(allStatuses); }
From source file:gdt.jgui.entity.contact.JContactEditor.java
@Override public void response(JMainConsole console, String locator$) { System.out.println("JContactEditor.response:locator=" + locator$); try {/*from ww w .j a v a 2 s . co m*/ Properties locator = Locator.toProperties(locator$); String action$ = locator.getProperty(JRequester.REQUESTER_ACTION); if (ACTION_CREATE_CONTACT.equals(action$)) { String entihome$ = locator.getProperty(Entigrator.ENTIHOME); String text$ = locator.getProperty(JTextEditor.TEXT); Entigrator entigrator = console.getEntigrator(entihome$); Sack contact = entigrator.ent_new("contact", text$); contact = entigrator.ent_assignProperty(contact, "contact", text$); contact = entigrator.ent_assignProperty(contact, "email", "a@b.com"); contact = entigrator.ent_assignProperty(contact, "phone", "123456"); contact.putAttribute(new Core(null, "icon", "contact.png")); entigrator.save(contact); entigrator.saveHandlerIcon(JContactEditor.class, "contact.png"); entityKey$ = contact.getKey(); JContactEditor ce = new JContactEditor(); String ceLocator$ = ce.getLocator(); ceLocator$ = Locator.append(ceLocator$, Entigrator.ENTIHOME, entihome$); ceLocator$ = Locator.append(ceLocator$, EntityHandler.ENTITY_KEY, entityKey$); JEntityPrimaryMenu.reindexEntity(console, ceLocator$); Stack<String> s = console.getTrack(); s.pop(); console.setTrack(s); JConsoleHandler.execute(console, ceLocator$); } } catch (Exception e) { Logger.getLogger(JContactEditor.class.getName()).severe(e.toString()); } }
From source file:eu.annocultor.converters.geonames.GeonamesCsvToRdf.java
Set<String> allParents(String uri, String country) { Set<String> all = new HashSet<String>(); Collection<String> directParents = broader.getCollection(uri); Stack<String> toCheckForParents = new Stack<String>(); if (directParents != null) { toCheckForParents.addAll(directParents); }/*ww w .ja v a 2 s.co m*/ while (!toCheckForParents.isEmpty()) { String parent = toCheckForParents.pop(); if (parent != null && all.add(parent)) { Collection<String> parents = broader.getCollection(parent); if (parents != null) { toCheckForParents.addAll(directParents); } } } String countryUri = countryCodeToGeonamesCode.get(country); if (countryUri != null) { all.add(countryUri); } return all; }
From source file:com.spidertracks.datanucleus.query.runtime.EqualityOperand.java
@Override public Operand optimizeDescriminator(Bytes descriminatorColumnValue, List<Bytes> possibleValues) { // the equality node is always a leaf, so we don't need to recurse if (possibleValues.size() == 1) { IndexExpression leaf = new IndexExpression(); leaf.setColumn_name(descriminatorColumnValue.getBytes()); leaf.setValue(possibleValues.get(0).getBytes()); leaf.setOp(IndexOperator.EQ);/*from w w w . j a v a 2s. com*/ // discriminator fields are always indexed. addExpression(leaf, true); return this; } Stack<EqualityOperand> eqOps = new Stack<EqualityOperand>(); Stack<OrOperand> orOps = new Stack<OrOperand>(); for (Bytes value : possibleValues) { if (orOps.size() == 2) { OrOperand orOp = new OrOperand(); orOp.setLeft(orOps.pop()); orOp.setRight(orOps.pop()); orOps.push(orOp); } if (eqOps.size() == 2) { OrOperand orOp = new OrOperand(); orOp.setLeft(eqOps.pop()); orOp.setRight(eqOps.pop()); orOps.push(orOp); } EqualityOperand subClass = new EqualityOperand(clause.getCount()); // add the existing clause subClass.addAll(this.getIndexClause().getExpressions(), this.isIndexed()); IndexExpression expression = new IndexExpression(); expression.setColumn_name(descriminatorColumnValue.getBytes()); expression.setValue(value.getBytes()); expression.setOp(IndexOperator.EQ); // now add the discriminator, discriminator is always indexed. subClass.addExpression(expression, true); // push onto the stack eqOps.push(subClass); } // only rewritten without needing to OR to other clauses, short circuit while (eqOps.size() > 0) { OrOperand orOp = new OrOperand(); if (eqOps.size() % 2 == 0) { orOp.setLeft(eqOps.pop()); orOp.setRight(eqOps.pop()); } else { orOp.setLeft(eqOps.pop()); orOp.setRight(orOps.pop()); } orOps.push(orOp); } while (orOps.size() > 1) { OrOperand orOp = new OrOperand(); orOp.setLeft(orOps.pop()); orOp.setRight(orOps.pop()); orOps.push(orOp); } // check if there's anything left in the eqOps. return orOps.pop(); }
From source file:com.intuit.tank.harness.functions.JexlStringFunctions.java
/** * Generate a random user id in the range provided * // w w w .j a va2s . c o m * @param prefixLength * The length of the random prefix * @param dateFormat * The format for the date * @return The random user id */ public String userIdFromRange(Object ominId, Object omaxId) { int minId = FunctionHandler.getInt(ominId); int maxId = FunctionHandler.getInt(omaxId); Stack<Integer> stack = getStack(minId, maxId); if (stack.size() > 0) { return Integer.toString(stack.pop()); } throw new IllegalArgumentException( "Exhausted random User Ids. Range not large enough for the number of calls."); }
From source file:de.codesourcery.jasm16.compiler.Main.java
private void handleCommandlineOption(String option, Stack<String> arguments) { if ("-d".equalsIgnoreCase(option) || "--debug".equalsIgnoreCase(option)) { this.printStackTraces = true; this.printDebugStats = true; this.verboseOutput = true; arguments.pop(); } else if ("--local-labels".equalsIgnoreCase(option)) { this.enableLocalLabelSupport = true; arguments.pop();// ww w . j av a 2s .co m } else if ("--relaxed-validation".equalsIgnoreCase(option)) { this.relaxedValidation = true; arguments.pop(); } else if ("--disable-literal-inlining".equalsIgnoreCase(option)) { this.disableLiteralInlining = true; arguments.pop(); } else if ("--relaxed-parsing".equalsIgnoreCase(option)) { this.relaxedParsing = true; arguments.pop(); } else if ("--print".equalsIgnoreCase(option)) { this.printSourceCode = true; arguments.pop(); } else if ("--print-symbols".equalsIgnoreCase(option)) { this.printSymbolTable = true; arguments.pop(); } else if ("-v".equalsIgnoreCase(option) || "--verbose".equalsIgnoreCase(option)) { this.verboseOutput = true; arguments.pop(); } else if ("--dump".equalsIgnoreCase(option)) { this.dumpObjectCode = true; arguments.pop(); } else if ("-o".equalsIgnoreCase(option)) { arguments.pop(); this.outputFile = new File(arguments.pop()); } else if ("-h".equalsIgnoreCase(option) || "--help".equalsIgnoreCase(option)) { printUsage(); System.exit(1); } else { printError("ERROR: Unrecognized option '" + option + "'\n\n"); printUsage(); System.exit(1); } }
From source file:org.apache.hadoop.tools.DistCp.java
/** * Initialize DFSCopyFileMapper specific job-configuration. * @param conf : The dfs/mapred configuration. * @param jobConf : The handle to the jobConf object to be initialized. * @param args Arguments//from w w w . j a v a2 s. c o m * @return true if it is necessary to launch a job. */ private static boolean setup(Configuration conf, JobConf jobConf, final Arguments args) throws IOException { jobConf.set(DST_DIR_LABEL, args.dst.toUri().toString()); //set boolean values final boolean update = args.flags.contains(Options.UPDATE); final boolean skipCRCCheck = args.flags.contains(Options.SKIPCRC); final boolean overwrite = !update && args.flags.contains(Options.OVERWRITE); jobConf.setBoolean(Options.UPDATE.propertyname, update); jobConf.setBoolean(Options.SKIPCRC.propertyname, skipCRCCheck); jobConf.setBoolean(Options.OVERWRITE.propertyname, overwrite); jobConf.setBoolean(Options.IGNORE_READ_FAILURES.propertyname, args.flags.contains(Options.IGNORE_READ_FAILURES)); jobConf.setBoolean(Options.PRESERVE_STATUS.propertyname, args.flags.contains(Options.PRESERVE_STATUS)); final String randomId = getRandomId(); JobClient jClient = new JobClient(jobConf); Path stagingArea; try { stagingArea = JobSubmissionFiles.getStagingDir(jClient, conf); } catch (InterruptedException e) { throw new IOException(e); } Path jobDirectory = new Path(stagingArea + NAME + "_" + randomId); FsPermission mapredSysPerms = new FsPermission(JobSubmissionFiles.JOB_DIR_PERMISSION); // FileSystem.mkdirs(jClient.getFs(), jobDirectory, mapredSysPerms); FileSystem.mkdirs(FileSystem.get(jobDirectory.toUri(), conf), jobDirectory, mapredSysPerms); jobConf.set(JOB_DIR_LABEL, jobDirectory.toString()); FileSystem dstfs = args.dst.getFileSystem(conf); // get tokens for all the required FileSystems.. TokenCache.obtainTokensForNamenodes(jobConf.getCredentials(), new Path[] { args.dst }, conf); boolean dstExists = dstfs.exists(args.dst); boolean dstIsDir = false; if (dstExists) { dstIsDir = dstfs.getFileStatus(args.dst).isDir(); } // default logPath Path logPath = args.log; if (logPath == null) { String filename = "_distcp_logs_" + randomId; if (!dstExists || !dstIsDir) { Path parent = args.dst.getParent(); if (!dstfs.exists(parent)) { dstfs.mkdirs(parent); } logPath = new Path(parent, filename); } else { logPath = new Path(args.dst, filename); } } FileOutputFormat.setOutputPath(jobConf, logPath); // create src list, dst list FileSystem jobfs = jobDirectory.getFileSystem(jobConf); Path srcfilelist = new Path(jobDirectory, "_distcp_src_files"); jobConf.set(SRC_LIST_LABEL, srcfilelist.toString()); SequenceFile.Writer src_writer = SequenceFile.createWriter(jobfs, jobConf, srcfilelist, LongWritable.class, FilePair.class, SequenceFile.CompressionType.NONE); Path dstfilelist = new Path(jobDirectory, "_distcp_dst_files"); SequenceFile.Writer dst_writer = SequenceFile.createWriter(jobfs, jobConf, dstfilelist, Text.class, Text.class, SequenceFile.CompressionType.NONE); Path dstdirlist = new Path(jobDirectory, "_distcp_dst_dirs"); jobConf.set(DST_DIR_LIST_LABEL, dstdirlist.toString()); SequenceFile.Writer dir_writer = SequenceFile.createWriter(jobfs, jobConf, dstdirlist, Text.class, FilePair.class, SequenceFile.CompressionType.NONE); // handle the case where the destination directory doesn't exist // and we've only a single src directory OR we're updating/overwriting // the contents of the destination directory. final boolean special = (args.srcs.size() == 1 && !dstExists) || update || overwrite; int srcCount = 0, cnsyncf = 0, dirsyn = 0; long fileCount = 0L, byteCount = 0L, cbsyncs = 0L; try { for (Iterator<Path> srcItr = args.srcs.iterator(); srcItr.hasNext();) { final Path src = srcItr.next(); FileSystem srcfs = src.getFileSystem(conf); FileStatus srcfilestat = srcfs.getFileStatus(src); Path root = special && srcfilestat.isDir() ? src : src.getParent(); if (srcfilestat.isDir()) { ++srcCount; } Stack<FileStatus> pathstack = new Stack<FileStatus>(); for (pathstack.push(srcfilestat); !pathstack.empty();) { FileStatus cur = pathstack.pop(); FileStatus[] children = srcfs.listStatus(cur.getPath()); for (int i = 0; i < children.length; i++) { boolean skipfile = false; final FileStatus child = children[i]; final String dst = makeRelative(root, child.getPath()); ++srcCount; if (child.isDir()) { pathstack.push(child); } else { //skip file if the src and the dst files are the same. skipfile = update && sameFile(srcfs, child, dstfs, new Path(args.dst, dst), skipCRCCheck); //skip file if it exceed file limit or size limit skipfile |= fileCount == args.filelimit || byteCount + child.getLen() > args.sizelimit; if (!skipfile) { ++fileCount; byteCount += child.getLen(); if (LOG.isTraceEnabled()) { LOG.trace("adding file " + child.getPath()); } ++cnsyncf; cbsyncs += child.getLen(); if (cnsyncf > SYNC_FILE_MAX || cbsyncs > BYTES_PER_MAP) { src_writer.sync(); dst_writer.sync(); cnsyncf = 0; cbsyncs = 0L; } } } if (!skipfile) { src_writer.append(new LongWritable(child.isDir() ? 0 : child.getLen()), new FilePair(child, dst)); } dst_writer.append(new Text(dst), new Text(child.getPath().toString())); } if (cur.isDir()) { String dst = makeRelative(root, cur.getPath()); dir_writer.append(new Text(dst), new FilePair(cur, dst)); if (++dirsyn > SYNC_FILE_MAX) { dirsyn = 0; dir_writer.sync(); } } } } } finally { checkAndClose(src_writer); checkAndClose(dst_writer); checkAndClose(dir_writer); } FileStatus dststatus = null; try { dststatus = dstfs.getFileStatus(args.dst); } catch (FileNotFoundException fnfe) { LOG.info(args.dst + " does not exist."); } // create dest path dir if copying > 1 file if (dststatus == null) { if (srcCount > 1 && !dstfs.mkdirs(args.dst)) { throw new IOException("Failed to create" + args.dst); } } final Path sorted = new Path(jobDirectory, "_distcp_sorted"); checkDuplication(jobfs, dstfilelist, sorted, conf); if (dststatus != null && args.flags.contains(Options.DELETE)) { deleteNonexisting(dstfs, dststatus, sorted, jobfs, jobDirectory, jobConf, conf); } Path tmpDir = new Path( (dstExists && !dstIsDir) || (!dstExists && srcCount == 1) ? args.dst.getParent() : args.dst, "_distcp_tmp_" + randomId); jobConf.set(TMP_DIR_LABEL, tmpDir.toUri().toString()); LOG.info("sourcePathsCount=" + srcCount); LOG.info("filesToCopyCount=" + fileCount); LOG.info("bytesToCopyCount=" + StringUtils.humanReadableInt(byteCount)); jobConf.setInt(SRC_COUNT_LABEL, srcCount); jobConf.setLong(TOTAL_SIZE_LABEL, byteCount); setMapCount(byteCount, jobConf); return fileCount > 0; }
From source file:com.altoukhov.svsync.fileviews.SmbFileSpace.java
@Override protected Snapshot scan(List<Pattern> filters) { try {/* w w w . j a v a 2 s . com*/ Map<String, FileSnapshot> files = new LinkedHashMap<>(); Set<String> dirs = new HashSet<>(); SmbFile root = new SmbFile(rootPath, auth); if (root.exists()) { Stack<SmbFile> stack = new Stack<>(); stack.push(root); dirs.add(""); while (!stack.isEmpty()) { SmbFile currentFolder = stack.pop(); for (final SmbFile file : listFiles(currentFolder)) { String path = file.getPath(); boolean isFile = isFile(file); boolean isDirectory = isDirectory(file); if (isFile && !isExcluded(path) && !isFiltered(toRelativePath(path), filters)) { FileSnapshot fileSnapshot = createFileSnapshot(file, path); files.put(fileSnapshot.getRelativePath(), fileSnapshot); } else if (isDirectory && !isExcluded(path) && !isFiltered(toRelativePath(path, true), filters)) { stack.push(file); dirs.add(toRelativePath(path)); System.out.println("Scanning " + path); } } } } Snapshot snapshot = new Snapshot(files, dirs); return snapshot; } catch (MalformedURLException | SmbException ex) { System.out.println("Failed to scan file space"); System.out.println(ex.getMessage()); System.out.println(ex); } return null; }