Example usage for java.util Stack Stack

List of usage examples for java.util Stack Stack

Introduction

In this page you can find the example usage for java.util Stack Stack.

Prototype

public Stack() 

Source Link

Document

Creates an empty Stack.

Usage

From source file:logica.Estacion.java

/**
 *  Actualiza todas las sub-estaciones y retorna una pila con todas ellas
 * //  www.j a v a 2  s  .co  m
 * @return Pila de PaqueteDatos con las mediciones de toda esta sub red
 */
public Stack<PaqueteDatos> actualizar() {
    // Toda la informacion recibida de las sub-estaciones
    // La informacin recibida de _una_ subestacion se almacena ac
    Stack<PaqueteDatos> newData = new Stack();
    LOGGER.log(Level.INFO, String.format("Actualizando estacion %s %d", clase.toString(), ID));

    medidasPila.clear(); // Limpio la pila

    for (Estacion subestacion : redEstaciones) {
        if (subestacion != null) {
            newData = subestacion.actualizar();

            //                newData.peek().printDatos();    // Para debbugear

            // newData >> data
            if (newData.peek() != null)
                // Copia newData (pila) en la base de medidasPila (otra pila).
                medidasPila.addAll(newData);
        }
    }

    return medidasPila;
}

From source file:alluxio.underfs.hdfs.HdfsUnderFileSystem.java

@Override
public boolean mkdirs(String path, MkdirsOptions options) throws IOException {
    IOException te = null;//from   w w w  . jav  a2s .com
    RetryPolicy retryPolicy = new CountingRetry(MAX_TRY);
    while (retryPolicy.attemptRetry()) {
        try {
            Path hdfsPath = new Path(path);
            if (mFileSystem.exists(hdfsPath)) {
                LOG.debug("Trying to create existing directory at {}", path);
                return false;
            }
            // Create directories one by one with explicit permissions to ensure no umask is applied,
            // using mkdirs will apply the permission only to the last directory
            Stack<Path> dirsToMake = new Stack<>();
            dirsToMake.push(hdfsPath);
            Path parent = hdfsPath.getParent();
            while (!mFileSystem.exists(parent)) {
                dirsToMake.push(parent);
                parent = parent.getParent();
            }
            while (!dirsToMake.empty()) {
                Path dirToMake = dirsToMake.pop();
                if (!FileSystem.mkdirs(mFileSystem, dirToMake, new FsPermission(options.getMode().toShort()))) {
                    return false;
                }
                // Set the owner to the Alluxio client user to achieve permission delegation.
                // Alluxio server-side user is required to be a HDFS superuser. If it fails to set owner,
                // proceeds with mkdirs and print out an warning message.
                try {
                    setOwner(dirToMake.toString(), options.getOwner(), options.getGroup());
                } catch (IOException e) {
                    LOG.warn("Failed to update the ufs dir ownership, default values will be used. " + e);
                }
            }
            return true;
        } catch (IOException e) {
            LOG.warn("{} try to make directory for {} : {}", retryPolicy.getRetryCount(), path, e.getMessage());
            te = e;
        }
    }
    throw te;
}

From source file:com.espertech.esper.epl.parse.EPLTreeWalkerListener.java

/**
 * Ctor./*  www.  j  a va2  s . c  o  m*/
 * @param engineImportService is required to resolve lib-calls into static methods or configured aggregation functions
 * @param variableService for variable access
 * @param defaultStreamSelector - the configuration for which insert or remove streams (or both) to produce
 * @param engineURI engine URI
 * @param configurationInformation configuration info
 */
public EPLTreeWalkerListener(CommonTokenStream tokenStream, EngineImportService engineImportService,
        VariableService variableService, SchedulingService schedulingService,
        SelectClauseStreamSelectorEnum defaultStreamSelector, String engineURI,
        ConfigurationInformation configurationInformation, PatternNodeFactory patternNodeFactory,
        ContextManagementService contextManagementService, List<String> scriptBodies,
        ExprDeclaredService exprDeclaredService, TableService tableService) {
    this.tokenStream = tokenStream;
    this.engineImportService = engineImportService;
    this.variableService = variableService;
    this.defaultStreamSelector = defaultStreamSelector;
    this.timeProvider = schedulingService;
    this.patternNodeFactory = patternNodeFactory;
    this.exprEvaluatorContext = new ExprEvaluatorContextTimeOnly(timeProvider);
    this.engineURI = engineURI;
    this.configurationInformation = configurationInformation;
    this.schedulingService = schedulingService;
    this.contextManagementService = contextManagementService;
    this.scriptBodies = scriptBodies;
    this.exprDeclaredService = exprDeclaredService;
    this.tableService = tableService;

    if (defaultStreamSelector == null) {
        throw ASTWalkException.from("Default stream selector is null");
    }

    statementSpec = new StatementSpecRaw(defaultStreamSelector);
    statementSpecStack = new Stack<StatementSpecRaw>();
    astExprNodeMapStack = new Stack<Map<Tree, ExprNode>>();

    // statement-global items
    expressionDeclarations = new ExpressionDeclDesc();
    statementSpec.setExpressionDeclDesc(expressionDeclarations);
    scriptExpressions = new ArrayList<ExpressionScriptProvided>(1);
    statementSpec.setScriptExpressions(scriptExpressions);
}

From source file:com.anite.antelope.zebra.om.AntelopeProcessInstance.java

/**
 * @param results/*w ww.j  a  v a 2  s .c om*/
 * @param q
 * @throws HibernateException
 */
private void recursivelyQueryChildProcesses(List results, Query q) throws HibernateException {
    // Recursive Process children
    Stack checkList = new Stack();
    checkList.push(this);
    while (!checkList.isEmpty()) {
        AntelopeProcessInstance processInstance = (AntelopeProcessInstance) checkList.pop();
        q.setLong("guid", processInstance.getProcessInstanceId().longValue());
        for (Iterator it = q.iterate(); it.hasNext();) {
            AntelopeProcessInstance childProcess = (AntelopeProcessInstance) it.next();
            results.add(childProcess);
            checkList.push(childProcess);
        }
    }
}

From source file:com.frostwire.android.gui.Librarian.java

/**
 * Given a folder path it'll return all the files contained within it and it's subfolders
 * as a flat set of Files.//  w ww .  ja  v a  2  s.co m
 * <p>
 * Non-recursive implementation, up to 20% faster in tests than recursive implementation. :)
 *
 * @param folder
 * @param extensions If you only need certain files filtered by their extensions, use this string array (without the "."). or set to null if you want all files. e.g. ["txt","jpg"] if you only want text files and jpegs.
 * @return The set of files.
 * @author gubatron
 */
private static Collection<File> getAllFolderFiles(File folder, String[] extensions) {
    Set<File> results = new HashSet<>();
    Stack<File> subFolders = new Stack<>();
    File currentFolder = folder;
    while (currentFolder != null && currentFolder.isDirectory() && currentFolder.canRead()) {
        File[] fs = null;
        try {
            fs = currentFolder.listFiles();
        } catch (SecurityException e) {
        }

        if (fs != null && fs.length > 0) {
            for (File f : fs) {
                if (!f.isDirectory()) {
                    if (extensions == null || FilenameUtils.isExtension(f.getName(), extensions)) {
                        results.add(f);
                    }
                } else {
                    subFolders.push(f);
                }
            }
        }

        if (!subFolders.isEmpty()) {
            currentFolder = subFolders.pop();
        } else {
            currentFolder = null;
        }
    }
    return results;
}

From source file:com.marklogic.dom.NodeImpl.java

protected NodeList getElementsByTagNameNSOrNodeName(String namespaceURI, String name, final boolean nodeName) {

    final String tagname = name;
    final String ns = namespaceURI;
    final Node thisNode = this;

    return new NodeList() {
        protected ArrayList<Node> elementList = new ArrayList<Node>();
        protected boolean done = false;

        protected void init() {
            if (done)
                return;
            Stack<Node> childrenStack = new Stack<Node>();
            childrenStack.push(thisNode);
            boolean root = true;
            while (!childrenStack.isEmpty()) {
                Node curr = childrenStack.pop();
                NodeList children = curr.getChildNodes();
                for (int childi = children.getLength() - 1; childi >= 0; childi--)
                    if (children.item(childi).getNodeType() == Node.ELEMENT_NODE)
                        childrenStack.push(children.item(childi));
                if (root) {
                    root = false;/*from w ww . j a  va2s. c o m*/
                    continue;
                }
                if (nodeName) {
                    if (curr.getNodeName().equals(tagname) || tagname.equals("*"))
                        elementList.add(curr);
                } else {
                    // do nothing if only one of the two is null
                    if ("*".equals(ns) && "*".equals(tagname)) {
                        elementList.add(curr);
                        continue;
                    }
                    if (ns != null) {
                        if ((ns.equals("*") || ns.equals(curr.getNamespaceURI()))
                                && (tagname.equals("*") || tagname.equals(curr.getLocalName())))
                            elementList.add(curr);
                    } else if (tagname.equals("*") || tagname.equals(curr.getLocalName()))
                        elementList.add(curr);
                }
            }
            done = true;
        }

        public int getLength() {
            init();
            return elementList.size();
        }

        public Node item(int index) {
            init();
            return (index < getLength()) ? elementList.get(index) : null;
        }

    };
}

From source file:net.mojodna.searchable.AbstractBeanIndexer.java

/**
 * Process a bean.//w  w w  . j  a v  a 2 s  .  c o m
 * 
 * @param doc Document to add fields to.
 * @param bean Bean to process.
 * @return Document with additional fields.
 * @throws IndexingException
 */
protected Document processBean(final Document doc, final Searchable bean) throws IndexingException {
    return processBean(doc, bean, new Stack<String>());
}

From source file:com.mmnaseri.dragonfly.data.impl.DefaultDataAccess.java

private PreparedStatement internalExecuteUpdate(Statement statement, Map<String, Object> values) {
    waitForSessionInitialization();//w ww  . jav a  2  s.co m
    if (isInBatchMode()) {
        if (batchOperation.get() == null) {
            batchOperation.set(new Stack<BatchOperationDescriptor>());
        }
        final List<BatchOperationDescriptor> operationDescriptors = batchOperation.get();
        boolean firstStep = operationDescriptors.isEmpty();
        if (!firstStep) {
            String sql = statement.getSql();
            if (statement.isDynamic()) {
                sql = new FreemarkerSecondPassStatementBuilder(statement, session.getDatabaseDialect(), values)
                        .getStatement(statement.getTableMetadata()).getSql();
            }
            firstStep = !sql.equals(operationDescriptors.get(operationDescriptors.size() - 1).getSql());
        }
        final PreparedStatement preparedStatement;
        if (!firstStep) {
            preparedStatement = operationDescriptors.get(operationDescriptors.size() - 1)
                    .getPreparedStatement();
            statementPreparator.prepare(preparedStatement, statement.getTableMetadata(), values,
                    operationDescriptors.get(operationDescriptors.size() - 1).getSql());
        } else {
            final BatchOperationDescriptor operationDescriptor = getPreparedStatement(statement, values);
            operationDescriptors.add(operationDescriptor);
            preparedStatement = operationDescriptor.getPreparedStatement();
        }
        try {
            preparedStatement.addBatch();
        } catch (SQLException e) {
            throw new BatchOperationExecutionError("Failed to add batch operation", e);
        }
        return preparedStatement;
    } else {
        final PreparedStatement preparedStatement = getPreparedStatement(statement, values)
                .getPreparedStatement();
        try {
            preparedStatement.executeUpdate();
        } catch (SQLException e) {
            throw new UnsuccessfulOperationError("Failed to execute update", e);
        }
        return preparedStatement;
    }
}

From source file:de.ingrid.external.gemet.GEMETService.java

@Override
public TreeTerm getHierarchyPathToTop(String url, String termId, Locale locale) {
    String language = getGEMETLanguageFilter(locale);

    // get concept and map to TreeTerm
    JSONObject inConcept = gemetClient.getConceptAsJSON(termId, language);
    // we check on null, cause some concepts are buggy in service !
    // (e.g. concept/15041)
    if (inConcept == null) {
        log.error("Problems fetching " + termId + " we return empty TreeTerm !");
        return new TreeTermImpl();
    }// ww  w  .ja v  a2  s . c  om

    TreeTerm resultTreeTerm = gemetMapper.mapToTreeTerm(inConcept, null, null);

    // set parents up to top. We only produce ONE PATH, so no multiple
    // parents are set !
    // We process "stack" until stack is empty

    Stack<TreeTerm> parentStack = new Stack<TreeTerm>();
    parentStack.add(resultTreeTerm);

    while (!parentStack.empty()) {
        TreeTerm currentTerm = parentStack.pop();
        if (currentTerm.getParents() == null) {
            // no processed parents yet, add first parent found !
            processParentsOfTerm(currentTerm, language, true);
            // check parents for null, may be top node
            if (currentTerm.getParents() != null) {
                parentStack.addAll(currentTerm.getParents());
            }
        }
    }

    return resultTreeTerm;
}

From source file:au.edu.jcu.fascinator.plugin.harvester.directory.DirectoryNameHarvester.java

/**
 * Initialisation of Directory harvester plugin
 *
 * @throws HarvesterException if fails to initialise
 */// w  w w  .j a  v  a2s .  c  om
@Override
public void init() throws HarvesterException {
    // Check for valid targests
    targets = getJsonConfig().getJsonSimpleList("harvester", "directory", "targets");
    if (targets.isEmpty()) {
        throw new HarvesterException("No targets specified");
    }

    //obtain the metadata filenames
    JsonSimple defaultFilenames = new JsonSimple(getJsonConfig().getObject("harvester", "default-files"));

    //this filename is expected to exist in the main folder, it provides the 
    //standard metadata for all directories
    defaultMetadataFilename = defaultFilenames.getString("null", "default-metadata-filename");
    //this filename is optional, it may exist in the directories being 
    //processed. It's contents can override and add to the default metadata
    overrideMetadataFilename = defaultFilenames.getString("null", "override-metadata-filename");

    if (defaultMetadataFilename == null) {
        throw new HarvesterException("No default metadata filename specified");
    }
    if (overrideMetadataFilename == null) {
        throw new HarvesterException("No override metadata filename specified");
    }

    //obtain the metadata types
    metadataTypes = getJsonConfig().getJsonSimpleList("harvester", "metadata-types");

    // Loop processing variables
    fileStack = new Stack<File>();
    targetIndex = null;
    hasMore = true;

    // Caching
    try {
        cache = new DerbyCache(getJsonConfig());
        // Reset flags for deletion support
        cache.resetFlags();
    } catch (Exception ex) {
        log.error("Error instantiating cache: ", ex);
        throw new HarvesterException(ex);
    }

    // Prep the first file
    nextFile = getNextFile();
}