Example usage for java.util Stack pop

List of usage examples for java.util Stack pop

Introduction

In this page you can find the example usage for java.util Stack pop.

Prototype

public synchronized E pop() 

Source Link

Document

Removes the object at the top of this stack and returns that object as the value of this function.

Usage

From source file:com.taobao.tdhs.jdbc.sqlparser.ParseSQL.java

private void analyzeUpdateSetColumns(String substring) {
    if (substring == null)
        return;/*from   w  w w  .  j  av a  2  s .  co m*/

    /*String[] array_setColumn = substring.split(",");
      for (String setColumn : array_setColumn) {
      int addr = StringUtils.indexOfIgnoreCase(setColumn, "=");
      String column = setColumn.substring(0, addr).trim();
      String value = setColumn.substring(addr + 1).trim();
      this.updateEntries.add(new Entry<String, String>(column, value));
      }*/

    //Stack??
    Stack<String> updateColumnValueStack = new Stack<String>();
    for (int i = 0; i < substring.length(); i++) {
        updateColumnValueStack.push(substring.substring(i, i + 1));
    }

    String column = "";
    String value = "";
    while (updateColumnValueStack.isEmpty() == false) {
        column = "";
        value = "";
        //value String
        while (updateColumnValueStack.peek().equals("=") == false
                || checkRealEqual(updateColumnValueStack) == false) {
            value = updateColumnValueStack.pop() + value;
        }
        //=
        updateColumnValueStack.pop();
        //column String
        try {
            while (updateColumnValueStack.peek().equals(",") == false) {
                column = updateColumnValueStack.pop() + column;
            }
        } catch (EmptyStackException e) {
            //?
            this.updateEntries.add(new Entry<String, String>(column, value));
            break;
        }

        //,
        updateColumnValueStack.pop();
        //?
        this.updateEntries.add(new Entry<String, String>(column, value));
    }

}

From source file:aula1.Aula1.java

public static String conversor(String entrada, String info) throws Exception {
    Pilha<String> input = new Pilha<>();
    Pilha<String> simbolos = new Pilha<>();
    Stack<Op> operadores = new Stack<>();
    Pilha<String> saida = new Pilha<>();
    String[] operadoresSuportados = { "+", "-", "*", "/", "^", "(", ")", "sen" };

    for (int i = 0; i < entrada.length(); i++) {
        String s = "";
        try {//from w w  w  . j  a  v a 2s .  com
            if (Character.isDigit(entrada.charAt(i))) {
                s = String.valueOf(entrada.charAt(i));
                while (Character.isDigit(entrada.charAt(i + 1))) {
                    s += String.valueOf(entrada.charAt(i + 1));
                    i++;
                }
            } else {
                if (entrada.charAt(i) == 's' && entrada.contains("sen")) {
                    int ind = entrada.indexOf("sen");
                    String ent = entrada.substring(ind);
                    int ini = ent.indexOf("sen(") + 4;
                    int fim = ent.indexOf(")/");
                    CharSequence x = ent.subSequence(ini, fim);
                    if (entrada.contains("sen(" + x + ")/" + x)) {
                        entrada = entrada.replace("sen(" + x + ")/" + x, "1");
                        s = "1";
                    } else {
                        ind += 2;
                        i = -1;
                        entrada = entrada.substring(ind + 1);
                        if (entrada.charAt(0) != '(')
                            throw new Exception("Falta de '(' aps sen");
                        s = "sen";
                    }
                } else
                    s = String.valueOf(entrada.charAt(i));
            }
            simbolos.push(s);
            input.push(s);
        } catch (IndexOutOfBoundsException ex) {
            s = String.valueOf(entrada.charAt(i));
            simbolos.push(s);
            input.push(s);
        }
    }
    while (!simbolos.isEMpty()) {
        String simbolo = simbolos.pop();

        if (Character.isDigit(simbolo.charAt(0)) || simbolo.charAt(0) == 'x')
            saida.push(simbolo);
        else if (Arrays.asList(operadoresSuportados).contains(simbolo)) {
            Op operador = new Op(simbolo);
            Op topOperador;
            try {
                topOperador = operadores.peek();
            } catch (EmptyStackException e) {
                topOperador = null;
            }
            if (simbolo.equals(")")) {
                while (topOperador != null && !topOperador.op().equals("(")) {
                    saida.push(topOperador.op());
                    operadores.pop();
                    topOperador = operadores.peek();
                }
                operadores.pop();
            } else if (simbolo.equals("(")) {
                operadores.push(operador);
            } else {
                while (topOperador != null && topOperador.Precedencia() > operador.Precedencia()) {
                    saida.push(topOperador.op());
                    operadores.pop();
                    try {
                        topOperador = operadores.peek();
                    } catch (EmptyStackException e) {
                        topOperador = null;
                    }
                }
                operadores.push(operador);
            }
        }
    }
    while (!operadores.isEmpty()) {
        Op operador = operadores.pop();
        saida.push(operador.op());
    }
    String resultado = "";
    for (String s : saida) {
        System.out.println("saida: " + s);
        resultado += s + " ";
    }
    resultado = calculaPolonesaINversa(resultado, info);
    return resultado;

}

From source file:com.espertech.esper.epl.join.plan.NStreamOuterQueryPlanBuilder.java

/**
 * Recusivly builds a substream-per-stream ordered tree graph using the
 * join information supplied for outer joins and from the query graph (where clause).
 * <p>/*from w  ww  . j  a  v  a2  s.  c o m*/
 * Required streams are considered first and their lookup is placed first in the list
 * to gain performance.
 * @param streamNum is the root stream number that supplies the incoming event to build the tree for
 * @param queryGraph contains where-clause stream relationship info
 * @param completedStreams is a temporary holder for streams already considered
 * @param substreamsPerStream is the ordered, tree-like structure to be filled
 * @param streamCallStack the query plan call stack of streams available via cursor
 * @param dependencyGraph - dependencies between historical streams
 * @throws ExprValidationException if the query planning failed
 */
protected static void recursiveBuildInnerJoin(int streamNum, Stack<Integer> streamCallStack,
        QueryGraph queryGraph, Set<Integer> completedStreams, LinkedHashMap<Integer, int[]> substreamsPerStream,
        DependencyGraph dependencyGraph) throws ExprValidationException {
    // add this stream to the set of completed streams
    completedStreams.add(streamNum);

    // check if the dependencies have been satisfied
    if (dependencyGraph.hasDependency(streamNum)) {
        Set<Integer> dependencies = dependencyGraph.getDependenciesForStream(streamNum);
        for (Integer dependentStream : dependencies) {
            if (!streamCallStack.contains(dependentStream)) {
                throw new ExprValidationException(
                        "Historical stream " + streamNum + " parameter dependency originating in stream "
                                + dependentStream + " cannot or may not be satisfied by the join");
            }
        }
    }

    // Determine the streams we can navigate to from this stream
    Set<Integer> navigableStreams = queryGraph.getNavigableStreams(streamNum);

    // remove streams with a dependency on other streams not yet processed
    Integer[] navigableStreamArr = navigableStreams.toArray(new Integer[navigableStreams.size()]);
    for (int navigableStream : navigableStreamArr) {
        if (dependencyGraph.hasUnsatisfiedDependency(navigableStream, completedStreams)) {
            navigableStreams.remove(navigableStream);
        }
    }

    // remove those already done
    navigableStreams.removeAll(completedStreams);

    // if we are a leaf node, we are done
    if (navigableStreams.isEmpty()) {
        substreamsPerStream.put(streamNum, new int[0]);
        return;
    }

    // First the outer (required) streams to this stream, then the inner (optional) streams
    int[] substreams = new int[navigableStreams.size()];
    substreamsPerStream.put(streamNum, substreams);
    int count = 0;
    for (int stream : navigableStreams) {
        substreams[count++] = stream;
        completedStreams.add(stream);
    }

    for (int stream : navigableStreams) {
        streamCallStack.push(stream);
        recursiveBuildInnerJoin(stream, streamCallStack, queryGraph, completedStreams, substreamsPerStream,
                dependencyGraph);
        streamCallStack.pop();
    }
}

From source file:ca.weblite.xmlvm.XMLVM.java

/**
 * Loads all of the dependencies associated with a collection of input
 * files. Dependencies will be loaded as stub files, so that XMLVM can be
 * run without producing a full transitive dependency chain.
 * @deprecated/*from w  w w .ja  va 2  s.c o  m*/
 * @param dir The directory containing the current .xmlvm files to be
 * converted. Dependent stub files will be copied to this directory by this
 * method.
 * @param files Collection of .xmlvm files to be parsed for dependencies. It
 * is assumed that these files are already located inside the "dir"
 * directory.
 * @throws ParserConfigurationException
 * @throws SAXException
 * @throws IOException
 * @throws FileNotFoundException
 * @throws UnsatisfiedDependencyException If a dependency cannot be found in
 * the cache. If you receive this exception, you should probably just run a
 * recursive dependent build with XMLVM to generate the cache.
 * @throws TransformerException
 */
public void loadDependencies(File dir, Collection<File> files) throws ParserConfigurationException,
        SAXException, IOException, FileNotFoundException, UnsatisfiedDependencyException, TransformerException {
    Set<String> loadedClasses = new HashSet<String>();

    // First find out which classes are already loaded.
    for (File f : dir.listFiles()) {
        if (f.getName().endsWith(".xmlvm")) {
            ClassFile cf = getClassFile(f.getParentFile(), f);
            if (cf == null) {
                throw new RuntimeException("Failed to get class file for " + f);
            }
            loadedClasses.add(cf.getName());
        }
    }

    System.out.println(loadedClasses.size() + " classes loaded.  Looking for dependencies...");

    // Now go through each of the loaded classes, and find dependencies.
    Set<File> newFiles = new HashSet<File>();
    File xmlvmstubDir = getXmlvmCacheDir("xmlvmstubs");
    File xmlvmDir = getXmlvmCacheDir("xmlvm");
    Stack<File> stack = new Stack<File>();
    stack.addAll(files);
    while (!stack.isEmpty()) {
        File f = stack.pop();
        String[] deps = getRequiredClasses(f, true);
        System.out.println(deps.length + " required classes in " + f.getName());
        for (String dep : deps) {
            if (!loadedClasses.contains(dep)) {
                // Check for cached stub
                System.out.println("Unloaded dependency found " + dep + ". Looking for cached version...");
                ClassFile cf = new ClassFile(dep);
                String fileName = cf.getcPrefix() + ".xmlvm";
                File cachedStubFile = new File(xmlvmstubDir, fileName);
                File cachedFullFile = new File(xmlvmDir, fileName);
                if (cachedStubFile.exists()) {
                    File destStubFile = new File(dir, fileName);
                    FileUtils.copyFile(cachedStubFile, destStubFile);
                    stack.push(destStubFile);
                    loadedClasses.add(cf.getName());

                } else if (cachedFullFile.exists()) {
                    File tempStubFile = new File(xmlvmstubDir, fileName);
                    createXMLVMClassStub(cachedFullFile, tempStubFile);
                    File destFile = new File(dir, fileName);
                    FileUtils.copyFile(tempStubFile, destFile);
                    stack.push(destFile);
                    loadedClasses.add(cf.getName());
                } else {

                    // At this point, we'll just give up this foolish mission
                    // If the caller receives this exception, it should
                    // just run xmlvm with transitive dependency loading.
                    throw new UnsatisfiedDependencyException(dep);
                }
            }
        }

    }

}

From source file:javalin.draw.Drawing.java

public void draw(final DrawingGraphics dg) {
    float[] pos = new float[] { 0, 0, 0 };
    final Stack<FloatList> polygonStack = new Stack<FloatList>();
    FloatList polygon = null;//  w w w  .  jav a  2 s . co m
    Map<Integer, float[][][]> patches = null;

    int argIndex = 0;
    final Iterator<DrawingOp> opIter = drawing.iterator();
    while (opIter.hasNext()) {
        final DrawingOp op = opIter.next();

        switch (op) {
        case LINE_TO:
            float x = args.get(argIndex++);
            float y = args.get(argIndex++);
            float z = args.get(argIndex++);
            dg.drawLine(pos[0], pos[1], pos[2], x, y, z);

            pos[0] = x;
            pos[1] = y;
            pos[2] = z;
            break;
        case MOVE_TO:
            pos[0] = args.get(argIndex++);
            pos[1] = args.get(argIndex++);
            pos[2] = args.get(argIndex++);
            break;
        case SETCOLOR:
            dg.setColor(args.get(argIndex++), args.get(argIndex++), args.get(argIndex++), args.get(argIndex++));
            break;
        case START_POLY:
            if (polygon != null)
                polygonStack.push(polygon);
            polygon = new FloatList(64);
            break;
        case VERTEX:
            if (polygon == null)
                throw new IllegalStateException("You can't create a vertex before a polygon is begun.");
            polygon.add(pos[0], pos[1], pos[2]);
            break;
        case END_POLY:
            if (polygon == null)
                throw new IllegalStateException("You can't end a polygon before a polygon is begun.");
            dg.drawPolygon(polygon.toArray());
            polygon = polygonStack.size() > 0 ? polygonStack.pop() : null;
            break;

        case INIT_PATCH:
            int patchIndex = (int) args.get(argIndex++);
            if (patches == null)
                patches = new HashMap<Integer, float[][][]>();
            float[][][] patch;
            if (patches.containsKey(patchIndex))
                patch = patches.get(patchIndex);
            else {
                patch = new float[4][4][3];
                patches.put(patchIndex, patch);
            }
            for (int row = 0; row < 4; row++)
                for (int col = 0; col < 4; col++)
                    for (int d = 0; d < 3; d++)
                        patch[row][col][d] = 0;
            break;
        case CONTROL_POINT:
            patchIndex = (int) args.get(argIndex++);
            patch = patches.get(patchIndex);
            if (patch == null)
                throw new IllegalStateException("Patch " + patchIndex + " has not been initialized.");
            int row = (int) args.get(argIndex++);
            int col = (int) args.get(argIndex++);
            patch[row][col][0] = args.get(argIndex++);
            patch[row][col][1] = args.get(argIndex++);
            patch[row][col][2] = args.get(argIndex++);
            break;
        case DRAW_PATCH:
            patchIndex = (int) args.get(argIndex++);
            patch = patches.get(patchIndex);
            if (patch == null)
                throw new IllegalStateException(
                        "You can't draw patch " + patchIndex + " before it is initialized.");
            final int gridSteps = (int) args.get(argIndex++);
            dg.drawPatch(patch, gridSteps);
            break;
        }
    }
    if (polygon != null || polygonStack.size() > 0)
        throw new IllegalStateException("Unfinished polygon.");
}

From source file:ca.weblite.xmlvm.XMLVM.java

/**
 * Finds all of the classes that may be dirty due to changes in 
 * an initial set of classes.// w  ww .ja va 2 s . c  o m
 * @param changedClasses An initial set of classes that were modified.
 * @param dirtyClasses The full set of classes that may be dirty due to these
 * changes.  This will necessarily be a superset of changedClasses.  
 * @param depsDir The directory the contains the deps.
 * @throws IOException 
 */
public void collectDirtyClasses(Set<String> changedClasses, Set<String> dirtyClasses, File depsDir)
        throws IOException {

    Set<String> processed = new HashSet<String>();
    Stack<String> stack = new Stack<>();
    stack.addAll(changedClasses);
    dirtyClasses.addAll(changedClasses);
    while (!stack.isEmpty()) {
        String cls = stack.pop();
        if (processed.contains(cls)) {
            continue;
        } else {
            processed.add(cls);
        }
        File depsFile = new File(depsDir, cls + ".deps");
        if (depsFile.exists()) {
            List<String> lines = FileUtils.readLines(depsFile);
            for (String line : lines) {
                String[] parts = line.split(" ");
                String clsName = parts[0];
                if (dirtyClasses.contains(clsName)) {
                    // This class is already marked dirty.
                    continue;
                }
                String kind = parts[1];

                switch (kind) {
                case "usage":
                    dirtyClasses.add(clsName);
                    break;
                case "super":
                case "interface":
                    dirtyClasses.add(clsName);
                    stack.push(clsName);
                    break;
                }
            }
        }
    }

}

From source file:com.datatorrent.stram.plan.physical.PhysicalPlan.java

/**
 *
 * @param dag/*w ww  .ja v  a  2s  .  c  o m*/
 * @param ctx
 */
public PhysicalPlan(LogicalPlan dag, PlanContext ctx) {

    this.dag = dag;
    this.ctx = ctx;
    this.maxContainers = Math.max(dag.getMaxContainerCount(), 1);
    LOG.debug("Max containers: {}", this.maxContainers);

    Stack<OperatorMeta> pendingNodes = new Stack<OperatorMeta>();

    // Add logging operators for streams if not added already
    updatePersistOperatorStreamCodec(dag);

    for (OperatorMeta n : dag.getAllOperators()) {
        pendingNodes.push(n);
    }

    while (!pendingNodes.isEmpty()) {
        OperatorMeta n = pendingNodes.pop();

        if (this.logicalToPTOperator.containsKey(n)) {
            // already processed as upstream dependency
            continue;
        }

        boolean upstreamDeployed = true;

        for (Map.Entry<InputPortMeta, StreamMeta> entry : n.getInputStreams().entrySet()) {
            StreamMeta s = entry.getValue();
            boolean delay = entry.getKey().getValue(LogicalPlan.IS_CONNECTED_TO_DELAY_OPERATOR);
            // skip delay sources since it's going to be handled as downstream
            if (!delay && s.getSource() != null
                    && !this.logicalToPTOperator.containsKey(s.getSource().getOperatorMeta())) {
                pendingNodes.push(n);
                pendingNodes.push(s.getSource().getOperatorMeta());
                upstreamDeployed = false;
                break;
            }
        }

        if (upstreamDeployed) {
            addLogicalOperator(n);
        }
    }

    updatePartitionsInfoForPersistOperator(dag);

    // assign operators to containers
    int groupCount = 0;
    Set<PTOperator> deployOperators = Sets.newHashSet();
    for (Map.Entry<OperatorMeta, PMapping> e : logicalToPTOperator.entrySet()) {
        for (PTOperator oper : e.getValue().getAllOperators()) {
            if (oper.container == null) {
                PTContainer container = getContainer((groupCount++) % maxContainers);
                if (!container.operators.isEmpty()) {
                    LOG.warn(
                            "Operator {} shares container without locality contraint due to insufficient resources.",
                            oper);
                }
                Set<PTOperator> inlineSet = oper.getGrouping(Locality.CONTAINER_LOCAL).getOperatorSet();
                if (!inlineSet.isEmpty()) {
                    // process inline operators
                    for (PTOperator inlineOper : inlineSet) {
                        setContainer(inlineOper, container);
                    }
                } else {
                    setContainer(oper, container);
                }
                deployOperators.addAll(container.operators);
            }
        }
    }

    for (PTContainer container : containers) {
        updateContainerMemoryWithBufferServer(container);
        container.setRequiredVCores(getVCores(container.getOperators()));
    }

    for (Map.Entry<PTOperator, Operator> operEntry : this.newOpers.entrySet()) {
        initCheckpoint(operEntry.getKey(), operEntry.getValue(), Checkpoint.INITIAL_CHECKPOINT);
    }
    // request initial deployment
    ctx.deploy(Collections.<PTContainer>emptySet(), Collections.<PTOperator>emptySet(),
            Sets.newHashSet(containers), deployOperators);
    this.newOpers.clear();
    this.deployOpers.clear();
    this.undeployOpers.clear();
}

From source file:com.mirth.connect.model.converters.NCPDPReader.java

private void parseSegment(String segment, ContentHandler contentHandler) throws SAXException {
    if (StringUtils.isBlank(segment)) {
        return;/*from  w  w w  .ja  va2s.  co  m*/
    }

    boolean inCounter = false;
    boolean inCount = false;
    boolean hasMoreFields = true;
    String segmentId = StringUtils.EMPTY;
    String subSegment = StringUtils.EMPTY;
    Stack<String> fieldStack = new Stack<String>();

    int fieldDelimeterIndex = segment.indexOf(fieldDelimeter);

    if (fieldDelimeterIndex == 0) {
        segment = segment.substring(fieldDelimeterIndex + fieldDelimeter.length());
        fieldDelimeterIndex = segment.indexOf(fieldDelimeter);
    }

    if (fieldDelimeterIndex == -1) {
        logger.warn("Empty segment with no field seperators. Make sure batch file processing is disabled.");
        hasMoreFields = false;
        segmentId = segment;
    } else {
        segmentId = segment.substring(0, fieldDelimeterIndex);
        subSegment = segment.substring(fieldDelimeterIndex + fieldDelimeter.length(), segment.length());
    }

    contentHandler.startElement("", NCPDPReference.getInstance().getSegment(segmentId, version), "", null);

    while (hasMoreFields) {
        fieldDelimeterIndex = subSegment.indexOf(fieldDelimeter);
        // not last field
        String field;

        if (fieldDelimeterIndex != -1) {
            field = subSegment.substring(0, subSegment.indexOf(fieldDelimeter));
            subSegment = subSegment.substring(fieldDelimeterIndex + fieldDelimeter.length());
        } else {
            field = subSegment;
            hasMoreFields = false;
        }

        String fieldId = field.substring(0, 2);
        String fieldDescription = NCPDPReference.getInstance().getDescription(fieldId, version);
        String fieldMessage = field.substring(2);

        if (inCount && !isRepeatingField(fieldDescription) && !fieldDescription.endsWith("Count")) {
            // if we are were in count field then end the element
            contentHandler.endElement("", fieldStack.pop(), "");

            if (fieldStack.size() == 0) {
                inCount = false;
            }
        }

        if (fieldDescription.endsWith("Counter")) {
            if (inCounter) {
                contentHandler.endElement("", fieldStack.pop(), "");
            }

            inCounter = true;
            AttributesImpl attr = new AttributesImpl();
            attr.addAttribute("", "counter", "counter", "", fieldMessage);
            contentHandler.startElement("", fieldDescription, "", attr);
            fieldStack.push(fieldDescription);
        } else if (fieldDescription.endsWith("Count")) {
            // count field, add complex element
            inCount = true;
            AttributesImpl attr = new AttributesImpl();
            attr.addAttribute("", fieldDescription, fieldDescription, "", fieldMessage);
            // start the repeating field element
            contentHandler.startElement("", fieldDescription, "", attr);
            fieldStack.push(fieldDescription);
        } else {
            contentHandler.startElement("", fieldDescription, "", null);
            contentHandler.characters(fieldMessage.toCharArray(), 0, fieldMessage.length());
            contentHandler.endElement("", fieldDescription, "");
        }
    }

    while (fieldStack.size() > 0) {
        // close remaining count and counters
        contentHandler.endElement("", fieldStack.pop(), "");
    }

    contentHandler.endElement("", NCPDPReference.getInstance().getSegment(segmentId, version), "");
}

From source file:com.datatorrent.stram.plan.physical.PhysicalPlan.java

private void redoPartitions(PMapping currentMapping, String note) {
    Partitioner<Operator> partitioner = getPartitioner(currentMapping);
    if (partitioner == null) {
        LOG.warn("No partitioner for {}", currentMapping.logicalOperator);
        return;// w ww . j  av  a  2s .c  om
    }

    RepartitionContext mainPC = new RepartitionContext(partitioner, currentMapping, 0);
    if (mainPC.newPartitions.isEmpty()) {
        LOG.warn("Empty partition list after repartition: {}", currentMapping.logicalOperator);
        return;
    }

    int memoryPerPartition = currentMapping.logicalOperator.getValue(OperatorContext.MEMORY_MB);
    for (Map.Entry<OutputPortMeta, StreamMeta> stream : currentMapping.logicalOperator.getOutputStreams()
            .entrySet()) {
        if (stream.getValue().getLocality() != Locality.THREAD_LOCAL
                && stream.getValue().getLocality() != Locality.CONTAINER_LOCAL) {
            memoryPerPartition += stream.getKey().getValue(PortContext.BUFFER_MEMORY_MB);
        }
    }
    for (OperatorMeta pp : currentMapping.parallelPartitions) {
        for (Map.Entry<OutputPortMeta, StreamMeta> stream : pp.getOutputStreams().entrySet()) {
            if (stream.getValue().getLocality() != Locality.THREAD_LOCAL
                    && stream.getValue().getLocality() != Locality.CONTAINER_LOCAL) {
                memoryPerPartition += stream.getKey().getValue(PortContext.BUFFER_MEMORY_MB);
            }
        }
        memoryPerPartition += pp.getValue(OperatorContext.MEMORY_MB);
    }
    int requiredMemoryMB = (mainPC.newPartitions.size() - mainPC.currentPartitions.size()) * memoryPerPartition;
    if (requiredMemoryMB > availableMemoryMB) {
        LOG.warn("Insufficient headroom for repartitioning: available {}m required {}m", availableMemoryMB,
                requiredMemoryMB);
        return;
    }

    List<Partition<Operator>> addedPartitions = new ArrayList<Partition<Operator>>();
    // determine modifications of partition set, identify affected operator instance(s)
    for (Partition<Operator> newPartition : mainPC.newPartitions) {
        PTOperator op = mainPC.currentPartitionMap.remove(newPartition);
        if (op == null) {
            addedPartitions.add(newPartition);
        } else {
            // check whether mapping was changed
            for (DefaultPartition<Operator> pi : mainPC.currentPartitions) {
                if (pi == newPartition && pi.isModified()) {
                    // existing partition changed (operator or partition keys)
                    // remove/add to update subscribers and state
                    mainPC.currentPartitionMap.put(newPartition, op);
                    addedPartitions.add(newPartition);
                }
            }
        }
    }

    // remaining entries represent deprecated partitions
    this.undeployOpers.addAll(mainPC.currentPartitionMap.values());
    // downstream dependencies require redeploy, resolve prior to modifying plan
    Set<PTOperator> deps = this.getDependents(mainPC.currentPartitionMap.values());
    this.undeployOpers.addAll(deps);
    // dependencies need redeploy, except operators excluded in remove
    this.deployOpers.addAll(deps);

    // process parallel partitions before removing operators from the plan
    LinkedHashMap<PMapping, RepartitionContext> partitionContexts = Maps.newLinkedHashMap();
    Stack<OperatorMeta> parallelPartitions = new Stack<LogicalPlan.OperatorMeta>();
    parallelPartitions.addAll(currentMapping.parallelPartitions);
    pendingLoop: while (!parallelPartitions.isEmpty()) {
        OperatorMeta ppMeta = parallelPartitions.pop();
        for (StreamMeta s : ppMeta.getInputStreams().values()) {
            if (currentMapping.parallelPartitions.contains(s.getSource().getOperatorMeta())
                    && parallelPartitions.contains(s.getSource().getOperatorMeta())) {
                parallelPartitions.push(ppMeta);
                parallelPartitions.remove(s.getSource().getOperatorMeta());
                parallelPartitions.push(s.getSource().getOperatorMeta());
                continue pendingLoop;
            }
        }
        LOG.debug("Processing parallel partition {}", ppMeta);

        PMapping ppm = this.logicalToPTOperator.get(ppMeta);
        Partitioner<Operator> ppp = getPartitioner(ppm);
        if (ppp == null) {
            partitionContexts.put(ppm, null);
        } else {
            RepartitionContext pc = new RepartitionContext(ppp, ppm, mainPC.newPartitions.size());
            if (pc.newPartitions == null) {
                throw new IllegalStateException(
                        "Partitioner returns null for parallel partition " + ppm.logicalOperator);
            }
            partitionContexts.put(ppm, pc);
        }
    }

    // plan updates start here, after all changes were identified
    // remove obsolete operators first, any freed resources
    // can subsequently be used for new/modified partitions
    List<PTOperator> copyPartitions = Lists.newArrayList(currentMapping.partitions);
    // remove deprecated partitions from plan
    for (PTOperator p : mainPC.currentPartitionMap.values()) {
        copyPartitions.remove(p);
        removePartition(p, currentMapping);
        mainPC.operatorIdToPartition.remove(p.getId());
    }
    currentMapping.partitions = copyPartitions;

    // add new operators
    for (Partition<Operator> newPartition : addedPartitions) {
        PTOperator p = addPTOperator(currentMapping, newPartition, mainPC.minCheckpoint);
        mainPC.operatorIdToPartition.put(p.getId(), newPartition);
    }

    // process parallel partition changes
    for (Map.Entry<PMapping, RepartitionContext> e : partitionContexts.entrySet()) {
        if (e.getValue() == null) {
            // no partitioner, add required operators
            for (int i = 0; i < addedPartitions.size(); i++) {
                LOG.debug("Automatically adding to parallel partition {}", e.getKey());
                // set activation windowId to confirm to upstream checkpoints
                addPTOperator(e.getKey(), null, mainPC.minCheckpoint);
            }
        } else {
            RepartitionContext pc = e.getValue();
            // track previous parallel partition mapping
            Map<Partition<Operator>, Partition<Operator>> prevMapping = Maps.newHashMap();
            for (int i = 0; i < mainPC.currentPartitions.size(); i++) {
                prevMapping.put(pc.currentPartitions.get(i), mainPC.currentPartitions.get(i));
            }
            // determine which new partitions match upstream, remaining to be treated as new operator
            Map<Partition<Operator>, Partition<Operator>> newMapping = Maps.newHashMap();
            Iterator<Partition<Operator>> itMain = mainPC.newPartitions.iterator();
            Iterator<Partition<Operator>> itParallel = pc.newPartitions.iterator();
            while (itMain.hasNext() && itParallel.hasNext()) {
                newMapping.put(itParallel.next(), itMain.next());
            }

            for (Partition<Operator> newPartition : pc.newPartitions) {
                PTOperator op = pc.currentPartitionMap.remove(newPartition);
                if (op == null) {
                    pc.addedPartitions.add(newPartition);
                } else if (prevMapping.get(newPartition) != newMapping.get(newPartition)) {
                    // upstream partitions don't match, remove/add to replace with new operator
                    pc.currentPartitionMap.put(newPartition, op);
                    pc.addedPartitions.add(newPartition);
                } else {
                    // check whether mapping was changed - based on DefaultPartition implementation
                    for (DefaultPartition<Operator> pi : pc.currentPartitions) {
                        if (pi == newPartition && pi.isModified()) {
                            // existing partition changed (operator or partition keys)
                            // remove/add to update subscribers and state
                            mainPC.currentPartitionMap.put(newPartition, op);
                            pc.addedPartitions.add(newPartition);
                        }
                    }
                }
            }

            if (!pc.currentPartitionMap.isEmpty()) {
                // remove obsolete partitions
                List<PTOperator> cowPartitions = Lists.newArrayList(e.getKey().partitions);
                for (PTOperator p : pc.currentPartitionMap.values()) {
                    cowPartitions.remove(p);
                    removePartition(p, e.getKey());
                    pc.operatorIdToPartition.remove(p.getId());
                }
                e.getKey().partitions = cowPartitions;
            }
            // add new partitions
            for (Partition<Operator> newPartition : pc.addedPartitions) {
                PTOperator oper = addPTOperator(e.getKey(), newPartition, mainPC.minCheckpoint);
                pc.operatorIdToPartition.put(oper.getId(), newPartition);
            }

            getPartitioner(e.getKey()).partitioned(pc.operatorIdToPartition);
        }
    }

    updateStreamMappings(currentMapping);
    for (PMapping pp : partitionContexts.keySet()) {
        updateStreamMappings(pp);
    }

    deployChanges();

    if (mainPC.currentPartitions.size() != mainPC.newPartitions.size()) {
        StramEvent ev = new StramEvent.PartitionEvent(currentMapping.logicalOperator.getName(),
                mainPC.currentPartitions.size(), mainPC.newPartitions.size());
        ev.setReason(note);
        this.ctx.recordEventAsync(ev);
    }

    partitioner.partitioned(mainPC.operatorIdToPartition);
}

From source file:com.vmware.identity.idm.server.provider.ldap.LdapProvider.java

Set<Group> getNestedGroups(ILdapConnectionEx connection, String membershipId, boolean groupNameOnly)
        throws NoSuchGroupException, InvalidPrincipalException {
    Set<Group> groups = new HashSet<Group>();
    if (ServerUtils.isNullOrEmpty(membershipId) == false) {
        final String ATTR_NAME_GROUP_CN = _ldapSchemaMapping
                .getGroupAttribute(IdentityStoreAttributeMapping.AttributeIds.GroupAttributeAccountName);
        final String ATTR_DESCRIPTION = _ldapSchemaMapping
                .getGroupAttribute(IdentityStoreAttributeMapping.AttributeIds.GroupAttributeDescription);
        final String ATTR_ENTRY_UUID = _ldapSchemaMapping
                .getGroupAttribute(IdentityStoreAttributeMapping.AttributeIds.GroupAttributeObjectId);
        ArrayList<String> attributeNames = getAttributesList(ATTR_NAME_GROUP_CN, ATTR_ENTRY_UUID,
                ATTR_DESCRIPTION, !groupNameOnly);

        HashSet<String> groupsProcessed = new HashSet<String>();
        Stack<String> groupsToProcess = new Stack<String>();
        groupsToProcess.push(membershipId);

        while (groupsToProcess.isEmpty() == false) {
            String currentMembershipId = groupsToProcess.pop();
            if (groupsProcessed.contains(currentMembershipId) == false) {
                String filter = String.format(_ldapSchemaMapping.getDirectParentGroupsQuery(),
                        LdapFilterString.encode(currentMembershipId));

                Collection<ILdapMessage> messages = null;
                try {
                    messages = ldap_search(connection, getStoreDataEx().getGroupBaseDn(),
                            LdapScope.SCOPE_SUBTREE, filter, attributeNames, DEFAULT_PAGE_SIZE, -1);

                    String groupMembershipId = null;

                    if (messages != null && messages.size() > 0) {
                        for (ILdapMessage message : messages) {
                            ILdapEntry[] entries = message.getEntries();
                            if ((entries != null) && (entries.length > 0)) {
                                for (ILdapEntry entry : entries) {
                                    Group g = buildGroupObject(entry, ATTR_NAME_GROUP_CN, ATTR_ENTRY_UUID,
                                            ATTR_DESCRIPTION, !groupNameOnly);

                                    if (this._groupGroupMembersListLinkIsDn) {
                                        groupMembershipId = entry.getDN();
                                    } else if (this._groupGroupMembersListLinkExists) {
                                        groupMembershipId = getOptionalFirstStringValue(entry
                                                .getAttributeValues(GROUP_GROUP_MEMBERS_LIST_LINK_ATTRIBUTE));
                                    }//from w  w  w  .  java  2 s .co  m

                                    groups.add(g);

                                    if (ServerUtils.isNullOrEmpty(groupMembershipId) == false) {
                                        groupsToProcess.push(groupMembershipId);
                                    }
                                }
                            }
                        }
                    }
                } catch (NoSuchObjectLdapException e) {
                    log.error(
                            String.format("Failed to search for grup membership for [%s]", currentMembershipId),
                            e);
                    throw e;
                } finally {
                    ServerUtils.disposeLdapMessages(messages);
                } // try

                groupsProcessed.add(currentMembershipId);
            }
        }
    }
    return groups;
}