Example usage for java.util Stack Stack

List of usage examples for java.util Stack Stack

Introduction

In this page you can find the example usage for java.util Stack Stack.

Prototype

public Stack() 

Source Link

Document

Creates an empty Stack.

Usage

From source file:org.apache.hadoop.gateway.filter.rewrite.impl.json.JsonFilterReader.java

public JsonFilterReader(Reader reader, UrlRewriteFilterContentDescriptor config) throws IOException {
    this.reader = reader;
    factory = new JsonFactory();
    mapper = new ObjectMapper();
    parser = factory.createParser(reader);
    writer = new StringWriter();
    buffer = writer.getBuffer();//from  w  ww .  j av a 2s  .c  om
    offset = 0;
    generator = factory.createGenerator(writer);
    stack = new Stack<Level>();
    bufferingLevel = null;
    bufferingConfig = null;
    this.config = config;
}

From source file:com.madrobot.di.wizard.json.JSONDeserializer.java

/**
 * Deserializes the JSON data from the input to the corresponding entity type <br/>
 * If there is an error while parsing, if possible it will try to ignore it, otherwise returns a null value.
 * /* ww w  . ja  v  a2  s  . c o  m*/
 * @param parser
 *            Parser to read XML from
 * @param objType
 *            Type of the entity to deserialize data to
 * 
 * @return Deserialized object, if successful, null otherwise
 * @see #deserialize(InputStream, Class)
 */
public <T> T deserialize(final Class<T> objType, final JSONObject jsonObject) throws JSONException {

    try {

        Stack<Class<?>> stack = new Stack<Class<?>>();
        stack.push(objType);

        T resultObject = objType.newInstance();

        deserialize(resultObject, jsonObject, stack);

        return resultObject;
    } catch (IllegalAccessException e) {
        Log.e(TAG, e.getMessage());
    } catch (InstantiationException e) {
        Log.e(TAG, e.getMessage());
    }

    return null;
}

From source file:com.jaspersoft.jasperserver.war.themes.ThemeCache.java

public ThemeCache() {
    name2uid = new ConcurrentHashMap<String, String>();
    uid2name = new ConcurrentHashMap<String, String>();
    themeMap = new ConcurrentHashMap<String, HierarchicalTheme>();
    resourceMap = new ConcurrentHashMap<String, ThemeResource>();

    themeRefreshThread = new RefreshThread();
    themeRefreshThread.setDaemon(true);//w w w . j  av  a  2  s  .c o  m
    themeRefreshThread.start();
    updatedURIs = new Stack<String>();
}

From source file:edu.uci.ics.jung.algorithms.scoring.BetweennessCentrality.java

protected void computeBetweenness(Queue<V> queue, Transformer<E, ? extends Number> edge_weights) {
    for (V v : graph.getVertices()) {
        // initialize the betweenness data for this new vertex
        for (V s : graph.getVertices())
            this.vertex_data.put(s, new BetweennessData());

        //         if (v.equals(new Integer(0)))
        //            System.out.println("pause");

        vertex_data.get(v).numSPs = 1;/*w  w w.j a  v a 2 s. c om*/
        vertex_data.get(v).distance = 0;

        Stack<V> stack = new Stack<V>();
        //            Buffer<V> queue = new UnboundedFifoBuffer<V>();
        //            queue.add(v);
        queue.offer(v);

        while (!queue.isEmpty()) {
            //                V w = queue.remove();
            V w = queue.poll();
            stack.push(w);
            BetweennessData w_data = vertex_data.get(w);

            for (E e : graph.getOutEdges(w)) {
                // TODO (jrtom): change this to getOtherVertices(w, e)
                V x = graph.getOpposite(w, e);
                if (x.equals(w))
                    continue;
                double wx_weight = edge_weights.transform(e).doubleValue();

                //                for(V x : graph.getSuccessors(w)) 
                //                {
                //                   if (x.equals(w))
                //                      continue;

                // FIXME: the other problem is that I need to 
                // keep putting the neighbors of things we've just 
                // discovered in the queue, if they're undiscovered or
                // at greater distance.

                // FIXME: this is the problem, right here, I think: 
                // need to update position in queue if distance changes
                // (which can only happen with weighted edges).
                // for each outgoing edge e from w, get other end x
                // if x not already visited (dist x < 0)
                //   set x's distance to w's dist + edge weight
                //   add x to queue; pri in queue is x's dist
                // if w's dist + edge weight < x's dist 
                //   update x's dist
                //   update x in queue (MapBinaryHeap)
                //   clear x's incoming edge list
                // if w's dist + edge weight = x's dist
                //   add e to x's incoming edge list

                BetweennessData x_data = vertex_data.get(x);
                double x_potential_dist = w_data.distance + wx_weight;

                if (x_data.distance < 0) {
                    //                        queue.add(x);
                    //                        vertex_data.get(x).distance = vertex_data.get(w).distance + 1;
                    x_data.distance = x_potential_dist;
                    queue.offer(x);
                }

                // note:
                // (1) this can only happen with weighted edges
                // (2) x's SP count and incoming edges are updated below 
                if (x_data.distance > x_potential_dist) {
                    x_data.distance = x_potential_dist;
                    // invalidate previously identified incoming edges
                    // (we have a new shortest path distance to x)
                    x_data.incomingEdges.clear();
                    // update x's position in queue
                    ((MapBinaryHeap<V>) queue).update(x);
                }
                //                  if (vertex_data.get(x).distance == vertex_data.get(w).distance + 1) 
                // 
                //                    if (x_data.distance == x_potential_dist) 
                //                    {
                //                        x_data.numSPs += w_data.numSPs;
                ////                        vertex_data.get(x).predecessors.add(w);
                //                        x_data.incomingEdges.add(e);
                //                    }
            }
            for (E e : graph.getOutEdges(w)) {
                V x = graph.getOpposite(w, e);
                if (x.equals(w))
                    continue;
                double e_weight = edge_weights.transform(e).doubleValue();
                BetweennessData x_data = vertex_data.get(x);
                double x_potential_dist = w_data.distance + e_weight;
                if (x_data.distance == x_potential_dist) {
                    x_data.numSPs += w_data.numSPs;
                    //                        vertex_data.get(x).predecessors.add(w);
                    x_data.incomingEdges.add(e);
                }
            }
        }
        while (!stack.isEmpty()) {
            V x = stack.pop();

            //              for (V w : vertex_data.get(x).predecessors) 
            for (E e : vertex_data.get(x).incomingEdges) {
                V w = graph.getOpposite(x, e);
                double partialDependency = vertex_data.get(w).numSPs / vertex_data.get(x).numSPs
                        * (1.0 + vertex_data.get(x).dependency);
                vertex_data.get(w).dependency += partialDependency;
                //                  E w_x = graph.findEdge(w, x);
                //                  double w_x_score = edge_scores.get(w_x).doubleValue();
                //                  w_x_score += partialDependency;
                //                  edge_scores.put(w_x, w_x_score);
                double e_score = edge_scores.get(e).doubleValue();
                edge_scores.put(e, e_score + partialDependency);
            }
            if (!x.equals(v)) {
                double x_score = vertex_scores.get(x).doubleValue();
                x_score += vertex_data.get(x).dependency;
                vertex_scores.put(x, x_score);
            }
        }
    }

    if (graph instanceof UndirectedGraph) {
        for (V v : graph.getVertices()) {
            double v_score = vertex_scores.get(v).doubleValue();
            v_score /= 2.0;
            vertex_scores.put(v, v_score);
        }
        for (E e : graph.getEdges()) {
            double e_score = edge_scores.get(e).doubleValue();
            e_score /= 2.0;
            edge_scores.put(e, e_score);
        }
    }

    vertex_data.clear();
}

From source file:com.quiltplayer.core.storage.neo.NeoStorage.java

@Override
@Transactional/*  w  ww.j  a v a  2s  .  c  o  m*/
public Stack<Album> getAlbumsAsStack(final Collection<Artist> artists) {
    final Stack<Album> albums = new Stack<Album>();

    for (final Artist artist : artists) {
        for (Album album : artist.getAlbums()) {
            albums.push(album);
        }
    }

    return albums;
}

From source file:hrider.hbase.Scanner.java

/**
 * Initializes a new instance of the {@link Scanner} class.
 *
 * @param connection The reference to the connection.
 * @param tableName  The name of the table to be scanned.
 *//*  w w w .j a  va2 s.co  m*/
public Scanner(Connection connection, String tableName) {
    this.connection = connection;
    this.tableName = tableName;
    this.rowsCount = 0;
    this.lastRow = 0;
    this.markers = new Stack<Marker>();
}

From source file:org.hdiv.dataComposer.AbstractDataComposer.java

/**
 * DataComposer initialization with new stack to store all states of the page <code>page</code>.
 *//*from w ww . j  a  v a 2s  .c o  m*/
public void init() {
    this.setPage(new Page());
    this.statesStack = new Stack<IState>();
}

From source file:com.mewmew.fairy.v1.cli.AnnotatedCLI.java

public static <T> AnnotatedCLI getMagicCLI(Class<T> c) {
    Stack<Class> stack = new Stack<Class>();
    Class p = c;//from ww w  . j a v a2s. co m
    while (p != Object.class) {
        stack.push(p);
        p = p.getSuperclass();
    }
    return new AnnotatedCLI(stack.toArray(new Class[stack.size()]));
}

From source file:com.medigy.persist.EntitySaveListener.java

public void newEntityList() {
    entityList = new Stack();
}

From source file:PmpEditor.java

/**
 * Constructs a PmpEditor//  w w  w.j  ava 2s .c  om
 */
public PmpEditor() {
    app = this;
    changes = new Stack();

    // Set up the printing options
    options = new StyledTextPrintOptions();
    options.footer = StyledTextPrintOptions.SEPARATOR + StyledTextPrintOptions.PAGE_TAG
            + StyledTextPrintOptions.SEPARATOR + "Confidential";
}