Example usage for java.util ArrayList removeAll

List of usage examples for java.util ArrayList removeAll

Introduction

In this page you can find the example usage for java.util ArrayList removeAll.

Prototype

public boolean removeAll(Collection<?> c) 

Source Link

Document

Removes from this list all of its elements that are contained in the specified collection.

Usage

From source file:sbu.srl.rolextract.ArgumentClassifier.java

public void performAblation(String outputDir, int crossValidation)
        throws IOException, FileNotFoundException, ClassNotFoundException, NoSuchMethodException,
        IllegalAccessException, IllegalArgumentException, InvocationTargetException, InterruptedException {
    ArrayList<String> triedFeatures = new ArrayList<String>(
            Arrays.asList(FileUtil.readLinesFromFile("./configSBUProcRel/features.ori")));
    List<String> ablationFeatures = getAblationFeatures("./configSBUProcRel/features.ablation");

    for (int idxAblation = 0; idxAblation < ablationFeatures.size(); idxAblation++) {
        System.out.println("Removing features : " + ablationFeatures.get(idxAblation));
        Thread.sleep(3000);//  w w w  . j  a v a2  s. c  o m
        List<String> removedFeatures = Arrays.asList(ablationFeatures.get(idxAblation).split(","));
        triedFeatures.removeAll(removedFeatures);
        FileUtil.dumpToFile(triedFeatures, "./configSBUProcRel/features");
        for (int idxFold = 1; idxFold <= crossValidation; idxFold++) {
            File trainFoldDir = new File(outputDir.concat("/fold-").concat("" + idxFold).concat("/train"));
            File testFoldDir = new File(outputDir.concat("/fold-").concat("" + idxFold).concat("/test"));
            SBURoleTrain trainer = new SBURoleTrain(trainFoldDir.getAbsolutePath().concat("/train.ser"),
                    isMultiClass);
            trainer.train(trainFoldDir.getAbsolutePath());

            SBURolePredict predict = new SBURolePredict(trainFoldDir.getAbsolutePath(),
                    testFoldDir.getAbsolutePath().concat("/test.arggold.ser"), isMultiClass);
            predict.performPrediction(testFoldDir.getAbsolutePath().concat("/test.arggold.ser"));

            ArrayList<Sentence> predictedSentences = (ArrayList<Sentence>) FileUtil
                    .deserializeFromFile(testFoldDir.getAbsolutePath().concat("/test.argpredict.ser"));
            Map<String, List<Sentence>> groupByProcess = predictedSentences.stream()
                    .collect(Collectors.groupingBy(Sentence::getProcessName));

            ArrayList<JSONData> jsonData = SentenceUtil.generateJSONData(groupByProcess);
            SentenceUtil.flushDataToJSON(jsonData, testFoldDir.getAbsolutePath().concat("/test.srlout.json"),
                    false);
            SentenceUtil.flushDataToJSON(jsonData,
                    testFoldDir.getAbsolutePath().concat("/test.srlpredict.json"), true);
            SentenceUtil.flushDataToJSON(jsonData,
                    testFoldDir.getAbsolutePath().concat("/test.ilppredict.json"), true); // dummy
            SentenceUtil.flushDataToJSON(jsonData,
                    testFoldDir.getAbsolutePath().concat("/test.semaforpredict.json"), true);// dummy
            SentenceUtil.flushDataToJSON(jsonData,
                    testFoldDir.getAbsolutePath().concat("/test.easysrlpredict.json"), true);// dummy

        }
        // copy all data to ILP's data folder
        // cp -r outputDir /home/slouvan/NetBeansProjects/ILP/data/
        try {
            ProcessBuilder pb = new ProcessBuilder(
                    "/home/slouvan/NetBeansProjects/SRL-Integrated/script/cpDir.sh", outputDir,
                    "/home/slouvan/NetBeansProjects/ILP/data/");
            //pb.environment().put("param1", )
            Process p = pb.start(); // Start the process.
            p.waitFor(); // Wait for the process to finish.
            StdUtil.printOutput(p);

            pb = new ProcessBuilder("/usr/bin/python", "/home/slouvan/NetBeansProjects/ILP/evaluate.py");
            p = pb.start(); // Start the process.
            p.waitFor(); // Wait for the process to finish.
            StdUtil.printOutput(p);

            System.out.println("Script executed successfully");
        } catch (Exception e) {
            e.printStackTrace();
        }
        String[] lines = FileUtil.readLinesFromFile("/home/slouvan/NetBeansProjects/ILP/stats.txt");
        PrintWriter out = new PrintWriter(
                new BufferedWriter(new FileWriter(GlobalV.PROJECT_DIR + "/ablationNew.txt", true)));
        //more code
        out.println((new Date()).toString() + " Removed features " + removedFeatures);
        out.println("Eval : " + Arrays.toString(lines));
        out.close();

        triedFeatures.addAll(removedFeatures);
    }
}

From source file:net.sf.jclal.experiment.ExperimentBuilder.java

/**
 * Expands the experiments for the configuration file
 *
 * @param experimentFileName The name of the experiment file.
 * @return The experiments for the configuration file.
 *///from  w w w  .ja v  a2 s .  c o m
public ArrayList<String> buildExperiment(String experimentFileName) {
    ArrayList<String> configurations = expandElements(experimentFileName);
    ArrayList<String> allCreatedExperiments = new ArrayList<String>();
    int numberExperiments = 1;

    allCreatedExperiments.addAll(configurations);

    /**
     * Expand multi-valued elements
     */
    do {
        numberExperiments = configurations.size();

        ArrayList<String> createdExperiments = new ArrayList<String>();

        for (String experiment : configurations) {
            createdExperiments.addAll(expandElements(experiment));
        }

        allCreatedExperiments.addAll(createdExperiments);

        configurations = createdExperiments;

    } while (configurations.size() != numberExperiments);

    /**
     * Expand multi-valued attributes
     */
    do {
        numberExperiments = configurations.size();

        ArrayList<String> createdExperiments = new ArrayList<String>();

        for (String experiment : configurations) {
            createdExperiments.addAll(expandAttributes(experiment));
        }

        allCreatedExperiments.addAll(createdExperiments);

        configurations = createdExperiments;

    } while (configurations.size() != numberExperiments);

    allCreatedExperiments.removeAll(configurations);

    /**
     * Remove temp files
     */
    for (String temp : allCreatedExperiments) {
        if (!temp.equals(experimentFileName)) {
            new File(temp).delete();
        }
    }

    /**
     * Move the expanded configuration files to the experiments folder
     */
    if (configurations.size() > 1) {
        File dir = new File("experiments");

        /**
         * If the directory exists, delete all files
         */
        if (dir.exists()) {
            File[] experimentFiles = dir.listFiles();
            for (File f : experimentFiles) {
                f.delete();
            }
        } /**
          * Else, create the directory
          */
        else {
            dir.mkdir();
        }

        for (int i = 0; i < configurations.size(); i++) {
            File file = new File(configurations.get(i));
            file.renameTo(new File(dir, file.getName()));
            String[] files = configurations.get(i).split("/");
            String fileName = files[files.length - 1];
            configurations.set(i, dir.getPath() + "/" + fileName);
        }
    }
    /**
     * Return the configuration filenames
     */
    return configurations;
}

From source file:com.bigdata.dastor.service.StorageService.java

private Multimap<Range, InetAddress> getChangedRangesForLeaving(String table, InetAddress endpoint) {
    // First get all ranges the leaving endpoint is responsible for
    Collection<Range> ranges = getRangesForEndPoint(table, endpoint);

    if (logger_.isDebugEnabled())
        logger_.debug("Node " + endpoint + " ranges [" + StringUtils.join(ranges, ", ") + "]");

    Map<Range, ArrayList<InetAddress>> currentReplicaEndpoints = new HashMap<Range, ArrayList<InetAddress>>();

    // Find (for each range) all nodes that store replicas for these ranges as well
    for (Range range : ranges)
        currentReplicaEndpoints.put(range,
                getReplicationStrategy(table).getNaturalEndpoints(range.right, tokenMetadata_, table));

    TokenMetadata temp = tokenMetadata_.cloneAfterAllLeft();

    // endpoint might or might not be 'leaving'. If it was not leaving (that is, removetoken
    // command was used), it is still present in temp and must be removed.
    if (temp.isMember(endpoint))
        temp.removeEndpoint(endpoint);/*from   w  w w.j a va  2  s. com*/

    Multimap<Range, InetAddress> changedRanges = HashMultimap.create();

    // Go through the ranges and for each range check who will be
    // storing replicas for these ranges when the leaving endpoint
    // is gone. Whoever is present in newReplicaEndpoins list, but
    // not in the currentReplicaEndpoins list, will be needing the
    // range.
    for (Range range : ranges) {
        ArrayList<InetAddress> newReplicaEndpoints = getReplicationStrategy(table)
                .getNaturalEndpoints(range.right, temp, table);
        newReplicaEndpoints.removeAll(currentReplicaEndpoints.get(range));
        if (logger_.isDebugEnabled())
            if (newReplicaEndpoints.isEmpty())
                logger_.debug("Range " + range + " already in all replicas");
            else
                logger_.debug("Range " + range + " will be responsibility of "
                        + StringUtils.join(newReplicaEndpoints, ", "));
        changedRanges.putAll(range, newReplicaEndpoints);
    }

    return changedRanges;
}

From source file:net.spy.memcached.CouchbaseConnection.java

@Override
public void reconfigure(Bucket bucket) {
    reconfiguring = true;//from   ww  w. j ava  2  s. co m
    try {
        // get a new collection of addresses from the received config
        List<String> servers = bucket.getConfig().getServers();
        HashSet<SocketAddress> newServerAddresses = new HashSet<SocketAddress>();
        ArrayList<InetSocketAddress> newServers = new ArrayList<InetSocketAddress>();
        for (String server : servers) {
            int finalColon = server.lastIndexOf(':');
            if (finalColon < 1) {
                throw new IllegalArgumentException(
                        "Invalid server ``" + server + "'' in vbucket's server list");
            }
            String hostPart = server.substring(0, finalColon);
            // String portNum = server.substring(finalColon + 1);

            InetSocketAddress address = new InetSocketAddress(hostPart, Integer.parseInt("5984"));
            // add parsed address to our collections
            newServerAddresses.add(address);
            newServers.add(address);
        }

        // split current nodes to "odd nodes" and "stay nodes"
        ArrayList<CouchbaseNode> oddNodes = new ArrayList<CouchbaseNode>();
        ArrayList<CouchbaseNode> stayNodes = new ArrayList<CouchbaseNode>();
        ArrayList<InetSocketAddress> stayServers = new ArrayList<InetSocketAddress>();
        for (CouchbaseNode current : nodes) {
            if (newServerAddresses.contains(current.getSocketAddress())) {
                stayNodes.add(current);
                stayServers.add((InetSocketAddress) current.getSocketAddress());
            } else {
                oddNodes.add(current);
            }
        }

        // prepare a collection of addresses for new nodes
        newServers.removeAll(stayServers);

        // create a collection of new nodes
        List<CouchbaseNode> newNodes = createConnections(newServers);

        // merge stay nodes with new nodes
        List<CouchbaseNode> mergedNodes = new ArrayList<CouchbaseNode>();
        mergedNodes.addAll(stayNodes);
        mergedNodes.addAll(newNodes);

        // call update locator with new nodes list and vbucket config
        nodes = mergedNodes;

        // schedule shutdown for the oddNodes
        nodesToShutdown.addAll(oddNodes);
    } catch (IOException e) {
        getLogger().error("Connection reconfiguration failed", e);
    } finally {
        reconfiguring = false;
    }
}

From source file:edu.mit.media.funf.probe.Probe.java

/**
 * Updates request list with items in queue, replacing duplicate pending intents for this probe.
 * @param requests/* ww  w. jav  a  2s .c om*/
 */
private void updateRequests(boolean removeRunOnce) {
    assert requestsIntent != null;
    boolean hasChanges = false;
    ArrayList<Intent> requests = requestsIntent.getParcelableArrayListExtra(INTERNAL_REQUESTS_KEY);
    if (requests == null) {
        hasChanges = true;
        requests = new ArrayList<Intent>();
    }

    // Remove run once requests
    Parameter periodParam = Parameter.getAvailableParameter(getAvailableParameters(), Parameter.Builtin.PERIOD);
    if (periodParam != null && removeRunOnce) {
        for (Intent request : requests) {
            ArrayList<Bundle> dataRequests = Utils.getArrayList(request.getExtras(), REQUESTS_KEY);
            List<Bundle> runOnceDataRequests = new ArrayList<Bundle>();
            for (Bundle dataRequest : dataRequests) {
                long periodValue = Utils.getLong(dataRequest, Parameter.Builtin.PERIOD.name,
                        (Long) periodParam.getValue());
                if (periodValue == 0L) {
                    Log.d(TAG, "Removing run once dataRequest: " + dataRequest);
                    runOnceDataRequests.add(dataRequest);
                }
            }
            dataRequests.removeAll(runOnceDataRequests);
            if (dataRequests.isEmpty()) {
                deadRequests.add(request);
            } else {
                request.putExtra(REQUESTS_KEY, dataRequests);
            }
        }
    }

    // Remove all requests that we aren't able to (or supposed to) send to anymore
    if (!deadRequests.isEmpty()) {
        hasChanges = true;
        for (Intent deadRequest = deadRequests.poll(); deadRequest != null; deadRequest = deadRequests.poll()) {
            Log.d(TAG, "Removing dead request: " + deadRequest);
            requests.remove(deadRequest);
        }
    }
    // Add any pending requests
    if (!pendingRequests.isEmpty()) {
        hasChanges = true;
        Map<PendingIntent, Intent> existingCallbacksToRequests = new HashMap<PendingIntent, Intent>();
        for (Intent existingRequest : requests) {
            PendingIntent callback = existingRequest.getParcelableExtra(CALLBACK_KEY);
            existingCallbacksToRequests.put(callback, existingRequest);
        }
        for (Intent request = pendingRequests.poll(); request != null; request = pendingRequests.poll()) {
            PendingIntent callback = request.getParcelableExtra(CALLBACK_KEY);
            if (packageHasRequiredPermissions(this, callback.getTargetPackage(), getRequiredPermissions())) {
                existingCallbacksToRequests.containsKey(callback);
                int existingRequestIndex = requests.indexOf(existingCallbacksToRequests.get(callback));
                ArrayList<Bundle> dataRequests = Utils.getArrayList(request.getExtras(), REQUESTS_KEY);
                Log.d(TAG, "Adding pending intent with data requests: " + dataRequests);
                if (existingRequestIndex >= 0) {
                    if (dataRequests == null || dataRequests.isEmpty()) {
                        Log.d(TAG, "Adding pending intent, removing because empty or null");
                        requests.remove(existingRequestIndex);
                    } else {
                        requests.set(existingRequestIndex, request);
                    }
                } else {
                    if (dataRequests != null && !dataRequests.isEmpty()) { // Only add requests with nonempty data requests
                        Log.d(TAG, "Adding new pending intent: " + request);
                        requests.add(request);
                    }
                }
            } else {
                Log.w(TAG, "Package '" + callback.getTargetPackage()
                        + "' does not have the required permissions to get data from this probe.");
            }
        }
    }

    if (hasChanges) {
        requestsIntent.putExtra(INTERNAL_REQUESTS_KEY, requests);
        updateInternalRequestsPendingIntent();
    }
}

From source file:br.unicamp.cst.behavior.bn.Behavior.java

/**
 * @return the amount of activation that is spread backwards from other modules in the direction of this module
 * // w  ww  .  j  a v  a 2s .  c  o m
 *         Note: this approach is slightly different from the one proposed at the article by [Maes 1989] since here we try to avoid meddling with another codelet's states.
 */
public double spreadBw() {

    // In this case x= other modules, y= this module
    double activation = 0;
    // synchronized(this.successors){
    if (!this.getSuccessors().isEmpty()) {
        Enumeration e = this.getSuccessors().keys();
        // iterate through Hashtable keys Enumeration
        while (e.hasMoreElements()) {
            Behavior module = (Behavior) e.nextElement();
            if (impendingAccess(module)) {
                try {
                    double amount = 0;
                    if (!module.isExecutable()) {// A competence module x that is not executable spreads activation backward.

                        ArrayList<MemoryObject> intersection = new ArrayList<MemoryObject>();
                        ArrayList<MemoryObject> preconPlusSoftPrecon = new ArrayList<MemoryObject>();

                        preconPlusSoftPrecon.addAll(module.getListOfPreconditions());

                        intersection.addAll(getIntersectionSet(preconPlusSoftPrecon, this.getAddList()));
                        intersection.removeAll(worldState);
                        for (MemoryObject item : intersection) {
                            amount = amount + ((1.0 / this.competencesWithPropInAdd(item))
                                    * (1.0 / (double) this.getAddList().size()));
                        }
                        amount = amount * module.getActivation()
                                * (globalVariables.getPhi() / globalVariables.getGamma());
                        if (showActivationSpread) {
                            System.out.println(this.getName() + " receives " + amount
                                    + " backwarded energy from " + module.getName() + " [which has A= "
                                    + module.getActivation() + " ]");
                        }

                    }
                    // --------------------------
                    activation = activation + amount;
                } finally {
                    lock.unlock();
                    module.lock.unlock();
                }
            }
        }
    }

    return activation;
}

From source file:br.unicamp.cst.behavior.bn.Behavior.java

/**
 * @return the amount of activation that is spread forward from other modules in the direction of this module
 * //from  www. j a  v  a 2 s  .  c  o  m
 *         Note: this approach is slightly different from the one proposed at the article by [Maes 1989] since here we try to avoid meddling with another codelet's states.
 */
public double spreadFw() {
    // In this case x= other modules, y= this module
    double activation = 0;
    // synchronized(this.predecessors){
    if (!this.getPredecessors().isEmpty()) {
        Enumeration e = this.getPredecessors().keys();
        // iterate through Hashtable keys Enumeration
        while (e.hasMoreElements()) {
            Behavior module = (Behavior) e.nextElement();
            if (impendingAccess(module)) {
                try {
                    double amount = 0;
                    if (module.isExecutable()) {// An executable competence module x spreads activation forward.
                        ArrayList<MemoryObject> intersection = new ArrayList<MemoryObject>();

                        ArrayList<MemoryObject> preconPlusSoftPrecon = new ArrayList<MemoryObject>();

                        preconPlusSoftPrecon.addAll(this.getListOfPreconditions());
                        preconPlusSoftPrecon.addAll(this.getSoftPreconList());

                        intersection.addAll(getIntersectionSet(module.getDeleteList(), preconPlusSoftPrecon));
                        intersection.removeAll(worldState);
                        for (MemoryObject item : intersection) {
                            amount = amount + ((1.0 / this.competencesWithPropInPrecon(item))
                                    * (1.0 / (double) preconPlusSoftPrecon.size()));
                        }
                        amount = amount * module.getActivation()
                                * (globalVariables.getPhi() / globalVariables.getGamma());
                        if (showActivationSpread) {
                            System.out.println(this.getName() + " receives " + amount
                                    + " forwarded energy from " + module.getName() + " [which has A= "
                                    + module.getActivation() + " ]");
                        }

                    }
                    // ------------------------------------------------
                    activation = activation + amount;
                } finally {
                    lock.unlock();
                    module.lock.unlock();
                }
            }
        }
    }
    // }//end synch
    return activation;
}

From source file:edu.uga.cs.fluxbuster.clustering.ClusterGenerator.java

/**
 * Copies candidate flux domains into a list if its candidate score is greater
 * than a threshold up to a limit on the size of the list.  The candidate flux 
 * domains are copied from a map of candidate flux domains.  Domains are only 
 * considered if they appear in the all domains list.   Once a candidate flux 
 * domain is copied it's corresponding domain name is removed from the all 
 * domains list./*from www  . java2s. com*/
 * 
 * @param maxCandidateDomains the limit on the total number of domains to add
 * @param goodCandidateThreshold the candidate score threshold
 * @param resultBuf the list in which to store the candidate flux domains
 * @param seenDomains the map of candidate flux domains.
 * @param allDomains this list of domains to consider
 */
private void addThresholdMeetingDomains(int maxCandidateDomains, double goodCandidateThreshold,
        List<CandidateFluxDomain> resultBuf, HashMap<String, CandidateFluxDomain> seenDomains,
        ArrayList<String> allDomains) {
    ArrayList<CandidateFluxDomain> sortedDomains = new ArrayList<CandidateFluxDomain>();
    ArrayList<String> removeDomains = new ArrayList<String>();
    // get all cfd's whose score is over the threshold
    for (String domain : allDomains) {
        CandidateFluxDomain temp = seenDomains.get(domain);
        if (this.calcCandidateScore(temp) > goodCandidateThreshold) {
            sortedDomains.add(temp);
        }
    }

    // sort them in descending order by score
    Collections.sort(sortedDomains, new Comparator<CandidateFluxDomain>() {
        @Override
        public int compare(CandidateFluxDomain o1, CandidateFluxDomain o2) {
            Double o1score = calcCandidateScore(o1);
            Double o2score = calcCandidateScore(o2);
            return o2score.compareTo(o1score); // Descending
            // order
        }
    });

    for (CandidateFluxDomain cfd2 : sortedDomains) {
        if (resultBuf.size() == maxCandidateDomains) {
            break;
        }
        resultBuf.add(cfd2);
        removeDomains.add(cfd2.getDomainName());
    }
    allDomains.removeAll(removeDomains);
}

From source file:org.apache.hadoop.hbase.regionserver.TestHStore.java

@Test
public void testSwitchingPreadtoStreamParallelyWithCompactionDischarger() throws Exception {
    Configuration conf = HBaseConfiguration.create();
    conf.set("hbase.hstore.engine.class", DummyStoreEngine.class.getName());
    conf.setLong(StoreScanner.STORESCANNER_PREAD_MAX_BYTES, 0);
    // Set the lower threshold to invoke the "MERGE" policy
    MyStore store = initMyStore(name.getMethodName(), conf, new MyStoreHook() {
    });/*w  ww. ja  va 2s  .c om*/
    MemStoreSize memStoreSize = new MemStoreSize();
    long ts = System.currentTimeMillis();
    long seqID = 1l;
    // Add some data to the region and do some flushes
    for (int i = 1; i < 10; i++) {
        store.add(createCell(Bytes.toBytes("row" + i), qf1, ts, seqID++, Bytes.toBytes("")), memStoreSize);
    }
    // flush them
    flushStore(store, seqID);
    for (int i = 11; i < 20; i++) {
        store.add(createCell(Bytes.toBytes("row" + i), qf1, ts, seqID++, Bytes.toBytes("")), memStoreSize);
    }
    // flush them
    flushStore(store, seqID);
    for (int i = 21; i < 30; i++) {
        store.add(createCell(Bytes.toBytes("row" + i), qf1, ts, seqID++, Bytes.toBytes("")), memStoreSize);
    }
    // flush them
    flushStore(store, seqID);

    assertEquals(3, store.getStorefilesCount());
    Scan scan = new Scan();
    scan.addFamily(family);
    Collection<HStoreFile> storefiles2 = store.getStorefiles();
    ArrayList<HStoreFile> actualStorefiles = Lists.newArrayList(storefiles2);
    StoreScanner storeScanner = (StoreScanner) store.getScanner(scan, scan.getFamilyMap().get(family),
            Long.MAX_VALUE);
    // get the current heap
    KeyValueHeap heap = storeScanner.heap;
    // create more store files
    for (int i = 31; i < 40; i++) {
        store.add(createCell(Bytes.toBytes("row" + i), qf1, ts, seqID++, Bytes.toBytes("")), memStoreSize);
    }
    // flush them
    flushStore(store, seqID);

    for (int i = 41; i < 50; i++) {
        store.add(createCell(Bytes.toBytes("row" + i), qf1, ts, seqID++, Bytes.toBytes("")), memStoreSize);
    }
    // flush them
    flushStore(store, seqID);
    storefiles2 = store.getStorefiles();
    ArrayList<HStoreFile> actualStorefiles1 = Lists.newArrayList(storefiles2);
    actualStorefiles1.removeAll(actualStorefiles);
    // Do compaction
    MyThread thread = new MyThread(storeScanner);
    thread.start();
    store.replaceStoreFiles(actualStorefiles, actualStorefiles1);
    thread.join();
    KeyValueHeap heap2 = thread.getHeap();
    assertFalse(heap.equals(heap2));
}

From source file:com.globalsight.everest.webapp.pagehandler.edit.inctxrv.EditorPageHandler.java

private void previousPage(EditorState p_state, HttpSession p_session, boolean p_fromActivity,
        boolean isContextReview) throws EnvoyServletException {
    ArrayList<EditorState.PagePair> pages = p_state.getPages();
    pages = (ArrayList<EditorState.PagePair>) getPagePairList(p_session, pages);
    if (isContextReview) {
        pages.removeAll(getRemovePages(pages));
    }//from www.  j  av a 2s  .  c om
    int i_index = pages.indexOf(p_state.getCurrentPage());

    if (p_fromActivity) {
        boolean foundNonempty = false;
        boolean allEmptyBefore = true;
        while (i_index > 0) {
            --i_index;
            EditorState.PagePair pp = (EditorState.PagePair) pages.get(i_index);

            if (!foundNonempty) {
                p_state.setCurrentPage(pp);
                p_state.setIsFirstPage(i_index == 0);
                p_state.setIsLastPage(false);

                initState(p_state, p_session);

                if (p_state.getUserIsPm() && s_pmCanEditTargetPages) {
                    if (EditorHelper.pmCanEditCurrentPage(p_state)) {
                        p_state.setReadOnly(false);
                        p_state.setAllowEditAll(true);
                        p_state.setEditAllState(EDIT_ALL);
                    } else {
                        p_state.setReadOnly(true);
                    }
                }
                foundNonempty = true;
                continue;
            }

            if (foundNonempty && allEmptyBefore) {
                allEmptyBefore = false;
                break;
            }

        }
        if (foundNonempty && allEmptyBefore) {
            p_state.setIsFirstPage(true);
        }
    } else {
        if (i_index > 0) {
            --i_index;

            p_state.setCurrentPage((EditorState.PagePair) pages.get(i_index));

            p_state.setIsFirstPage(i_index == 0);
            p_state.setIsLastPage(false);

            initState(p_state, p_session);

            if (p_state.getUserIsPm() && s_pmCanEditTargetPages) {
                if (EditorHelper.pmCanEditCurrentPage(p_state)) {
                    p_state.setReadOnly(false);
                    p_state.setAllowEditAll(true);
                    p_state.setEditAllState(EDIT_ALL);
                } else {
                    p_state.setReadOnly(true);
                }
            }
        }
    }

}