Example usage for java.util ArrayList remove

List of usage examples for java.util ArrayList remove

Introduction

In this page you can find the example usage for java.util ArrayList remove.

Prototype

public boolean remove(Object o) 

Source Link

Document

Removes the first occurrence of the specified element from this list, if it is present.

Usage

From source file:com.kbot2.scriptable.methods.data.Walking.java

private WalkerNode[] findPath(WalkerNode startNode, WalkerNode endNode) {
    try {//w  w w .  ja  v a 2s  .co  m
        if (!loadNodes || !loadLisks)
            load();
        ArrayList<WalkerNode> l = new ArrayList<WalkerNode>();
        for (int i = 0; i < nodes.size(); i++) {
            WalkerNode thisNode = nodes.get(i);
            thisNode.distance = 999999;
            thisNode.previous = null;
            l.add(thisNode);
        }
        startNode.distance = 0;
        while (l.isEmpty() == false) {
            WalkerNode nearestNode = l.get(0);
            for (int i = 0; i < l.size(); i++) {
                WalkerNode thisNode = l.get(i);
                if (thisNode.distance <= nearestNode.distance)
                    nearestNode = thisNode;

            }
            l.remove(l.indexOf(nearestNode));
            if (nearestNode == endNode)
                l.clear();
            else
                for (int i = 0; i < nearestNode.neighbours.size(); i++) {
                    WalkerNode neighbourNode = nearestNode.neighbours.get(i);
                    int alt = nearestNode.distance + nearestNode.distance(neighbourNode);
                    if (alt < neighbourNode.distance) {
                        neighbourNode.distance = alt;
                        neighbourNode.previous = nearestNode;
                    }
                }
        }
        ArrayList<WalkerNode> nodePath = new ArrayList<WalkerNode>();
        nodePath.add(endNode);
        WalkerNode previousNode = endNode.previous;
        while (previousNode != null) {
            nodePath.add(previousNode);
            previousNode = previousNode.previous;
        }
        if (nodePath.size() == 1)
            return null;
        WalkerNode[] nodeArray = new WalkerNode[nodePath.size()];
        for (int i = nodePath.size() - 1; i >= 0; i--)
            nodeArray[nodePath.size() - i - 1] = nodePath.get(i);
        return nodeArray;
    } catch (Exception e) {
    }
    return null;
}

From source file:org.ala.repository.Validator.java

/**
 * Validate a DC file (parsed into list of String[])
 *
 * @param lines /*w ww  . j a v  a 2  s.c o m*/
 * @throws MalformedURLException
 * @throws IllegalArgumentException
 * @throws NoSuchFieldError
 * @throws Exception
 */
protected void validateDcFile(List<String[]> lines)
        throws MalformedURLException, IllegalArgumentException, NoSuchFieldError, Exception {
    // initialise requiredDcFields
    ArrayList<String> requiredDcFields = new ArrayList<String>();
    requiredDcFields.add(Field.IDENTIFIER.name); // alt value: Predicates.DC_IDENTIFIER.getLocalPart()
    requiredDcFields.add(Field.FORMAT.name);
    requiredDcFields.add(Field.MODIFIED.name);
    //requiredDcFields.add(Field.URI.name);

    for (String[] data : lines) {
        logger.debug("DC entries (" + data.length + ") = " + StringUtils.join(data, "|"));
        // Check for expected number of tab fields
        Assert.isTrue(data.length == FileType.DC.getFieldCount(), "Entry not expected size of "
                + FileType.DC.getFieldCount() + ", got " + data.length + " - " + StringUtils.join(data, "|"));

        if (data[0].endsWith(Field.FORMAT.name)) {
            // Check "format" field
            requiredDcFields.remove(Field.FORMAT.name);
            Assert.isTrue(MimeType.getAllMimeTypes().contains(data[1]),
                    Field.FORMAT.name + " does not contain an accepted value: " + data[1] + " - "
                            + StringUtils.join(MimeType.getAllMimeTypes(), "|"));
        } else if (data[0].endsWith(Field.IDENTIFIER.name)) {
            // Check "identifier" field
            requiredDcFields.remove(Field.IDENTIFIER.name);
            Assert.isTrue(data[1].length() > 0, Field.IDENTIFIER.name + " is empty");
        } else if (data[0].endsWith(Field.MODIFIED.name)) {
            // Check "modified" date field
            requiredDcFields.remove(Field.MODIFIED.name);
            Assert.isTrue(data[1].length() > 0, Field.MODIFIED.name + " date is empty");
            DateValidator validator = DateValidator.getInstance();
            if (!validator.isValid(data[1], "yyyy-MM-dd", true)) {
                throw new IllegalArgumentException(
                        Field.MODIFIED.name + " date is not a valid date: " + data[1]);
            }
        } else if (data[0].endsWith(Field.URI.name)) {
            // Check "URI" field
            requiredDcFields.remove(Field.URI.name);
            new URL(data[1]); // throws MalformedURLException if not valid URL
        }
    }

    if (!requiredDcFields.isEmpty()) {
        throw new NoSuchFieldError("Required fields not found: " + StringUtils.join(requiredDcFields, ", "));
    }
}

From source file:hudson.scm.CVSChangeLogSet.java

public static CVSChangeLogSet parse(final AbstractBuild<?, ?> build, final java.io.File f)
        throws IOException, SAXException {
    Digester digester = new Digester2();
    ArrayList<CVSChangeLog> r = new ArrayList<CVSChangeLog>();
    digester.push(r);//from  w  w  w. j  a  va 2s  . co m

    digester.addObjectCreate("*/entry", CVSChangeLog.class);
    digester.addBeanPropertySetter("*/entry/changeDate", "changeDateString");
    digester.addBeanPropertySetter("*/entry/date");
    digester.addBeanPropertySetter("*/entry/time");
    digester.addBeanPropertySetter("*/entry/author", "user");
    digester.addBeanPropertySetter("*/entry/msg");
    digester.addSetNext("*/entry", "add");

    digester.addObjectCreate("*/entry/file", File.class);
    digester.addBeanPropertySetter("*/entry/file/name");
    digester.addBeanPropertySetter("*/entry/file/fullName");
    digester.addBeanPropertySetter("*/entry/file/revision");
    digester.addBeanPropertySetter("*/entry/file/prevrevision");
    digester.addCallMethod("*/entry/file/dead", "setDead");
    digester.addSetNext("*/entry/file", "addFile");

    try {
        digester.parse(f);
    } catch (IOException e) {
        throw new IOException2("Failed to parse " + f, e);
    } catch (SAXException e) {
        throw new IOException2("Failed to parse " + f, e);
    }

    // merge duplicate entries. Ant task somehow seems to report duplicate
    // entries.
    for (int i = r.size() - 1; i >= 0; i--) {
        CVSChangeLog log = r.get(i);
        boolean merged = false;
        if (!log.isComplete()) {
            r.remove(log);
            continue;
        }
        for (int j = 0; j < i; j++) {
            CVSChangeLog c = r.get(j);
            if (c.canBeMergedWith(log)) {
                c.merge(log);
                merged = true;
                break;
            }
        }
        if (merged) {
            r.remove(log);
        }
    }

    return new CVSChangeLogSet(build, r);
}

From source file:org.opendatakit.common.android.utilities.TableUtil.java

/**
 * Wrapper for handling database interactions for removing the indicated column from
 * the groupBy columns list./*from   ww w  .j a v  a  2s. co m*/
 *
 * @param ctxt
 * @param appName
 * @param tableId
 * @param elementKey
 * @throws RemoteException
 */
public void atomicRemoveGroupByColumn(CommonApplication ctxt, String appName, String tableId, String elementKey)
        throws RemoteException {
    OdkDbHandle db = null;
    try {
        db = ctxt.getDatabase().openDatabase(appName);

        ArrayList<String> elementKeys = getGroupByColumns(ctxt, appName, db, tableId);
        elementKeys.remove(elementKey);
        setGroupByColumns(ctxt, appName, db, tableId, elementKeys);
    } catch (RemoteException e) {
        WebLogger.getLogger(appName).printStackTrace(e);
        throw e;
    } finally {
        if (db != null) {
            try {
                ctxt.getDatabase().closeDatabase(appName, db);
            } catch (RemoteException e) {
                WebLogger.getLogger(appName).printStackTrace(e);
                throw e;
            }
        }
    }
}

From source file:org.opendatakit.common.android.utilities.TableUtil.java

/**
 * Wrapper for handling database interactions for adding the indicate column to the
 * end of the groupBy columns array. If it is already in the array, it is removed and
 * appended to the end./*from  w  w w . ja v  a  2  s.c om*/
 *
 * @param ctxt
 * @param appName
 * @param tableId
 * @param elementKey
 * @throws RemoteException
 */
public void atomicAddGroupByColumn(CommonApplication ctxt, String appName, String tableId, String elementKey)
        throws RemoteException {
    OdkDbHandle db = null;
    try {
        db = ctxt.getDatabase().openDatabase(appName);

        ArrayList<String> elementKeys = getGroupByColumns(ctxt, appName, db, tableId);
        elementKeys.remove(elementKey);
        elementKeys.add(elementKey);
        setGroupByColumns(ctxt, appName, db, tableId, elementKeys);
    } catch (RemoteException e) {
        WebLogger.getLogger(appName).printStackTrace(e);
        throw e;
    } finally {
        if (db != null) {
            try {
                ctxt.getDatabase().closeDatabase(appName, db);
            } catch (RemoteException e) {
                WebLogger.getLogger(appName).printStackTrace(e);
                throw e;
            }
        }
    }
}

From source file:com.mylife.hbase.mapper.HBaseEntityMapper.java

/**
 * Helper method to lookup getter methods for each annotated Field
 * /*w  w w.j  a  v a2 s.  c  o  m*/
 * Note: This requires a proper bean pattern getter method in the annotatedClass for the annotatedField.
 * 
 * @param annotatedClass
 * @param annotatedFields
 * @return
 */
private ImmutableMap<Field, Method> fieldsToGetterMap(final Class<?> annotatedClass,
        final ImmutableSet<Field> annotatedFields) {
    final ImmutableMap.Builder<Field, Method> mappings = new ImmutableMap.Builder<Field, Method>();
    final BeanInfo beanInfo;
    try {
        beanInfo = Introspector.getBeanInfo(annotatedClass);
    } catch (IntrospectionException e) {
        // should never happen
        LOG.error(e);
        throw new RuntimeException(e);
    }

    final ArrayList<PropertyDescriptor> propertyDescriptors = Lists
            .newArrayList(beanInfo.getPropertyDescriptors());

    for (final Field field : annotatedFields) {
        for (int i = 0; i < propertyDescriptors.size(); i++) {
            if (field.getName().equals(propertyDescriptors.get(i).getName())) {
                mappings.put(field, propertyDescriptors.get(i).getReadMethod());
                propertyDescriptors.remove(i);
                i--;
            }
        }
    }

    return mappings.build();
}

From source file:com.google.sampling.experiential.server.MapServiceImpl.java

public List<ExperimentDAO> getUsersJoinedExperiments() {
    List<com.google.sampling.experiential.server.Query> queries = new QueryParser()
            .parse("who=" + getWhoFromLogin().getEmail());
    List<Event> events = EventRetriever.getInstance().getEvents(queries, getWho(),
            EventServlet.getTimeZoneForClient(getThreadLocalRequest()));
    Set<Long> experimentIds = Sets.newHashSet();
    for (Event event : events) {
        if (event.getExperimentId() == null) {
            continue; // legacy check
        }// w w  w. ja  v a2s.co  m
        experimentIds.add(Long.parseLong(event.getExperimentId()));
    }
    List<ExperimentDAO> experimentDAOs = Lists.newArrayList();
    if (experimentIds.size() == 0) {
        return experimentDAOs;
    }

    ArrayList<Long> idList = Lists.newArrayList(experimentIds);
    System.out.println("Found " + experimentIds.size() + " unique experiments where joined.");
    System.out.println(Joiner.on(",").join(idList));

    PersistenceManager pm = null;
    try {
        pm = PMF.get().getPersistenceManager();
        Query q = pm.newQuery(Experiment.class, ":p.contains(id)");

        List<Experiment> experiments = (List<Experiment>) q.execute(idList);
        System.out.println("Got back " + experiments.size() + " experiments");
        if (experiments != null) {
            for (Experiment experiment : experiments) {
                experimentDAOs.add(DAOConverter.createDAO(experiment));
                idList.remove(experiment.getId().longValue());
            }
        }
        for (Long id : idList) {
            experimentDAOs.add(new ExperimentDAO(id, "Deleted Experiment Definition", "", "", "", null, null,
                    null, null, null, null, null, null, null, null, null, null));
        }
    } finally {
        if (pm != null) {
            pm.close();
        }
    }
    return experimentDAOs;
}

From source file:com.hygenics.parser.SpecDumpWithReference.java

/**
 * Runs the Dump/* ww  w  . j a  va 2s  .com*/
 */
public void run() {

    if (archive) {
        Archiver zip = new Archiver();
        String[] barr = baseFile.split("\\/");
        String basefile = "";
        for (int i = 0; i > barr.length - 1; i++) {
            basefile += (i == 0) ? barr[i] : "/" + barr[i];
        }
        if (basefile.trim().length() > 0) {
            zip.setBasedirectory(basefile);
            zip.setZipDirectory(basefile + "archive.zip");
            zip.setAvoidanceString(".zip|archive");
            zip.setDelFiles(true);
            zip.run();
        }
    }

    int dumped = 0;
    log.info("Tables Found: " + tables.size());
    ForkJoinPool fjp = new ForkJoinPool(Runtime.getRuntime().availableProcessors());
    boolean checkedTables = (this.tablesMustHave == null);
    for (String tf : tables.keySet()) {
        String[] split = (this.baseschema + "." + tf + "|" + this.baseFile + tf).split("\\|");
        log.info("Dumping for " + split[0]);
        String schema = null;
        try {
            schema = split[0].split("\\.")[0];

            if (!checkedTables) {
                ArrayList<String> mustHaveTemp = (ArrayList<String>) this.tablesMustHave.clone();
                ArrayList<String> existingTables = this.template.getJsonData(
                        "SELECT table_name FROM information_schema.tables WHERE table_schema ILIKE '%" + schema
                                + "%'");
                for (String tdict : existingTables) {

                    String table = Json.parse(tdict).asObject().get("table_name").asString();
                    if (mustHaveTemp.contains(table)) {
                        mustHaveTemp.remove(table);

                        // get count
                        if (this.template.getCount(schema + "." + table) == 0) {
                            try {
                                throw new MissingData(
                                        "Data Missing from Required Table: " + schema + "." + table);
                            } catch (MissingData e) {
                                e.printStackTrace();
                                if (tablesMustHave.contains(table)) {
                                    log.error("Critical Table Missing Data! Terminating!");
                                    System.exit(-1);
                                }
                            }
                        }

                    }
                }

                if (mustHaveTemp.size() > 0) {
                    log.error("Drop Schema " + schema + "  is missing the following tables:\n");
                    for (String table : mustHaveTemp) {
                        log.error(table + "\n");
                    }

                    try {
                        throw new TableMissingException();
                    } catch (TableMissingException e) {
                        e.printStackTrace();
                        System.exit(-1);
                    }
                }
            }

        } catch (IndexOutOfBoundsException e) {
            try {
                throw new SQLMalformedException("FATAL ERROR: Table name " + split[0] + " malformed");
            } catch (SQLMalformedException e2) {
                e2.printStackTrace();
                System.exit(-1);
            }
        }

        log.info("Checking  table: " + split[0] + "&& schema: " + schema);

        if (template.checkTable(split[0], schema)) {
            // check if there are records

            if (template.getCount(schema + "." + split[0].replace(schema + ".", "")) > 0) {
                dumped += 1;
                Set<String> keys = tables.get(tf).keySet();
                String sql;
                String select = "SELECT ";
                String distinct = null;
                String attrs = null;
                String where = null;
                String group = null;
                String order = null;

                /**
                 * SET THE ATTRIBUTES WHICH CAN BE SPECIFIED WITH
                 * distinct-for concacting distinct part of query not0-for
                 * specifiying that the length must be greater than 0 in the
                 * WHERE clause group-for grouping the attribute not
                 * null-for specifying that the attr cannot be null
                 * orderby-for specifying our one order attr
                 */
                for (String k : keys) {
                    if (k.toLowerCase().contains("distinct")) {
                        distinct = (distinct == null)
                                ? "distinct on(" + tables.get(tf).get(k).replaceAll("\\sas.*", "")
                                : distinct + "," + tables.get(tf).get(k).replaceAll("\\sas.*", "");
                    }

                    if (k.toLowerCase().contains("group")) {
                        group = (group == null) ? "GROUP BY " + tables.get(tf).get(k).replaceAll("\\sas.*", "")
                                : group + "," + tables.get(tf).get(k).replaceAll("\\sas.*", "");
                    }

                    if (k.toLowerCase().contains("not0")) {
                        if (k.contains("not0OR")) {
                            where = (where == null)
                                    ? "WHERE length(" + tables.get(tf).get(k).replaceAll("\\sas.*", "")
                                            + ") >0 "
                                    : where + "OR length(" + tables.get(tf).get(k).replaceAll("\\sas.*", "")
                                            + ")";
                        } else {
                            where = (where == null)
                                    ? "WHERE length(" + tables.get(tf).get(k).replaceAll("\\sas.*", "")
                                            + ") >0 "
                                    : where + "AND length(" + tables.get(tf).get(k).replaceAll("\\sas.*", "")
                                            + ")";
                        }
                    }

                    if (k.toLowerCase().contains("notnull")) {
                        if (k.toLowerCase().contains("notnullor")) {
                            where = (where == null)
                                    ? "WHERE " + tables.get(tf).get(k).replaceAll("\\sas.*", "")
                                            + " IS NOT NULL"
                                    : where + " OR " + tables.get(tf).get(k).replaceAll("\\sas.*", "")
                                            + " IS NOT NULL";
                        } else {
                            where = (where == null)
                                    ? "WHERE " + tables.get(tf).get(k).replaceAll("\\sas.*", "")
                                            + " IS NOT NULL"
                                    : where + " AND " + tables.get(tf).get(k).replaceAll("\\sas.*", "")
                                            + " IS NOT NULL";
                        }
                    }

                    if (k.toLowerCase().contains("order")) {
                        if (k.toLowerCase().contains("orderdesc")) {
                            order = (order == null)
                                    ? "ORDER BY " + tables.get(tf).get(k).replaceAll("\\sas.*", "") + " ASC"
                                    : order;
                        } else {
                            order = (order == null)
                                    ? "ORDER BY " + tables.get(tf).get(k).replaceAll("\\sas.*", "") + " DESC"
                                    : order;
                        }
                    }

                    String field = tables.get(tf).get(k);
                    if (k.toLowerCase().contains("attr")) {
                        if (unicoderemove == true) {
                            field = "regexp_replace(trim(replace(regexp_replace(cast(" + field + " as text)"
                                    + ",'[^\\u0020-\\u007e,\\(\\);\\-\\[\\]]+',' '),'" + this.delimiter + "','"
                                    + this.replacedel + "')),'[\\r|\\n]+','   ','gm') as " + field;
                        } else {
                            field = "regexp_replace(trim(replace(cast(" + field + " as text),'" + this.delimiter
                                    + "','" + this.replacedel + "')),'[\\r|\\n]+','   ','gm')";
                        }

                        attrs = (attrs == null) ? field : attrs + "," + field;
                    }
                }

                select = (distinct == null) ? select : select.trim() + " " + distinct.trim() + ")";
                select += " " + attrs.trim();
                select += " FROM " + split[0].trim();
                select = (where == null) ? select : select.trim() + " " + where.trim();
                select = (group == null) ? select : select.trim() + " " + group.trim();
                select = (order == null) ? select : select.trim() + " " + order.trim();

                if (extracondition != null) {
                    select += (select.contains(" WHERE ") == true) ? " AND" + extracondition
                            : " WHERE " + extracondition;
                }

                select = select.trim();

                log.info("Dump Select Command: " + select);

                sql = "COPY  (" + select + ") TO STDOUT WITH DELIMITER '" + delimiter.trim()
                        + "' NULL as '' CSV HEADER";
                fjp.execute(new ToFile(sql, split[1].trim()));

                select = "SELECT ";
                distinct = null;
                attrs = null;
                where = null;
                group = null;
                order = null;
            } else {
                try {

                    throw new NoDataException("WARNING: Table " + split[0] + " has no Data");

                } catch (NoDataException e) {
                    e.printStackTrace();
                    if (tablesMustHave != null && tablesMustHave.contains(split[0])) {
                        log.error("Table is a Must Have Table by has not Data. Terminating!");
                        System.exit(-1);
                    }
                }
            }
        } else {
            try {
                throw new SQLMalformedException("WARNING: Table " + split[0] + " is missing");
            } catch (SQLMalformedException e) {
                e.printStackTrace();
            }
        }
    }

    try {
        fjp.awaitTermination(60000, TimeUnit.MILLISECONDS);
        fjp.shutdown();
    } catch (InterruptedException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }

    if (dumped == 0) {
        log.info("No Data Found in any Table");
        System.exit(-1);
    }

}

From source file:com.magnet.mmx.server.plugin.mmxmgmt.api.tags.MMXUserTagsResourceTest.java

@Test
public void testSetGetDelTags() throws Exception {
    String username = Helper.removeSuffix(userEntityList.get(0).getUsername(), "%");
    List<String> tagList1 = Arrays.asList("tag1", "tag2", "tag3");
    List<String> tagList2 = Arrays.asList("tag4", "tag5", "tag6");

    //set tags 1//from   w  w  w.  ja v a  2 s  . c o m
    {
        Response setResp = setTags(username, tagList1);
        assertEquals(Response.Status.CREATED.getStatusCode(), setResp.getStatus());
        setResp.close();
    }

    //set tags2
    {
        Response setResp = setTags(username, tagList2);
        assertEquals(Response.Status.CREATED.getStatusCode(), setResp.getStatus());
        setResp.close();
    }

    //get the tags
    {
        Response getResp = getTags(username);
        assertEquals(Response.Status.OK.getStatusCode(), getResp.getStatus());
        String jsonString = getResp.readEntity(String.class);
        ArrayList<String> newList = new ArrayList<String>();
        newList.addAll(tagList1);
        newList.addAll(tagList2);
        validateContainsTags(jsonString, username, tagList1);
    }

    /**
     * delete one tag
     */
    {
        Response delResp = deleteTag(username, "tag1");
        assertEquals(delResp.getStatus(), Response.Status.OK.getStatusCode());
        delResp.close();
    }

    {
        Response getResp = getTags(username);
        assertEquals(Response.Status.OK.getStatusCode(), getResp.getStatus());
        String jsonString = getResp.readEntity(String.class);
        ArrayList<String> newList = new ArrayList<String>();
        newList.addAll(tagList1);
        newList.addAll(tagList2);
        newList.remove("tag1");
        validateContainsTags(jsonString, username, newList);
        validateNotContainsTags(jsonString, username, Arrays.asList("tag1"));
        getResp.close();
    }

    /**
     * delete all tags
     */

    {
        Response delResp = deleteAllTags(username);
        assertEquals(delResp.getStatus(), Response.Status.OK.getStatusCode());
        delResp.close();
    }

    //get the tags and check
    {
        Response getResp = getTags(username);
        assertEquals(Response.Status.OK.getStatusCode(), getResp.getStatus());
        String jsonString = getResp.readEntity(String.class);
        ArrayList<String> newList = new ArrayList<String>();
        newList.addAll(tagList1);
        newList.addAll(tagList2);
        validateNotContainsTags(jsonString, username, newList);
        getResp.close();
    }
}

From source file:net.sourceforge.mipa.predicatedetection.lattice.sequence.SequenceWindowedLatticeChecker.java

private void updateReachableState(SequenceLatticeIDNode minCGS, AbstractLatticeIDNode maxCGS, int id) {
    // TODO Auto-generated method stub
    if (minCGS == null) {
        return;//w  ww. j  av  a  2s.c  om
    }

    /*
     * minCGS.getReachedStates().clear();
     * 
     * String[] string = minCGS.getSatisfiedPredicates().split(" "); for
     * (int i = 0; i < string.length; i++) { State state =
     * automaton.getInitialState().step(string[i].charAt(0));
     * minCGS.addReachedStates(state); } //
     * minCGS.addReachedStates(automaton.getInitialState()); if (DEBUG) {
     * long time_t = (new Date()).getTime(); out.print("[ "); for (int i =
     * 0; i < minCGS.getID().length; i++) { out.print(minCGS.getID()[i] +
     * " "); } out.print("]: satisfied predicates: " +
     * minCGS.getSatisfiedPredicates()); out.print(" reachable states: ");
     * Iterator<State> it = minCGS.getReachedStates().iterator(); while
     * (it.hasNext()) { State state = it.next(); out.print(state.getName() +
     * " "); } out.println(); out.flush(); wastedTime += (new
     * Date()).getTime() - time_t; } long time = (new Date()).getTime();
     * ArrayList<SequenceLatticeIDNode> set = new
     * ArrayList<SequenceLatticeIDNode>(); ArrayList<String> setID = new
     * ArrayList<String>(); set.add(minCGS);
     * setID.add(StringUtils.join(minCGS.getID(), ' ')); while
     * (!set.isEmpty()) { SequenceLatticeIDNode node = set.remove(0); for
     * (int i = 0; i < children.length; i++) { String[] index = new
     * String[children.length]; for (int j = 0; j < children.length; j++) {
     * index[j] = node.getID()[j]; } index[i] =
     * Integer.toString(Integer.valueOf(index[i]) + 1); String ID =
     * StringUtils.join(index, ' '); if (!setID.contains(ID) &&
     * getMappedLattice().get(ID) != null) { SequenceLatticeIDNode newNode =
     * (SequenceLatticeIDNode) getMappedLattice() .get(ID); if
     * (newNode.getGlobalState()[id].getID().equals(
     * minCGS.getGlobalState()[id].getID())) {
     * newNode.getReachedStates().clear(); computeReachableStates(newNode);
     * set.add(newNode); setID.add(StringUtils.join(newNode.getID(), ' '));
     * } else { HashSet<State> oriState = new HashSet<State>();
     * Iterator<State> iterator = newNode.getReachedStates() .iterator();
     * while (iterator.hasNext()) { oriState.add(iterator.next()); }
     * newNode.getReachedStates().clear(); computeReachableStates(newNode);
     * 
     * boolean flag = true; if (oriState.size() ==
     * newNode.getReachedStates() .size()) { String ori = ""; iterator =
     * oriState.iterator(); while (iterator.hasNext()) { State state =
     * iterator.next(); ori += state.getName() + " ";
     * 
     * } String news = ""; iterator = newNode.getReachedStates().iterator();
     * while (iterator.hasNext()) { State state = iterator.next(); news +=
     * state.getName() + " "; }
     * 
     * String[] oriStates = ori.trim().split(" "); String[] newStates =
     * news.trim().split(" "); for (int j = 0; j < oriStates.length; j++) {
     * String s = oriStates[j]; boolean f = false; for (int k = 0; k <
     * newStates.length; k++) { if (s.equals(newStates[k])) { f = true;
     * break; } } if (f == false) { flag = false; break; } } } else { flag =
     * false; } if (flag == false) { set.add(newNode);
     * setID.add(StringUtils.join(newNode.getID(), ' ')); } } } } } for (int
     * i = 0; i < children.length; i++) { String[] index = new
     * String[children.length]; for (int j = 0; j < children.length; j++) {
     * index[j] = node.getID()[j]; } index[i] =
     * Integer.toString(Integer.valueOf(index[i]) + 1); String ID =
     * StringUtils.join(index, ' '); if (!setID.contains(ID) &&
     * getMappedLattice().get(ID) != null) { SequenceLatticeIDNode newNode =
     * (SequenceLatticeIDNode) getMappedLattice() .get(ID); if
     * (newNode.getGlobalState()[id].getID().equals(
     * minCGS.getGlobalState()[id].getID())) {
     * newNode.getReachedStates().clear(); computeReachableStates(newNode);
     * set.add(newNode); setID.add(StringUtils.join(newNode.getID(), ' '));
     * } else { HashSet<State> oriState = new HashSet<State>();
     * Iterator<State> iterator = newNode.getReachedStates() .iterator();
     * while (iterator.hasNext()) { oriState.add(iterator.next()); }
     * newNode.getReachedStates().clear(); computeReachableStates(newNode);
     * 
     * boolean flag = true; if (oriState.size() ==
     * newNode.getReachedStates() .size()) { String ori = ""; iterator =
     * oriState.iterator(); while (iterator.hasNext()) { State state =
     * iterator.next(); ori += state.getName() + " ";
     * 
     * } String news = ""; iterator = newNode.getReachedStates().iterator();
     * while (iterator.hasNext()) { State state = iterator.next(); news +=
     * state.getName() + " "; }
     * 
     * String[] oriStates = ori.trim().split(" "); String[] newStates =
     * news.trim().split(" "); for (int j = 0; j < oriStates.length; j++) {
     * String s = oriStates[j]; boolean f = false; for (int k = 0; k <
     * newStates.length; k++) { if (s.equals(newStates[k])) { f = true;
     * break; } } if (f == false) { flag = false; break; } } } else { flag =
     * false; } if (flag == false) { set.add(newNode);
     * setID.add(StringUtils.join(newNode.getID(), ' ')); } } } }
     */

    long time = (new Date()).getTime();
    ArrayList<SequenceLatticeIDNode> set = new ArrayList<SequenceLatticeIDNode>();
    ArrayList<String> setID = new ArrayList<String>();
    set.add(minCGS);
    while (!set.isEmpty()) {
        SequenceLatticeIDNode node = set.remove(0);
        if (!setID.contains(StringUtils.join(node.getID(), ' '))) {
            setID.add(StringUtils.join(node.getID(), ' '));
            HashSet<State> oriState = new HashSet<State>();
            Iterator<State> iterator = node.getReachedStates().iterator();
            while (iterator.hasNext()) {
                oriState.add(iterator.next());
            }
            if (node.equals(minCGS)) {
                node.getReachedStates().clear();
                String[] string = node.getSatisfiedPredicates().split(" ");
                for (int i = 0; i < string.length; i++) {
                    State state = automaton.getInitialState().step(string[i].charAt(0));
                    node.addReachedStates(state);
                }
                if (DEBUG) {
                    long time_t = (new Date()).getTime();
                    out.print("[ ");
                    for (int i = 0; i < node.getID().length; i++) {
                        out.print(node.getID()[i] + " ");
                    }
                    out.print("]: satisfied predicates: " + node.getSatisfiedPredicates());
                    out.print(" reachable states: ");
                    Iterator<State> it = node.getReachedStates().iterator();
                    while (it.hasNext()) {
                        State state = it.next();
                        out.print(state.getName() + " ");
                    }
                    out.println();
                    out.flush();
                    wastedTime += (new Date()).getTime() - time_t;
                }
            } else {
                node.getReachedStates().clear();
                computeReachableStates(node);
            }

            boolean flag = true;
            if (oriState.size() == node.getReachedStates().size()) {
                String ori = "";
                iterator = oriState.iterator();
                while (iterator.hasNext()) {
                    State state = iterator.next();
                    ori += state.getName() + " ";

                }
                String news = "";
                iterator = node.getReachedStates().iterator();
                while (iterator.hasNext()) {
                    State state = iterator.next();
                    news += state.getName() + " ";
                }

                String[] oriStates = ori.trim().split(" ");
                String[] newStates = news.trim().split(" ");
                for (int j = 0; j < oriStates.length; j++) {
                    String s = oriStates[j];
                    boolean f = false;
                    for (int k = 0; k < newStates.length; k++) {
                        if (s.equals(newStates[k])) {
                            f = true;
                            break;
                        }
                    }
                    if (f == false) {
                        flag = false;
                        break;
                    }
                }
            } else {
                flag = false;
            }
            if (flag == false) {
                for (int i = 0; i < children.length; i++) {
                    String[] index = new String[children.length];
                    for (int j = 0; j < children.length; j++) {
                        index[j] = node.getID()[j];
                    }
                    index[i] = Integer.toString(Integer.valueOf(index[i]) + 1);
                    String ID = StringUtils.join(index, ' ');
                    if (getMappedLattice().get(ID) != null) {
                        set.add((SequenceLatticeIDNode) getMappedLattice().get(ID));
                    }
                }
            } else {
                // [id] not change
                if (node.getID()[id].equals(windowedLocalStateSet.get(id).get(0).getID())) {
                    for (int i = 0; i < children.length; i++) {
                        if (i == id) {
                            continue;
                        }
                        String[] index = new String[children.length];
                        for (int j = 0; j < children.length; j++) {
                            index[j] = node.getID()[j];
                        }
                        index[i] = Integer.toString(Integer.valueOf(index[i]) + 1);
                        String ID = StringUtils.join(index, ' ');
                        if (getMappedLattice().get(ID) != null) {
                            set.add((SequenceLatticeIDNode) getMappedLattice().get(ID));
                        }
                    }
                }
            }
        }
    }
    updateNumber = setID.size();
    updateTime = (new Date()).getTime() - time;
}