Example usage for java.util ArrayList remove

List of usage examples for java.util ArrayList remove

Introduction

In this page you can find the example usage for java.util ArrayList remove.

Prototype

public boolean remove(Object o) 

Source Link

Document

Removes the first occurrence of the specified element from this list, if it is present.

Usage

From source file:at.ac.tuwien.dsg.csdg.DependencyGraph.java

public ArrayList<Node> getAllVMs() {
    ArrayList<Node> units = new ArrayList<Node>();
    ArrayList<Node> unexploredNodes = new ArrayList<Node>();
    for (Node n : cloudService.getAllRelatedNodes()) {
        if (n.getNodeType() == NodeType.SERVICE_TOPOLOGY) {
            unexploredNodes.add(n);//from w w w  .  j a v a  2 s. c o m
        }
    }
    while (unexploredNodes.size() > 0) {
        Node n = unexploredNodes.get(0);
        if (n.getAllRelatedNodes() != null) {
            for (Node n1 : n.getAllRelatedNodes()) {
                unexploredNodes.add(n1);
            }
        }

        if (n.getNodeType() == NodeType.VIRTUAL_MACHINE)
            units.add(n);

        unexploredNodes.remove(0);
    }

    return units;
}

From source file:com.edgenius.wiki.gwt.server.SpaceControllerImpl.java

public BlogMetaList updateLinkedBlog(String spaceUname, ArrayList<BlogMeta> blogs) {
    BlogMetaList list = new BlogMetaList();

    Space space = spaceService.getSpaceByUname(spaceUname);
    List<BlogMeta> exists = space.getSetting().getLinkedMetas();
    if (exists != null) {
        List<BlogMeta> needRemove = new ArrayList<BlogMeta>();
        //check if anyone removed or updated
        for (BlogMeta exist : exists) {
            if (blogs != null) {
                if (blogs.contains(exist)) {
                    //already exist, then don't do anything -- ???
                    blogs.remove(exist);
                } else {
                    //it is not inside list, so remove existing one.
                    needRemove.add(exist);
                }//from ww  w. ja v  a  2 s .  co  m
            } else {
                //remove all existing
                needRemove.add(exist);
            }

        }
        //the reason that don't call disconnectFromSpace() within above looping, as it will cause ConcurrenceModification exception
        //as disconnectFromSpace() also update list from space.getSetting().getLinkedMetas()
        if (needRemove.size() > 0) {
            for (BlogMeta blogMeta : needRemove) {
                blogSyncService.disconnectFromSpace(spaceUname, blogMeta.getKey());
            }
        }

    }
    if (blogs != null) {
        for (BlogMeta blog : blogs) {
            if (!blogSyncService.verifyBlog(blog)) {
                blog.setError(messageService.getMessage("err.invalid.blog.info"));
                list.errorMsg = messageService.getMessage("err.invalid.blog.info");
            } else {
                //stop create space!
                try {
                    blogSyncService.linkToSpace(blog, spaceUname);
                } catch (BlogSyncException e) {
                    log.error("Update blog failed " + blog, e);
                    blog.setError(messageService.getMessage("err.invalid.blog.info"));
                    list.errorMsg = messageService.getMessage("err.invalid.blog.info");
                }
            }
        }
    }

    //get from space, rather than from input parameter as some blog may not success saved.
    Collection<BlogMeta> blogMetas = SpaceUtil.getSpaceLinkMetaToModel(space);
    if (blogMetas != null)
        list.blogList = new ArrayList<BlogMeta>(blogMetas);
    return list;

}

From source file:org.herrlado.engeo.Utils.java

/**
 * Update cookies from response./*from  w ww  .  j a va  2  s.  c  om*/
 * 
 * @param cookies
 *            old {@link Cookie} list
 * @param headers
 *            {@link Header}s from {@link HttpResponse}
 * @param url
 *            requested URL
 * @throws URISyntaxException
 *             malformed URI
 * @throws MalformedCookieException
 *             malformed {@link Cookie}
 */
@Deprecated
public static void updateCookies(final ArrayList<Cookie> cookies, final Header[] headers, final String url)
        throws URISyntaxException, MalformedCookieException {
    final URI uri = new URI(url);
    int port = uri.getPort();
    if (port < 0) {
        if (url.startsWith("https")) {
            port = PORT_HTTPS;
        } else {
            port = PORT_HTTP;
        }
    }
    final CookieOrigin origin = new CookieOrigin(uri.getHost(), port, uri.getPath(), false);
    final CookieSpecBase cookieSpecBase = new BrowserCompatSpec();
    String name;
    String value;
    for (final Header header : headers) {
        for (final Cookie cookie : cookieSpecBase.parse(header, origin)) {
            // THE cookie
            name = cookie.getName();
            value = cookie.getValue();
            if (value == null || value.equals("")) {
                continue;
            }
            for (final Cookie c : cookies) {
                if (name.equals(c.getName())) {
                    cookies.remove(c);
                    cookies.add(cookie);
                    name = null;
                    break;
                }
            }
            if (name != null) {
                cookies.add(cookie);
            }
        }
    }
}

From source file:com.xpn.xwiki.plugin.chronopolys.FolderManager.java

public ArrayList<Object> getProjectContainerBrothers(String uid, XWikiContext context) throws XWikiException {
    Object container = getProjectContainer(uid, context);
    ArrayList<Object> containers;

    if (FOLDERS_ROOT.equals(container.display("type", "view"))) {
        containers = this.getRootFolders(context);
    } else {// w  ww  . j a v a2 s.  c om
        containers = getProjectContainerChilds(container.getProperty("parent").getValue().toString(), context);
    }
    Object obj;
    for (int i = 0; i < containers.size(); i++) {
        obj = containers.get(i);
        if (uid.equals(obj.display("uid", "view"))) {
            containers.remove(i);
        }
    }
    return containers;
}

From source file:com.gcrm.action.crm.BaseEditAction.java

/**
 * Gets the base information for entity.
 * /*from   w  w  w  . j  a v  a 2 s  . c om*/
 * @param entity instance
 */
@SuppressWarnings({ "rawtypes", "unchecked" })
protected void getBaseInfo(BaseEntity entity, String entityName, String namespace) {
    User createdUser = entity.getCreated_by();
    if (createdUser != null) {
        this.setCreatedBy(createdUser.getName());
    }
    User updatedUser = entity.getUpdated_by();
    if (updatedUser != null) {
        this.setUpdatedBy(updatedUser.getName());
    }
    SimpleDateFormat dateFormat = new SimpleDateFormat(Constant.DATE_TIME_FORMAT);
    Date createdOn = entity.getCreated_on();
    if (createdOn != null) {
        this.setCreatedOn(dateFormat.format(createdOn));
    }
    Date updatedOn = entity.getUpdated_on();
    if (updatedOn != null) {
        this.setUpdatedOn(dateFormat.format(updatedOn));
    }
    User owner = entity.getOwner();
    if (owner != null) {
        ownerID = owner.getId();
        ownerText = owner.getName();
    }

    // Sets navigation history
    HttpServletRequest request = ServletActionContext.getRequest();
    HttpSession session = request.getSession();
    ArrayList navigationList = (ArrayList) session.getAttribute(Constant.NAVIGATION_HISTORY);
    if (navigationList == null) {
        navigationList = new ArrayList();
    }
    String entityLabel = getText("entity." + CommonUtil.lowerCaseString(entityName) + ".label");
    if (!CommonUtil.isNullOrEmpty(entity.getName())) {
        entityLabel += " - " + entity.getName();
    }
    String navigatoin = "<a href='" + namespace + "edit" + entityName + ".action?id=" + entity.getId() + "'>"
            + entityLabel + "</a>";
    if (navigationList.contains(navigatoin)) {
        navigationList.remove(navigatoin);
    }
    navigationList.add(navigatoin);
    if (navigationList.size() > Constant.NAVIGATION_HISTORY_COUNT) {
        navigationList.remove(0);
    }
    session.setAttribute(Constant.NAVIGATION_HISTORY, navigationList);
}

From source file:edu.isi.karma.er.helper.SPARQLGeneratorUtil.java

private String generate_sparql(TriplesMap node, String node_symbol, String graph) {

    ArrayList<Object> queue = new ArrayList<>();
    queue.add(node);//ww w .  j  a v a  2  s . c om
    StringBuffer query = new StringBuffer();
    this.var_count = 1;
    this.prefix_list = new HashMap<>();
    this.select_params = new StringBuffer();
    HashMap<TriplesMap, String> markedTriples = new HashMap<>();

    this.ParentMapingInfoList = new HashMap<>();

    HashMap<Predicate, String> predicateList = new HashMap<>();

    // using a BFS approach, we traverse the tree from the root node and add triples/predicates to the queue
    while (!queue.isEmpty()) {
        Object currentObj = queue.remove(0);

        // if this is a tripleMap, then add all its RefObjects to the queue
        // for the predicates, add only the ones that satisfy the criteria of being <...hasValue>
        if (currentObj instanceof TriplesMap) {
            String var = "x" + var_count;
            TriplesMap triple = (TriplesMap) currentObj;
            boolean foundHasValue = false;
            List<PredicateObjectMap> predicates = triple.getPredicateObjectMaps();

            for (PredicateObjectMap p_map : predicates) {

                // if there are tripleMaps linked to the current tripleMap, then
                // we need to save their relation/linkage between them
                if (p_map.getObject().hasRefObjectMap()) {
                    RefObjectMap objMap = p_map.getObject().getRefObjectMap();
                    queue.add(objMap.getParentTriplesMap());

                    logger.info(triple.getSubject().getId() + "  ---> "
                            + objMap.getParentTriplesMap().getSubject().getId());

                    // maintain a list of mapping properties between triples
                    ParentMapingInfoList.put(objMap.getParentTriplesMap().getSubject().getId(),
                            new ParentMapingInfo(triple, p_map.getPredicate()));

                } else if (!foundHasValue) {
                    if (p_map.getPredicate().getTemplate().toString()
                            .equalsIgnoreCase("<http://www.opengis.net/gml/hasValue>")) {
                        queue.add(p_map.getPredicate());
                        predicateList.put(p_map.getPredicate(), var);
                        foundHasValue = true;
                    }
                }
            }
            // if this triple is marked to be included in the query,
            // we add it to the markedTriples list and add to the query string
            // for its class type Eg. 
            // Prefix pref1: <.../.../Input>
            // x2 a pref1:
            if (foundHasValue) {
                markedTriples.put(triple, var);
                String rdfsTypes = triple.getSubject().getRdfsType().get(0).toString();
                this.prefix_list.put(rdfsTypes, "pref" + var_count);
                query.append(" ?" + var + " a pref" + var_count + ": .");

                // if the parent of this triple is also marked for the query
                // then we add the relation to between triples to the query. Eg.

                //               TriplesMap parentTriple = parent.get(triple.getSubject().getId());
                ParentMapingInfo parentTriple = ParentMapingInfoList.get(triple.getSubject().getId());

                if (parentTriple != null && markedTriples.containsKey(parentTriple.parent)) {
                    String predicate = parentTriple.predicate.getTemplate().toString();
                    //                  PredicateObjectMap parentPredicate = getPredicateBetweenTriples(triple, parentTriple);
                    if (predicate != null) {
                        query.append(" ?" + markedTriples.get(parentTriple.parent) + " " + predicate + " ?"
                                + var + " . ");
                    } else {
                        logger.error("predicate is null from parent : "
                                + triple.getSubject().getRdfsType().toString());
                    }
                }

            }
            var_count++;
        }
        // if it is a predicate Object, create a variable in in the query string
        else if (currentObj instanceof Predicate) {
            Predicate predicate = (Predicate) currentObj;
            query.append(" ?" + predicateList.get(predicate) + " " + predicate.getTemplate() + " ?z" + var_count
                    + " . ");
            select_params.append(" ?z" + var_count);
            var_count++;

        }
        // if this is a RefObject add the Child Triple to the queue
        else if (currentObj instanceof RefObjectMap) {
            RefObjectMap refObj = (RefObjectMap) currentObj;
            TriplesMap t = refObj.getParentTriplesMap();
            queue.add(t);

        }
    }

    // generate the query from the list of prefix and the param lists
    Iterator<String> itr = this.prefix_list.keySet().iterator();
    StringBuffer sQuery = new StringBuffer();
    while (itr.hasNext()) {
        String key = itr.next();
        sQuery.append(" PREFIX ").append(this.prefix_list.get(key)).append(": ").append(key);
    }
    if (graph == null || graph.isEmpty()) {
        sQuery.append(" select ").append(select_params).append(" where { ").append(query.toString())
                .append(" } ");
    } else {
        sQuery.append(" select ").append(select_params).append(" where { GRAPH <").append(graph).append("> { ")
                .append(query.toString()).append(" } }");
    }
    logger.info("Genreated Query : " + sQuery);
    return sQuery.toString();
}

From source file:com.linkedin.pinot.integration.tests.Pql2CompilerTest.java

private boolean brokerRequestsAreEquivalent(BrokerRequest left, BrokerRequest right) {
    boolean queryTypeIsEqual = EqualityUtils.isEqual(left.getQueryType(), right.getQueryType());
    boolean querySourceIsEqual = EqualityUtils.isEqual(left.getQuerySource(), right.getQuerySource());
    boolean timeInterlalIsEqual = EqualityUtils.isEqual(left.getTimeInterval(), right.getTimeInterval());
    boolean durationIsEqual = EqualityUtils.isEqual(left.getDuration(), right.getDuration());
    boolean selectionsAreEqual = EqualityUtils.isEqual(left.getSelections(), right.getSelections());
    boolean bucketHashKeyIsEqual = EqualityUtils.isEqual(left.getBucketHashKey(), right.getBucketHashKey());
    boolean basicFieldsAreEquivalent = queryTypeIsEqual && querySourceIsEqual && timeInterlalIsEqual
            && durationIsEqual && selectionsAreEqual && bucketHashKeyIsEqual;

    boolean aggregationsAreEquivalent = true;

    List<AggregationInfo> leftAggregationsInfo = left.getAggregationsInfo();
    List<AggregationInfo> rightAggregationsInfo = right.getAggregationsInfo();
    if (!EqualityUtils.isEqual(leftAggregationsInfo, rightAggregationsInfo)) {
        if (leftAggregationsInfo == null || rightAggregationsInfo == null
                || leftAggregationsInfo.size() != rightAggregationsInfo.size()) {
            aggregationsAreEquivalent = false;
        } else {/*from   ww w . j a va 2s  .  c  om*/
            ArrayList<AggregationInfo> leftAggregationsInfoCopy = new ArrayList<>(leftAggregationsInfo);
            ArrayList<AggregationInfo> rightAggregationsInfoCopy = new ArrayList<>(rightAggregationsInfo);
            int aggregationsInfoCount = leftAggregationsInfoCopy.size();
            for (int i = 0; i < aggregationsInfoCount; i++) {
                AggregationInfo leftInfo = leftAggregationsInfoCopy.get(i);

                for (int j = 0; j < rightAggregationsInfoCopy.size(); ++j) {
                    AggregationInfo rightInfo = rightAggregationsInfoCopy.get(j);

                    // Check if the aggregationsInfo are the same or they're the count function
                    if (EqualityUtils.isEqual(leftInfo, rightInfo)) {
                        rightAggregationsInfoCopy.remove(j);
                        break;
                    } else {
                        if ("count".equalsIgnoreCase(rightInfo.getAggregationType())
                                && "count".equalsIgnoreCase(leftInfo.getAggregationType())) {
                            rightAggregationsInfoCopy.remove(j);
                            break;
                        }
                    }
                }
            }

            aggregationsAreEquivalent = rightAggregationsInfoCopy.isEmpty();
        }
    }

    // Group by clauses might not be in the same order
    boolean groupByClauseIsEquivalent = EqualityUtils.isEqual(left.getGroupBy(), right.getGroupBy());

    if (!groupByClauseIsEquivalent) {
        groupByClauseIsEquivalent = (EqualityUtils.isEqualIgnoringOrder(left.getGroupBy().getColumns(),
                right.getGroupBy().getColumns())
                && EqualityUtils.isEqual(left.getGroupBy().getTopN(), right.getGroupBy().getTopN()));
    }

    boolean filtersAreEquivalent = EqualityUtils.isEqual(left.isSetFilterQuery(), right.isSetFilterQuery());

    if (left.isSetFilterQuery()) {
        int leftRootId = left.getFilterQuery().getId();
        int rightRootId = right.getFilterQuery().getId();
        // The Pql 1 compiler merges ranges, the Pql 2 compiler doesn't, so we skip the filter comparison if either side
        // has more than one range filter for the same column
        filtersAreEquivalent = !filtersHaveAtMostOneRangeFilterPerColumn(left, right)
                || filterQueryIsEquivalent(Collections.singletonList(leftRootId),
                        Collections.singletonList(rightRootId), left.getFilterSubQueryMap(),
                        right.getFilterSubQueryMap());
    }

    boolean areEqual = basicFieldsAreEquivalent && aggregationsAreEquivalent && groupByClauseIsEquivalent
            && filtersAreEquivalent;

    if (!areEqual) {
        System.out.println("queryTypeIsEqual = " + queryTypeIsEqual);
        System.out.println("querySourceIsEqual = " + querySourceIsEqual);
        System.out.println("timeInterlalIsEqual = " + timeInterlalIsEqual);
        System.out.println("durationIsEqual = " + durationIsEqual);
        System.out.println("selectionsAreEqual = " + selectionsAreEqual);
        System.out.println("bucketHashKeyIsEqual = " + bucketHashKeyIsEqual);
        System.out.println("basicFieldsAreEquivalent = " + basicFieldsAreEquivalent);
        System.out.println("aggregationsAreEquivalent = " + aggregationsAreEquivalent);
        System.out.println("groupByClauseIsEquivalent = " + groupByClauseIsEquivalent);
        System.out.println("filtersAreEquivalent = " + filtersAreEquivalent);

        if (!filtersAreEquivalent) {
            int leftRootId = left.getFilterQuery().getId();
            int rightRootId = right.getFilterQuery().getId();
            displayFilterDifference(Collections.singletonList(leftRootId),
                    Collections.singletonList(rightRootId), left.getFilterSubQueryMap(),
                    right.getFilterSubQueryMap());
        }
    }

    return areEqual;
}

From source file:com.krawler.spring.hrms.employee.hrmsEmpDAOImpl.java

public KwlReturnObject getPayHistory(HashMap<String, Object> requestParams) {
    boolean success = true;
    List tabledata = null;//from   w  ww  .  j  a v a  2 s  .c o  m
    try {
        //            String userid =(String) requestParams.get("userid");
        //            User userobj = (User) hibernateTemplate.get(User.class, userid);
        //            Date createdfor =(Date) requestParams.get("createdfor");
        ArrayList orderby = null;
        ArrayList ordertype = null;
        ArrayList name = null;
        ArrayList value = null;
        String hql = "from Payhistory ";
        if (requestParams.get("filter_names") != null && requestParams.get("filter_values") != null) {
            name = new ArrayList((List<String>) requestParams.get("filter_names"));
            value = new ArrayList((List<Object>) requestParams.get("filter_values"));
            hql += com.krawler.common.util.StringUtil.filterQuery(name, "where");
            int ind = hql.indexOf("(");
            if (ind > -1) {
                int index = Integer.valueOf(hql.substring(ind + 1, ind + 2));
                hql = hql.replaceAll("(" + index + ")", value.get(index).toString());
                value.remove(index);
            }
        }
        if (requestParams.get("order_by") != null && requestParams.get("order_type") != null) {
            orderby = new ArrayList((List<String>) requestParams.get("order_by"));
            ordertype = new ArrayList((List<Object>) requestParams.get("order_type"));
            hql += StringUtil.orderQuery(orderby, ordertype);
        }
        tabledata = HibernateUtil.executeQuery(hibernateTemplate, hql, value.toArray());
        success = true;
    } catch (Exception e) {
        success = false;
        e.printStackTrace();
    } finally {
        return new KwlReturnObject(success, "", "-1", tabledata, tabledata.size());
    }
}

From source file:eu.europa.esig.dss.cades.signature.CadesLevelBaselineLTATimestampExtractor.java

/**
 * The field certificatesHashIndex is a sequence of octet strings. Each one contains the hash value of one
 * instance of CertificateChoices within certificates field of the root SignedData. A hash value for
 * every instance of CertificateChoices, as present at the time when the corresponding archive time-stamp is
 * requested, shall be included in certificatesHashIndex. No other hash value shall be included in this field.
 *
 * @return//from  w w w .  j  av  a2s  .c o  m
 * @throws eu.europa.esig.dss.DSSException
 */
@SuppressWarnings("unchecked")
private ASN1Sequence getVerifiedCertificatesHashIndex(TimestampToken timestampToken) throws DSSException {

    final ASN1Sequence certHashes = getCertificatesHashIndex(timestampToken);
    final ArrayList<DEROctetString> certHashesList = Collections.list(certHashes.getObjects());

    final List<CertificateToken> certificates = cadesSignature.getCertificatesWithinSignatureAndTimestamps();
    for (final CertificateToken certificateToken : certificates) {

        final byte[] encodedCertificate = certificateToken.getEncoded();
        final byte[] digest = DSSUtils.digest(hashIndexDigestAlgorithm, encodedCertificate);
        final DEROctetString derOctetStringDigest = new DEROctetString(digest);
        if (certHashesList.remove(derOctetStringDigest)) {
            // attribute present in signature and in timestamp
            LOG.debug("Cert {} present in timestamp", certificateToken.getAbbreviation());
        } else {
            LOG.debug("Cert {} not present in timestamp", certificateToken.getAbbreviation());
        }
    }
    if (!certHashesList.isEmpty()) {
        LOG.error("{} attribute hash in Cert Hashes have not been found in document attributes: {}",
                certHashesList.size(), certHashesList);
        // return a empty DERSequence to screw up the hash
        return new DERSequence();
    }
    return certHashes;
}

From source file:com.hygenics.parser.SpecifiedDump.java

/**
 * Runs the Dump/*from  ww  w  .java2  s.c  o  m*/
 */
public void run() {

    if (archive) {
        if (tables.keySet().size() > 0) {

            Archiver zip = new Archiver();
            String basefile = tables.keySet().iterator().next().split("\\|")[1];

            if (basefile.trim().length() > 0) {
                zip.setBasedirectory(basefile);
                zip.setZipDirectory(basefile + "archive.zip");
                zip.setAvoidanceString(".zip|archive");
                zip.setDelFiles(true);
                zip.run();
            }
        }
    }

    int dumped = 0;
    ForkJoinPool fjp = new ForkJoinPool(Runtime.getRuntime().availableProcessors());
    boolean checkedTables = (this.tablesMustHave == null);
    for (String tf : tables.keySet()) {
        String[] split = tf.split("\\|");
        log.info("Dumping for " + split[0]);
        String schema = null;
        try {
            schema = split[0].split("\\.")[0];

            if (!checkedTables) {
                ArrayList<String> mustHaveTemp = (ArrayList<String>) this.tablesMustHave.clone();
                ArrayList<String> existingTables = this.template.getJsonData(
                        "SELECT table_name FROM information_schema.tables WHERE table_schema ILIKE '%" + schema
                                + "%'");
                for (String tdict : existingTables) {
                    String table = JsonObject.readFrom(tdict).get("table_name").asString();
                    if (mustHaveTemp.contains(table)) {
                        mustHaveTemp.remove(table);

                        // get count
                        if (this.template.getCount(schema + "." + table) == 0) {
                            try {
                                throw new MissingData(
                                        "Data Missing from Required Table: " + schema + "." + table);
                            } catch (MissingData e) {
                                e.printStackTrace();
                            }
                        }
                    }
                }

                if (mustHaveTemp.size() > 0) {
                    log.error("Drop Schema " + schema + "  is missing the following tables:\n");
                    for (String table : mustHaveTemp) {
                        log.error(table + "\n");
                    }

                    try {
                        throw new TableMissingException();
                    } catch (TableMissingException e) {
                        e.printStackTrace();
                        System.exit(-1);
                    }
                }

            }

        } catch (IndexOutOfBoundsException e) {
            try {
                throw new SQLMalformedException("FATAL ERROR: Table name " + split[0] + " malformed");
            } catch (SQLMalformedException e2) {
                e2.printStackTrace();
                System.exit(-1);
            }
        }

        log.info("Checking  table: " + split[0] + "&& schema: " + schema);
        if (template.checkTable(split[0], schema)) {
            if (template.getCount(schema + "." + split[0].replace(schema + ".", "")) > 0) {
                Set<String> keys = tables.get(tf).keySet();
                String sql;
                String select = "SELECT ";
                String distinct = null;
                String attrs = null;
                String where = null;
                String group = null;
                String order = null;

                /**
                 * SET THE ATTRIBUTES WHICH CAN BE SPECIFIED WITH
                 * distinct-for concacting distinct part of query not0-for
                 * specifiying that the length must be greater than 0 in the
                 * WHERE clause group-for grouping the attribute not
                 * null-for specifying that the attr cannot be null
                 * orderby-for specifying our one order attr
                 */
                for (String k : keys) {
                    if (k.toLowerCase().contains("distinct")) {
                        distinct = (distinct == null)
                                ? "distinct on(" + tables.get(tf).get(k).replaceAll("\\sas.*", "")
                                : distinct + "," + tables.get(tf).get(k).replaceAll("\\sas.*", "");
                    }

                    if (k.toLowerCase().contains("group")) {
                        group = (group == null) ? "GROUP BY " + tables.get(tf).get(k).replaceAll("\\sas.*", "")
                                : group + "," + tables.get(tf).get(k).replaceAll("\\sas.*", "");
                    }

                    if (k.toLowerCase().contains("not0")) {
                        if (k.contains("not0OR")) {
                            where = (where == null)
                                    ? "WHERE length(" + tables.get(tf).get(k).replaceAll("\\sas.*", "")
                                            + ") >0 "
                                    : where + "OR length(" + tables.get(tf).get(k).replaceAll("\\sas.*", "")
                                            + ")";
                        } else {
                            where = (where == null)
                                    ? "WHERE length(" + tables.get(tf).get(k).replaceAll("\\sas.*", "")
                                            + ") >0 "
                                    : where + "AND length(" + tables.get(tf).get(k).replaceAll("\\sas.*", "")
                                            + ")";
                        }
                    }

                    if (k.toLowerCase().contains("notnull")) {
                        if (k.toLowerCase().contains("notnullor")) {
                            where = (where == null)
                                    ? "WHERE " + tables.get(tf).get(k).replaceAll("\\sas.*", "")
                                            + " IS NOT NULL"
                                    : where + " OR " + tables.get(tf).get(k).replaceAll("\\sas.*", "")
                                            + " IS NOT NULL";
                        } else {
                            where = (where == null)
                                    ? "WHERE " + tables.get(tf).get(k).replaceAll("\\sas.*", "")
                                            + " IS NOT NULL"
                                    : where + " AND " + tables.get(tf).get(k).replaceAll("\\sas.*", "")
                                            + " IS NOT NULL";
                        }
                    }

                    if (k.toLowerCase().contains("order")) {
                        if (k.toLowerCase().contains("orderdesc")) {
                            order = (order == null)
                                    ? "ORDER BY " + tables.get(tf).get(k).replaceAll("\\sas.*", "") + " ASC"
                                    : order;
                        } else {
                            order = (order == null)
                                    ? "ORDER BY " + tables.get(tf).get(k).replaceAll("\\sas.*", "") + " DESC"
                                    : order;
                        }
                    }

                    String field = tables.get(tf).get(k);
                    if (k.toLowerCase().contains("attr")) {
                        if (unicoderemove == true) {
                            field = "trim(replace(regexp_replace(" + field
                                    + ",'[^\\u0020-\\u007e,\\(\\);\\-\\[\\]]+',' '),'" + this.delimiter + "','"
                                    + this.replacedel + "')) as " + field;
                        } else {
                            field = "trim(replace(" + field + ",'" + this.delimiter + "','" + this.replacedel
                                    + "'))";
                        }

                        attrs = (attrs == null) ? field : attrs + "," + field;
                    }
                }

                select = (distinct == null) ? select : select.trim() + " " + distinct.trim() + ")";
                select += " " + attrs.trim();
                select += " FROM " + split[0].trim();
                select = (where == null) ? select : select.trim() + " " + where.trim();
                select = (group == null) ? select : select.trim() + " " + group.trim();
                select = (order == null) ? select : select.trim() + " " + order.trim();

                if (extracondition != null) {
                    select += (select.contains(" WHERE ") == true) ? " AND" + extracondition
                            : " WHERE " + extracondition;
                }

                select = select.trim();

                log.info("Dump Select Command: " + select);

                sql = "COPY  (" + select + ") TO STDOUT WITH DELIMITER '" + delimiter.trim()
                        + "' NULL as '' CSV HEADER";
                fjp.execute(new ToFile(sql, split[1].trim()));

                select = "SELECT ";
                distinct = null;
                attrs = null;
                where = null;
                group = null;
                order = null;
                dumped += 1;
            } else {
                try {
                    throw new NoDataException("No Data found in " + split[0]);
                } catch (Exception e) {
                    e.printStackTrace();
                }
            }
        } else {
            try {
                throw new SQLMalformedException("WARNING: Table " + split[0] + " is missing");
            } catch (SQLMalformedException e) {
                e.printStackTrace();
            }
        }
    }

    try {
        fjp.awaitTermination(60000, TimeUnit.MILLISECONDS);
        fjp.shutdown();
    } catch (InterruptedException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }

    if (dumped == 0) {
        log.error("No Date found in any tables");
        System.exit(-1);
    }

}