Example usage for java.util HashSet toString

List of usage examples for java.util HashSet toString

Introduction

In this page you can find the example usage for java.util HashSet toString.

Prototype

public String toString() 

Source Link

Document

Returns a string representation of the object.

Usage

From source file:es.caib.seycon.ng.servei.AutoritzacioServiceImpl.java

protected Collection<AutoritzacioRol> handleGetDescriptionUserAuthorizations(String codiUsuari)
        throws Exception {

    Collection autoritzacionsRolUsuari = handleGetUserAuthorizations(codiUsuari);

    // IMPORTANT: la seva clau es el nom del rol + codiAutoritzacio +
    // descripci [valor_domini]
    HashMap<String, AutoritzacioRol> autoritzacionsSenseRepeticions = new HashMap();

    // Afegim informaci addicional:
    if (autoritzacionsRolUsuari != null) {
        for (Iterator it = autoritzacionsRolUsuari.iterator(); it.hasNext();) {
            AutoritzacioRol auto = (AutoritzacioRol) it.next();
            AutoritzacioSEU autoSEU = (AutoritzacioSEU) getAuthorizations().get(auto.getAutoritzacio());
            if (autoSEU != null) {
                // formategem els valor de domini
                String valorDominiUsuari = ""; //$NON-NLS-1$
                if (auto.getValorDominiRolUsuari() != null && auto.getValorDominiRolUsuari().size() > 0) {
                    HashSet valors = new HashSet();
                    for (Iterator vit = auto.getValorDominiRolUsuari().iterator(); vit.hasNext();) {
                        ValorDomini vd = (ValorDomini) vit.next();
                        valors.add(vd.getValor());
                    }/*from w  ww.j a v  a2 s .  co m*/
                    if (valors.size() == 1 && valors.contains("*")) //$NON-NLS-1$
                        ;
                    else
                        valorDominiUsuari = " " + valors.toString(); //$NON-NLS-1$
                }

                auto.setDescripcio(autoSEU.getDescripcio() //$NON-NLS-1$
                        + valorDominiUsuari);
                auto.setTipusDomini(autoSEU.getTipusDomini());
                auto.setScope(autoSEU.getScope());
                auto.setAmbit(autoSEU.getAmbit());
                auto.setHereta(autoSEU.getHereta()); // separat per comes
                autoritzacionsSenseRepeticions
                        .put(auto.getRol().getNom() + auto.getAutoritzacio() + auto.getDescripcio(), auto);
            }

        }
        // Les ordenem
        LinkedList autosOrdenades = new LinkedList(autoritzacionsSenseRepeticions.values());
        Collections.sort(autosOrdenades, new ComparaAutos());
        return autosOrdenades;
    }

    return autoritzacionsSenseRepeticions.values();
}

From source file:web.DirldapdeleteauthorController.java

/**
 * This method is called by the spring framework. The configuration
 * for this controller to be invoked is based on the pagetype and
 * is set in the urlMapping property in the spring config file.
 *
 * @param request the <code>HttpServletRequest</code>
 * @param response the <code>HttpServletResponse</code>
 * @throws ServletException/*from  w  w w.ja  v  a  2  s . co  m*/
 * @throws IOException
 * @return ModelAndView this instance is returned to spring
 */
public synchronized ModelAndView handleRequest(HttpServletRequest request, HttpServletResponse response)
        throws ServletException, IOException {

    // ***************************************************************************
    // This will initialize common data in the abstract class and the return result is of no value.
    // The abstract class initializes protected variables, login, collabrum, logininfo.
    // Which can be accessed in all controllers if they want to.
    // ***************************************************************************

    try {
        ModelAndView m = super.handleRequest(request, response);
    } catch (Exception e) {
        return handleError("error in handleRequest", e);
    }

    if (!WebUtil.isLicenseProfessional(login)) {
        return handleError("Cannot manage directory feature in deluxe version.");
    }

    // ***************************************************************************
    // This is the only line of code you need to get all session info initialized!
    // Always be the first line before anything else is done. Add to each controller's
    // handlRequest method. Also recollabrum to extend SessionObject.
    // ***************************************************************************
    outOfSession(request, response);
    if (RegexStrUtil.isNull(login)) {
        return handleUserpageError("Login is null in DirldapdeleteauthorController.");
    }

    String directoryId = request.getParameter(DbConstants.DIRECTORY_ID);
    if (!RegexStrUtil.isNull(directoryId) && directoryId.length() > GlobalConst.directoryidSize) {
        return handleError(
                "directoryId.length() > WebConstants.directoryidSize, DirldapdeleteauthorController");
    }
    directoryId = RegexStrUtil.goodNameStr(directoryId);

    /**
     * delete this user as author from directory 
     */
    DirectoryAuthorDao authorDao = (DirectoryAuthorDao) daoMapper.getDao(DbConstants.DIRECTORY_AUTHOR);
    if (authorDao == null) {
        return handleError("DirectoryAuthorDao is null in deleteauthorController directory, " + login);
    }

    DirectoryDao dirDao = (DirectoryDao) daoMapper.getDao(DbConstants.DIRECTORY);
    if (dirDao == null) {
        return handleError("DirectoryDao is null, DirshowadduserController  " + login);
    }

    try {
        authorDao.deleteAuthor(directoryId, member, loginInfo.getValue(DbConstants.LOGIN_ID), login);
    } catch (BaseDaoException e) {
        return handleError("Exception occured in deleteAuthor for login " + login, e);
    }

    if (RegexStrUtil.isNull(login)) {
        return handleUserpageError("Login is null, DirldapdeleteauthorController");
    }

    String pageNumStr = request.getParameter(DbConstants.PAGE_NUM);
    String pageSizeStr = request.getParameter(DbConstants.PAGE_SIZE);

    int pageSize = GlobalConst.defaultPageSize;
    int pageNum = GlobalConst.defaultPageNum;

    if (!RegexStrUtil.isNull(pageSizeStr)) {
        pageSize = new Integer(pageSizeStr).intValue();
    } else {
        pageSizeStr = new Integer(pageSize).toString();
    }

    logger.info("pageSizeStr = " + pageSizeStr);

    if (!RegexStrUtil.isNull(pageNumStr)) {
        pageNum = new Integer(pageNumStr).intValue();
    } else {
        pageNumStr = new Integer(pageNum).toString();
    }

    logger.info("pageNumStr = " + pageNumStr);

    String startStr = GlobalConst.startStr;
    String endStr = GlobalConst.endStr;
    String alphabets = request.getParameter(DbConstants.ALPHABET);
    String[] arrStr = null;
    if (!RegexStrUtil.isNull(alphabets)) {
        arrStr = alphabets.split("-");
    } else {
        alphabets = startStr + "-" + endStr;
    }

    char startChar = GlobalConst.startChar;
    char endChar = GlobalConst.endChar;

    if (arrStr != null && arrStr.length > 1) {
        logger.info("arrStr = " + arrStr);
        startChar = ((String) arrStr[0]).charAt(0);
        startStr = arrStr[0];
        endChar = ((String) arrStr[1]).charAt(0);
        endStr = arrStr[1];
    }

    logger.info("startChar = " + startChar + " endChar = " + endChar);

    List users = null;
    HashSet authorSet = null;
    Directory directory = null;
    String loginId = null;
    List authorLoginList = null;
    try {
        if (loginInfo != null) {
            loginId = loginInfo.getValue(DbConstants.LOGIN_ID);
            directory = dirDao.viewDirectory(directoryId, loginId, login, DbConstants.READ_FROM_SLAVE,
                    DbConstants.BLOB_READ_FROM_SLAVE, DbConstants.WEBSITE_READ_FROM_SLAVE);
            authorSet = authorDao.listAuthorsOfDirectory(directoryId, loginId, login,
                    DbConstants.READ_FROM_SLAVE);
            logger.info("authorSet = " + authorSet.toString());
            authorLoginList = authorDao.getLoginsOfAuthors(authorSet);
        }
    } catch (BaseDaoException e) {
        return handleError(
                "Exception occurred in viewDirectory()/listAuthorsOfDirectory(), DirldapdeleteauthorController, for login "
                        + login,
                e);
    }

    /** 
     * cobrand
     */
    Userpage cobrand = null;
    CobrandDao cobrandDao = (CobrandDao) getDaoMapper().getDao(DbConstants.COBRAND);
    if (cobrandDao == null) {
        return handleError("CobrandDao is null, DirldapdeleteauthorController");
    }
    try {
        cobrand = cobrandDao.getUserCobrand(loginId);
    } catch (BaseDaoException e) {
        return handleError("cobrand is null, DirldapdeleteauthorController.", e);
    }

    /**
    * get all ldap users based on certain attributes
         * get all ldap group users based on the user grouptype for 
         * this directory
         * Includes name, email, group 
    */
    List allLdapUsersName, allLdapUsersGroup, allLdapUsersMail, allLdapUsersUid;
    List groupUsersGroup, groupUsersMail, groupUsersUid;

    allLdapUsersName = allLdapUsersGroup = allLdapUsersMail = null;
    groupUsersGroup = groupUsersMail = groupUsersUid = null;
    allLdapUsersUid = null;

    List totalLdapUsers = null;
    String maxPageNumStr = null;
    String maxPageSizeStr = null;
    boolean enableUserGroup = false;
    boolean enableShareGroup = false;
    String ldapGroup = null;

    if (WebUtil.isLdapActive()) {
        int userMailIndex = 0;
        int userGroupIndex = 1;
        int userUidIndex = 2;
        try {
            enableUserGroup = dirDao.isUserGroupEnabled(login, loginId, directoryId);
            if (!enableUserGroup) {
                enableShareGroup = dirDao.isShareGroupEnabled(login, loginId, directoryId);
            }
            LdapApi ldapConnection = new LdapApi();
            if (ldapConnection == null) {
                return handleError("ldapConnection is null, DirldapdeleteauthorController");
            } else {

                /**
                *  Returns a list of userlogins, list of emails(login), 
                   *  Get the group of author, get the list of users who
                *  who belong to this group
                */
                String[] attrs = { LdapConstants.ldapAttrMail, LdapConstants.ldapGroup,
                        LdapConstants.ldapAttrUid };
                List groupUsers = null;
                String groupArea = null;

                logger.info("enableUserGroup = " + enableUserGroup);
                logger.info("enableShareGroup = " + enableShareGroup);

                if (enableUserGroup || enableShareGroup) {
                    groupArea = dirDao.getDirectoryGroupAndAreaInfo(login, loginId, directoryId);
                    ldapGroup = dirDao.getDirectoryGroup(login);
                    logger.info("groupArea = " + groupArea + " ldapGroup = " + ldapGroup);
                    if (!RegexStrUtil.isNull(groupArea)) {
                        String searchDn = LdapUtil.getUserSearchDn(groupArea);
                        logger.info("searchDn = " + searchDn);
                        groupUsers = ldapConnection.getLdapUsers(searchDn, attrs, startChar, endChar, pageSize,
                                pageNum);
                    }
                    if (groupUsers == null) {
                        return handleError("ldap groupUsers is null, groupArea= " + groupArea
                                + " directoryid = " + directoryId);
                    } else {
                        if (groupUsers.size() >= userMailIndex) {
                            groupUsersMail = (List) groupUsers.get(userMailIndex);
                            if (groupUsersMail != null) {
                                logger.info("groupUsersMail = " + groupUsersMail.toString());
                            }
                        }
                        if (groupUsers.size() >= userGroupIndex) {
                            groupUsersGroup = (List) groupUsers.get(userGroupIndex);
                            if (groupUsersGroup != null) {
                                logger.info("groupUsersGroup = " + groupUsersGroup.toString());
                            }
                        }
                        if (groupUsers.size() >= userUidIndex) {
                            groupUsersUid = (List) groupUsers.get(userUidIndex);
                            if (groupUsersUid != null) {
                                logger.info("groupUsersUid = " + groupUsersUid.toString());
                            }
                        }
                        logger.info("groupArea = " + groupArea);
                        logger.info("groupUsers = " + groupUsers.toString());
                    }
                }

                /*
                * When the scope is !enableUserGroup
                */
                List allLdapUsers = null;
                if (!enableUserGroup) {
                    logger.info("!enableUserGroup ");
                    allLdapUsers = ldapConnection.getLdapUsers(LdapConstants.ldapAdminRoleDn, attrs, startChar,
                            endChar, pageSize, pageNum);
                    totalLdapUsers = ldapConnection.getLdapUsers(LdapConstants.ldapAdminRoleDn, attrs,
                            startChar, endChar);
                    if (allLdapUsers == null) {
                        return handleError("allLdapUsers  is null, directoryid = " + directoryId);
                    } else {
                        logger.info("allLdapUsers = " + allLdapUsers.toString());
                    }

                    if (totalLdapUsers != null) {
                        logger.info("container has lists, totalLdapUsers.size() " + totalLdapUsers.size());
                        List totalLdapUsersMail = null;
                        if (totalLdapUsers.size() > userMailIndex) {
                            totalLdapUsersMail = (List) totalLdapUsers.get(userMailIndex);
                        }

                        if (totalLdapUsersMail != null) {
                            int maxPageNum = WebUtil.getMaxPageNumber(totalLdapUsersMail.size(), pageSize);
                            int maxPageSize = WebUtil.getMaxPageSize(totalLdapUsersMail.size(), pageSize);
                            logger.info("maxPageSize = " + maxPageSize);
                            logger.info("maxPageNum = " + maxPageNum);
                            maxPageSizeStr = new Integer(maxPageSize).toString();
                            maxPageNumStr = new Integer(maxPageNum).toString();
                            logger.info("maxPageSizeStr = " + maxPageSizeStr);
                            logger.info("maxPageNumStr = " + maxPageNumStr);
                        }
                    }
                }

                /** 
                * List the users only when we want 
                * to display enableShareGroup or show the list of users
                */
                /* 
                         if (enableShareGroup && groupUsers != null) {
                            allLdapUsersMail = LdapUtil.removeElements((List)allLdapUsers.get(userMailIndex), (List)groupUsers.get(userMailIndex));
                            allLdapUsersGroup = LdapUtil.removeElements((List)allLdapUsers.get(userGroupIndex), (List)groupUsers.get(userGroupIndex));
                         } else { 
                         }
                */

                if (allLdapUsers != null) {
                    if (allLdapUsers.size() > userMailIndex) {
                        allLdapUsersMail = (List) allLdapUsers.get(userMailIndex);
                    }
                    if (allLdapUsers.size() > userGroupIndex) {
                        allLdapUsersGroup = (List) allLdapUsers.get(userGroupIndex);
                    }
                    if (allLdapUsers.size() > userUidIndex) {
                        allLdapUsersUid = (List) allLdapUsers.get(userUidIndex);
                    }
                }

                if (!enableShareGroup && !enableUserGroup) {
                    logger.info("enableShareGroup = " + !enableShareGroup);
                    logger.info("enableUserGroup = " + !enableUserGroup);

                    if (allLdapUsersMail == null || allLdapUsersUid == null || allLdapUsersGroup == null) {
                        logger.info(
                                "allLdapUserseMail/allLdapUsersUid/allLdapUsersGroup is null, indicates that all users of ldap belong to the group "
                                        + login);
                        return handleError("allLdapUsersMail/allLdapUsersUid/allLdapUsersGroup is null");
                    } else {
                        if (allLdapUsersMail != null) {
                            logger.info("allLdapUsersMail = " + allLdapUsersMail.toString());
                        }
                        if (allLdapUsersGroup != null) {
                            logger.info("allLdapUsersGroup = " + allLdapUsersGroup.toString());
                        }
                        if (allLdapUsersUid != null) {
                            logger.info("allLdapUsersUid = " + allLdapUsersUid.toString());
                        }
                    }
                }
            }
        } catch (Exception e) {
            return handleError("ldap Exception getUsers()" + e.getMessage(), e);
        }
    }

    String viewName = DbConstants.SHOW_DB_AUTHORS;
    Map myModel = new HashMap();
    myModel.put(DbConstants.LOGIN_INFO, loginInfo);
    myModel.put(DbConstants.PAGE_NUM, pageNumStr);
    myModel.put(DbConstants.DIRECTORY, directory);
    myModel.put(DbConstants.MEMBERS, authorSet);
    myModel.put(DbConstants.COBRAND, cobrand);
    myModel.put(DbConstants.DIR_EXISTS, rbDirectoryExists);
    myModel.put(DbConstants.START, startStr);
    myModel.put(DbConstants.END, endStr);
    myModel.put(DbConstants.USER_PAGE, userpage);
    myModel.put(DbConstants.BUSINESS_EXISTS, isBizExists(login));
    myModel.put(DbConstants.MAX_PAGE_NUM, maxPageNumStr);
    myModel.put(DbConstants.MAX_PAGE_SIZE, maxPageSizeStr);
    myModel.put(DbConstants.PAGE_SIZE, pageSizeStr);
    myModel.put(DbConstants.ALPHABET, alphabets);
    //  myModel.put(DbConstants.USERS, users);
    //  myModel.put(DbConstants.USERS_ALPHABET, alphabetUsers);

    if (WebUtil.isLdapActive()) {
        if (enableUserGroup) {
            myModel.put(DbConstants.LDAP_GROUP, ldapGroup);
            myModel.put(DbConstants.ALL_LDAP_USERS_MAIL, groupUsersMail);
            myModel.put(DbConstants.ALL_LDAP_USERS_GROUP, groupUsersGroup);
            myModel.put(DbConstants.ALL_LDAP_USERS_UID, groupUsersUid);
        } else {
            myModel.put(DbConstants.ALL_LDAP_USERS_MAIL, allLdapUsersMail);
            myModel.put(DbConstants.ALL_LDAP_USERS_GROUP, allLdapUsersGroup);
            myModel.put(DbConstants.ALL_LDAP_USERS_UID, allLdapUsersUid);
        }
        if (enableUserGroup) {
            myModel.put(DbConstants.ENABLE_USER_GROUP, "1");
        } else {
            myModel.put(DbConstants.ENABLE_USER_GROUP, "0");
        }
        /*
              if (enableShareGroup) {
                 myModel.put(DbConstants.ENABLE_SHARE_GROUP, "1");
              } else {
                 myModel.put(DbConstants.ENABLE_SHARE_GROUP, "0");
              }
        */
        myModel.put(DbConstants.TOTAL_LDAP_USERS, totalLdapUsers);
    }
    myModel.put(DbConstants.LOGIN_LIST, authorLoginList);
    return new ModelAndView(viewName, "model", myModel);
}

From source file:dao.SearchDaoDb.java

/**
 * cleanForBizAccess  - looks for business access for the accessibility
 * @param resultSet - resultSet of objects
 * @param bid - bid to which the login belongs
 * @param login - login who is searching for info
 * @param modelType - type of the model (Userpage/Blog/Photo)
 * @return HashSet - cleanedup for biz access resultSet
 * @throws BaseDaoException If we have a problem interpreting the data or the data is missing
 * or incorrect/*from   w  w  w .j  a v a 2 s . co m*/
 */
private HashSet cleanForBizAccess(HashSet resultSet, String bid, String login, int modelType) {

    if (RegexStrUtil.isNull(bid) || RegexStrUtil.isNull(login) || resultSet == null) {
        throw new BaseDaoException("params are null");
    }

    logger.info("bid = " + bid + " login=" + login);
    logger.info("resultSet = " + resultSet.toString());

    /**
     * don't create a new set - in future, fix this
     */
    HashSet bizHashSet = new HashSet();
    Iterator it1 = resultSet.iterator();
    while (it1.hasNext()) {
        Object obj = it1.next();
        String mbid, bsearch;
        bsearch = mbid = null;
        if (modelType == USER_PAGE) {
            mbid = ((Userpage) obj).getValue(DbConstants.BID);
            bsearch = ((Userpage) obj).getValue(DbConstants.BSEARCH);
        }
        if (modelType == BLOG) {
            mbid = ((Blog) obj).getValue(DbConstants.BID);
            bsearch = ((Blog) obj).getValue(DbConstants.BSEARCH);
        }

        if (modelType == PHOTO) {
            mbid = ((Photo) obj).getValue(DbConstants.BID);
            bsearch = ((Photo) obj).getValue(DbConstants.BSEARCH);
        }

        if ((bsearch != null && bsearch.equals(DbConstants.BSEARCH_ALLOW))
                || (mbid != null && mbid.equals(bid))) {
            /** cannot access this members information 
             * remove it from the list
             */
            bizHashSet.add(obj);
        }
    }
    logger.info("bizHashSet = " + bizHashSet.toString());
    return bizHashSet;
}

From source file:com.novartis.opensource.yada.format.Joiner.java

/**
 * The meaty bit. Uses hsqld to create in memory db tables for the combined rows of converted results in each yqr. 
 * Then uses the join spec to build data structures, mapping columns to tables, tables to columns, and table pairs to columns.
 * Then builds a select join query from the structures, executes it, wraps and returns the results.
 * @return a {@link JSONArray} containing structured results, or a {@link StringBuffer} containing delimited results
 * @throws YADAResponseException if there is a problem with the in-memory database
 *//* ww w .j  a  v  a  2  s .  c o m*/
public Object join() throws YADAResponseException {
    Object result = null;
    try {
        Class.forName("org.hsqldb.jdbcDriver");
    } catch (ClassNotFoundException e1) {
        //TODO exception handling
    }
    try (Connection c = DriverManager.getConnection("jdbc:hsqldb:mem:mymemdb", "SA", "");) {
        StringBuffer sql = null;
        StringBuffer buffer = new StringBuffer();
        JSONArray rows = new JSONArray();
        boolean isFormatStructured = isFormatStructured();
        // create tables and insert data
        for (YADAQueryResult yqr : getYadaQueryResults()) {
            // create tables
            sql = new StringBuffer();
            sql.append("CREATE TABLE");
            sql.append(" T" + yqr.hashCode());
            sql.append(" (");
            for (int col = 0; col < yqr.getConvertedHeader().size(); col++) {
                sql.append(yqr.getConvertedHeader().get(col).replaceAll("\"", ""));
                sql.append(" VARCHAR(4000)");
                if (col < yqr.getConvertedHeader().size() - 1)
                    sql.append(",");
            }
            sql.append(")");
            l.debug(sql.toString());
            try (PreparedStatement create = c.prepareStatement(sql.toString());) {
                create.executeUpdate();
            } catch (SQLException e) {
                String msg = "Unable to create in-memory tables";
                throw new YADAResponseException(msg, e);
            }

            StringBuilder header = new StringBuilder();
            StringBuilder params = new StringBuilder();
            String delim = "";

            //TODO build these in first iteration of converted header during CREATE construction
            for (String hdr : yqr.getConvertedHeader()) {
                header.append(delim).append(hdr);
                params.append(delim).append("?");
                delim = ",";
            }

            // inserts

            sql = new StringBuffer();
            sql.append("INSERT INTO T" + yqr.hashCode());
            sql.append(" (");
            sql.append(header.toString().replaceAll("\"", ""));
            sql.append(") VALUES (");
            sql.append(params);
            sql.append(")");
            l.debug(sql.toString());
            try (PreparedStatement insert = c.prepareStatement(sql.toString());) {

                for (int i = 0; i < yqr.getConvertedResults().size(); i++) {
                    //TODO xml
                    if (isFormatStructured) // json (and someday xml)
                    {
                        @SuppressWarnings("unchecked")
                        List<String> results = (List<String>) yqr.getConvertedResult(i);
                        for (String res : results) {
                            JSONObject row = new JSONObject(res);
                            for (int k = 1; k <= yqr.getConvertedHeader().size(); k++) {
                                String key = yqr.getConvertedHeader().get(k - 1);
                                insert.setString(k, row.getString(key));
                            }
                            insert.addBatch();
                        }
                    } else // delimited
                    {
                        @SuppressWarnings("unchecked")
                        List<List<String>> results = (List<List<String>>) yqr.getConvertedResult(i);
                        for (int j = 0; j < results.size(); j++) {
                            for (int k = 1; k <= yqr.getConvertedHeader().size(); k++) {
                                insert.setString(k, results.get(j).get(k - 1));
                            }
                            insert.addBatch();
                        }
                    }
                }
                insert.executeBatch();
            } catch (SQLException e) {
                String msg = "Unable to populate in-memory tables";
                throw new YADAResponseException(msg, e);
            }
        }

        // derive/apply the join spec
        // get columns from converted headers
        // TODO create this list in previous YQR iteration
        List<List<String>> localHeaders = new ArrayList<>();
        for (int i = 0; i < getYadaQueryResults().length; i++) {
            localHeaders.add(getYadaQueryResults()[i].getConvertedHeader());
        }

        String specStr = "";
        if (isOuter())
            specStr = getYADAQueryParamValue(YADARequest.PS_LEFTJOIN);
        else
            specStr = getYADAQueryParamValue(YADARequest.PS_JOIN);

        HashSet<String> specSet = null;

        if (!specStr.equals("")) {
            if (specStr.equals("true")) {
                specSet = new HashSet<>();
                for (int i = 0; i < localHeaders.size() - 1; i++) {
                    for (int j = 0; j < localHeaders.get(i).size(); j++) {
                        String hdr = localHeaders.get(i).get(j);
                        for (int k = i + 1; k < localHeaders.size(); k++) {
                            if (localHeaders.get(k).contains(hdr))
                                specSet.add(hdr.replaceAll("\"", ""));
                        }
                    }
                }
            } else {
                specSet = new HashSet<>(Arrays.asList(specStr.split(",")));
            }
            l.debug("specStr = " + specStr);
            l.debug("specSet = " + specSet.toString());
        }

        // hash the column indexes by request
        Map<String, Set<String>> S_t2c = new LinkedHashMap<>(); // the cols mapped to tables
        Map<String, Set<String>> S_c2t = new HashMap<>(); // the tables mapped to the columns
        for (int i = 0; i < localHeaders.size() - 1; i++) {
            String table = "T" + getYadaQueryResults()[i].hashCode();
            String nextTable = "T" + getYadaQueryResults()[i + 1].hashCode();
            HashSet<String> dupeCheck = new HashSet<>();
            List<String> iHdr = localHeaders.get(i);
            List<String> jHdr = localHeaders.get(i + 1);
            for (String hdr : iHdr) {
                String _hdr = hdr.replaceAll("\"", "");
                dupeCheck.add(_hdr);
            }
            for (String hdr : jHdr) {
                String _hdr = hdr.replaceAll("\"", "");
                if (dupeCheck.contains(_hdr) && (specSet == null || (specSet.contains(_hdr)
                        || specSet.contains(_hdr.toLowerCase()) || specSet.contains(_hdr.toUpperCase())))) {
                    // table to columns
                    if (!S_t2c.containsKey(table)) {
                        S_t2c.put(table, new HashSet<String>());
                    }
                    S_t2c.get(table).add(_hdr);

                    // column to tables
                    if (!S_c2t.containsKey(_hdr)) {
                        S_c2t.put(_hdr, new HashSet<String>());
                    }
                    S_c2t.get(_hdr).add(table);

                    // nextTable to columns
                    if (!S_t2c.containsKey(nextTable)) {
                        S_t2c.put(nextTable, new HashSet<String>());
                    }
                    S_t2c.get(nextTable).add(_hdr);
                    // column to tables
                    S_c2t.get(_hdr).add(nextTable);
                }
            }
        }

        // hash the table combo to the col
        HashMap<List<String>, List<String>> S_tt2c = new HashMap<>();
        for (String col : S_c2t.keySet()) {
            List<String> tables = new ArrayList<>(S_c2t.get(col));
            if (tables.size() == 2) {
                if (S_tt2c.get(tables) == null)
                    S_tt2c.put(tables, new ArrayList<>(Arrays.asList(col)));
                else
                    S_tt2c.get(tables).add(col);
            } else {
                for (int i = 0; i < tables.size() - 1; i++) {
                    List<String> biTabs = new ArrayList<>();
                    biTabs.add(tables.get(i));
                    biTabs.add(tables.get(++i));
                    if (S_tt2c.get(biTabs) == null)
                        S_tt2c.put(biTabs, new ArrayList<>(Arrays.asList(col)));
                    else
                        S_tt2c.get(biTabs).add(col);
                }
            }
        }

        /*
         *   i=0, table = t1,
         *   i=1, table = t2,
         *   joinTable = (
         */

        // build join
        sql = new StringBuffer();
        sql.append("SELECT");
        String delim = " ";
        StringBuilder gh = new StringBuilder();
        Set<String> globalHeader = getGlobalHeader();
        for (String hdr : globalHeader) {
            //TODO consider using COALESCE in here to return empty strings instead of 'null' on LEFT JOINs, maybe make that a parameter as well
            gh.append(delim + hdr.replaceAll("\"", ""));
            delim = ", ";
        }
        sql.append(gh);
        sql.append(" FROM");
        String[] tables = S_t2c.keySet().toArray(new String[S_t2c.size()]);
        //      Arrays.sort(tables);
        for (int i = 0; i < tables.length; i++) {
            String table = tables[i];
            if (i == 0) {
                sql.append(" " + table);
            } else {
                List<String> joinTables = Arrays.asList(tables[i - 1], tables[i]);
                List<String> columns = S_tt2c.get(joinTables);
                if (columns == null) {
                    joinTables = Arrays.asList(tables[i], tables[i - 1]);
                    columns = S_tt2c.get(joinTables);
                }
                if (isOuter())
                    sql.append(" LEFT");
                sql.append(" JOIN " + table + " ON");
                for (int j = 0; j < columns.size(); j++) {
                    String col = columns.get(j);
                    if (j > 0)
                        sql.append(" AND");
                    sql.append(" " + joinTables.get(0) + "." + col + " = " + joinTables.get(1) + "." + col);
                }
            }
        }
        sql.append(" GROUP BY");
        sql.append(gh);
        l.debug(sql.toString());
        ResultSet rs = null;
        String colsep = getYADAQueryParamValue(YADARequest.PS_DELIMITER);
        String recsep = getYADAQueryParamValue(YADARequest.PS_ROW_DELIMITER);
        try (PreparedStatement select = c.prepareStatement(sql.toString());) {
            rs = select.executeQuery();
            if (isFormatStructured) {
                while (rs.next()) {
                    JSONObject j = new JSONObject();
                    for (String key : globalHeader) {
                        j.put(key, rs.getString(key));
                    }
                    rows.put(j);
                }
                result = rows;
            } else {
                while (rs.next()) {
                    delim = "";
                    for (int j = 0; j < rs.getMetaData().getColumnCount(); j++) {
                        buffer.append(delim + rs.getString(j + 1));
                        delim = colsep;
                    }
                    buffer.append(recsep);
                }
                result = buffer;
            }
        } catch (SQLException e) {
            String msg = "Unable to format result sets.";
            throw new YADAResponseException(msg, e);
        } finally {
            try {
                if (rs != null)
                    rs.close();
            } catch (SQLException e) {
                String msg = "There was a problem releasing resources.";
                throw new YADAResponseException(msg, e);
            }
        }
    } catch (SQLException e) {
        String msg = "Unable to connect to in-memory database.";
        throw new YADAResponseException(msg, e);
    }
    return result;
}

From source file:web.ZipUploadController.java

/**
 * This method is called by the spring framework. The configuration
 * for this controller to be invoked is based on the pagetype and
 * is set in the urlMapping property in the spring config file.
 *
 * @param request the <code>HttpServletRequest</code>
 * @param response the <code>HttpServletResponse</code>
 * @throws ServletException//  w ww  .  j  av a  2  s .  c o m
 * @throws IOException
 * @return ModelAndView this instance is returned to spring
 */
public ModelAndView handleRequest(HttpServletRequest request, HttpServletResponse response)
        throws ServletException, IOException {

    try {
        ModelAndView modelAndView = super.handleRequest(request, response);
    } catch (Exception e) {
        return handleError("error in handleRequest", e);
    }

    outOfSession(request, response);

    if (RegexStrUtil.isNull(login) && RegexStrUtil.isNull(member)) {
        return handleUserpageError("Login & member are null.");
    }

    String category = request.getParameter(DbConstants.CATEGORY);
    boolean isCobrand = false;
    if (!RegexStrUtil.isNull(request.getParameter(DbConstants.IS_COBRAND))) {
        isCobrand = request.getParameter(DbConstants.IS_COBRAND).equals((Object) "1");
    }

    if ((!RegexStrUtil.isNull(category) && category.equals(DbConstants.FILE_CATEGORY)) || isCobrand) {
        if (!WebUtil.isLicenseProfessional(login)) {
            return handleError(
                    "Cannot access user carryon features or cobrand user in deluxe version." + login);
        }
    }
    if (RegexStrUtil.isNull(category)) {
        return handleError("category is null in CarryonupdateController. " + login);
    }

    if (daoMapper == null) {
        return handleError("DaoMapper is null in carryon update.");
    }

    CarryonDao carryonDao = (CarryonDao) daoMapper.getDao(DbConstants.CARRYON);
    if (carryonDao == null) {
        return handleError("CarryonDao is null for carryon update.");
    }

    byte[] blob = null;
    String mtype = null;
    if (!RegexStrUtil.isNull(category)) {
        int catVal = new Integer(category).intValue();
        if (catVal < GlobalConst.categoryMinSize || catVal > GlobalConst.categoryMaxSize) {
            return handleError("category values are not correct" + catVal);
        }
    }

    CobrandDao cobrandDao = (CobrandDao) daoMapper.getDao(DbConstants.COBRAND);
    if (cobrandDao == null) {
        return handleError("cobrandDao is null for CarryonupdateController");
    }

    DisplaypageDao displayDao = (DisplaypageDao) daoMapper.getDao(DbConstants.DISPLAY_PAGE);
    if (displayDao == null) {
        return handleError("displayDao is null for CarryonupdateController");
    }

    Displaypage displaypage = null;
    Userpage cobrand = null;
    try {
        displaypage = displayDao.getDisplaypage(login, DbConstants.READ_FROM_SLAVE);
        cobrand = cobrandDao.getUserCobrand(loginInfo.getValue(DbConstants.LOGIN_ID));
    } catch (BaseDaoException e) {
        return handleError("Exception occurred in getDisplaypage() for login " + login, e);
    }

    System.setProperty("jmagick.systemclassloader", "no");

    List fileList = null;
    ServletFileUpload upload = null;
    try {
        // Check that we have a file upload request
        boolean isMultipart = FileUpload.isMultipartContent(request);
        if (isMultipart) {
            // Create a factory for disk-based file items
            DiskFileItemFactory factory = new DiskFileItemFactory();

            // Set factory constraints
            factory.setSizeThreshold(maxMemorySize.intValue());
            //factory.setRepository(new File(tempDirectory));

            // Create a new file upload handler
            upload = new ServletFileUpload(factory);

            // Set overall request size constraint
            upload.setSizeMax(maxRequestSize.longValue());

            // Parse the request
            fileList = upload.parseRequest(request);

            long fieldsize = 0;
            String fieldname, fieldvalue;
            fieldname = fieldvalue = null;

            // educate the fieldnames to this form by using the setFieldName()
            String label = "btitle";
            String caption = "";
            String tagsLabel = DbConstants.USER_TAGS;
            String fileName = null;
            String usertags = null;
            String btitle = null;

            // Process the uploaded items
            Iterator iter = fileList.iterator();
            while (iter.hasNext()) {
                FileItem fileItem = (FileItem) iter.next();
                if (fileItem.isFormField()) {
                    fileItem.setFieldName(label);
                    fieldname = fileItem.getFieldName();
                    logger.info("fieldname = " + fieldname);
                    if (fieldname.equalsIgnoreCase(DbConstants.USER_TAGS)) {
                        usertags = fileItem.getString();
                        label = "";
                    } else {
                        if (fieldname.equalsIgnoreCase("btitle")) {
                            btitle = fileItem.getString();
                            label = DbConstants.CAPTION;
                        } else {
                            if (fieldname.equalsIgnoreCase("caption")) {
                                caption = fileItem.getString();
                                label = DbConstants.USER_TAGS;
                            } else {
                                fieldvalue = fileItem.getString();
                            }
                        }
                    }
                } else {
                    logger.info("contentType = " + fileItem.getContentType());
                    if (fileItem.getContentType().contains("zip")) {
                        List entries = zipUtil.getEntries(fileItem.get());
                        logger.info("num entries = " + entries.size());
                        Iterator iter1 = entries.iterator();
                        while (iter1.hasNext()) {
                            Media media = (Media) iter1.next();
                            blob = media.getData();
                            mtype = mimeMap.getMimeType(zipUtil.getSuffix(media.getName()));
                            fileName = media.getName();
                            fieldsize = media.getData().length;
                            if (RegexStrUtil.isNull(btitle)) {
                                btitle = fileName;
                            }
                            if ((fieldsize <= 0) || (RegexStrUtil.isNull(mtype))
                                    || (RegexStrUtil.isNull(btitle)) || (blob == null)) {
                                return handleError(
                                        "fieldsize/mtype/btitle/blob one of them is empty, cannot upload files.");
                            }
                            if (!isCobrand) {
                                if (btitle.length() > GlobalConst.blobTitleSize) {
                                    btitle = btitle.substring(0, GlobalConst.blobTitleSize);
                                }
                                int zoom = 100;
                                if (!RegexStrUtil.isNull(usertags)) {
                                    if (usertags.length() > GlobalConst.usertagsSize) {
                                        usertags = usertags.substring(0, GlobalConst.usertagsSize);
                                    }
                                    usertags = RegexStrUtil.goodText(usertags);
                                }
                                if (!RegexStrUtil.isNull(caption)) {
                                    if (caption.length() > GlobalConst.refererSize) {
                                        caption = caption.substring(0, GlobalConst.refererSize);
                                    }
                                    caption = RegexStrUtil.goodText(caption);
                                }
                                boolean publishPhoto = displayDao.getDisplayPhotos(login,
                                        DbConstants.READ_FROM_SLAVE);
                                carryonDao.addCarryon(fieldsize, category, mtype, RegexStrUtil.goodText(btitle),
                                        blob, zoom, loginInfo.getValue(DbConstants.LOGIN_ID), login, usertags,
                                        caption, publishPhoto);

                            }
                        }
                    } else {
                        if (!validImage.isValid(fileItem.getContentType())) {
                            logger.warn("Found unexpected content type in upload, ignoring  "
                                    + fileItem.getContentType());
                            continue;
                        }
                        logger.debug("Is not a form field");
                        blob = fileItem.get();
                        mtype = fileItem.getContentType();
                        fileName = fileItem.getName();
                        fieldsize = fileItem.getSize();
                        if (RegexStrUtil.isNull(btitle)) {
                            btitle = fileName;
                        }
                        if ((fieldsize <= 0) || (RegexStrUtil.isNull(mtype)) || (RegexStrUtil.isNull(btitle))
                                || (blob == null)) {
                            return handleError(
                                    "fieldsize/mtype/btitle/blob one of them is empty, cannot upload files.");
                        }
                        if (isCobrand)
                            break;
                        if (!isCobrand) {
                            if (btitle.length() > GlobalConst.blobTitleSize) {
                                btitle = btitle.substring(0, GlobalConst.blobTitleSize);
                            }
                            int zoom = 100;
                            if (!RegexStrUtil.isNull(usertags)) {
                                if (usertags.length() > GlobalConst.usertagsSize) {
                                    usertags = usertags.substring(0, GlobalConst.usertagsSize);
                                }
                                usertags = RegexStrUtil.goodText(usertags);
                            }
                            if (!RegexStrUtil.isNull(caption)) {
                                if (caption.length() > GlobalConst.refererSize) {
                                    caption = caption.substring(0, GlobalConst.refererSize);
                                }
                                caption = RegexStrUtil.goodText(caption);
                            }
                            boolean publishPhoto = displayDao.getDisplayPhotos(login,
                                    DbConstants.READ_FROM_SLAVE);
                            carryonDao.addCarryon(fieldsize, category, mtype, RegexStrUtil.goodText(btitle),
                                    blob, zoom, loginInfo.getValue(DbConstants.LOGIN_ID), login, usertags,
                                    caption, publishPhoto);

                        }
                    }
                }
            }
        } else {
            return handleError("Did not get a multipart request");
        }
    } catch (Exception e) {
        return handleError("Exception occurred in addCarryon/addCobrandUserStreamBlo()", e);
    }

    if (isCobrand) {
        try {
            String ftype = request.getParameter(DbConstants.TYPE);
            if (RegexStrUtil.isNull(ftype)) {
                return handleError("ftype is null, CarryonUpdateController() ");
            }
            if (ftype.equals(DbConstants.COBRAND_HEADER) || ftype.equals(DbConstants.COBRAND_FOOTER)) {
                cobrandDao.addUserCobrand(blob, ftype, loginInfo.getValue(DbConstants.LOGIN_ID), login);
            } else {
                return handleError("cobrand type is not a header or footer in CarryonUpdateController ");
            }
        } catch (BaseDaoException e) {
            return handleError("Exception occurred in addCobrandUserStreamBlo()", e);
        }
    }

    /**
    * list the files
    */
    String loginId = loginInfo.getValue(DbConstants.LOGIN_ID);
    List carryon = null;
    List tagList = null;
    HashSet tagSet = null;
    try {
        carryon = carryonDao.getCarryonByCategory(loginId, category, DbConstants.READ_FROM_MASTER);
        tagList = carryonDao.getTags(loginId, DbConstants.READ_FROM_MASTER);
        tagSet = carryonDao.getUniqueTags(tagList);
    } catch (BaseDaoException e) {
        return handleError(
                "Exception occurred in getCarryonByCategory()/getTags carryon update for login " + login, e);
    }

    /**
    * display information about the files, if the category of the blobs is files category(1)
    */
    String viewName = DbConstants.EDIT_PHOTOS;
    if (category.equals(DbConstants.FILE_CATEGORY)) {
        viewName = DbConstants.EDIT_FILES;
    }

    Map myModel = new HashMap();
    myModel.put(viewName, carryon);
    myModel.put(DbConstants.COBRAND, cobrand);
    if (tagSet != null) {
        myModel.put(DbConstants.USER_TAGS, RegexStrUtil.goodText(tagSet.toString()));
    }
    myModel.put(DbConstants.LOGIN_INFO, loginInfo);
    myModel.put(DbConstants.DISPLAY_PAGE, displaypage);
    myModel.put(DbConstants.USER_PAGE, userpage);
    myModel.put(DbConstants.SHARE_INFO, shareInfo);
    myModel.put(DbConstants.VISITOR_PAGE, memberUserpage);
    myModel.put(DbConstants.DIR_EXISTS, rbDirectoryExists);
    myModel.put(DbConstants.BUSINESS_EXISTS, isBizExists(login));
    return new ModelAndView(viewName, "model", myModel);
}

From source file:com.ikanow.aleph2.search_service.elasticsearch.services.ElasticsearchIndexService.java

@Override
public Tuple2<String, List<BasicMessageBean>> validateSchema(final SearchIndexSchemaBean schema,
        final DataBucketBean bucket) {
    final LinkedList<BasicMessageBean> errors = new LinkedList<BasicMessageBean>(); // (Warning mutable code)
    try {//from  w w  w  .j ava 2  s. c  om
        Map<String, DataSchemaBean.ColumnarSchemaBean> tokenization_overrides = Optionals
                .of(() -> schema.tokenization_override()).orElse(Collections.emptyMap());
        final HashSet<String> unsupported_tokenization_overrides = new HashSet<String>(
                tokenization_overrides.keySet());
        unsupported_tokenization_overrides
                .removeAll(Arrays.asList(ElasticsearchIndexUtils.DEFAULT_TOKENIZATION_TYPE,
                        ElasticsearchIndexUtils.NO_TOKENIZATION_TYPE));
        if (!unsupported_tokenization_overrides.isEmpty()) {
            errors.add(ErrorUtils.buildErrorMessage(bucket.full_name(), "validateSchema",
                    SearchIndexErrorUtils.NOT_YET_SUPPORTED,
                    "tokenization_overrides: " + unsupported_tokenization_overrides.toString()));
        }
        Map<String, DataSchemaBean.ColumnarSchemaBean> type_overrides = Optionals
                .of(() -> schema.type_override()).orElse(Collections.emptyMap());
        type_overrides.keySet().stream().filter(type -> !_supported_types.contains(type))
                .forEach(type -> errors.add(ErrorUtils.buildErrorMessage(bucket.full_name(), "validateSchema",
                        SearchIndexErrorUtils.NOT_YET_SUPPORTED, "type: " + type)));

        // If the user is trying to override the index name then they have to be admin:
        final Optional<String> manual_index_name = Optionals
                .<String>of(() -> ((String) bucket.data_schema().search_index_schema()
                        .technology_override_schema().get(SearchIndexSchemaDefaultBean.index_name_override_)));

        if (manual_index_name.isPresent()) { // (then must be admin)
            if (!_service_context.getSecurityService().hasUserRole(bucket.owner_id(),
                    ISecurityService.ROLE_ADMIN)) {
                errors.add(ErrorUtils.buildErrorMessage(bucket.full_name(), "validateSchema",
                        SearchIndexErrorUtils.NON_ADMIN_BUCKET_NAME_OVERRIDE));
            }
        }

        final String index_name = ElasticsearchIndexUtils.getBaseIndexName(bucket, Optional.empty());
        boolean error = false; // (Warning mutable code)
        final boolean is_verbose = is_verbose(schema);
        final ElasticsearchIndexServiceConfigBean schema_config = ElasticsearchIndexConfigUtils
                .buildConfigBeanFromSchema(bucket, _config, _mapper);

        // 1) Check the schema:

        try {
            final Optional<String> type = Optional.ofNullable(schema_config.search_technology_override())
                    .map(t -> t.type_name_or_prefix());
            final String index_type = CollidePolicy.new_type == Optional
                    .ofNullable(schema_config.search_technology_override()).map(t -> t.collide_policy())
                    .orElse(CollidePolicy.new_type) ? "_default_"
                            : type.orElse(ElasticsearchIndexServiceConfigBean.DEFAULT_FIXED_TYPE_NAME);

            final XContentBuilder mapping = ElasticsearchIndexUtils.createIndexMapping(bucket, Optional.empty(),
                    true, schema_config, _mapper, index_type);
            if (is_verbose) {
                errors.add(ErrorUtils.buildSuccessMessage(bucket.full_name(), "validateSchema",
                        mapping.bytes().toUtf8()));
            }
        } catch (Throwable e) {
            errors.add(ErrorUtils.buildErrorMessage(bucket.full_name(), "validateSchema",
                    ErrorUtils.getLongForm("{0}", e)));
            error = true;
        }

        // 2) Sanity check the max size

        final Optional<Long> index_max_size = Optional
                .ofNullable(schema_config.search_technology_override().target_index_size_mb());
        if (index_max_size.isPresent()) {
            final long max = index_max_size.get();
            if ((max > 0) && (max < 25)) {
                errors.add(ErrorUtils.buildErrorMessage(bucket.full_name(), "validateSchema",
                        SearchIndexErrorUtils.INVALID_MAX_INDEX_SIZE, max));
                error = true;
            } else if (is_verbose) {
                errors.add(ErrorUtils.buildSuccessMessage(bucket.full_name(), "validateSchema",
                        "Max index size = {0} MB", max));
            }
        }
        return Tuples._2T(error ? "" : index_name, errors);
    } catch (Exception e) { // Very early error has occurred, just report that:
        return Tuples._2T("", Arrays.asList(ErrorUtils.buildErrorMessage(bucket.full_name(), "validateSchema",
                ErrorUtils.getLongForm("{0}", e))));
    }
}

From source file:cz.cas.lib.proarc.webapp.server.rest.DigitalObjectResource.java

/**
 * Removes given children from a parent.
 * <p><b>Requires handler commit!</b>
 * @param parent parent PID//from  w  w w .ja v  a  2s . co  m
 * @param toRemovePidSet PIDs of children to remove
 */
private void deleteMembers(DigitalObjectHandler parent, Set<String> toRemovePidSet)
        throws DigitalObjectException {
    RelationEditor editor = parent.relations();
    List<String> members = editor.getMembers();
    // check that PIDs being removed are members of parent object
    HashSet<String> toRemovePidSetCopy = new HashSet<String>(toRemovePidSet);
    toRemovePidSetCopy.removeAll(members);
    if (!toRemovePidSetCopy.isEmpty()) {
        String msg = String.format("Parent: %s does not contain members: %s", parent.getFedoraObject().getPid(),
                toRemovePidSetCopy.toString());
        throw RestException.plainText(Status.BAD_REQUEST, msg);
    }
    // remove
    if (members.removeAll(toRemovePidSet)) {
        editor.setMembers(members);
        editor.write(editor.getLastModified(), session.asFedoraLog());
    }
}

From source file:cz.cas.lib.proarc.webapp.server.rest.DigitalObjectResource.java

private void checkSearchedMembers(Set<String> pids, Map<String, Item> memberSearchMap) throws RestException {
    if (!pids.equals(memberSearchMap.keySet())) {
        HashSet<String> notMembers = new HashSet<String>(pids);
        notMembers.removeAll(memberSearchMap.keySet());
        HashSet<String> missingPids = new HashSet<String>(memberSearchMap.keySet());
        missingPids.removeAll(pids);/*from  w  w w  .j a  v  a2  s  .c  om*/
        throw RestException.plainNotFound(DigitalObjectResourceApi.MEMBERS_ITEM_PID,
                "Not member PIDs: " + notMembers.toString() + "\nMissing PIDs: " + missingPids.toString());
    }
}

From source file:edu.usf.cutr.fdot7.main.Test.java

/**
 * Constructor of the main program. We use this to avoid static variable
 *///from w  w w. j ava  2 s. c  om
public Test() {
    //initialize logger
    org.apache.log4j.BasicConfigurator.configure();

    _log.info("Please log-in to upload data.");
    SessionForm sf = new SessionForm();

    sf.showDialog();
    try {
        mutex.acquire();
    } catch (InterruptedException ie) {
        _log.error(ie.getMessage());
    }

    sf.dispose();

    if (mainUsername == null || mainPassword == null) {
        _log.error("You must log-in sucessfully before continuing.");
        _log.info("Exit Program!");
        System.exit(0);
    }

    boolean isInputError = false;
    HashSet<String> errorFeeds = new HashSet<String>();
    ArrayList<AgencyInfo> ais = new ArrayList<AgencyInfo>();
    ArrayList<ArrayList<GenericGtfsData>> gtfsAgenciesData = new ArrayList<ArrayList<GenericGtfsData>>();
    _log.info("Reading 'AgencyInfo.csv'");
    ais.addAll(readAgencyInfo(
            System.getProperty("user.dir") + System.getProperty("file.separator") + "AgencyInfo.csv"));
    _log.info(ais.size() + " GTFS feeds to be processed.");

    factory = new XmlBeanFactory(new FileSystemResource(
            System.getProperty("user.dir") + System.getProperty("file.separator") + "data-source.xml"));

    for (int i = 0; i < ais.size(); i++) {
        AgencyInfo ai = ais.get(i);
        try {
            ArrayList<GenericGtfsData> gtfsAgencyData = new ArrayList<GenericGtfsData>();
            gtfsAgencyData.addAll(getDataFromAgency(ai));
            gtfsAgenciesData.add(gtfsAgencyData);
        } catch (IOException e) {
            errorFeeds.add(ai.getName());
            _log.error("Error reading input from " + ai.getName());
            _log.error(e.getMessage());
            isInputError = true;
            continue;
        }
    }

    if (!isInputError) {
        _log.info("Complete checking and reading " + ais.size() + " GTFS feeds.");
        _log.info("Start to upload data.");
        uploadAgenciesData(gtfsAgenciesData, mainUsername, mainPassword);
    } else {
        _log.info("Please check agency dataset from " + errorFeeds.toString()
                + " again! No data will be uploaded.");
    }
}

From source file:org.wso2.carbon.integration.common.tests.utils.DistributionValidationTestUtils.java

public static void identifyDuplicateJars(List<File> jarFileListInDistribution, File distributionVersion,
        HashSet<String> distributionDuplicateJarList, ArrayList<String> unidentifiedVersionJars) {
    Iterator<File> itJarList = jarFileListInDistribution.iterator();
    ArrayList<String> tempArr = new ArrayList<String>();
    ArrayList<File> pathListForAddedJarToJarVersions = new ArrayList<File>();
    HashMap<String, String> jarVersions = new HashMap<String, String>();
    StringBuilder builder = new StringBuilder();
    Pattern numeric = Pattern.compile("[^0-9_.-]");
    Pattern nonNumeric = Pattern.compile("[a-zA-Z]");
    while (itJarList.hasNext()) {
        File jarFilePath = itJarList.next();
        String jarName = (jarFilePath).getName();
        if (!jarFilePath.getAbsolutePath().contains(distributionVersion.getName().replaceAll(".zip", "")
                + File.separator + TEMP_DIRECTORY + File.separator)) {
            for (int letter = jarName.length() - 1; letter >= 0; letter--) {
                char singleChar = jarName.charAt(letter);
                Matcher matcher = numeric.matcher(Character.toString(singleChar));
                // Find all matches
                if (!matcher.find()) {
                    // Get the matching string
                    builder.append(singleChar);
                } else if (nonNumeric.matcher(Character.toString(singleChar)).find()) {
                    if (builder.length() > 1) {
                        tempArr.add(builder.toString());
                        builder.setLength(0);
                    } else {
                        builder.setLength(0);
                    }// w  w  w. j  a v  a 2 s.  c o  m
                }
            }
            int max;
            int previousMax = 0;
            String[] version = new String[1];
            for (String element : tempArr) {
                max = element.length();
                if (max > previousMax) {
                    previousMax = max;
                    version[0] = element;
                }
            }
            tempArr.clear();
            if (version[0] != null) {
                String jar = jarName.split((StringUtils.reverse(version[0])))[0];
                if (jar.length() >= 2) {
                    if (jarVersions.containsKey(jar)) {
                        if (!jarVersions.get(jar).equals(jarName.split(jar)[1])) {
                            // removing patches - plugins duplication
                            if (distributionDuplicateJarList.toString().contains(jarName)) {
                                for (String itemDistributionDuplicateJarList : distributionDuplicateJarList) {
                                    if (itemDistributionDuplicateJarList.contains(jarName)
                                            && (itemDistributionDuplicateJarList.contains("patches")
                                                    || itemDistributionDuplicateJarList.contains("plugins"))) {
                                        if (!(jarFilePath.getAbsolutePath().contains("patches")
                                                || jarFilePath.getAbsolutePath().contains("plugins"))) {
                                            distributionDuplicateJarList.add(jarFilePath.getAbsolutePath());
                                        }
                                    }
                                }
                            } else {
                                distributionDuplicateJarList.add(jarFilePath.getAbsolutePath());
                            }
                            for (File pathListForAddedJarToJarVersion : pathListForAddedJarToJarVersions) {
                                String path = pathListForAddedJarToJarVersion.toString();
                                if (path.contains(jar + jarVersions.get(jar))) {
                                    distributionDuplicateJarList.add(path);
                                    break;
                                }
                            }
                        }
                    } else {
                        jarVersions.put(jar, jarName.split(jar)[1]);
                        pathListForAddedJarToJarVersions.add(jarFilePath);
                    }
                } else {
                    log.info("Unable to identify the version " + jar);
                    unidentifiedVersionJars.add(jarFilePath.getAbsolutePath());
                }
            } else {
                jarVersions.put(jarName, null);
                pathListForAddedJarToJarVersions.add(jarFilePath);
            }
        }
    }
}