Example usage for java.util LinkedHashMap values

List of usage examples for java.util LinkedHashMap values

Introduction

In this page you can find the example usage for java.util LinkedHashMap values.

Prototype

public Collection<V> values() 

Source Link

Document

Returns a Collection view of the values contained in this map.

Usage

From source file:org.jamwiki.db.AnsiQueryHandler.java

/**
 *
 *//* www .j  a v  a 2 s.c  om*/
public List<RoleMap> getRoleMapByLogin(String loginFragment) throws SQLException {
    if (StringUtils.isBlank(loginFragment)) {
        return new ArrayList<RoleMap>();
    }
    Connection conn = null;
    PreparedStatement stmt = null;
    ResultSet rs = null;
    try {
        conn = DatabaseConnection.getConnection();
        stmt = conn.prepareStatement(STATEMENT_SELECT_AUTHORITIES_LOGIN);
        loginFragment = '%' + loginFragment.toLowerCase() + '%';
        stmt.setString(1, loginFragment);
        rs = stmt.executeQuery();
        LinkedHashMap<Integer, RoleMap> roleMaps = new LinkedHashMap<Integer, RoleMap>();
        while (rs.next()) {
            Integer userId = rs.getInt("wiki_user_id");
            RoleMap roleMap = new RoleMap();
            if (roleMaps.containsKey(userId)) {
                roleMap = roleMaps.get(userId);
            } else {
                roleMap.setUserId(userId);
                roleMap.setUserLogin(rs.getString("username"));
            }
            roleMap.addRole(rs.getString("authority"));
            roleMaps.put(userId, roleMap);
        }
        return new ArrayList<RoleMap>(roleMaps.values());
    } finally {
        DatabaseConnection.closeConnection(conn, stmt, rs);
    }
}

From source file:com.jsonstore.api.JSONStoreCollection.java

/**
 * Find documents in the collection that are based on the given query.
 *
 * @param query//from  www.j a va2s.  co m
 *             The find query that restricts the search.
 * @param options
 *             Additional options to modify the count operation.
 * @return   A list of documents.
 * @throws JSONStoreFindException
 *             An error occurred when trying to execute the find.
 * @throws JSONStoreFilterException
 *             An error occurred when trying to apply a filter to the query.
 * @throws JSONStoreDatabaseClosedException
 *             The JSONStore database is closed, and cannot be accessed to
 *             execute the find.
 */
public List<JSONObject> findDocuments(JSONStoreQueryParts query, JSONStoreFindOptions options)
        throws JSONStoreFindException, JSONStoreFilterException, JSONStoreDatabaseClosedException {
    JSONStoreAnalyticsLogInstance logInst = JSONStoreLogger.startAnalyticsInstance(getUsername(), getName(),
            JSONStoreLogger.OPERATION_FIND);
    try {
        getAccessor(); // This does some closed checks.
        if (options == null) {
            options = new JSONStoreFindOptions();
        }

        if (query == null) {
            query = new JSONStoreQueryParts();
        }

        QueryBuilderSelect selectQuery = new QueryBuilderSelect(this, query);
        selectQuery.setLimit(options.getLimit());
        selectQuery.setOffset(options.getOffset());
        selectQuery.setSort(options.getSort());

        if (options.shouldIncludeDeletedDocuments()) {
            selectQuery.setSearchIncludeDeleted();
        }

        LinkedHashMap<Integer, JSONObject> resultHash = new LinkedHashMap<Integer, JSONObject>();
        List<JSONObject> filterResults = new ArrayList<JSONObject>();

        // Set the fields to select in the query, if specified, otherwise default to _id and _json:
        Map<String, Boolean> filters = options.getSearchFilters();
        if (filters != null && filters.size() > 0) {
            for (String filter : filters.keySet()) {
                boolean isSpecial = filters.get(filter);
                selectQuery.addSelectStatement(filter, isSpecial);
            }
        } else {
            selectQuery.addSelectStatement(DatabaseConstants.FIELD_ID, false);
            selectQuery.addSelectStatement(DatabaseConstants.FIELD_JSON, false);
        }

        Cursor cursor = null;
        List<JSONObject> result = null;
        try {
            cursor = runQuery(selectQuery);
            if (cursor != null) {
                result = new LinkedList<JSONObject>();

                for (int j = 0; j < cursor.getCount(); ++j) {
                    JSONObject item = new JacksonSerializedJSONObject();

                    cursor.moveToNext();

                    for (int k = 0; k < cursor.getColumnNames().length; ++k) {
                        if (cursor.getColumnName(k).equals(DatabaseConstants.FIELD_ID)) {
                            item.put(cursor.getColumnName(k), cursor.getInt(k));
                        } else if (cursor.getColumnName(k).equals(DatabaseConstants.FIELD_JSON)) {
                            item.put(DatabaseConstants.FIELD_JSON,
                                    JsonOrgModule.deserializeJSONObject(cursor.getString(k)));
                        } else if (isJSONCreatedColumn(cursor.getColumnName(k))) {
                            item.put(cursor.getColumnName(k), cursor.getString(k));
                        } else {
                            item.put(cursor.getColumnName(k).replace("_", "."), cursor.getString(k)); //$NON-NLS-1$ //$NON-NLS-2$
                        }
                    }

                    result.add(item);
                }
            }
        } catch (Throwable e) {
            String message = "Error when attempting to find a document. An error occurred when reading from the database.";
            JSONStoreFindException jsException = new JSONStoreFindException(message, e);
            logger.logError(message, jsException);
            throw jsException;

        } finally {
            if (cursor != null)
                cursor.close();
        }

        if (options.getSearchFilters() != null) {
            for (JSONObject obj : result) {
                filterResults.add(obj);
            }
        } else {
            addNonDuplicates(resultHash, result);
        }

        List<JSONObject> results = null;

        if (options.getSearchFilters() != null) {
            results = removeFilterDuplicates(filterResults);
        } else {
            results = new ArrayList<JSONObject>(resultHash.values());
        }

        return results;
    } finally {
        logInst.end();
    }
}

From source file:org.finra.herd.service.helper.Hive13DdlGenerator.java

/**
 * Gets a list of Hive partitions. For single level partitioning, no auto-discovery of sub-partitions (sub-directories) is needed - the business object data
 * will be represented by a single Hive partition instance. For multiple level partitioning, this method performs an auto-discovery of all sub-partitions
 * (sub-directories) and creates a Hive partition object instance for each partition.
 *
 * @param businessObjectDataKey the business object data key
 * @param autoDiscoverableSubPartitionColumns the auto-discoverable sub-partition columns
 * @param s3KeyPrefix the S3 key prefix/*from  w  w  w. j ava  2s .  com*/
 * @param storageFiles the storage files
 * @param storageName the storage name
 *
 * @return the list of Hive partitions
 */
public List<HivePartitionDto> getHivePartitions(BusinessObjectDataKey businessObjectDataKey,
        List<SchemaColumn> autoDiscoverableSubPartitionColumns, String s3KeyPrefix,
        Collection<String> storageFiles, String storageName) {
    // We are using linked hash map to preserve the order of the discovered partitions.
    LinkedHashMap<List<String>, HivePartitionDto> linkedHashMap = new LinkedHashMap<>();

    Pattern pattern = getHivePathPattern(autoDiscoverableSubPartitionColumns);
    for (String storageFile : storageFiles) {
        // Remove S3 key prefix from the file path. Please note that the storage files are already validated to start with S3 key prefix.
        String relativeFilePath = storageFile.substring(s3KeyPrefix.length());

        // Try to match the relative file path to the expected subpartition folders.
        Matcher matcher = pattern.matcher(relativeFilePath);
        Assert.isTrue(matcher.matches(), String.format(
                "Registered storage file or directory does not match the expected Hive sub-directory pattern. "
                        + "Storage: {%s}, file/directory: {%s}, business object data: {%s}, S3 key prefix: {%s}, pattern: {%s}",
                storageName, storageFile,
                businessObjectDataHelper.businessObjectDataKeyToString(businessObjectDataKey), s3KeyPrefix,
                pattern.pattern()));

        // Create a list of partition values.
        List<String> partitionValues = new ArrayList<>();

        // Add partition values per business object data key.
        partitionValues.add(businessObjectDataKey.getPartitionValue());
        partitionValues.addAll(businessObjectDataKey.getSubPartitionValues());

        // Extract relative partition values.
        for (int i = 1; i <= matcher.groupCount(); i++) {
            partitionValues.add(matcher.group(i));
        }

        // If we did not match all expected partition values, then this storage file path is not part
        // of a fully qualified partition (this is an intermediate folder marker) and we ignore it.
        if (!partitionValues.contains(null)) {
            // Get path for this partition by removing trailing "/" plus an optional file name from the relative file path,
            // or the trailing "_$folder$", which represents an empty partition in S3.
            String partitionPath = relativeFilePath.endsWith(S3_EMPTY_PARTITION)
                    ? relativeFilePath.substring(0, relativeFilePath.length() - S3_EMPTY_PARTITION.length())
                    : relativeFilePath.replaceAll("\\/[^/]*$", "");

            // Check if we already have that partition discovered - that would happen if partition contains multiple data files.
            HivePartitionDto hivePartition = linkedHashMap.get(partitionValues);

            if (hivePartition != null) {
                // Partition is already discovered, so just validate that the relative paths match.
                Assert.isTrue(hivePartition.getPath().equals(partitionPath), String.format(
                        "Found two different locations for the same Hive partition. Storage: {%s}, business object data: {%s}, "
                                + "S3 key prefix: {%s}, path[1]: {%s}, path[2]: {%s}",
                        storageName,
                        businessObjectDataHelper.businessObjectDataKeyToString(businessObjectDataKey),
                        s3KeyPrefix, hivePartition.getPath(), partitionPath));
            } else {
                // Add this partition to the hash map of discovered partitions.
                linkedHashMap.put(partitionValues, new HivePartitionDto(partitionPath, partitionValues));
            }
        }
    }

    List<HivePartitionDto> hivePartitions = new ArrayList<>();
    hivePartitions.addAll(linkedHashMap.values());

    return hivePartitions;
}

From source file:org.apache.axis2.context.MessageContext.java

/**
 * Flatten the handler list into just unique handler instances
 * including phase instances./*from  w w  w.  j ava  2  s .co m*/
 *
 * @param list the list of handlers/phases
 * @param map  users should pass null as this is just a holder for the recursion
 * @return a list of unigue object instances
 */
private ArrayList<Handler> flattenHandlerList(List<Handler> list, LinkedHashMap<String, Handler> map) {

    if (map == null) {
        map = new LinkedHashMap<String, Handler>();
    }

    Iterator<Handler> it = list.iterator();
    while (it.hasNext()) {
        Handler handler = (Handler) it.next();

        String key = null;
        if (handler != null) {
            key = handler.getClass().getName() + "@" + handler.hashCode();
        }

        if (handler instanceof Phase) {
            // put the phase in the list
            map.put(key, handler);

            // add its handlers to the list
            flattenHandlerList(((Phase) handler).getHandlers(), map);
        } else {
            // if the same object is already in the list,
            // then it won't be in the list multiple times
            map.put(key, handler);
        }
    }

    return new ArrayList<Handler>(map.values());
}

From source file:org.jamwiki.db.AnsiQueryHandler.java

/**
 *
 *//*from  w  ww .  j  a  v  a2 s  .  c  om*/
public List<RoleMap> getRoleMapByRole(String authority, boolean includeInheritedRoles) throws SQLException {
    Connection conn = null;
    PreparedStatement stmt = null;
    ResultSet rs = null;
    try {
        conn = DatabaseConnection.getConnection();
        if (includeInheritedRoles) {
            stmt = conn.prepareStatement(STATEMENT_SELECT_AUTHORITIES_AUTHORITY_ALL);
            stmt.setString(1, authority);
            stmt.setString(2, authority);
            stmt.setString(3, authority);
            stmt.setString(4, authority);
        } else {
            stmt = conn.prepareStatement(STATEMENT_SELECT_AUTHORITIES_AUTHORITY);
            stmt.setString(1, authority);
            stmt.setString(2, authority);
        }
        rs = stmt.executeQuery();
        LinkedHashMap<String, RoleMap> roleMaps = new LinkedHashMap<String, RoleMap>();
        while (rs.next()) {
            int userId = rs.getInt("wiki_user_id");
            int groupId = rs.getInt("group_id");
            RoleMap roleMap = new RoleMap();
            String key = userId + "|" + groupId;
            if (roleMaps.containsKey(key)) {
                roleMap = roleMaps.get(key);
            } else {
                if (userId > 0) {
                    roleMap.setUserId(userId);
                    roleMap.setUserLogin(rs.getString("username"));
                }
                if (groupId > 0) {
                    roleMap.setGroupId(groupId);
                    roleMap.setGroupName(rs.getString("group_name"));
                }
            }
            String roleName = rs.getString("authority");
            if (roleName != null) {
                roleMap.addRole(roleName);
            }
            // roleMap.addRole(rs.getString("authority"));
            roleMaps.put(key, roleMap);
        }
        return new ArrayList<RoleMap>(roleMaps.values());
    } finally {
        DatabaseConnection.closeConnection(conn, stmt, rs);
    }
}

From source file:org.apache.hadoop.hive.ql.exec.DDLTask.java

/**
 * Rename a partition in a table//  w w  w .  j  a  v a 2  s.  c o  m
 *
 * @param db
 *          Database to rename the partition.
 * @param renamePartitionDesc
 *          rename old Partition to new one.
 * @return Returns 0 when execution succeeds and above 0 if it fails.
 * @throws HiveException
 */
private int renamePartition(Hive db, RenamePartitionDesc renamePartitionDesc) throws HiveException {
    String tableName = renamePartitionDesc.getTableName();
    LinkedHashMap<String, String> oldPartSpec = renamePartitionDesc.getOldPartSpec();

    if (!allowOperationInReplicationScope(db, tableName, oldPartSpec,
            renamePartitionDesc.getReplicationSpec())) {
        // no rename, the table is missing either due to drop/rename which follows the current rename.
        // or the existing table is newer than our update.
        LOG.debug("DDLTask: Rename Partition is skipped as table {} / partition {} is newer than update",
                tableName, FileUtils.makePartName(new ArrayList(oldPartSpec.keySet()),
                        new ArrayList(oldPartSpec.values())));
        return 0;
    }

    Table tbl = db.getTable(tableName);
    Partition oldPart = db.getPartition(tbl, oldPartSpec, false);
    if (oldPart == null) {
        String partName = FileUtils.makePartName(new ArrayList<String>(oldPartSpec.keySet()),
                new ArrayList<String>(oldPartSpec.values()));
        throw new HiveException("Rename partition: source partition [" + partName + "] does not exist.");
    }
    Partition part = db.getPartition(tbl, oldPartSpec, false);
    part.setValues(renamePartitionDesc.getNewPartSpec());
    db.renamePartition(tbl, oldPartSpec, part);
    Partition newPart = db.getPartition(tbl, renamePartitionDesc.getNewPartSpec(), false);
    work.getInputs().add(new ReadEntity(oldPart));
    // We've already obtained a lock on the table, don't lock the partition too
    addIfAbsentByName(new WriteEntity(newPart, WriteEntity.WriteType.DDL_NO_LOCK));
    return 0;
}

From source file:org.apache.axis2.context.MessageContext.java

/**
 * Flatten the phase list into a list of just unique handler instances
 *
 * @param list the list of handlers//from ww  w  .j a va 2s  .  c  o m
 * @param map  users should pass null as this is just a holder for the recursion
 * @return a list of unigue object instances
 */
private ArrayList<Handler> flattenPhaseListToHandlers(ArrayList<Handler> list,
        LinkedHashMap<String, Handler> map) {

    if (map == null) {
        map = new LinkedHashMap<String, Handler>();
    }

    Iterator<Handler> it = list.iterator();
    while (it.hasNext()) {
        Handler handler = (Handler) it.next();

        String key = null;
        if (handler != null) {
            key = handler.getClass().getName() + "@" + handler.hashCode();
        }

        if (handler instanceof Phase) {
            // add its handlers to the list
            flattenHandlerList(((Phase) handler).getHandlers(), map);
        } else {
            // if the same object is already in the list,
            // then it won't be in the list multiple times
            map.put(key, handler);
        }
    }

    if (DEBUG_ENABLED && log.isTraceEnabled()) {
        Iterator<String> it2 = map.keySet().iterator();
        while (it2.hasNext()) {
            Object key = it2.next();
            Handler value = (Handler) map.get(key);
            String name = value.getName();
            log.trace(getLogIDString() + ":flattenPhaseListToHandlers():  key [" + key + "]    handler name ["
                    + name + "]");
        }
    }

    return new ArrayList<Handler>(map.values());
}

From source file:com.bytelightning.opensource.pokerface.PokerFace.java

/**
 * Configures all the needed components, but does not actually start the server.
 * @param config   Contains all information needed to fully wire up the http, https, and httpclient components of this reverse proxy.
 * @throws Exception   Yeah, a lot can go wrong here, but at least it will be caught immediately :-)
 *//*from w w  w .j a  v a2s. c  om*/
public void config(HierarchicalConfiguration config) throws Exception {
    List<HierarchicalConfiguration> lconf;
    HttpAsyncRequester executor = null;
    BasicNIOConnPool connPool = null;
    ObjectPool<ByteBuffer> byteBufferPool = null;
    LinkedHashMap<String, TargetDescriptor> mappings = null;
    ConcurrentMap<String, HttpHost> hosts = null;

    handlerRegistry = new UriHttpAsyncRequestHandlerMapper();

    // Initialize the keystore (if one was specified)
    KeyStore keystore = null;
    char[] keypass = null;
    String keystoreUri = config.getString("keystore");
    if ((keystoreUri != null) && (keystoreUri.trim().length() > 0)) {
        Path keystorePath = Utils.MakePath(keystoreUri);
        if (!Files.exists(keystorePath))
            throw new ConfigurationException("Keystore does not exist.");
        if (Files.isDirectory(keystorePath))
            throw new ConfigurationException("Keystore is not a file");
        String storepass = config.getString("storepass");
        if ((storepass != null) && "null".equals(storepass))
            storepass = null;
        keystore = KeyStore.getInstance(KeyStore.getDefaultType());
        try (InputStream keyStoreStream = Files.newInputStream(keystorePath)) {
            keystore.load(keyStoreStream, storepass == null ? null : storepass.trim().toCharArray());
        } catch (IOException ex) {
            Logger.error("Unable to load https server keystore from " + keystoreUri);
            return;
        }
        keypass = config.getString("keypass").trim().toCharArray();
    }

    // Wire up the listening reactor
    lconf = config.configurationsAt("server");
    if ((lconf == null) || (lconf.size() != 1))
        throw new ConfigurationException("One (and only one) server configuration element is allowed.");
    else {
        Builder builder = IOReactorConfig.custom();
        builder.setIoThreadCount(ComputeReactorProcessors(config.getDouble("server[@cpu]", 0.667)));
        builder.setSoTimeout(config.getInt("server[@soTimeout]", 0));
        builder.setSoLinger(config.getInt("server[@soLinger]", -1));
        builder.setSoReuseAddress(true);
        builder.setTcpNoDelay(false);
        builder.setSelectInterval(100);

        IOReactorConfig rconfig = builder.build();
        Logger.info("Configuring server with options: " + rconfig.toString());
        listeningReactor = new DefaultListeningIOReactor(rconfig);

        lconf = config.configurationsAt("server.listen");
        InetSocketAddress addr;
        boolean hasNonWildcardSecure = false;
        LinkedHashMap<SocketAddress, SSLContext> addrSSLContext = new LinkedHashMap<SocketAddress, SSLContext>();
        if ((lconf == null) || (lconf.size() == 0)) {
            addr = new InetSocketAddress("127.0.0.1", 8080);
            ListenerEndpoint ep = listeningReactor.listen(addr);
            Logger.warn("Configured " + ep.getAddress());
        } else {
            TrustManager[] trustManagers = null;
            KeyManagerFactory kmf = null;
            // Create all the specified listeners.
            for (HierarchicalConfiguration hc : lconf) {
                String addrStr = hc.getString("[@address]");
                if ((addrStr == null) || (addrStr.length() == 0))
                    addrStr = "0.0.0.0";
                String alias = hc.getString("[@alias]");
                int port = hc.getInt("[@port]", alias != null ? 443 : 80);
                addr = new InetSocketAddress(addrStr, port);
                ListenerEndpoint ep = listeningReactor.listen(addr);
                String protocol = hc.containsKey("[@protocol]") ? hc.getString("[@protocol]") : null;
                Boolean secure = hc.containsKey("[@secure]") ? hc.getBoolean("[@secure]") : null;
                if ((alias != null) && (secure == null))
                    secure = true;
                if ((protocol != null) && (secure == null))
                    secure = true;
                if ((secure != null) && secure) {
                    if (protocol == null)
                        protocol = "TLS";
                    if (keystore == null)
                        throw new ConfigurationException(
                                "An https listening socket was requested, but no keystore was specified.");
                    if (kmf == null) {
                        kmf = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm());
                        kmf.init(keystore, keypass);
                    }
                    // Are we going to trust all clients or just specific ones?
                    if (hc.getBoolean("[@trustAny]", true))
                        trustManagers = new TrustManager[] { new X509TrustAllManager() };
                    else {
                        TrustManagerFactory instance = TrustManagerFactory
                                .getInstance(TrustManagerFactory.getDefaultAlgorithm());
                        instance.init(keystore);
                        trustManagers = instance.getTrustManagers();
                    }
                    KeyManager[] keyManagers = kmf.getKeyManagers();
                    if (alias != null)
                        for (int i = 0; i < keyManagers.length; i++) {
                            if (keyManagers[i] instanceof X509ExtendedKeyManager)
                                keyManagers[i] = new PokerFaceKeyManager(alias,
                                        (X509ExtendedKeyManager) keyManagers[i]);
                        }
                    SSLContext sslCtx = SSLContext.getInstance(protocol);
                    sslCtx.init(keyManagers, trustManagers, new SecureRandom());
                    if (addr.getAddress().isAnyLocalAddress()) {
                        // This little optimization helps us respond faster for every connection as we don't have to extrapolate a local connection address to wild card.
                        for (Enumeration<NetworkInterface> en = NetworkInterface.getNetworkInterfaces(); en
                                .hasMoreElements();) {
                            NetworkInterface intf = en.nextElement();
                            for (Enumeration<InetAddress> enumIpAddr = intf.getInetAddresses(); enumIpAddr
                                    .hasMoreElements();) {
                                addr = new InetSocketAddress(enumIpAddr.nextElement(), port);
                                addrSSLContext.put(addr, sslCtx);
                            }
                        }
                    } else {
                        addrSSLContext.put(addr, sslCtx);
                        hasNonWildcardSecure = true;
                    }
                }
                Logger.warn("Configured " + (alias == null ? "" : (protocol + " on")) + ep.getAddress());
            }
        }
        // We will need an HTTP protocol processor for the incoming connections
        String serverAgent = config.getString("server.serverAgent", "PokerFace/" + Utils.Version);
        HttpProcessor inhttpproc = new ImmutableHttpProcessor(
                new HttpResponseInterceptor[] { new ResponseDateInterceptor(), new ResponseServer(serverAgent),
                        new ResponseContent(), new ResponseConnControl() });
        HttpAsyncService serviceHandler = new HttpAsyncService(inhttpproc, new DefaultConnectionReuseStrategy(),
                null, handlerRegistry, null) {
            public void exception(final NHttpServerConnection conn, final Exception cause) {
                Logger.warn(cause.getMessage());
                super.exception(conn, cause);
            }
        };
        if (addrSSLContext.size() > 0) {
            final SSLContext defaultCtx = addrSSLContext.values().iterator().next();
            final Map<SocketAddress, SSLContext> sslMap;
            if ((!hasNonWildcardSecure) || (addrSSLContext.size() == 1))
                sslMap = null;
            else
                sslMap = addrSSLContext;
            listeningDispatcher = new DefaultHttpServerIODispatch(serviceHandler,
                    new SSLNHttpServerConnectionFactory(defaultCtx, null, ConnectionConfig.DEFAULT) {
                        protected SSLIOSession createSSLIOSession(IOSession iosession, SSLContext sslcontext,
                                SSLSetupHandler sslHandler) {
                            SSLIOSession retVal;
                            SSLContext sktCtx = sslcontext;
                            if (sslMap != null) {
                                SocketAddress la = iosession.getLocalAddress();
                                if (la != null) {
                                    sktCtx = sslMap.get(la);
                                    if (sktCtx == null)
                                        sktCtx = sslcontext;
                                }
                                retVal = new SSLIOSession(iosession, SSLMode.SERVER, sktCtx, sslHandler);
                            } else
                                retVal = super.createSSLIOSession(iosession, sktCtx, sslHandler);
                            if (sktCtx != null)
                                retVal.setAttribute("com.bytelightning.opensource.pokerface.secure", true);
                            return retVal;
                        }
                    });
        } else
            listeningDispatcher = new DefaultHttpServerIODispatch(serviceHandler, ConnectionConfig.DEFAULT);
    }

    // Configure the httpclient reactor that will be used to do reverse proxing to the specified targets.
    lconf = config.configurationsAt("targets");
    if ((lconf != null) && (lconf.size() > 0)) {
        HierarchicalConfiguration conf = lconf.get(0);
        Builder builder = IOReactorConfig.custom();
        builder.setIoThreadCount(ComputeReactorProcessors(config.getDouble("targets[@cpu]", 0.667)));
        builder.setSoTimeout(conf.getInt("targets[@soTimeout]", 0));
        builder.setSoLinger(config.getInt("targets[@soLinger]", -1));
        builder.setConnectTimeout(conf.getInt("targets[@connectTimeout]", 0));
        builder.setSoReuseAddress(true);
        builder.setTcpNoDelay(false);
        connectingReactor = new DefaultConnectingIOReactor(builder.build());

        final int bufferSize = conf.getInt("targets[@bufferSize]", 1024) * 1024;
        byteBufferPool = new SoftReferenceObjectPool<ByteBuffer>(new BasePooledObjectFactory<ByteBuffer>() {
            @Override
            public ByteBuffer create() throws Exception {
                return ByteBuffer.allocateDirect(bufferSize);
            }

            @Override
            public PooledObject<ByteBuffer> wrap(ByteBuffer buffer) {
                return new DefaultPooledObject<ByteBuffer>(buffer);
            }
        });

        KeyManager[] keyManagers = null;
        TrustManager[] trustManagers = null;

        if (keystore != null) {
            KeyManagerFactory kmf = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm());
            kmf.init(keystore, keypass);
            keyManagers = kmf.getKeyManagers();
        }
        // Will the httpclient's trust any remote target, or only specific ones.
        if (conf.getBoolean("targets[@trustAny]", false))
            trustManagers = new TrustManager[] { new X509TrustAllManager() };
        else if (keystore != null) {
            TrustManagerFactory instance = TrustManagerFactory
                    .getInstance(TrustManagerFactory.getDefaultAlgorithm());
            instance.init(keystore);
            trustManagers = instance.getTrustManagers();
        }
        SSLContext clientSSLContext = SSLContext.getInstance(conf.getString("targets[@protocol]", "TLS"));
        clientSSLContext.init(keyManagers, trustManagers, new SecureRandom());

        // Setup an SSL capable connection pool for the httpclients.
        connPool = new BasicNIOConnPool(connectingReactor,
                new BasicNIOConnFactory(clientSSLContext, null, ConnectionConfig.DEFAULT),
                conf.getInt("targets[@connectTimeout]", 0));
        connPool.setMaxTotal(conf.getInt("targets[@connMaxTotal]", 1023));
        connPool.setDefaultMaxPerRoute(conf.getInt("targets[@connMaxPerRoute]", 1023));

        // Set up HTTP protocol processor for outgoing connections
        String userAgent = conf.getString("targets.userAgent", "PokerFace/" + Utils.Version);
        HttpProcessor outhttpproc = new ImmutableHttpProcessor(new HttpRequestInterceptor[] {
                new RequestContent(), new RequestTargetHost(), new RequestConnControl(),
                new RequestUserAgent(userAgent), new RequestExpectContinue(true) });
        executor = new HttpAsyncRequester(outhttpproc, new DefaultConnectionReuseStrategy());

        // Now set up all the configured targets.
        mappings = new LinkedHashMap<String, TargetDescriptor>();
        hosts = new ConcurrentHashMap<String, HttpHost>();
        String[] scheme = { null };
        String[] host = { null };
        int[] port = { 0 };
        String[] path = { null };
        int[] stripPrefixCount = { 0 };
        for (HierarchicalConfiguration targetConfig : conf.configurationsAt("target")) {
            String match = targetConfig.getString("[@pattern]");
            if ((match == null) || (match.trim().length() < 1)) {
                Logger.error("Unable to configure target;  Invalid url match pattern");
                continue;
            }
            String key = RequestForTargetConsumer.UriToTargetKey(targetConfig.getString("[@url]"), scheme, host,
                    port, path, stripPrefixCount);
            if (key == null) {
                Logger.error("Unable to configure target");
                continue;
            }
            HttpHost targetHost = hosts.get(key);
            if (targetHost == null) {
                targetHost = new HttpHost(host[0], port[0], scheme[0]);
                hosts.put(key, targetHost);
            }
            TargetDescriptor desc = new TargetDescriptor(targetHost, path[0], stripPrefixCount[0]);
            mappings.put(match, desc);
        }
        connectionDispatcher = new DefaultHttpClientIODispatch(new HttpAsyncRequestExecutor(),
                ConnectionConfig.DEFAULT);
    }
    // Allocate the script map which will be populated by it's own executor thread.
    if (config.containsKey("scripts.rootDirectory")) {
        Path tmp = Utils.MakePath(config.getProperty("scripts.rootDirectory"));
        if (!Files.exists(tmp))
            throw new FileNotFoundException("Scripts directory does not exist.");
        if (!Files.isDirectory(tmp))
            throw new FileNotFoundException("'scripts' path is not a directory.");
        scripts = new ConcurrentSkipListMap<String, ScriptObjectMirror>();
        boolean watch = config.getBoolean("scripts.dynamicWatch", false);
        List<Path> jsLibs;
        Object prop = config.getProperty("scripts.library");
        if (prop != null) {
            jsLibs = new ArrayList<Path>();
            if (prop instanceof Collection<?>) {
                @SuppressWarnings("unchecked")
                Collection<Object> oprop = (Collection<Object>) prop;
                for (Object obj : oprop)
                    jsLibs.add(Utils.MakePath(obj));
            } else {
                jsLibs.add(Utils.MakePath(prop));
            }
        } else
            jsLibs = null;

        lconf = config.configurationsAt("scripts.scriptConfig");
        if (lconf != null) {
            if (lconf.size() > 1)
                throw new ConfigurationException("Only one scriptConfig element is allowed.");
            if (lconf.size() == 0)
                lconf = null;
        }

        HierarchicalConfiguration scriptConfig;
        if (lconf == null)
            scriptConfig = new HierarchicalConfiguration();
        else
            scriptConfig = lconf.get(0);
        scriptConfig.setProperty("pokerface.scripts.rootDirectory", tmp.toString());

        configureScripts(jsLibs, scriptConfig, tmp, watch);
        if (watch)
            ScriptDirectoryWatcher = new DirectoryWatchService();
    }

    // Configure the static file directory (if any)
    Path staticFilesPath = null;
    if (config.containsKey("files.rootDirectory")) {
        Path tmp = Utils.MakePath(config.getProperty("files.rootDirectory"));
        if (!Files.exists(tmp))
            throw new FileNotFoundException("Files directory does not exist.");
        if (!Files.isDirectory(tmp))
            throw new FileNotFoundException("'files' path is not a directory.");
        staticFilesPath = tmp;
        List<HierarchicalConfiguration> mimeEntries = config.configurationsAt("files.mime-entry");
        if (mimeEntries != null) {
            for (HierarchicalConfiguration entry : mimeEntries) {
                entry.setDelimiterParsingDisabled(true);
                String type = entry.getString("[@type]", "").trim();
                if (type.length() == 0)
                    throw new ConfigurationException("Invalid mime type entry");
                String extensions = entry.getString("[@extensions]", "").trim();
                if (extensions.length() == 0)
                    throw new ConfigurationException("Invalid mime extensions for: " + type);
                ScriptHelperImpl.AddMimeEntry(type, extensions);
            }
        }
    }

    handlerRegistry.register("/*",
            new RequestHandler(executor, connPool, byteBufferPool, staticFilesPath, mappings,
                    scripts != null ? Collections.unmodifiableNavigableMap(scripts) : null,
                    config.getBoolean("scripts.allowScriptsToSpecifyDynamicHosts", false) ? hosts : null));
}

From source file:org.cerberus.servlet.crud.testexecution.ReadTestCaseExecution.java

private AnswerItem findExecutionListByTag(ApplicationContext appContext, HttpServletRequest request, String Tag)
        throws CerberusException, ParseException, JSONException {
    AnswerItem answer = new AnswerItem(new MessageEvent(MessageEventEnum.DATA_OPERATION_OK));
    testCaseLabelService = appContext.getBean(ITestCaseLabelService.class);

    int startPosition = Integer
            .valueOf(ParameterParserUtil.parseStringParam(request.getParameter("iDisplayStart"), "0"));
    int length = Integer
            .valueOf(ParameterParserUtil.parseStringParam(request.getParameter("iDisplayLength"), "0"));

    String searchParameter = ParameterParserUtil.parseStringParam(request.getParameter("sSearch"), "");
    String sColumns = ParameterParserUtil.parseStringParam(request.getParameter("sColumns"),
            "test,testCase,application,priority,status,description,bugId,function");
    String columnToSort[] = sColumns.split(",");

    //Get Sorting information
    int numberOfColumnToSort = Integer
            .parseInt(ParameterParserUtil.parseStringParam(request.getParameter("iSortingCols"), "1"));
    int columnToSortParameter = 0;
    String sort = "asc";
    StringBuilder sortInformation = new StringBuilder();
    for (int c = 0; c < numberOfColumnToSort; c++) {
        columnToSortParameter = Integer
                .parseInt(ParameterParserUtil.parseStringParam(request.getParameter("iSortCol_" + c), "0"));
        sort = ParameterParserUtil.parseStringParam(request.getParameter("sSortDir_" + c), "asc");
        String columnName = columnToSort[columnToSortParameter];
        sortInformation.append(columnName).append(" ").append(sort);

        if (c != numberOfColumnToSort - 1) {
            sortInformation.append(" , ");
        }//from   ww  w  .j  ava  2  s  . com
    }

    Map<String, List<String>> individualSearch = new HashMap<String, List<String>>();
    for (int a = 0; a < columnToSort.length; a++) {
        if (null != request.getParameter("sSearch_" + a) && !request.getParameter("sSearch_" + a).isEmpty()) {
            List<String> search = new ArrayList(Arrays.asList(request.getParameter("sSearch_" + a).split(",")));
            individualSearch.put(columnToSort[a], search);
        }
    }

    List<TestCaseExecution> testCaseExecutions = readExecutionByTagList(appContext, Tag, startPosition, length,
            sortInformation.toString(), searchParameter, individualSearch);

    JSONArray executionList = new JSONArray();
    JSONObject statusFilter = getStatusList(request);
    JSONObject countryFilter = getCountryList(request, appContext);
    LinkedHashMap<String, JSONObject> ttc = new LinkedHashMap<String, JSONObject>();

    String globalStart = "";
    String globalEnd = "";
    String globalStatus = "Finished";

    /**
     * Find the list of labels
     */
    AnswerList testCaseLabelList = testCaseLabelService.readByTestTestCase(null, null);

    for (TestCaseExecution testCaseExecution : testCaseExecutions) {
        try {
            if (testCaseExecution.getStart() != 0) {
                if ((globalStart.isEmpty())
                        || (globalStart.compareTo(String.valueOf(testCaseExecution.getStart())) > 0)) {
                    globalStart = String.valueOf(testCaseExecution.getStart());
                }
            }
            if (testCaseExecution.getEnd() != 0) {
                if ((globalEnd.isEmpty())
                        || (globalEnd.compareTo(String.valueOf(testCaseExecution.getEnd())) < 0)) {
                    globalEnd = String.valueOf(testCaseExecution.getEnd());
                }
            }
            if (testCaseExecution.getControlStatus().equalsIgnoreCase("PE")) {
                globalStatus = "Pending...";
            }
            String controlStatus = testCaseExecution.getControlStatus();
            if (statusFilter.get(controlStatus).equals("on")
                    && countryFilter.get(testCaseExecution.getCountry()).equals("on")) {
                JSONObject execution = testCaseExecutionToJSONObject(testCaseExecution);
                String execKey = testCaseExecution.getEnvironment() + " " + testCaseExecution.getCountry() + " "
                        + testCaseExecution.getBrowser();
                String testCaseKey = testCaseExecution.getTest() + "_" + testCaseExecution.getTestCase();
                JSONObject execTab = new JSONObject();

                executionList.put(testCaseExecutionToJSONObject(testCaseExecution));
                JSONObject ttcObject = new JSONObject();

                if (ttc.containsKey(testCaseKey)) {
                    ttcObject = ttc.get(testCaseKey);
                    execTab = ttcObject.getJSONObject("execTab");
                    execTab.put(execKey, execution);
                    ttcObject.put("execTab", execTab);
                } else {
                    ttcObject.put("test", testCaseExecution.getTest());
                    ttcObject.put("testCase", testCaseExecution.getTestCase());
                    ttcObject.put("function", testCaseExecution.getTestCaseObj().getFunction());
                    ttcObject.put("shortDesc", testCaseExecution.getTestCaseObj().getDescription());
                    ttcObject.put("status", testCaseExecution.getStatus());
                    ttcObject.put("application", testCaseExecution.getApplication());
                    ttcObject.put("priority", testCaseExecution.getTestCaseObj().getPriority());
                    ttcObject.put("bugId",
                            new JSONObject(
                                    "{\"bugId\":\"" + testCaseExecution.getTestCaseObj().getBugID()
                                            + "\",\"bugTrackerUrl\":\""
                                            + testCaseExecution.getApplicationObj().getBugTrackerUrl().replace(
                                                    "%BUGID%", testCaseExecution.getTestCaseObj().getBugID())
                                            + "\"}"));
                    ttcObject.put("comment", testCaseExecution.getTestCaseObj().getComment());
                    execTab.put(execKey, execution);
                    ttcObject.put("execTab", execTab);

                    /**
                     * Iterate on the label retrieved and generate HashMap
                     * based on the key Test_TestCase
                     */
                    LinkedHashMap<String, JSONArray> testCaseWithLabel = new LinkedHashMap();
                    for (TestCaseLabel label : (List<TestCaseLabel>) testCaseLabelList.getDataList()) {
                        String key = label.getTest() + "_" + label.getTestcase();

                        if (testCaseWithLabel.containsKey(key)) {
                            JSONObject jo = new JSONObject().put("name", label.getLabel().getLabel())
                                    .put("color", label.getLabel().getColor())
                                    .put("description", label.getLabel().getDescription());
                            testCaseWithLabel.get(key).put(jo);
                        } else {
                            JSONObject jo = new JSONObject().put("name", label.getLabel().getLabel())
                                    .put("color", label.getLabel().getColor())
                                    .put("description", label.getLabel().getDescription());
                            testCaseWithLabel.put(key, new JSONArray().put(jo));
                        }
                    }
                    ttcObject.put("labels", testCaseWithLabel
                            .get(testCaseExecution.getTest() + "_" + testCaseExecution.getTestCase()));
                }
                ttc.put(testCaseExecution.getTest() + "_" + testCaseExecution.getTestCase(), ttcObject);
            }
        } catch (JSONException ex) {
            Logger.getLogger(ReadTestCaseExecution.class.getName()).log(Level.SEVERE, null, ex);
        }
    }

    JSONObject jsonResponse = new JSONObject();

    jsonResponse.put("globalEnd", globalEnd.toString());
    jsonResponse.put("globalStart", globalStart.toString());
    jsonResponse.put("globalStatus", globalStatus);

    jsonResponse.put("testList", ttc.values());
    jsonResponse.put("iTotalRecords", ttc.size());
    jsonResponse.put("iTotalDisplayRecords", ttc.size());

    answer.setItem(jsonResponse);
    answer.setResultMessage(new MessageEvent(MessageEventEnum.DATA_OPERATION_OK));
    return answer;
}