com.glaf.core.jdbc.connection.DruidConnectionProvider.java Source code

Java tutorial

Introduction

Here is the source code for com.glaf.core.jdbc.connection.DruidConnectionProvider.java

Source

/*
 * Licensed to the Apache Software Foundation (ASF) under one
 * or more contributor license agreements.  See the NOTICE file
 * distributed with this work for additional information
 * regarding copyright ownership.  The ASF licenses this file
 * to you under the Apache License, Version 2.0 (the
 * "License"); you may not use this file except in compliance
 * with the License.  You may obtain a copy of the License at
 *
 *     http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

package com.glaf.core.jdbc.connection;

import java.sql.Connection;
import java.sql.SQLException;
import java.util.Iterator;
import java.util.Properties;

import javax.sql.DataSource;

import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;

import com.alibaba.druid.pool.DruidDataSource;
import com.alibaba.druid.pool.DruidDataSourceFactory;
import com.glaf.core.config.BaseConfiguration;
import com.glaf.core.config.Configuration;
import com.glaf.core.util.PropertiesHelper;
import com.glaf.core.util.ReflectUtils;

/**
 * Druid? druid.minPoolSize=5 druid.maxPoolSize=50
 * druid.initialPoolSize=5 druid.acquireIncrement=1 #?? druid.maxWait=600
 * #???? druid.idleConnectionTestPeriod=120
 */
public class DruidConnectionProvider implements ConnectionProvider {

    private static final Log log = LogFactory.getLog(DruidConnectionProvider.class);

    protected static Configuration conf = BaseConfiguration.create();

    private volatile DruidDataSource ds;

    private volatile Integer isolation;

    private volatile boolean autocommit;

    public void close() {
        try {
            ds.close();
        } catch (Exception sqle) {
            log.warn("could not destroy Druid connection pool", sqle);
        }
    }

    public void closeConnection(Connection conn) throws SQLException {
        conn.close();
    }

    public void configure(Properties props) {
        Properties properties = new Properties();
        properties.putAll(props);

        for (Iterator<Object> ii = props.keySet().iterator(); ii.hasNext();) {
            String key = (String) ii.next();
            if (key.startsWith("druid.")) {
                String newKey = key.substring(6);
                properties.put(newKey, props.get(key));
            }
        }

        Properties connectionProps = ConnectionProviderFactory.getConnectionProperties(properties);
        log.info("Connection properties: " + PropertiesHelper.maskOut(connectionProps, "password"));

        String jdbcDriverClass = properties.getProperty("jdbc.driver");
        String jdbcUrl = properties.getProperty("jdbc.url");

        log.info("Druid using driver: " + jdbcDriverClass + " at URL: " + jdbcUrl);

        autocommit = PropertiesHelper.getBoolean("jdbc.autocommit", properties);
        log.info("autocommit mode: " + autocommit);

        if (jdbcDriverClass == null) {
            log.warn("No JDBC Driver class was specified by property jdbc.driver");
        } else {
            try {
                Class.forName(jdbcDriverClass);
            } catch (ClassNotFoundException cnfe) {
                try {
                    ReflectUtils.instantiate(jdbcDriverClass);
                } catch (Exception e) {
                    String msg = "JDBC Driver class not found: " + jdbcDriverClass;
                    log.error(msg, e);
                    throw new RuntimeException(msg, e);
                }
            }
        }

        try {

            Integer maxPoolSize = PropertiesHelper.getInteger(ConnectionConstants.PROP_MAXACTIVE, properties);
            Integer maxStatements = PropertiesHelper.getInteger(ConnectionConstants.PROP_MAXSTATEMENTS, properties);

            Integer timeBetweenEvictionRuns = PropertiesHelper
                    .getInteger(ConnectionConstants.PROP_TIMEBETWEENEVICTIONRUNS, properties);

            Integer maxWait = PropertiesHelper.getInteger(ConnectionConstants.PROP_MAXWAIT, properties);

            String validationQuery = properties.getProperty(ConnectionConstants.PROP_VALIDATIONQUERY);

            if (maxPoolSize == null) {
                maxPoolSize = 50;
            }

            if (timeBetweenEvictionRuns == null) {
                timeBetweenEvictionRuns = 60;
            }

            if (maxWait == null) {
                maxWait = 60;
            }

            String dbUser = properties.getProperty("jdbc.user");
            String dbPassword = properties.getProperty("jdbc.password");

            if (dbUser == null) {
                dbUser = "";
            }

            if (dbPassword == null) {
                dbPassword = "";
            }

            ds = new DruidDataSource();

            DruidDataSourceFactory.config(ds, properties);
            ds.setConnectProperties(properties);
            ds.setDriverClassName(jdbcDriverClass);
            ds.setUrl(jdbcUrl);
            ds.setUsername(dbUser);
            ds.setPassword(dbPassword);

            ds.setInitialSize(1);
            ds.setMinIdle(3);
            ds.setMaxActive(maxPoolSize);
            ds.setMaxWait(maxWait * 1000L);

            ds.setConnectionErrorRetryAttempts(30);
            ds.setDefaultAutoCommit(true);

            ds.setTestOnReturn(false);
            ds.setTestOnBorrow(false);
            ds.setTestWhileIdle(false);

            if (StringUtils.isNotEmpty(validationQuery)) {
                log.debug("validationQuery:" + validationQuery);
                ds.setValidationQuery(validationQuery);
                ds.setTestWhileIdle(true);// ??????
            }

            ds.setTimeBetweenEvictionRunsMillis(timeBetweenEvictionRuns * 1000L);// ??
            ds.setMinEvictableIdleTimeMillis(1000L * 60L * 120L);// ????

            if (maxStatements != null) {
                ds.setPoolPreparedStatements(true);
                ds.setMaxOpenPreparedStatements(maxStatements);
                ds.setMaxPoolPreparedStatementPerConnectionSize(200);
            }

            ds.setRemoveAbandoned(false);// ? true/false
            ds.setRemoveAbandonedTimeout(7200);// 120
            ds.setLogAbandoned(true);// ?

            ds.init();
        } catch (Exception ex) {
            ex.printStackTrace();
            log.error("could not instantiate Druid connection pool", ex);
            throw new RuntimeException("Could not instantiate Druid connection pool", ex);
        }

        String i = properties.getProperty("jdbc.isolation");
        if (i == null) {
            isolation = null;
        } else {
            isolation = new Integer(i);
        }

    }

    public Connection getConnection() throws SQLException {
        Connection connection = null;
        int count = 0;
        while (count < conf.getInt("jdbc.connection.retryCount", 10)) {
            try {
                connection = ds.getConnection();
                if (connection != null) {
                    if (isolation != null) {
                        connection.setTransactionIsolation(isolation.intValue());
                    }
                    if (connection.getAutoCommit() != autocommit) {
                        connection.setAutoCommit(autocommit);
                    }
                    log.debug("druid connection: " + connection.toString());
                    return connection;
                } else {
                    count++;
                    try {
                        Thread.sleep(conf.getInt("jdbc.connection.retryTimeMs", 500));
                    } catch (InterruptedException e) {
                        e.printStackTrace();
                    }
                }
            } catch (SQLException ex) {
                count++;
                try {
                    Thread.sleep(conf.getInt("jdbc.connection.retryTimeMs", 500));
                } catch (InterruptedException e) {
                    e.printStackTrace();
                }
                if (count >= conf.getInt("jdbc.connection.retryCount", 10)) {
                    ex.printStackTrace();
                    throw ex;
                }
            }
        }
        return connection;
    }

    public DataSource getDataSource() {
        return ds;
    }

    public boolean supportsAggressiveRelease() {
        return false;
    }

}