这是一个非常优秀有用的方法
public static List<Map<String, Object>> executeQuery(Connection conn, String sql, List<Object> parameters){
非常好的数据结构
List<Map<Key,Value>> 一个map就是一个对象,每个Map里面的key就是对象属性,Value是属性值
}
package com.alibaba.druid.util;
import com.alibaba.druid.support.logging.Log;
import com.alibaba.druid.support.logging.LogFactory;
import javax.sql.DataSource;
import java.io.Closeable;
import java.io.InputStream;
import java.io.PrintStream;
import java.net.URL;
import java.sql.Connection;
import java.sql.Date;
import java.sql.Driver;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;
import java.sql.Types;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Enumeration;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
/**
* @author wenshao [szujobs@hotmail.com]
*/
public final class JdbcUtils implements JdbcConstants {
private final static Log LOG = LogFactory.getLog(JdbcUtils.class);
private static final Properties DRIVER_URL_MAPPING = new Properties();
private static Boolean mysql_driver_version_6 = null;
static {
try {
//加载数据库驱动
ClassLoader ctxClassLoader = Thread.currentThread().getContextClassLoader();
if (ctxClassLoader != null) {
for (Enumeration<URL> e = ctxClassLoader.getResources("META-INF/druid-driver.properties"); e.hasMoreElements();) {
URL url = e.nextElement();
Properties property = new Properties();
InputStream is = null;
try {
is = url.openStream();
property.load(is);
} finally {
JdbcUtils.close(is);
}
DRIVER_URL_MAPPING.putAll(property);
}
}
} catch (Exception e) {
LOG.error("load druid-driver.properties error", e);
}
}
public static void close(Connection x) {
if (x == null) {
return;
}
try {
x.close();
} catch (Exception e) {
LOG.debug("close connection error", e);
}
}
public static void close(Statement x) {
if (x == null) {
return;
}
try {
x.close();
} catch (Exception e) {
LOG.debug("close statement error", e);
}
}
public static void close(ResultSet x) {
if (x == null) {
return;
}
try {
x.close();
} catch (Exception e) {
LOG.debug("close result set error", e);
}
}
public static void close(Closeable x) {
if (x == null) {
return;
}
try {
x.close();
} catch (Exception e) {
LOG.debug("close error", e);
}
}
public static void printResultSet(ResultSet rs) throws SQLException {
printResultSet(rs, System.out);
}
public static void printResultSet(ResultSet rs, PrintStream out) throws SQLException {
printResultSet(rs, out, true, "\t");
}
public static void printResultSet(ResultSet rs, PrintStream out, boolean printHeader, String seperator) throws SQLException {
ResultSetMetaData metadata = rs.getMetaData();
int columnCount = metadata.getColumnCount();
if (printHeader) {
for (int columnIndex = 1; columnIndex <= columnCount; ++columnIndex) {
if (columnIndex != 1) {
out.print(seperator);
}
out.print(metadata.getColumnName(columnIndex));
}
}
out.println();
while (rs.next()) {
for (int columnIndex = 1; columnIndex <= columnCount; ++columnIndex) {
if (columnIndex != 1) {
out.print(seperator);
}
int type = metadata.getColumnType(columnIndex);
if (type == Types.VARCHAR || type == Types.CHAR || type == Types.NVARCHAR || type == Types.NCHAR) {
out.print(rs.getString(columnIndex));
} else if (type == Types.DATE) {
Date date = rs.getDate(columnIndex);
if (rs.wasNull()) {
out.print("null");
} else {
out.print(date.toString());
}
} else if (type == Types.BIT) {
boolean value = rs.getBoolean(columnIndex);
if (rs.wasNull()) {
out.print("null");
} else {
out.print(Boolean.toString(value));
}
} else if (type == Types.BOOLEAN) {
boolean value = rs.getBoolean(columnIndex);
if (rs.wasNull()) {
out.print("null");
} else {
out.print(Boolean.toString(value));
}
} else if (type == Types.TINYINT) {
byte value = rs.getByte(columnIndex);
if (rs.wasNull()) {
out.print("null");
} else {
out.print(Byte.toString(value));
}
} else if (type == Types.SMALLINT) {
short value = rs.getShort(columnIndex);
if (rs.wasNull()) {
out.print("null");
} else {
out.print(Short.toString(value));
}
} else if (type == Types.INTEGER) {
int value = rs.getInt(columnIndex);
if (rs.wasNull()) {
out.print("null");
} else {
out.print(Integer.toString(value));
}
} else if (type == Types.BIGINT) {
long value = rs.getLong(columnIndex);
if (rs.wasNull()) {
out.print("null");
} else {
out.print(Long.toString(value));
}
} else if (type == Types.TIMESTAMP) {
out.print(String.valueOf(rs.getTimestamp(columnIndex)));
} else if (type == Types.DECIMAL) {
out.print(String.valueOf(rs.getBigDecimal(columnIndex)));
} else if (type == Types.CLOB) {
out.print(String.valueOf(rs.getString(columnIndex)));
} else if (type == Types.JAVA_OBJECT) {
Object object = rs.getObject(columnIndex);
if (rs.wasNull()) {
out.print("null");
} else {
out.print(String.valueOf(object));
}
} else if (type == Types.LONGVARCHAR) {
Object object = rs.getString(columnIndex);
if (rs.wasNull()) {
out.print("null");
} else {
out.print(String.valueOf(object));
}
} else if (type == Types.NULL) {
out.print("null");
} else {
Object object = rs.getObject(columnIndex);
if (rs.wasNull()) {
out.print("null");
} else {
if (object instanceof byte[]) {
byte[] bytes = (byte[]) object;
String text = HexBin.encode(bytes);
out.print(text);
} else {
out.print(String.valueOf(object));
}
}
}
}
out.println();
}
}
public static String getTypeName(int sqlType) {
switch (sqlType) {
case Types.ARRAY:
return "ARRAY";
case Types.BIGINT:
return "BIGINT";
case Types.BINARY:
return "BINARY";
case Types.BIT:
return "BIT";
case Types.BLOB:
return "BLOB";
case Types.BOOLEAN:
return "BOOLEAN";
case Types.CHAR:
return "CHAR";
case Types.CLOB:
return "CLOB";
case Types.DATALINK:
return "DATALINK";
case Types.DATE:
return "DATE";
case Types.DECIMAL:
return "DECIMAL";
case Types.DISTINCT:
return "DISTINCT";
case Types.DOUBLE:
return "DOUBLE";
case Types.FLOAT:
return "FLOAT";
case Types.INTEGER:
return "INTEGER";
case Types.JAVA_OBJECT:
return "JAVA_OBJECT";
case Types.LONGNVARCHAR:
return "LONGNVARCHAR";
case Types.LONGVARBINARY:
return "LONGVARBINARY";
case Types.NCHAR:
return "NCHAR";
case Types.NCLOB:
return "NCLOB";
case Types.NULL:
return "NULL";
case Types.NUMERIC:
return "NUMERIC";
case Types.NVARCHAR:
return "NVARCHAR";
case Types.REAL:
return "REAL";
case Types.REF:
return "REF";
case Types.ROWID:
return "ROWID";
case Types.SMALLINT:
return "SMALLINT";
case Types.SQLXML:
return "SQLXML";
case Types.STRUCT:
return "STRUCT";
case Types.TIME:
return "TIME";
case Types.TIMESTAMP:
return "TIMESTAMP";
case Types.TINYINT:
return "TINYINT";
case Types.VARBINARY:
return "VARBINARY";
case Types.VARCHAR:
return "VARCHAR";
default:
return "OTHER";
}
}
public static String getDriverClassName(String rawUrl) throws SQLException {
if (rawUrl == null) {
return null;
}
if (rawUrl.startsWith("jdbc:derby:")) {
return "org.apache.derby.jdbc.EmbeddedDriver";
} else if (rawUrl.startsWith("jdbc:mysql:")) {
if (mysql_driver_version_6 == null) {
mysql_driver_version_6 = Utils.loadClass("com.mysql.cj.jdbc.Driver") != null;
}
if (mysql_driver_version_6) {
return MYSQL_DRIVER_6;
} else {
return MYSQL_DRIVER;
}
} else if (rawUrl.startsWith("jdbc:log4jdbc:")) {
return LOG4JDBC_DRIVER;
} else if (rawUrl.startsWith("jdbc:mariadb:")) {
return MARIADB_DRIVER;
} else if (rawUrl.startsWith("jdbc:oracle:") //
|| rawUrl.startsWith("JDBC:oracle:")) {
return ORACLE_DRIVER;
} else if (rawUrl.startsWith("jdbc:alibaba:oracle:")) {
return ALI_ORACLE_DRIVER;
} else if (rawUrl.startsWith("jdbc:microsoft:")) {
return "com.microsoft.jdbc.sqlserver.SQLServerDriver";
} else if (rawUrl.startsWith("jdbc:sqlserver:")) {
return "com.microsoft.sqlserver.jdbc.SQLServerDriver";
} else if (rawUrl.startsWith("jdbc:sybase:Tds:")) {
return "com.sybase.jdbc2.jdbc.SybDriver";
} else if (rawUrl.startsWith("jdbc:jtds:")) {
return "net.sourceforge.jtds.jdbc.Driver";
} else if (rawUrl.startsWith("jdbc:fake:") || rawUrl.startsWith("jdbc:mock:")) {
return "com.alibaba.druid.mock.MockDriver";
} else if (rawUrl.startsWith("jdbc:postgresql:")) {
return POSTGRESQL_DRIVER;
} else if (rawUrl.startsWith("jdbc:odps:")) {
return ODPS_DRIVER;
} else if (rawUrl.startsWith("jdbc:hsqldb:")) {
return "org.hsqldb.jdbcDriver";
} else if (rawUrl.startsWith("jdbc:db2:")) {
return DB2_DRIVER;
} else if (rawUrl.startsWith("jdbc:sqlite:")) {
return "org.sqlite.JDBC";
} else if (rawUrl.startsWith("jdbc:ingres:")) {
return "com.ingres.jdbc.IngresDriver";
} else if (rawUrl.startsWith("jdbc:h2:")) {
return H2_DRIVER;
} else if (rawUrl.startsWith("jdbc:mckoi:")) {
return "com.mckoi.JDBCDriver";
} else if (rawUrl.startsWith("jdbc:cloudscape:")) {
return "COM.cloudscape.core.JDBCDriver";
} else if (rawUrl.startsWith("jdbc:informix-sqli:")) {
return "com.informix.jdbc.IfxDriver";
} else if (rawUrl.startsWith("jdbc:timesten:")) {
return "com.timesten.jdbc.TimesTenDriver";
} else if (rawUrl.startsWith("jdbc:as400:")) {
return "com.ibm.as400.access.AS400JDBCDriver";
} else if (rawUrl.startsWith("jdbc:sapdb:")) {
return "com.sap.dbtech.jdbc.DriverSapDB";
} else if (rawUrl.startsWith("jdbc:JSQLConnect:")) {
return "com.jnetdirect.jsql.JSQLDriver";
} else if (rawUrl.startsWith("jdbc:JTurbo:")) {
return "com.newatlanta.jturbo.driver.Driver";
} else if (rawUrl.startsWith("jdbc:firebirdsql:")) {
return "org.firebirdsql.jdbc.FBDriver";
} else if (rawUrl.startsWith("jdbc:interbase:")) {
return "interbase.interclient.Driver";
} else if (rawUrl.startsWith("jdbc:pointbase:")) {
return "com.pointbase.jdbc.jdbcUniversalDriver";
} else if (rawUrl.startsWith("jdbc:edbc:")) {
return "ca.edbc.jdbc.EdbcDriver";
} else if (rawUrl.startsWith("jdbc:mimer:multi1:")) {
return "com.mimer.jdbc.Driver";
} else if (rawUrl.startsWith("jdbc:dm:")) {
return JdbcConstants.DM_DRIVER;
} else if (rawUrl.startsWith("jdbc:kingbase:")) {
return JdbcConstants.KINGBASE_DRIVER;
} else if (rawUrl.startsWith("jdbc:hive:")) {
return JdbcConstants.HIVE_DRIVER;
} else if (rawUrl.startsWith("jdbc:hive2:")) {
return JdbcConstants.HIVE_DRIVER;
} else if (rawUrl.startsWith("jdbc:phoenix:thin:")) {
return "org.apache.phoenix.queryserver.client.Driver";
} else if (rawUrl.startsWith("jdbc:phoenix://")) {
return JdbcConstants.PHOENIX_DRIVER;
} else {
throw new SQLException("unkow jdbc driver : " + rawUrl);
}
}
/**
* 根据rawUrl得到数据库类型
* @param rawUrl
* @param driverClassName 该参数暂时没用
* @return
*/
public static String getDbType(String rawUrl, String driverClassName) {
if (rawUrl == null) {
return null;
}
if (rawUrl.startsWith("jdbc:derby:") || rawUrl.startsWith("jdbc:log4jdbc:derby:")) {
return DERBY;
} else if (rawUrl.startsWith("jdbc:mysql:") || rawUrl.startsWith("jdbc:cobar:")
|| rawUrl.startsWith("jdbc:log4jdbc:mysql:")) {
return MYSQL;
} else if (rawUrl.startsWith("jdbc:mariadb:")) {
return MARIADB;
} else if (rawUrl.startsWith("jdbc:oracle:") || rawUrl.startsWith("jdbc:log4jdbc:oracle:")) {
return ORACLE;
} else if (rawUrl.startsWith("jdbc:alibaba:oracle:")) {
return ALI_ORACLE;
} else if (rawUrl.startsWith("jdbc:microsoft:") || rawUrl.startsWith("jdbc:log4jdbc:microsoft:")) {
return SQL_SERVER;
} else if (rawUrl.startsWith("jdbc:sqlserver:") || rawUrl.startsWith("jdbc:log4jdbc:sqlserver:")) {
return SQL_SERVER;
} else if (rawUrl.startsWith("jdbc:sybase:Tds:") || rawUrl.startsWith("jdbc:log4jdbc:sybase:")) {
return SYBASE;
} else if (rawUrl.startsWith("jdbc:jtds:") || rawUrl.startsWith("jdbc:log4jdbc:jtds:")) {
return JTDS;
} else if (rawUrl.startsWith("jdbc:fake:") || rawUrl.startsWith("jdbc:mock:")) {
return MOCK;
} else if (rawUrl.startsWith("jdbc:postgresql:") || rawUrl.startsWith("jdbc:log4jdbc:postgresql:")) {
return POSTGRESQL;
} else if (rawUrl.startsWith("jdbc:hsqldb:") || rawUrl.startsWith("jdbc:log4jdbc:hsqldb:")) {
return HSQL;
} else if (rawUrl.startsWith("jdbc:odps:")) {
return ODPS;
} else if (rawUrl.startsWith("jdbc:db2:")) {
return DB2;
} else if (rawUrl.startsWith("jdbc:sqlite:")) {
return "sqlite";
} else if (rawUrl.startsWith("jdbc:ingres:")) {
return "ingres";
} else if (rawUrl.startsWith("jdbc:h2:") || rawUrl.startsWith("jdbc:log4jdbc:h2:")) {
return H2;
} else if (rawUrl.startsWith("jdbc:mckoi:")) {
return "mckoi";
} else if (rawUrl.startsWith("jdbc:cloudscape:")) {
return "cloudscape";
} else if (rawUrl.startsWith("jdbc:informix-sqli:") || rawUrl.startsWith("jdbc:log4jdbc:informix-sqli:")) {
return "informix";
} else if (rawUrl.startsWith("jdbc:timesten:")) {
return "timesten";
} else if (rawUrl.startsWith("jdbc:as400:")) {
return "as400";
} else if (rawUrl.startsWith("jdbc:sapdb:")) {
return "sapdb";
} else if (rawUrl.startsWith("jdbc:JSQLConnect:")) {
return "JSQLConnect";
} else if (rawUrl.startsWith("jdbc:JTurbo:")) {
return "JTurbo";
} else if (rawUrl.startsWith("jdbc:firebirdsql:")) {
return "firebirdsql";
} else if (rawUrl.startsWith("jdbc:interbase:")) {
return "interbase";
} else if (rawUrl.startsWith("jdbc:pointbase:")) {
return "pointbase";
} else if (rawUrl.startsWith("jdbc:edbc:")) {
return "edbc";
} else if (rawUrl.startsWith("jdbc:mimer:multi1:")) {
return "mimer";
} else if (rawUrl.startsWith("jdbc:dm:")) {
return JdbcConstants.DM;
} else if (rawUrl.startsWith("jdbc:kingbase:")) {
return JdbcConstants.KINGBASE;
} else if (rawUrl.startsWith("jdbc:log4jdbc:")) {
return LOG4JDBC;
} else if (rawUrl.startsWith("jdbc:hive:")) {
return HIVE;
} else if (rawUrl.startsWith("jdbc:hive2:")) {
return HIVE;
} else if (rawUrl.startsWith("jdbc:phoenix:")) {
return PHOENIX;
} else {
return null;
}
}
public static Driver createDriver(String driverClassName) throws SQLException {
return createDriver(null, driverClassName);
}
/**
* 加载驱动
* @param classLoader
* @param driverClassName
* @return
* @throws SQLException
*/
public static Driver createDriver(ClassLoader classLoader, String driverClassName) throws SQLException {
Class<?> clazz = null;
if (classLoader != null) {
try {
clazz = classLoader.loadClass(driverClassName);
} catch (ClassNotFoundException e) {
// skip
}
}
if (clazz == null) {
try {
ClassLoader contextLoader = Thread.currentThread().getContextClassLoader();
if (contextLoader != null) {
clazz = contextLoader.loadClass(driverClassName);
}
} catch (ClassNotFoundException e) {
// skip
}
}
if (clazz == null) {
try {
clazz = Class.forName(driverClassName);
} catch (ClassNotFoundException e) {
throw new SQLException(e.getMessage(), e);
}
}
try {
return (Driver) clazz.newInstance();
} catch (IllegalAccessException e) {
throw new SQLException(e.getMessage(), e);
} catch (InstantiationException e) {
throw new SQLException(e.getMessage(), e);
}
}
public static int executeUpdate(DataSource dataSource, String sql, Object... parameters) throws SQLException {
return executeUpdate(dataSource, sql, Arrays.asList(parameters));
}
public static int executeUpdate(DataSource dataSource, String sql, List<Object> parameters) throws SQLException {
Connection conn = null;
try {
conn = dataSource.getConnection();
return executeUpdate(conn, sql, parameters);
} finally {
close(conn);
}
}
public static int executeUpdate(Connection conn, String sql, List<Object> parameters) throws SQLException {
PreparedStatement stmt = null;
int updateCount;
try {
stmt = conn.prepareStatement(sql);
setParameters(stmt, parameters);
updateCount = stmt.executeUpdate();
} finally {
JdbcUtils.close(stmt);
}
return updateCount;
}
public static void execute(DataSource dataSource, String sql, Object... parameters) throws SQLException {
execute(dataSource, sql, Arrays.asList(parameters));
}
public static void execute(DataSource dataSource, String sql, List<Object> parameters) throws SQLException {
Connection conn = null;
try {
conn = dataSource.getConnection();
execute(conn, sql, parameters);
} finally {
close(conn);
}
}
public static void execute(Connection conn, String sql, List<Object> parameters) throws SQLException {
PreparedStatement stmt = null;
try {
stmt = conn.prepareStatement(sql);
setParameters(stmt, parameters);
stmt.executeUpdate();
} finally {
JdbcUtils.close(stmt);
}
}
public static List<Map<String, Object>> executeQuery(DataSource dataSource, String sql, Object... parameters)
throws SQLException {
return executeQuery(dataSource, sql, Arrays.asList(parameters));
}
public static List<Map<String, Object>> executeQuery(DataSource dataSource, String sql, List<Object> parameters)
throws SQLException {
Connection conn = null;
try {
conn = dataSource.getConnection();
return executeQuery(conn, sql, parameters);
} finally {
close(conn);
}
}
/**
* 非常好的数据结构
* List<Map<Key,Value>> 一个map就是一个对象,每个Map里面的key就是对象属性,Value是属性值
* @param conn
* @param sql
* @param parameters
* @return
* @throws SQLException
*/
public static List<Map<String, Object>> executeQuery(Connection conn, String sql, List<Object> parameters)
throws SQLException {
List<Map<String, Object>> rows = new ArrayList<Map<String, Object>>();
PreparedStatement stmt = null;
ResultSet rs = null;
try {
stmt = conn.prepareStatement(sql);
setParameters(stmt, parameters);
rs = stmt.executeQuery();
ResultSetMetaData rsMeta = rs.getMetaData();
while (rs.next()) {
Map<String, Object> row = new LinkedHashMap<String, Object>();
for (int i = 0, size = rsMeta.getColumnCount(); i < size; ++i) {
String columName = rsMeta.getColumnLabel(i + 1);
Object value = rs.getObject(i + 1);
row.put(columName, value);
}
rows.add(row);
}
} finally {
JdbcUtils.close(rs);
JdbcUtils.close(stmt);
}
return rows;
}
/**
* 替换占位符设置参数
* @param stmt
* @param parameters
* @throws SQLException
*/
private static void setParameters(PreparedStatement stmt, List<Object> parameters) throws SQLException {
for (int i = 0, size = parameters.size(); i < size; ++i) {
Object param = parameters.get(i);
stmt.setObject(i + 1, param);
}
}
public static void insertToTable(DataSource dataSource, String tableName, Map<String, Object> data)
throws SQLException {
Connection conn = null;
try {
conn = dataSource.getConnection();
insertToTable(conn, tableName, data);
} finally {
close(conn);
}
}
public static void insertToTable(Connection conn, String tableName, Map<String, Object> data) throws SQLException {
String sql = makeInsertToTableSql(tableName, data.keySet());
List<Object> parameters = new ArrayList<Object>(data.values());
execute(conn, sql, parameters);
}
/**
* 拼接一条insert into TabName (field1,field2) values(?,?)
* @param tableName
* @param names
* @return
*/
public static String makeInsertToTableSql(String tableName, Collection<String> names) {
StringBuilder sql = new StringBuilder() //
.append("insert into ") //
.append(tableName) //
.append("("); //
int nameCount = 0;
for (String name : names) {
if (nameCount > 0) {
sql.append(",");
}
sql.append(name);
nameCount++;
}
sql.append(") values (");
for (int i = 0; i < nameCount; ++i) {
if (i != 0) {
sql.append(",");
}
sql.append("?");
}
sql.append(")");
return sql.toString();
}
}
二、JdbcConstants 源码阅读
package com.alibaba.druid.util;
public interface JdbcConstants {
public static final String JTDS = "jtds";
public static final String MOCK = "mock";
public static final String HSQL = "hsql";
public static final String DB2 = "db2";
public static final String DB2_DRIVER = "COM.ibm.db2.jdbc.app.DB2Driver";
public static final String POSTGRESQL = "postgresql";
public static final String POSTGRESQL_DRIVER = "org.postgresql.Driver";
public static final String SYBASE = "sybase";
public static final String SQL_SERVER = "sqlserver";
public static final String SQL_SERVER_DRIVER = "com.microsoft.jdbc.sqlserver.SQLServerDriver";
public static final String SQL_SERVER_DRIVER_SQLJDBC4 = "com.microsoft.sqlserver.jdbc.SQLServerDriver";
public static final String SQL_SERVER_DRIVER_JTDS = "net.sourceforge.jtds.jdbc.Driver";
public static final String ORACLE = "oracle";
public static final String ORACLE_DRIVER = "oracle.jdbc.OracleDriver";
public static final String ALI_ORACLE = "AliOracle";
public static final String ALI_ORACLE_DRIVER = "com.alibaba.jdbc.AlibabaDriver";
public static final String MYSQL = "mysql";
public static final String MYSQL_DRIVER = "com.mysql.jdbc.Driver";
public static final String MYSQL_DRIVER_6 = "com.mysql.cj.jdbc.Driver";
public static final String MARIADB = "mariadb";
public static final String MARIADB_DRIVER = "org.mariadb.jdbc.Driver";
public static final String DERBY = "derby";
public static final String HBASE = "hbase";
public static final String HIVE = "hive";
public static final String HIVE_DRIVER = "org.apache.hive.jdbc.HiveDriver";
public static final String H2 = "h2";
public static final String H2_DRIVER = "org.h2.Driver";
public static final String DM = "dm";
public static final String DM_DRIVER = "dm.jdbc.driver.DmDriver";
public static final String KINGBASE = "kingbase";
public static final String KINGBASE_DRIVER = "com.kingbase.Driver";
public static final String OCEANBASE = "oceanbase";
public static final String OCEANBASE_DRIVER = "com.mysql.jdbc.Driver";
public static final String ODPS = "odps";
public static final String ODPS_DRIVER = "com.aliyun.odps.jdbc.OdpsDriver";
public static final String TERADATA = "teradata";
public static final String TERADATA_DRIVER = "com.teradata.jdbc.TeraDriver";
/**
* Log4JDBC
*/
public static final String LOG4JDBC = "log4jdbc";
public static final String LOG4JDBC_DRIVER = "net.sf.log4jdbc.DriverSpy";
public static final String PHOENIX = "phoenix";
public static final String PHOENIX_DRIVER = "org.apache.phoenix.jdbc.PhoenixDriver";
}
相关推荐
标题所指的知识点为“Druid 源码分析 逐层详解”,意味着我们需要深入分析Druid这一开源数据处理工具的源码,并从不同的层面揭示其内部实现机制。 首先,我们来看Druid的构架设计。Druid采用了分层的架构,每个层次...
通过阅读Druid-1.2.8的源码,我们可以了解其设计思想和实现原理,如线程安全的实现、连接池的优化策略、SQL解析的复杂性等。这对于我们进行数据库连接池的选型、优化或者开发自定义的数据库连接池组件都有极大的参考...
在Druid源码包`apache-druid-0.22.1-src.tar.gz`中,我们可以深入研究其内部实现机制。这个源码包包含了Druid的所有源代码,包括核心组件、扩展模块以及相关的测试用例。通过分析源码,我们可以学习到以下几个关键...
大数据技术,Druid源码导读! 源码结构: Druid-api Druid-common Druid-process 索引和查询的核心 Druid-server Druid-indexing-service Druid-indexing-Hadoop Hadoop 离线索引实现 ...
Druid 的源码分析可以帮助我们深入了解其内部机制和设计思想,这对于开发人员优化性能、调试问题或扩展功能至关重要。 在 `apache-druid-0.21.1-src` 压缩包中,我们可以找到 Druid 的全部源代码,包括核心库、扩展...
在本压缩包"druid源码配置文件和jar包.rar"中,我们关注的是Druid的核心组件——`druid-1.0.9.jar`,以及可能包含的源码和配置文件。 首先,Druid连接池的核心功能是管理数据库连接,它通过复用已建立的连接来减少...
**阿里巴巴数据库连接池Druid详解** Druid是阿里巴巴开源的一款高效、强大且可扩展的数据库连接池组件。作为Java世界中的优秀数据源...对于开发人员来说,深入理解Druid的源码有助于更好地利用其特性,提高系统性能。
druid源码
MyBatis是一个优秀的持久层框架,而Druid则是一款强大的数据库连接池。本教程将详细介绍如何在MyBatis中利用第三方库Druid来创建数据连接池,并以SQL Server为例进行配置和使用。 首先,我们需要理解MyBatis和Druid...
2. 学习设计模式:Druid大量使用了设计模式,如工厂模式、装饰者模式、代理模式等,源码阅读有助于提高设计能力。 3. 提升SQL优化技能:通过Druid的SQL解析和统计功能,可以更好地理解和优化SQL语句。 总结,`...
阿里巴巴数据库连接池,通常指的是Druid,是一款由阿里巴巴开源的高效、强大且功能丰富的数据库连接池组件。在Java开发中,数据库连接池是系统性能优化的重要环节,它负责管理数据库连接,提供高效的连接复用,避免...
通过阅读和分析Druid源码,我们可以学习到如何设计和实现一个高性能的数据库连接池,同时也能了解阿里巴巴在开发过程中的一些最佳实践和设计模式。这对于提升个人技能,特别是Java后端开发能力,有着极大的帮助。...
通过阅读源码,你可以了解到: 1. 如何实现连接池管理:Druid如何维护连接的生命周期,如何根据预设策略创建和回收连接。 2. SQL监控的具体实现:StatFilter是如何收集并统计SQL执行信息的。 3. SQL解析的细节:...
druid1.0.29的源码jar。希望对下载的朋友有帮助。谢谢
赠送jar包:druid-1.2.8.jar; 赠送原API文档:druid-1.2.8-javadoc.jar; 赠送源代码:druid-1.2.8-sources.jar; 赠送Maven依赖信息文件:druid-1.2.8.pom; 包含翻译后的API文档:druid-1.2.8-javadoc-API文档-...
spring-boot项目整合shiro权限框架,实现了登录认证、权限认证、密码加密、rememberMe、验证码、登录次数过多...整合druid,页面上监控sql语句的执行情况 git项目下载地址(持续更新): https://github.com/aqsunkai/era
- `druid-1.2.8-sources.jar`:包含了Druid 1.2.8版本的源代码,开发者可以通过查看源码深入了解其内部实现机制,进行定制或调试。 5. **集成与配置**:在Java项目中,我们可以将`druid-1.2.8.jar`添加到项目的类...
《Hibernate与Druid整合实战详解》 在Java Web开发中,Hibernate作为一款强大的ORM(对象关系映射)框架,能够简化数据库操作,而Druid则是一款高性能、灵活且功能丰富的数据库连接池。当两者结合使用时,可以...
对于开发者来说,阅读"druid-1.2.4"的源码可以深入理解其内部实现,包括连接管理、线程安全、内存池设计等方面的知识,有助于提升数据库连接池相关的技术水平。 总结来说,"druid-1.2.4.rar"是一个宝贵的资源,...
Druid连接池,全称是Druid Data Source,是由阿里巴巴开源的一个高效、强大的数据库连接池组件。在Java开发中,数据库连接池是管理数据库连接的重要工具,它可以提高数据库访问的效率,减少系统资源的浪费,是现代...