A high-performance JDBC connection pool and database monitoring library with SQL parsing and security features
—
Comprehensive performance monitoring and statistics collection framework providing detailed insights into database connections, SQL execution, and system performance with JMX integration and web interfaces.
// Primary statistics service
class DruidStatService {
public static DruidStatService getInstance();
// REST-style API access
public String service(String url);
// Configuration
public boolean isResetEnable();
public void setResetEnable(boolean value);
// JMX integration
public static void registerMBean();
public static void unregisterMBean();
}// Available service endpoints via service(String url) method
// Core statistics
"/basic.json" // Basic system statistics
"/datasource.json" // All DataSource statistics
"/sql.json" // SQL execution statistics
"/sql.json?orderBy=ExecuteCount" // Ordered SQL statistics
"/activeConnectionStackTrace.json" // Active connection debugging
// Security and web statistics
"/wall.json" // Security wall statistics
"/weburi.json" // Web URI access statistics
"/webapp.json" // Web application statistics
"/websession.json" // Web session statistics
"/spring.json" // Spring method statistics
// Management operations
"/reset-all.json" // Reset all statistics
"/log-and-reset.json" // Log current stats and reset// DataSource statistics manager
class DruidDataSourceStatManager {
public static DruidDataSourceStatManager getInstance();
// DataSource registration
public boolean addDataSource(Object dataSource, String name);
public boolean removeDataSource(Object dataSource);
// Statistics access
public Set<DruidDataSourceStatValue> getDataSourceList();
public static Set<Object> getDruidDataSourceInstances();
// Management operations
public void reset();
public void logAndResetDataSource();
public long getResetCount();
// JMX support
public TabularData getDataSourceList();
}// DataSource-level statistics collector
class JdbcDataSourceStat {
// SQL statistics management
public Map<String, JdbcSqlStat> getSqlStatMap();
public JdbcSqlStat getSqlStat(String sql);
public JdbcSqlStat createSqlStat(String sql);
public Map<String, JdbcSqlStat> getSqlStatMapAndReset();
// Connection statistics
public long getConnectionActiveCount();
public long getConnectionActiveCountMax();
public long getConnectionCloseCount();
public long getConnectionCommitCount();
public long getConnectionRollbackCount();
public long getConnectionConnectCount();
public long getConnectionConnectErrorCount();
// Timing statistics
public long getConnectionConnectMillisTotal();
public long getConnectionConnectMillisMax();
public long getConnectionConnectMillisMin();
public long[] getConnectionHistogramValues();
public long[] getConnectionHoldTimeHistogramValues();
// Resource management
public long getBlobOpenCount();
public long getClobOpenCount();
public long getReadStringLength();
public long getReadBytesLength();
public long getInputStreamOpenCount();
public long getReaderOpenCount();
// Management operations
public void reset();
public long getResetCount();
}import com.alibaba.druid.pool.DruidDataSource;
import com.alibaba.druid.stat.JdbcDataSourceStat;
// Access DataSource statistics
DruidDataSource dataSource = getDataSource();
JdbcDataSourceStat stat = dataSource.getDataSourceStat();
// Connection metrics
long activeConnections = stat.getConnectionActiveCount();
long maxConnections = stat.getConnectionActiveCountMax();
long totalConnections = stat.getConnectionConnectCount();
long connectionErrors = stat.getConnectionConnectErrorCount();
// Connection timing
long totalConnectTime = stat.getConnectionConnectMillisTotal();
long maxConnectTime = stat.getConnectionConnectMillisMax();
long[] connectionHistogram = stat.getConnectionHistogramValues();
// Transaction statistics
long commits = stat.getConnectionCommitCount();
long rollbacks = stat.getConnectionRollbackCount();// SQL statement statistics
class JdbcSqlStat {
// Execution counts
public long getExecuteCount();
public long getExecuteSuccessCount();
public long getExecuteErrorCount();
public long getRunningCount();
public long getConcurrentMax();
// Timing statistics
public long getExecuteMillisTotal();
public long getExecuteMillisMax();
public long getExecuteMillisMin();
public double getExecuteMillisMean();
// Row operations
public long getFetchRowCount();
public long getFetchRowCountMax();
public long getUpdateCount();
public long getUpdateCountMax();
// Histogram data
public long[] getHistogramValues(); // Execution time distribution
public long[] getFetchRowCountHistogramValues();
public long[] getUpdateCountHistogramValues();
public long[] getExecuteAndResultHoldTimeHistogramValues();
// Transaction context
public long getInTransactionCount();
// Error information
public Throwable getLastError();
public long getLastErrorCount();
public Date getLastErrorTime();
public String getLastErrorMessage();
public String getLastErrorClass();
public String getLastErrorStackTrace();
// Timing details
public Date getLastTime();
public Date getMaxTimespan();
public long getLastExecuteTimeNano();
// Data access
public Map<String, Object> getData();
public JdbcSqlStatValue getValue(boolean reset);
}import com.alibaba.druid.stat.JdbcSqlStat;
// Access SQL statistics
Map<String, JdbcSqlStat> sqlStats = dataSourceStat.getSqlStatMap();
for (Map.Entry<String, JdbcSqlStat> entry : sqlStats.entrySet()) {
String sql = entry.getKey();
JdbcSqlStat stat = entry.getValue();
// Performance metrics
long executions = stat.getExecuteCount();
long errors = stat.getExecuteErrorCount();
long totalTime = stat.getExecuteMillisTotal();
long maxTime = stat.getExecuteMillisMax();
double avgTime = stat.getExecuteMillisMean();
// Row operations
long rowsFetched = stat.getFetchRowCount();
long rowsUpdated = stat.getUpdateCount();
// Concurrency
long currentRunning = stat.getRunningCount();
long maxConcurrent = stat.getConcurrentMax();
// Error information
if (stat.getLastError() != null) {
System.out.println("Last error: " + stat.getLastErrorMessage());
System.out.println("Error time: " + stat.getLastErrorTime());
}
}// Statistics collection filter
class StatFilter extends FilterEventAdapter implements StatFilterMBean {
// Configuration
public void setSlowSqlMillis(long millis); // Default: 3000ms
public long getSlowSqlMillis();
public void setLogSlowSql(boolean enabled); // Default: false
public boolean isLogSlowSql();
public void setMergeSql(boolean enabled); // Default: false
public boolean isMergeSql();
// Connection tracking
public void setConnectionStackTraceEnable(boolean enabled);
public boolean isConnectionStackTraceEnable();
// SQL merging and parameterization
public String mergeSql(String sql);
public String mergeSql(String sql, DbType dbType);
// Statistics access
public JdbcDataSourceStat getDataSourceStat();
public TabularData getSqlList();
public CompositeData getSqlStat(String sql);
// Management operations
public void reset();
public long getResetCount();
}import com.alibaba.druid.filter.stat.StatFilter;
import com.alibaba.druid.pool.DruidDataSource;
// Configure statistics collection
DruidDataSource dataSource = new DruidDataSource();
// Method 1: Via filters string
dataSource.setFilters("stat");
// Method 2: Programmatic configuration
StatFilter statFilter = new StatFilter();
statFilter.setSlowSqlMillis(5000); // 5 second slow SQL threshold
statFilter.setLogSlowSql(true); // Log slow SQL statements
statFilter.setMergeSql(true); // Merge similar SQL statements
statFilter.setConnectionStackTraceEnable(true); // Capture connection stack traces
dataSource.getProxyFilters().add(statFilter);// Web interface servlet
class StatViewServlet extends HttpServlet {
// Configuration parameters
// resetEnable - Allow statistics reset via web interface
// jmxUrl, jmxUsername, jmxPassword - Remote JMX connection
// allow, deny - IP access control
}<!-- web.xml configuration -->
<servlet>
<servlet-name>DruidStatView</servlet-name>
<servlet-class>com.alibaba.druid.support.http.StatViewServlet</servlet-class>
<init-param>
<param-name>resetEnable</param-name>
<param-value>true</param-value>
</init-param>
<init-param>
<param-name>loginUsername</param-name>
<param-value>admin</param-value>
</init-param>
<init-param>
<param-name>loginPassword</param-name>
<param-value>password</param-value>
</init-param>
</servlet>
<servlet-mapping>
<servlet-name>DruidStatView</servlet-name>
<url-pattern>/druid/*</url-pattern>
</servlet-mapping>// JMX ObjectNames
// com.alibaba.druid:type=DruidStatService
// com.alibaba.druid:type=DruidDataSourceStat
// com.alibaba.druid:type=DruidDataSource,id={name}import javax.management.MBeanServer;
import javax.management.ObjectName;
import java.lang.management.ManagementFactory;
// Access statistics via JMX
MBeanServer server = ManagementFactory.getPlatformMBeanServer();
// DruidStatService MBean
ObjectName statServiceName = new ObjectName("com.alibaba.druid:type=DruidStatService");
String datasourceStats = (String) server.invoke(statServiceName, "service",
new Object[]{"/datasource.json"}, new String[]{String.class.getName()});
// SQL statistics with pagination
String sqlStats = (String) server.invoke(statServiceName, "service",
new Object[]{"/sql.json?page=1&perPageCount=100&orderBy=ExecuteCount&orderType=desc"},
new String[]{String.class.getName()});
// DataSource manager MBean
ObjectName managerName = new ObjectName("com.alibaba.druid:type=DruidDataSourceStat");
TabularData dataSourceList = (TabularData) server.getAttribute(managerName, "DataSourceList");
Long resetCount = (Long) server.getAttribute(managerName, "ResetCount");
// Reset all statistics
server.invoke(managerName, "reset", null, null);// Execution time histogram buckets
long[] histogram = sqlStat.getHistogramValues();
// Index 0: 0-1ms
// Index 1: 1-10ms
// Index 2: 10-100ms
// Index 3: 100ms-1s
// Index 4: 1s-10s
// Index 5: 10s-100s
// Index 6: 100s-1000s
// Index 7: >1000s
// Analyze performance distribution
long totalExecutions = sqlStat.getExecuteCount();
long slowExecutions = histogram[4] + histogram[5] + histogram[6] + histogram[7];
double slowPercentage = (double) slowExecutions / totalExecutions * 100;
System.out.println("Slow executions (>1s): " + slowPercentage + "%");// Monitor active connections and detect leaks
DruidStatService statService = DruidStatService.getInstance();
String activeConnectionsJson = statService.service("/activeConnectionStackTrace.json");
// Parse JSON to analyze connection stack traces
// Identify potential connection leaks and problematic code paths// Reset individual DataSource statistics
JdbcDataSourceStat stat = dataSource.getDataSourceStat();
stat.reset();
// Reset all DataSource statistics via manager
DruidDataSourceStatManager.getInstance().reset();
// Reset via StatService
DruidStatService statService = DruidStatService.getInstance();
if (statService.isResetEnable()) {
statService.service("/reset-all.json");
statService.service("/log-and-reset.json"); // Log before reset
}import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
// Automated statistics collection
ScheduledExecutorService scheduler = Executors.newScheduledThreadPool(1);
scheduler.scheduleAtFixedRate(() -> {
DruidStatService statService = DruidStatService.getInstance();
// Collect current statistics
String datasourceStats = statService.service("/datasource.json");
String sqlStats = statService.service("/sql.json?orderBy=ExecuteCount");
// Send to monitoring system
sendToMonitoringSystem(datasourceStats, sqlStats);
// Optional: Reset statistics after collection
if (shouldReset()) {
statService.service("/reset-all.json");
}
}, 0, 60, TimeUnit.SECONDS);import com.alibaba.druid.pool.DruidDataSource;
import com.alibaba.druid.filter.stat.StatFilter;
import com.alibaba.druid.stat.DruidStatService;
public class DruidMonitoringConfiguration {
public static DruidDataSource createMonitoredDataSource() {
// Create DataSource with monitoring
DruidDataSource dataSource = new DruidDataSource();
dataSource.setUrl("jdbc:mysql://localhost:3306/mydb");
dataSource.setUsername("dbuser");
dataSource.setPassword("dbpass");
// Configure StatFilter
StatFilter statFilter = new StatFilter();
statFilter.setSlowSqlMillis(3000); // 3 second slow SQL threshold
statFilter.setLogSlowSql(true); // Log slow SQL
statFilter.setMergeSql(true); // Merge similar queries
statFilter.setConnectionStackTraceEnable(true); // Debug connection leaks
dataSource.getProxyFilters().add(statFilter);
// Enable reset functionality
DruidStatService.getInstance().setResetEnable(true);
// Register MBeans for JMX access
DruidStatService.registerMBean();
return dataSource;
}
public static void printStatistics(DruidDataSource dataSource) {
JdbcDataSourceStat stat = dataSource.getDataSourceStat();
// DataSource metrics
System.out.println("Active Connections: " + stat.getConnectionActiveCount());
System.out.println("Total Connections: " + stat.getConnectionConnectCount());
System.out.println("Connection Errors: " + stat.getConnectionConnectErrorCount());
System.out.println("Commits: " + stat.getConnectionCommitCount());
System.out.println("Rollbacks: " + stat.getConnectionRollbackCount());
// SQL statistics
Map<String, JdbcSqlStat> sqlStats = stat.getSqlStatMap();
System.out.println("Unique SQL statements: " + sqlStats.size());
for (Map.Entry<String, JdbcSqlStat> entry : sqlStats.entrySet()) {
JdbcSqlStat sqlStat = entry.getValue();
if (sqlStat.getExecuteCount() > 0) {
System.out.printf("SQL: %s%n", entry.getKey());
System.out.printf(" Executions: %d, Errors: %d%n",
sqlStat.getExecuteCount(), sqlStat.getExecuteErrorCount());
System.out.printf(" Avg Time: %.2fms, Max Time: %dms%n",
sqlStat.getExecuteMillisMean(), sqlStat.getExecuteMillisMax());
}
}
}
}This comprehensive monitoring framework provides detailed visibility into database performance, connection usage, SQL execution patterns, and system health, enabling effective performance tuning, troubleshooting, and capacity planning.
Install with Tessl CLI
npx tessl i tessl/maven-com-alibaba--druid