H2 Database Engine - A very fast, open source, JDBC API database with embedded and server modes, transaction support, multi-version concurrency, browser-based console application, encrypted databases, fulltext search, and pure Java implementation with small footprint
—
H2 Database Engine provides comprehensive extension APIs that allow developers to customize and extend database functionality. These include triggers for database events, custom aggregate functions, table engines for alternative storage, event listeners for monitoring, and authentication providers.
Core interface for implementing database triggers that fire on table operations.
public interface Trigger {
// Trigger types
int INSERT = 1;
int UPDATE = 2;
int DELETE = 4;
int SELECT = 8;
// Lifecycle methods
default void init(Connection conn, String schemaName, String triggerName,
String tableName, boolean before, int type) throws SQLException {
// Default implementation - override if needed
}
void fire(Connection conn, Object[] oldRow, Object[] newRow) throws SQLException;
default void close() throws SQLException {
// Default implementation - override if needed
}
default void remove() throws SQLException {
// Default implementation - override if needed
}
}Usage Examples:
// Audit trigger implementation
public class AuditTrigger implements Trigger {
private String auditTable;
@Override
public void init(Connection conn, String schemaName, String triggerName,
String tableName, boolean before, int type) throws SQLException {
this.auditTable = tableName + "_audit";
// Create audit table if it doesn't exist
String createAudit = "CREATE TABLE IF NOT EXISTS " + auditTable + " (" +
"audit_id IDENTITY PRIMARY KEY, " +
"operation VARCHAR(10), " +
"table_name VARCHAR(255), " +
"old_values CLOB, " +
"new_values CLOB, " +
"changed_by VARCHAR(255), " +
"changed_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP" +
")";
conn.createStatement().execute(createAudit);
}
@Override
public void fire(Connection conn, Object[] oldRow, Object[] newRow) throws SQLException {
String operation;
if (oldRow == null) {
operation = "INSERT";
} else if (newRow == null) {
operation = "DELETE";
} else {
operation = "UPDATE";
}
PreparedStatement stmt = conn.prepareStatement(
"INSERT INTO " + auditTable +
" (operation, table_name, old_values, new_values, changed_by) VALUES (?, ?, ?, ?, ?)");
stmt.setString(1, operation);
stmt.setString(2, "customers"); // Could be dynamic
stmt.setString(3, oldRow != null ? Arrays.toString(oldRow) : null);
stmt.setString(4, newRow != null ? Arrays.toString(newRow) : null);
stmt.setString(5, System.getProperty("user.name"));
stmt.executeUpdate();
}
}
// Register trigger
Connection conn = DriverManager.getConnection("jdbc:h2:~/test", "sa", "");
conn.createStatement().execute(
"CREATE TRIGGER audit_customers AFTER INSERT, UPDATE, DELETE ON customers " +
"FOR EACH ROW CALL \"com.example.AuditTrigger\"");Abstract adapter class that simplifies trigger implementation.
public abstract class TriggerAdapter implements Trigger {
// Template methods for specific operations
protected void fireInsert(Connection conn, Object[] newRow) throws SQLException {
// Override in subclass
}
protected void fireUpdate(Connection conn, Object[] oldRow, Object[] newRow) throws SQLException {
// Override in subclass
}
protected void fireDelete(Connection conn, Object[] oldRow) throws SQLException {
// Override in subclass
}
protected void fireSelect(Connection conn, Object[] row) throws SQLException {
// Override in subclass
}
@Override
public final void fire(Connection conn, Object[] oldRow, Object[] newRow) throws SQLException {
if (oldRow == null) {
fireInsert(conn, newRow);
} else if (newRow == null) {
fireDelete(conn, oldRow);
} else {
fireUpdate(conn, oldRow, newRow);
}
}
}Usage Examples:
// Simplified trigger using adapter
public class ValidationTrigger extends TriggerAdapter {
@Override
protected void fireInsert(Connection conn, Object[] newRow) throws SQLException {
validateRow(newRow);
}
@Override
protected void fireUpdate(Connection conn, Object[] oldRow, Object[] newRow) throws SQLException {
validateRow(newRow);
}
private void validateRow(Object[] row) throws SQLException {
// Assume first column is email
if (row.length > 1 && row[1] != null) {
String email = row[1].toString();
if (!email.contains("@")) {
throw new SQLException("Invalid email format: " + email);
}
}
}
}Interface for creating custom aggregate functions that work with standard SQL types.
public interface AggregateFunction {
// Lifecycle
default void init(Connection conn) throws SQLException {
// Default implementation - override if needed
}
// Type information
int getType(int[] inputTypes) throws SQLException;
// Aggregation
void add(Object value) throws SQLException;
Object getResult() throws SQLException;
}Usage Examples:
// String concatenation aggregate
public class ConcatAggregate implements AggregateFunction {
private StringBuilder result = new StringBuilder();
private String separator = "";
@Override
public void init(Connection conn) throws SQLException {
result = new StringBuilder();
separator = "";
}
@Override
public int getType(int[] inputTypes) throws SQLException {
return Types.VARCHAR;
}
@Override
public void add(Object value) throws SQLException {
if (value != null) {
result.append(separator).append(value.toString());
separator = ",";
}
}
@Override
public Object getResult() throws SQLException {
return result.toString();
}
}
// Register and use aggregate
Connection conn = DriverManager.getConnection("jdbc:h2:~/test", "sa", "");
conn.createStatement().execute(
"CREATE AGGREGATE CONCAT_AGG FOR \"com.example.ConcatAggregate\"");
// Use in SQL
ResultSet rs = conn.createStatement().executeQuery(
"SELECT department, CONCAT_AGG(name) as employees FROM staff GROUP BY department");Alternative interface for aggregate functions that work with H2's internal Value types.
public interface Aggregate {
// Lifecycle
default void init(Connection conn) throws SQLException {
// Default implementation
}
// Type information (returns H2 internal type)
int getInternalType(int[] inputTypes) throws SQLException;
// Aggregation
void add(Object value) throws SQLException;
Object getResult() throws SQLException;
}Usage Examples:
// Statistical aggregate using H2 Value types
public class StatsAggregate implements Aggregate {
private List<Double> values = new ArrayList<>();
@Override
public void init(Connection conn) throws SQLException {
values.clear();
}
@Override
public int getInternalType(int[] inputTypes) throws SQLException {
return Value.VARCHAR; // H2 internal type
}
@Override
public void add(Object value) throws SQLException {
if (value instanceof Number) {
values.add(((Number) value).doubleValue());
}
}
@Override
public Object getResult() throws SQLException {
if (values.isEmpty()) return "No data";
double sum = values.stream().mapToDouble(Double::doubleValue).sum();
double mean = sum / values.size();
double variance = values.stream()
.mapToDouble(v -> Math.pow(v - mean, 2))
.sum() / values.size();
double stdDev = Math.sqrt(variance);
return String.format("Count: %d, Mean: %.2f, StdDev: %.2f",
values.size(), mean, stdDev);
}
}Interface for monitoring database events, exceptions, and progress.
public interface DatabaseEventListener extends EventListener {
// Lifecycle events
default void init(String url) {
// Called when listener is registered
}
default void opened() {
// Called when database is opened
}
default void closingDatabase() {
// Called before database closes
}
// Error handling
default void exceptionThrown(SQLException e, String sql) {
// Called when SQL exception occurs
}
// Progress monitoring
default void setProgress(int state, String name, long x, long max) {
// Called during long-running operations
}
// State constants
int STATE_SCAN_FILE = 1;
int STATE_CREATE_INDEX = 2;
int STATE_RECOVER = 3;
int STATE_BACKUP_FILE = 4;
int STATE_RECONNECTED = 5;
}Usage Examples:
// Comprehensive database event logger
public class DatabaseEventLogger implements DatabaseEventListener {
private static final Logger logger = LoggerFactory.getLogger(DatabaseEventLogger.class);
@Override
public void init(String url) {
logger.info("Database event listener initialized for: {}", url);
}
@Override
public void opened() {
logger.info("Database connection opened");
}
@Override
public void closingDatabase() {
logger.info("Database is closing");
}
@Override
public void exceptionThrown(SQLException e, String sql) {
logger.error("SQL Exception occurred. SQL: {}, Error: {}", sql, e.getMessage(), e);
// Could send alerts, metrics, etc.
if (e.getErrorCode() == ErrorCode.OUT_OF_MEMORY) {
sendCriticalAlert("Database out of memory", e);
}
}
@Override
public void setProgress(int state, String name, long x, long max) {
String operation = getOperationName(state);
double percentage = max > 0 ? (double) x / max * 100 : 0;
logger.debug("Progress - {}: {} ({:.1f}%)", operation, name, percentage);
// Update monitoring dashboards
updateProgressMetrics(operation, percentage);
}
private String getOperationName(int state) {
switch (state) {
case STATE_SCAN_FILE: return "File Scan";
case STATE_CREATE_INDEX: return "Index Creation";
case STATE_RECOVER: return "Recovery";
case STATE_BACKUP_FILE: return "Backup";
case STATE_RECONNECTED: return "Reconnection";
default: return "Unknown Operation";
}
}
private void sendCriticalAlert(String message, Exception e) {
// Implementation for alerting system
}
private void updateProgressMetrics(String operation, double percentage) {
// Implementation for metrics collection
}
}
// Register event listener
Connection conn = DriverManager.getConnection(
"jdbc:h2:~/test;DATABASE_EVENT_LISTENER='com.example.DatabaseEventLogger'", "sa", "");Interface for custom credential validation logic.
public interface CredentialsValidator extends Configurable {
boolean validateCredentials(String userName, String password, String realm) throws Exception;
}Interface for mapping authenticated users to database roles.
public interface UserToRolesMapper extends Configurable {
String[] mapUserToRoles(String userName, String realm) throws Exception;
}Base interface for configurable components.
public interface Configurable {
void configure(String key, String value);
}Usage Examples:
// LDAP-based credentials validator
public class LdapCredentialsValidator implements CredentialsValidator {
private String ldapUrl;
private String baseDn;
@Override
public void configure(String key, String value) {
switch (key) {
case "ldapUrl":
this.ldapUrl = value;
break;
case "baseDn":
this.baseDn = value;
break;
}
}
@Override
public boolean validateCredentials(String userName, String password, String realm) throws Exception {
// LDAP authentication logic
Hashtable<String, String> env = new Hashtable<>();
env.put(Context.INITIAL_CONTEXT_FACTORY, "com.sun.jndi.ldap.LdapCtxFactory");
env.put(Context.PROVIDER_URL, ldapUrl);
env.put(Context.SECURITY_AUTHENTICATION, "simple");
env.put(Context.SECURITY_PRINCIPAL, "uid=" + userName + "," + baseDn);
env.put(Context.SECURITY_CREDENTIALS, password);
try {
new InitialDirContext(env);
return true; // Authentication successful
} catch (AuthenticationException e) {
return false; // Authentication failed
}
}
}
// Role mapper based on user attributes
public class AttributeBasedRoleMapper implements UserToRolesMapper {
private Map<String, String[]> userRoles = new HashMap<>();
@Override
public void configure(String key, String value) {
// Parse role configuration
// Format: user1=role1,role2;user2=role3
String[] userRolePairs = value.split(";");
for (String pair : userRolePairs) {
String[] parts = pair.split("=");
if (parts.length == 2) {
String user = parts[0].trim();
String[] roles = parts[1].split(",");
for (int i = 0; i < roles.length; i++) {
roles[i] = roles[i].trim();
}
userRoles.put(user, roles);
}
}
}
@Override
public String[] mapUserToRoles(String userName, String realm) throws Exception {
return userRoles.getOrDefault(userName, new String[]{"PUBLIC"});
}
}
// Connection with custom authentication
String url = "jdbc:h2:~/secure;" +
"CREDENTIALS_VALIDATOR='com.example.LdapCredentialsValidator';" +
"CREDENTIALS_VALIDATOR.ldapUrl='ldap://localhost:389';" +
"CREDENTIALS_VALIDATOR.baseDn='ou=users,dc=example,dc=com';" +
"USER_TO_ROLES_MAPPER='com.example.AttributeBasedRoleMapper';" +
"USER_TO_ROLES_MAPPER=admin=DBA,ADMIN;user1=SELECT_ROLE,INSERT_ROLE";
Connection conn = DriverManager.getConnection(url, "admin", "password");Interface for implementing custom table storage engines.
public interface TableEngine {
TableBase createTable(CreateTableData data);
}This is an advanced extension point typically used for integrating external storage systems.
Usage Examples:
// Simple in-memory table engine
public class MemoryTableEngine implements TableEngine {
@Override
public TableBase createTable(CreateTableData data) {
return new MemoryTable(data);
}
private static class MemoryTable extends TableBase {
private final List<Row> rows = new ArrayList<>();
public MemoryTable(CreateTableData data) {
// Initialize table structure
}
// Implement required table operations
// This is a simplified example - full implementation would be extensive
}
}
// Register table engine
conn.createStatement().execute(
"CREATE TABLE memory_table (...) ENGINE \"com.example.MemoryTableEngine\"");Interface for custom serialization of Java objects stored in the database.
public interface JavaObjectSerializer {
byte[] serialize(Object obj) throws Exception;
Object deserialize(byte[] bytes) throws Exception;
}Usage Examples:
// JSON-based object serializer
public class JsonObjectSerializer implements JavaObjectSerializer {
private final ObjectMapper objectMapper = new ObjectMapper();
@Override
public byte[] serialize(Object obj) throws Exception {
return objectMapper.writeValueAsBytes(obj);
}
@Override
public Object deserialize(byte[] bytes) throws Exception {
return objectMapper.readValue(bytes, Object.class);
}
}
// Use custom serializer
String url = "jdbc:h2:~/test;JAVA_OBJECT_SERIALIZER='com.example.JsonObjectSerializer'";
Connection conn = DriverManager.getConnection(url, "sa", "");
// Store objects that will use custom serialization
PreparedStatement stmt = conn.prepareStatement("INSERT INTO objects (data) VALUES (?)");
stmt.setObject(1, new MyCustomObject());
stmt.executeUpdate();public class ExtensionManager {
public static void registerExtensions(Connection conn) throws SQLException {
// Register triggers
conn.createStatement().execute(
"CREATE TRIGGER audit_users AFTER INSERT, UPDATE, DELETE ON users " +
"FOR EACH ROW CALL \"com.example.AuditTrigger\"");
// Register aggregates
conn.createStatement().execute(
"CREATE AGGREGATE CONCAT_AGG FOR \"com.example.ConcatAggregate\"");
// Register functions (if using function interface)
conn.createStatement().execute(
"CREATE ALIAS CUSTOM_HASH FOR \"com.example.CustomHashFunction.hash\"");
}
public static void unregisterExtensions(Connection conn) throws SQLException {
conn.createStatement().execute("DROP TRIGGER IF EXISTS audit_users");
conn.createStatement().execute("DROP AGGREGATE IF EXISTS CONCAT_AGG");
conn.createStatement().execute("DROP ALIAS IF EXISTS CUSTOM_HASH");
}
}Extensions can also be registered via connection parameters:
// Multiple extensions via connection URL
String url = "jdbc:h2:~/test;" +
"DATABASE_EVENT_LISTENER='com.example.DatabaseEventLogger';" +
"CREDENTIALS_VALIDATOR='com.example.LdapCredentialsValidator';" +
"USER_TO_ROLES_MAPPER='com.example.AttributeBasedRoleMapper';" +
"JAVA_OBJECT_SERIALIZER='com.example.JsonObjectSerializer'";
Connection conn = DriverManager.getConnection(url, "sa", "");public class RobustTrigger implements Trigger {
private static final Logger logger = LoggerFactory.getLogger(RobustTrigger.class);
@Override
public void fire(Connection conn, Object[] oldRow, Object[] newRow) throws SQLException {
try {
performTriggerLogic(conn, oldRow, newRow);
} catch (SQLException e) {
logger.error("Trigger execution failed", e);
throw e; // Re-throw to fail the transaction
} catch (Exception e) {
logger.error("Unexpected error in trigger", e);
throw new SQLException("Trigger failed due to unexpected error", e);
}
}
private void performTriggerLogic(Connection conn, Object[] oldRow, Object[] newRow) throws SQLException {
// Actual trigger implementation
}
}public class PerformantAggregate implements AggregateFunction {
private final List<Object> values = new ArrayList<>();
private Object cachedResult;
private boolean resultValid = false;
@Override
public void add(Object value) throws SQLException {
values.add(value);
resultValid = false; // Invalidate cache
}
@Override
public Object getResult() throws SQLException {
if (!resultValid) {
cachedResult = computeResult();
resultValid = true;
}
return cachedResult;
}
private Object computeResult() {
// Expensive computation only when needed
return values.stream()
.filter(Objects::nonNull)
.collect(Collectors.toList())
.toString();
}
}public class ThreadSafeEventListener implements DatabaseEventListener {
private final AtomicLong eventCount = new AtomicLong(0);
private final ConcurrentHashMap<String, AtomicLong> errorCounts = new ConcurrentHashMap<>();
@Override
public void exceptionThrown(SQLException e, String sql) {
eventCount.incrementAndGet();
String errorType = e.getClass().getSimpleName();
errorCounts.computeIfAbsent(errorType, k -> new AtomicLong(0)).incrementAndGet();
// Thread-safe logging and monitoring
logError(e, sql);
}
private void logError(SQLException e, String sql) {
// Thread-safe logging implementation
}
public long getEventCount() {
return eventCount.get();
}
public Map<String, Long> getErrorCounts() {
return errorCounts.entrySet().stream()
.collect(Collectors.toMap(
Map.Entry::getKey,
entry -> entry.getValue().get()));
}
}Install with Tessl CLI
npx tessl i tessl/maven-com-h2database--h2