Higher-level abstraction over Java's JDBC technology providing simplified database operations with resource management
—
High-performance batch processing with automatic batching, configurable batch sizes, and support for both Statement and PreparedStatement batching.
Execute multiple database operations efficiently using automatic batching with configurable batch sizes.
// Basic batch operations
public Object withBatch(Closure closure) throws SQLException;
public Object withBatch(int batchSize, Closure closure) throws SQLException;
// Prepared statement batching
public Object withBatch(String sql, Closure closure) throws SQLException;
public Object withBatch(int batchSize, String sql, Closure closure) throws SQLException;
// Batch state checking
public boolean isWithinBatch();Example:
// Basic batch with default batch size
sql.withBatch { stmt ->
// All operations are automatically batched
stmt.addBatch('INSERT INTO users (name, email) VALUES ("Alice", "alice@example.com")')
stmt.addBatch('INSERT INTO users (name, email) VALUES ("Bob", "bob@example.com")')
stmt.addBatch('INSERT INTO users (name, email) VALUES ("Charlie", "charlie@example.com")')
// Batch is automatically executed when closure ends
}
// Batch with custom batch size (execute every 100 operations)
sql.withBatch(100) { stmt ->
1000.times { i ->
stmt.addBatch("INSERT INTO test_data (value) VALUES ($i)")
// Automatically executes batch every 100 insertions
}
}
// Prepared statement batching
sql.withBatch('INSERT INTO products (name, price, category) VALUES (?, ?, ?)') { stmt ->
products.each { product ->
stmt.addBatch([product.name, product.price, product.category])
}
}
// Check if currently in batch mode
if (sql.isWithinBatch()) {
println "Currently processing batch operations"
}Use Statement-based batching for dynamic SQL operations with automatic resource management.
public class BatchingStatementWrapper extends GroovyObjectSupport implements AutoCloseable {
public void addBatch(String sql) throws SQLException;
public int[] executeBatch() throws SQLException;
public void clearBatch() throws SQLException;
public void close() throws SQLException;
}Example:
// Manual Statement batch control
sql.withBatch { stmt ->
// stmt is a BatchingStatementWrapper
// Add various SQL statements to batch
stmt.addBatch('INSERT INTO audit_log (action, timestamp) VALUES ("login", NOW())')
stmt.addBatch('UPDATE user_stats SET login_count = login_count + 1 WHERE user_id = 123')
stmt.addBatch('INSERT INTO session_log (user_id, session_start) VALUES (123, NOW())')
// Manually execute batch before closure ends (optional)
int[] results = stmt.executeBatch()
println "Batch executed: ${results.length} statements"
// Add more statements
stmt.addBatch('UPDATE users SET last_login = NOW() WHERE id = 123')
// Clear batch without executing (if needed)
// stmt.clearBatch()
}
// Conditional batching
sql.withBatch { stmt ->
users.each { user ->
if (user.active) {
stmt.addBatch("INSERT INTO active_users (id, name) VALUES (${user.id}, '${user.name}')")
} else {
stmt.addBatch("INSERT INTO inactive_users (id, name) VALUES (${user.id}, '${user.name}')")
}
}
}Use PreparedStatement-based batching for parameterized operations with improved performance and security.
public class BatchingPreparedStatementWrapper extends BatchingStatementWrapper {
public void addBatch(Object[] parameters) throws SQLException;
public void addBatch(List<Object> parameters) throws SQLException;
}Example:
// PreparedStatement batch with parameter arrays
sql.withBatch('INSERT INTO orders (customer_id, product_id, quantity, price) VALUES (?, ?, ?, ?)') { stmt ->
orders.each { order ->
// Add parameters as array
stmt.addBatch([order.customerId, order.productId, order.quantity, order.price] as Object[])
}
}
// PreparedStatement batch with parameter lists
sql.withBatch('UPDATE inventory SET quantity = quantity - ? WHERE product_id = ?') { stmt ->
orderItems.each { item ->
// Add parameters as list
stmt.addBatch([item.quantity, item.productId])
}
}
// Mixed parameter types
sql.withBatch('INSERT INTO events (user_id, event_type, event_data, created_at) VALUES (?, ?, ?, ?)') { stmt ->
events.each { event ->
stmt.addBatch([
event.userId, // INTEGER
event.type, // VARCHAR
event.data.toString(), // TEXT/CLOB
new Timestamp(event.timestamp) // TIMESTAMP
])
}
}Efficiently process large datasets using batching with memory management and progress tracking.
Example:
// Process large CSV import with batching
def processCsvImport(csvFile, batchSize = 1000) {
def totalRows = 0
def batchCount = 0
sql.withBatch(batchSize, 'INSERT INTO imported_data (col1, col2, col3, col4) VALUES (?, ?, ?, ?)') { stmt ->
csvFile.eachLine { line, lineNumber ->
if (lineNumber == 1) return // Skip header
def columns = line.split(',')
stmt.addBatch([
columns[0]?.trim(),
columns[1]?.trim(),
columns[2]?.trim() ?: null,
columns[3]?.trim() ?: null
])
totalRows++
// Progress tracking (executed automatically every batchSize)
if (totalRows % batchSize == 0) {
batchCount++
println "Processed batch $batchCount ($totalRows rows so far)"
}
}
}
println "Import complete: $totalRows rows imported in ${batchCount} batches"
}
// Process with transaction and error handling
def batchProcessWithErrorHandling(dataList) {
def successCount = 0
def errorCount = 0
sql.withTransaction {
sql.withBatch(500, 'INSERT INTO processed_data (data, status, processed_at) VALUES (?, ?, ?)') { stmt ->
dataList.each { data ->
try {
// Validate data before adding to batch
if (validateData(data)) {
stmt.addBatch([data.content, 'valid', new Timestamp(System.currentTimeMillis())])
successCount++
} else {
// Handle invalid data separately
sql.executeUpdate('INSERT INTO error_log (data, error, timestamp) VALUES (?, ?, ?)',
[data.content, 'validation_failed', new Timestamp(System.currentTimeMillis())])
errorCount++
}
} catch (Exception e) {
println "Error processing data item: ${e.message}"
errorCount++
}
}
}
}
println "Processing complete: $successCount successful, $errorCount errors"
}Optimize batch operations for maximum throughput and minimal resource usage.
Example:
// Optimal batch size determination
def findOptimalBatchSize(testDataSize = 10000) {
def batchSizes = [100, 500, 1000, 2000, 5000]
def results = [:]
batchSizes.each { batchSize ->
def startTime = System.currentTimeMillis()
sql.withBatch(batchSize, 'INSERT INTO performance_test (id, data) VALUES (?, ?)') { stmt ->
testDataSize.times { i ->
stmt.addBatch([i, "test_data_$i"])
}
}
def endTime = System.currentTimeMillis()
results[batchSize] = endTime - startTime
// Cleanup
sql.executeUpdate('DELETE FROM performance_test')
}
def optimal = results.min { it.value }
println "Optimal batch size: ${optimal.key} (${optimal.value}ms for $testDataSize records)"
return optimal.key
}
// Memory-efficient large data processing
def processLargeDataset(query, batchSize = 1000) {
def processedCount = 0
// Process in chunks to avoid memory issues
sql.eachRow(query, 0, batchSize) { firstBatch ->
def offset = 0
while (true) {
def batch = sql.rows(query, offset, batchSize)
if (batch.isEmpty()) break
sql.withBatch(batchSize, 'INSERT INTO processed_table (original_id, processed_data) VALUES (?, ?)') { stmt ->
batch.each { row ->
def processedData = processRow(row)
stmt.addBatch([row.id, processedData])
processedCount++
}
}
offset += batchSize
// Memory cleanup
if (processedCount % 10000 == 0) {
System.gc()
println "Processed $processedCount rows..."
}
}
}
}Handle errors in batch operations with proper recovery mechanisms and partial success tracking.
Example:
// Batch with individual error handling
def batchWithErrorRecovery(dataList) {
def successfulItems = []
def failedItems = []
try {
sql.withBatch(100, 'INSERT INTO target_table (data1, data2, data3) VALUES (?, ?, ?)') { stmt ->
dataList.each { item ->
try {
stmt.addBatch([item.data1, item.data2, item.data3])
successfulItems << item
} catch (Exception e) {
println "Failed to add item to batch: ${e.message}"
failedItems << [item: item, error: e.message]
}
}
}
} catch (SQLException e) {
println "Batch execution failed: ${e.message}"
// Retry failed items individually
failedItems.each { failed ->
try {
sql.executeUpdate('INSERT INTO target_table (data1, data2, data3) VALUES (?, ?, ?)',
[failed.item.data1, failed.item.data2, failed.item.data3])
successfulItems << failed.item
} catch (Exception retryError) {
println "Individual retry also failed for item: ${retryError.message}"
}
}
}
return [successful: successfulItems.size(), failed: dataList.size() - successfulItems.size()]
}
// Transactional batch with rollback
def transactionalBatch(operations) {
sql.withTransaction {
try {
sql.withBatch(50) { stmt ->
operations.each { operation ->
stmt.addBatch(operation.sql)
}
}
println "All batch operations committed successfully"
} catch (Exception e) {
println "Batch failed, rolling back transaction: ${e.message}"
throw e // Re-throw to trigger rollback
}
}
}Install with Tessl CLI
npx tessl i tessl/maven-org-apache-groovy--groovy-sql