or run

npx @tessl/cli init
Log in

Version

Tile

Overview

Evals

Files

Files

docs

batch-operations.mdcore-operations.mddataset.mdindex.mdparameters.mdresult-processing.mdstored-procedures.mdtransactions.md

batch-operations.mddocs/

0

# Batch Operations

1

2

High-performance batch processing with automatic batching, configurable batch sizes, and support for both Statement and PreparedStatement batching.

3

4

## Capabilities

5

6

### Automatic Batch Processing

7

8

Execute multiple database operations efficiently using automatic batching with configurable batch sizes.

9

10

```java { .api }

11

// Basic batch operations

12

public Object withBatch(Closure closure) throws SQLException;

13

public Object withBatch(int batchSize, Closure closure) throws SQLException;

14

15

// Prepared statement batching

16

public Object withBatch(String sql, Closure closure) throws SQLException;

17

public Object withBatch(int batchSize, String sql, Closure closure) throws SQLException;

18

19

// Batch state checking

20

public boolean isWithinBatch();

21

```

22

23

**Example**:

24

```groovy

25

// Basic batch with default batch size

26

sql.withBatch { stmt ->

27

// All operations are automatically batched

28

stmt.addBatch('INSERT INTO users (name, email) VALUES ("Alice", "alice@example.com")')

29

stmt.addBatch('INSERT INTO users (name, email) VALUES ("Bob", "bob@example.com")')

30

stmt.addBatch('INSERT INTO users (name, email) VALUES ("Charlie", "charlie@example.com")')

31

// Batch is automatically executed when closure ends

32

}

33

34

// Batch with custom batch size (execute every 100 operations)

35

sql.withBatch(100) { stmt ->

36

1000.times { i ->

37

stmt.addBatch("INSERT INTO test_data (value) VALUES ($i)")

38

// Automatically executes batch every 100 insertions

39

}

40

}

41

42

// Prepared statement batching

43

sql.withBatch('INSERT INTO products (name, price, category) VALUES (?, ?, ?)') { stmt ->

44

products.each { product ->

45

stmt.addBatch([product.name, product.price, product.category])

46

}

47

}

48

49

// Check if currently in batch mode

50

if (sql.isWithinBatch()) {

51

println "Currently processing batch operations"

52

}

53

```

54

55

### Statement Batching

56

57

Use Statement-based batching for dynamic SQL operations with automatic resource management.

58

59

```java { .api }

60

public class BatchingStatementWrapper extends GroovyObjectSupport implements AutoCloseable {

61

public void addBatch(String sql) throws SQLException;

62

public int[] executeBatch() throws SQLException;

63

public void clearBatch() throws SQLException;

64

public void close() throws SQLException;

65

}

66

```

67

68

**Example**:

69

```groovy

70

// Manual Statement batch control

71

sql.withBatch { stmt ->

72

// stmt is a BatchingStatementWrapper

73

74

// Add various SQL statements to batch

75

stmt.addBatch('INSERT INTO audit_log (action, timestamp) VALUES ("login", NOW())')

76

stmt.addBatch('UPDATE user_stats SET login_count = login_count + 1 WHERE user_id = 123')

77

stmt.addBatch('INSERT INTO session_log (user_id, session_start) VALUES (123, NOW())')

78

79

// Manually execute batch before closure ends (optional)

80

int[] results = stmt.executeBatch()

81

println "Batch executed: ${results.length} statements"

82

83

// Add more statements

84

stmt.addBatch('UPDATE users SET last_login = NOW() WHERE id = 123')

85

86

// Clear batch without executing (if needed)

87

// stmt.clearBatch()

88

}

89

90

// Conditional batching

91

sql.withBatch { stmt ->

92

users.each { user ->

93

if (user.active) {

94

stmt.addBatch("INSERT INTO active_users (id, name) VALUES (${user.id}, '${user.name}')")

95

} else {

96

stmt.addBatch("INSERT INTO inactive_users (id, name) VALUES (${user.id}, '${user.name}')")

97

}

98

}

99

}

100

```

101

102

### PreparedStatement Batching

103

104

Use PreparedStatement-based batching for parameterized operations with improved performance and security.

105

106

```java { .api }

107

public class BatchingPreparedStatementWrapper extends BatchingStatementWrapper {

108

public void addBatch(Object[] parameters) throws SQLException;

109

public void addBatch(List<Object> parameters) throws SQLException;

110

}

111

```

112

113

**Example**:

114

```groovy

115

// PreparedStatement batch with parameter arrays

116

sql.withBatch('INSERT INTO orders (customer_id, product_id, quantity, price) VALUES (?, ?, ?, ?)') { stmt ->

117

orders.each { order ->

118

// Add parameters as array

119

stmt.addBatch([order.customerId, order.productId, order.quantity, order.price] as Object[])

120

}

121

}

122

123

// PreparedStatement batch with parameter lists

124

sql.withBatch('UPDATE inventory SET quantity = quantity - ? WHERE product_id = ?') { stmt ->

125

orderItems.each { item ->

126

// Add parameters as list

127

stmt.addBatch([item.quantity, item.productId])

128

}

129

}

130

131

// Mixed parameter types

132

sql.withBatch('INSERT INTO events (user_id, event_type, event_data, created_at) VALUES (?, ?, ?, ?)') { stmt ->

133

events.each { event ->

134

stmt.addBatch([

135

event.userId, // INTEGER

136

event.type, // VARCHAR

137

event.data.toString(), // TEXT/CLOB

138

new Timestamp(event.timestamp) // TIMESTAMP

139

])

140

}

141

}

142

```

143

144

### Large Dataset Processing

145

146

Efficiently process large datasets using batching with memory management and progress tracking.

147

148

**Example**:

149

```groovy

150

// Process large CSV import with batching

151

def processCsvImport(csvFile, batchSize = 1000) {

152

def totalRows = 0

153

def batchCount = 0

154

155

sql.withBatch(batchSize, 'INSERT INTO imported_data (col1, col2, col3, col4) VALUES (?, ?, ?, ?)') { stmt ->

156

csvFile.eachLine { line, lineNumber ->

157

if (lineNumber == 1) return // Skip header

158

159

def columns = line.split(',')

160

stmt.addBatch([

161

columns[0]?.trim(),

162

columns[1]?.trim(),

163

columns[2]?.trim() ?: null,

164

columns[3]?.trim() ?: null

165

])

166

167

totalRows++

168

169

// Progress tracking (executed automatically every batchSize)

170

if (totalRows % batchSize == 0) {

171

batchCount++

172

println "Processed batch $batchCount ($totalRows rows so far)"

173

}

174

}

175

}

176

177

println "Import complete: $totalRows rows imported in ${batchCount} batches"

178

}

179

180

// Process with transaction and error handling

181

def batchProcessWithErrorHandling(dataList) {

182

def successCount = 0

183

def errorCount = 0

184

185

sql.withTransaction {

186

sql.withBatch(500, 'INSERT INTO processed_data (data, status, processed_at) VALUES (?, ?, ?)') { stmt ->

187

dataList.each { data ->

188

try {

189

// Validate data before adding to batch

190

if (validateData(data)) {

191

stmt.addBatch([data.content, 'valid', new Timestamp(System.currentTimeMillis())])

192

successCount++

193

} else {

194

// Handle invalid data separately

195

sql.executeUpdate('INSERT INTO error_log (data, error, timestamp) VALUES (?, ?, ?)',

196

[data.content, 'validation_failed', new Timestamp(System.currentTimeMillis())])

197

errorCount++

198

}

199

} catch (Exception e) {

200

println "Error processing data item: ${e.message}"

201

errorCount++

202

}

203

}

204

}

205

}

206

207

println "Processing complete: $successCount successful, $errorCount errors"

208

}

209

```

210

211

### Performance Optimization

212

213

Optimize batch operations for maximum throughput and minimal resource usage.

214

215

**Example**:

216

```groovy

217

// Optimal batch size determination

218

def findOptimalBatchSize(testDataSize = 10000) {

219

def batchSizes = [100, 500, 1000, 2000, 5000]

220

def results = [:]

221

222

batchSizes.each { batchSize ->

223

def startTime = System.currentTimeMillis()

224

225

sql.withBatch(batchSize, 'INSERT INTO performance_test (id, data) VALUES (?, ?)') { stmt ->

226

testDataSize.times { i ->

227

stmt.addBatch([i, "test_data_$i"])

228

}

229

}

230

231

def endTime = System.currentTimeMillis()

232

results[batchSize] = endTime - startTime

233

234

// Cleanup

235

sql.executeUpdate('DELETE FROM performance_test')

236

}

237

238

def optimal = results.min { it.value }

239

println "Optimal batch size: ${optimal.key} (${optimal.value}ms for $testDataSize records)"

240

return optimal.key

241

}

242

243

// Memory-efficient large data processing

244

def processLargeDataset(query, batchSize = 1000) {

245

def processedCount = 0

246

247

// Process in chunks to avoid memory issues

248

sql.eachRow(query, 0, batchSize) { firstBatch ->

249

def offset = 0

250

251

while (true) {

252

def batch = sql.rows(query, offset, batchSize)

253

if (batch.isEmpty()) break

254

255

sql.withBatch(batchSize, 'INSERT INTO processed_table (original_id, processed_data) VALUES (?, ?)') { stmt ->

256

batch.each { row ->

257

def processedData = processRow(row)

258

stmt.addBatch([row.id, processedData])

259

processedCount++

260

}

261

}

262

263

offset += batchSize

264

265

// Memory cleanup

266

if (processedCount % 10000 == 0) {

267

System.gc()

268

println "Processed $processedCount rows..."

269

}

270

}

271

}

272

}

273

```

274

275

### Error Handling and Recovery

276

277

Handle errors in batch operations with proper recovery mechanisms and partial success tracking.

278

279

**Example**:

280

```groovy

281

// Batch with individual error handling

282

def batchWithErrorRecovery(dataList) {

283

def successfulItems = []

284

def failedItems = []

285

286

try {

287

sql.withBatch(100, 'INSERT INTO target_table (data1, data2, data3) VALUES (?, ?, ?)') { stmt ->

288

dataList.each { item ->

289

try {

290

stmt.addBatch([item.data1, item.data2, item.data3])

291

successfulItems << item

292

} catch (Exception e) {

293

println "Failed to add item to batch: ${e.message}"

294

failedItems << [item: item, error: e.message]

295

}

296

}

297

}

298

} catch (SQLException e) {

299

println "Batch execution failed: ${e.message}"

300

301

// Retry failed items individually

302

failedItems.each { failed ->

303

try {

304

sql.executeUpdate('INSERT INTO target_table (data1, data2, data3) VALUES (?, ?, ?)',

305

[failed.item.data1, failed.item.data2, failed.item.data3])

306

successfulItems << failed.item

307

} catch (Exception retryError) {

308

println "Individual retry also failed for item: ${retryError.message}"

309

}

310

}

311

}

312

313

return [successful: successfulItems.size(), failed: dataList.size() - successfulItems.size()]

314

}

315

316

// Transactional batch with rollback

317

def transactionalBatch(operations) {

318

sql.withTransaction {

319

try {

320

sql.withBatch(50) { stmt ->

321

operations.each { operation ->

322

stmt.addBatch(operation.sql)

323

}

324

}

325

println "All batch operations committed successfully"

326

} catch (Exception e) {

327

println "Batch failed, rolling back transaction: ${e.message}"

328

throw e // Re-throw to trigger rollback

329

}

330

}

331

}

332

```