Batch operations and object-oriented JDBC abstractions for reusable SQL operations.
Execute multiple SQL statements efficiently in a single database round-trip.
List<Object[]> batchArgs = List.of(
new Object[]{"John", "john@example.com", "active"},
new Object[]{"Jane", "jane@example.com", "active"},
new Object[]{"Bob", "bob@example.com", "pending"}
);
int[] updateCounts = jdbc.batchUpdate(
"INSERT INTO users (name, email, status) VALUES (?, ?, ?)",
batchArgs
);List<User> users = getUsers();
int[] updateCounts = jdbc.batchUpdate(
"INSERT INTO users (name, email, status) VALUES (?, ?, ?)",
new BatchPreparedStatementSetter() {
public void setValues(PreparedStatement ps, int i) throws SQLException {
User user = users.get(i);
ps.setString(1, user.getName());
ps.setString(2, user.getEmail());
ps.setString(3, user.getStatus());
}
public int getBatchSize() {
return users.size();
}
}
);Automatic chunking into batches.
List<User> users = getUsers();
int[][] updateCounts = jdbc.batchUpdate(
"INSERT INTO users (name, email) VALUES (?, ?)",
users,
100, // Batch size: 100 rows per batch
(ps, user) -> {
ps.setString(1, user.getName());
ps.setString(2, user.getEmail());
}
);
// updateCounts[i] contains results for batch i
int totalRows = Arrays.stream(updateCounts).mapToInt(batch -> batch.length).sum();// Map array
Map<String, Object>[] batchValues = new Map[]{
Map.of("name", "User1", "email", "user1@example.com"),
Map.of("name", "User2", "email", "user2@example.com")
};
int[] counts = namedTemplate.batchUpdate(
"INSERT INTO users (name, email) VALUES (:name, :email)",
batchValues
);
// SqlParameterSource array
SqlParameterSource[] batch = SqlParameterSourceUtils.createBatch(users.toArray());
counts = namedTemplate.batchUpdate(
"INSERT INTO users (name, email) VALUES (:name, :email)",
batch
);List<User> allUsers = getLargeUserList(); // 100,000 users
int batchSize = 1000;
for (int i = 0; i < allUsers.size(); i += batchSize) {
int end = Math.min(i + batchSize, allUsers.size());
List<User> chunk = allUsers.subList(i, end);
List<Object[]> batchArgs = chunk.stream()
.map(u -> new Object[]{u.getName(), u.getEmail(), u.getStatus()})
.collect(Collectors.toList());
jdbc.batchUpdate(
"INSERT INTO users (name, email, status) VALUES (?, ?, ?)",
batchArgs
);
logger.info("Processed {}/{}", end, allUsers.size());
}@Transactional
public void importUsers(List<User> users) {
int batchSize = 1000;
for (int i = 0; i < users.size(); i += batchSize) {
int end = Math.min(i + batchSize, users.size());
List<User> chunk = users.subList(i, end);
SqlParameterSource[] batch = SqlParameterSourceUtils.createBatch(chunk.toArray());
namedTemplate.batchUpdate(
"INSERT INTO users (name, email, status) VALUES (:name, :email, :status)",
batch
);
}
// All batches committed together or rolled back on error
}try {
int[] updateCounts = jdbc.batchUpdate(
"INSERT INTO users (name, email) VALUES (?, ?)",
batchArgs
);
// Check individual results
for (int i = 0; i < updateCounts.length; i++) {
if (updateCounts[i] == Statement.EXECUTE_FAILED) {
logger.error("Failed to insert row {}", i);
}
}
} catch (DataAccessException e) {
if (e.getCause() instanceof BatchUpdateException) {
BatchUpdateException bue = (BatchUpdateException) e.getCause();
int[] updateCounts = bue.getUpdateCounts();
// Process partial results
logger.error("Batch failed after {} rows", updateCounts.length);
}
throw e;
}Stop batch early based on condition.
int[] counts = jdbc.batchUpdate(
"INSERT INTO users (name, email) VALUES (?, ?)",
new InterruptibleBatchPreparedStatementSetter() {
private Iterator<User> iterator = users.iterator();
private User current;
public void setValues(PreparedStatement ps, int i) throws SQLException {
ps.setString(1, current.getName());
ps.setString(2, current.getEmail());
}
public int getBatchSize() {
return users.size();
}
public boolean isBatchExhausted(int i) {
if (iterator.hasNext()) {
current = iterator.next();
return current.getStatus().equals("invalid"); // Stop on invalid
}
return true;
}
}
);Reusable, thread-safe SQL operation objects.
import org.springframework.jdbc.object.MappingSqlQuery;
public class UserQuery extends MappingSqlQuery<User> {
public UserQuery(DataSource ds) {
super(ds, "SELECT id, name, email FROM users WHERE status = ?");
declareParameter(new SqlParameter("status", Types.VARCHAR));
compile();
}
protected User mapRow(ResultSet rs, int rowNum) throws SQLException {
return new User(
rs.getLong("id"),
rs.getString("name"),
rs.getString("email")
);
}
}
// Usage - thread-safe, reusable
UserQuery userQuery = new UserQuery(dataSource);
List<User> activeUsers = userQuery.execute("active");
User singleUser = userQuery.findObject("active");import org.springframework.jdbc.object.SqlUpdate;
public class UserUpdate extends SqlUpdate {
public UserUpdate(DataSource ds) {
super(ds, "UPDATE users SET status = ? WHERE id = ?");
declareParameter(new SqlParameter("status", Types.VARCHAR));
declareParameter(new SqlParameter("id", Types.BIGINT));
compile();
}
}
// Usage
UserUpdate userUpdate = new UserUpdate(dataSource);
int rows = userUpdate.update("inactive", 123L);public class UserInsert extends SqlUpdate {
public UserInsert(DataSource ds) {
super(ds, "INSERT INTO users (name, email, status) VALUES (?, ?, ?)");
declareParameter(new SqlParameter("name", Types.VARCHAR));
declareParameter(new SqlParameter("email", Types.VARCHAR));
declareParameter(new SqlParameter("status", Types.VARCHAR));
setReturnGeneratedKeys(true);
setGeneratedKeysColumnNames("id");
compile();
}
}
// Usage
UserInsert userInsert = new UserInsert(dataSource);
KeyHolder keyHolder = new GeneratedKeyHolder();
userInsert.update(
new Object[]{"John", "john@example.com", "active"},
keyHolder
);
Long generatedId = keyHolder.getKey().longValue();import org.springframework.jdbc.object.BatchSqlUpdate;
public class UserBatchInsert extends BatchSqlUpdate {
public UserBatchInsert(DataSource ds) {
super(ds, "INSERT INTO users (name, email, status) VALUES (?, ?, ?)");
declareParameter(new SqlParameter("name", Types.VARCHAR));
declareParameter(new SqlParameter("email", Types.VARCHAR));
declareParameter(new SqlParameter("status", Types.VARCHAR));
setBatchSize(100);
compile();
}
}
// Usage
UserBatchInsert batchInsert = new UserBatchInsert(dataSource);
for (User user : users) {
batchInsert.update(user.getName(), user.getEmail(), user.getStatus());
}
// Flush remaining batch
int[] updateCounts = batchInsert.flush();
int totalRows = Arrays.stream(updateCounts).sum();
// Reset for next batch
batchInsert.reset();import org.springframework.jdbc.object.StoredProcedure;
public class CreateUserProcedure extends StoredProcedure {
public CreateUserProcedure(DataSource ds) {
super(ds, "create_user");
declareParameter(new SqlParameter("name", Types.VARCHAR));
declareParameter(new SqlParameter("email", Types.VARCHAR));
declareParameter(new SqlOutParameter("user_id", Types.INTEGER));
compile();
}
public Long createUser(String name, String email) {
Map<String, Object> in = Map.of("name", name, "email", email);
Map<String, Object> out = execute(in);
return ((Number) out.get("user_id")).longValue();
}
}
// Usage
CreateUserProcedure proc = new CreateUserProcedure(dataSource);
Long userId = proc.createUser("John", "john@example.com");@Repository
public class UserDao {
private final UserQuery findByStatus;
private final UserUpdate updateStatus;
private final UserBatchInsert batchInsert;
public UserDao(DataSource dataSource) {
this.findByStatus = new UserQuery(dataSource);
this.updateStatus = new UserUpdate(dataSource);
this.batchInsert = new UserBatchInsert(dataSource);
}
public List<User> getActiveUsers() {
return findByStatus.execute("active");
}
public void deactivateUser(Long userId) {
updateStatus.update("inactive", userId);
}
public void bulkInsertUsers(List<User> users) {
for (User user : users) {
batchInsert.update(user.getName(), user.getEmail(), user.getStatus());
}
batchInsert.flush();
}
}@Configuration
public class SqlObjectConfig {
@Bean
public UserQuery userQuery(DataSource dataSource) {
return new UserQuery(dataSource);
}
@Bean
public UserUpdate userUpdate(DataSource dataSource) {
return new UserUpdate(dataSource);
}
@Bean
public BatchSqlUpdate userBatchInsert(DataSource dataSource) {
BatchSqlUpdate batch = new BatchSqlUpdate();
batch.setDataSource(dataSource);
batch.setSql("INSERT INTO users (name, email) VALUES (?, ?)");
batch.declareParameter(new SqlParameter("name", Types.VARCHAR));
batch.declareParameter(new SqlParameter("email", Types.VARCHAR));
batch.setBatchSize(100);
batch.compile();
return batch;
}
}@Configuration
public class BatchConfig {
@Value("${app.batch.size:1000}")
private int batchSize;
@Bean
public BatchProcessor batchProcessor(JdbcTemplate jdbcTemplate) {
BatchProcessor processor = new BatchProcessor(jdbcTemplate);
processor.setBatchSize(batchSize);
return processor;
}
}
public class BatchProcessor {
private final JdbcTemplate jdbc;
private int batchSize = 1000;
public BatchProcessor(JdbcTemplate jdbc) {
this.jdbc = jdbc;
}
public void setBatchSize(int batchSize) {
this.batchSize = batchSize;
}
public void processLargeDataset(List<User> users) {
for (int i = 0; i < users.size(); i += batchSize) {
int end = Math.min(i + batchSize, users.size());
List<User> chunk = users.subList(i, end);
processBatch(chunk);
}
}
private void processBatch(List<User> batch) {
// Process batch
}
}Batch size guidelines:
Performance gains:
Memory considerations:
// Bad: Load everything into memory
List<User> allUsers = repository.findAll(); // 1M rows
jdbc.batchUpdate(sql, allUsers);
// Good: Process in chunks
int offset = 0;
int limit = 1000;
while (true) {
List<User> chunk = repository.findChunk(offset, limit);
if (chunk.isEmpty()) break;
jdbc.batchUpdate(sql, chunk);
offset += limit;
}Database-specific optimizations:
// PostgreSQL: COPY for maximum performance
@Component
public class PostgresCopyHelper {
public void bulkInsert(List<User> users, DataSource dataSource) throws SQLException {
try (Connection conn = dataSource.getConnection()) {
CopyManager copyManager = new CopyManager((BaseConnection) conn);
StringReader reader = createCsvData(users);
copyManager.copyIn("COPY users (name, email) FROM STDIN WITH CSV", reader);
}
}
}
// MySQL: LOAD DATA INFILE
jdbc.execute("LOAD DATA LOCAL INFILE '/path/to/data.csv' INTO TABLE users FIELDS TERMINATED BY ','");Thread-safe (after compilation):
Best practices:
compile() in constructorNot thread-safe: