High-performance Java library for reading and writing Excel files with minimal memory usage
—
Event-driven processing system with listeners for handling large datasets efficiently. EasyExcel uses a listener pattern for memory-efficient processing of Excel files, allowing row-by-row processing without loading entire datasets into memory.
Primary interface for handling read events during Excel processing.
/**
* Main interface for handling read events
* @param <T> Type of data object for each row
*/
public interface ReadListener<T> extends Listener {
/**
* Process each row of data
* @param data Parsed data object for current row
* @param context Analysis context with metadata
*/
void invoke(T data, AnalysisContext context);
/**
* Called after all data has been analyzed
* @param context Analysis context with final metadata
*/
void doAfterAllAnalysed(AnalysisContext context);
/**
* Handle exceptions during reading
* @param exception Exception that occurred
* @param context Analysis context when exception occurred
* @throws Exception Can re-throw or handle the exception
*/
default void onException(Exception exception, AnalysisContext context) throws Exception {
throw exception;
}
/**
* Process header row data
* @param headMap Map of column index to header cell data
* @param context Analysis context
*/
default void invokeHead(Map<Integer, ReadCellData<?>> headMap, AnalysisContext context) {
// Default implementation does nothing
}
/**
* Process extra information (comments, hyperlinks, merged regions)
* @param extra Extra cell information
* @param context Analysis context
*/
default void extra(CellExtra extra, AnalysisContext context) {
// Default implementation does nothing
}
/**
* Control whether to continue reading
* @param context Analysis context
* @return true to continue reading, false to stop
*/
default boolean hasNext(AnalysisContext context) {
return true;
}
}Context interface providing metadata and state information during reading.
/**
* Provides context information during read operations
*/
public interface AnalysisContext {
/**
* Get the current row's analysis result
* @return Current row data object
*/
Object getCurrentRowAnalysisResult();
/**
* Get current row number (0-based)
* @return Row number
*/
Integer getCurrentRowNum();
/**
* Get current sheet information
* @return ReadSheet configuration
*/
ReadSheet getCurrentSheet();
/**
* Get Excel file type
* @return ExcelTypeEnum (XLS, XLSX, CSV)
*/
ExcelTypeEnum getExcelType();
/**
* Get read holder with configuration
* @return ReadHolder with settings
*/
ReadHolder currentReadHolder();
/**
* Get global configuration
* @return GlobalConfiguration settings
*/
GlobalConfiguration getGlobalConfiguration();
/**
* Get current workbook holder
* @return ReadWorkbook configuration
*/
ReadWorkbookHolder getReadWorkbookHolder();
/**
* Get current sheet holder
* @return ReadSheetHolder configuration
*/
ReadSheetHolder getReadSheetHolder();
/**
* Interrupt the reading process
*/
void interrupt();
}Abstract base implementation providing common functionality.
/**
* Abstract base implementation of ReadListener
* @param <T> Type of data object for each row
*/
public abstract class AnalysisEventListener<T> implements ReadListener<T> {
/**
* Process each row of data (must be implemented)
* @param data Parsed data object for current row
* @param context Analysis context with metadata
*/
public abstract void invoke(T data, AnalysisContext context);
/**
* Called after all data has been analyzed (must be implemented)
* @param context Analysis context with final metadata
*/
public abstract void doAfterAllAnalysed(AnalysisContext context);
/**
* Handle exceptions during reading
* @param exception Exception that occurred
* @param context Analysis context when exception occurred
*/
@Override
public void onException(Exception exception, AnalysisContext context) throws Exception {
throw exception;
}
/**
* Process header row data
* @param headMap Map of column index to header cell data
* @param context Analysis context
*/
@Override
public void invokeHead(Map<Integer, ReadCellData<?>> headMap, AnalysisContext context) {
// Default implementation does nothing
}
}Pre-built listeners for common scenarios.
/**
* Synchronous listener that collects all data in a List
* @param <T> Type of data object
*/
public class SyncReadListener<T> implements ReadListener<T> {
private final List<T> list = new ArrayList<>();
@Override
public void invoke(T data, AnalysisContext context) {
list.add(data);
}
@Override
public void doAfterAllAnalysed(AnalysisContext context) {
// Analysis completed
}
/**
* Get all collected data
* @return List of all data objects
*/
public List<T> getList() {
return list;
}
}
/**
* Paginated listener for processing large datasets in batches
* @param <T> Type of data object
*/
public abstract class PageReadListener<T> extends AnalysisEventListener<T> {
private final int batchCount;
private List<T> cachedDataList = new ArrayList<>();
/**
* Create paginated listener with batch size
* @param batchCount Number of records per batch
*/
public PageReadListener(int batchCount) {
this.batchCount = batchCount;
}
@Override
public void invoke(T data, AnalysisContext context) {
cachedDataList.add(data);
if (cachedDataList.size() >= batchCount) {
invokeAfterBatch(cachedDataList, context);
cachedDataList = new ArrayList<>();
}
}
@Override
public void doAfterAllAnalysed(AnalysisContext context) {
if (!cachedDataList.isEmpty()) {
invokeAfterBatch(cachedDataList, context);
}
doAfterAllPageRead(context);
}
/**
* Process a batch of data
* @param dataList Batch of data objects
* @param context Analysis context
*/
public abstract void invokeAfterBatch(List<T> dataList, AnalysisContext context);
/**
* Called after all pages have been processed
* @param context Analysis context
*/
public void doAfterAllPageRead(AnalysisContext context) {
// Default implementation does nothing
}
}
/**
* Listener that ignores exceptions and continues reading
* @param <T> Type of data object
*/
public class IgnoreExceptionReadListener<T> implements ReadListener<T> {
private final ReadListener<T> delegate;
/**
* Create listener that wraps another listener and ignores exceptions
* @param delegate Listener to delegate to
*/
public IgnoreExceptionReadListener(ReadListener<T> delegate) {
this.delegate = delegate;
}
@Override
public void invoke(T data, AnalysisContext context) {
try {
delegate.invoke(data, context);
} catch (Exception e) {
// Log and ignore the exception
System.err.println("Exception ignored: " + e.getMessage());
}
}
@Override
public void doAfterAllAnalysed(AnalysisContext context) {
delegate.doAfterAllAnalysed(context);
}
@Override
public void onException(Exception exception, AnalysisContext context) {
// Ignore exceptions
System.err.println("Exception ignored: " + exception.getMessage());
}
}Classes for handling additional cell information.
/**
* Extra cell information (comments, hyperlinks, merged regions)
*/
public class CellExtra {
/**
* Type of extra information
*/
private CellExtraTypeEnum type;
/**
* Text content (for comments and hyperlinks)
*/
private String text;
/**
* First row index (0-based)
*/
private Integer firstRowIndex;
/**
* Last row index (0-based)
*/
private Integer lastRowIndex;
/**
* First column index (0-based)
*/
private Integer firstColumnIndex;
/**
* Last column index (0-based)
*/
private Integer lastColumnIndex;
// Getters and setters...
}
/**
* Types of extra cell information
*/
public enum CellExtraTypeEnum {
/**
* Cell comments
*/
COMMENT,
/**
* Hyperlinks
*/
HYPERLINK,
/**
* Merged cell regions
*/
MERGE
}Base interfaces for the listener system.
/**
* Base marker interface for all listeners
*/
public interface Listener {
}
/**
* Base marker interface for event handlers
*/
public interface Handler {
}
/**
* Interface for controlling execution order
*/
public interface Order {
/**
* Get execution order (lower values execute first)
* @return Order value
*/
int order();
}import com.alibaba.excel.context.AnalysisContext;
import com.alibaba.excel.event.AnalysisEventListener;
public class UserDataListener extends AnalysisEventListener<UserData> {
private final List<UserData> users = new ArrayList<>();
@Override
public void invoke(UserData user, AnalysisContext context) {
// Validate data
if (user.getName() != null && user.getEmail() != null) {
users.add(user);
System.out.println("Processed user: " + user.getName());
} else {
System.err.println("Invalid user data at row: " + context.getCurrentRowNum());
}
}
@Override
public void doAfterAllAnalysed(AnalysisContext context) {
System.out.println("Analysis completed. Total valid users: " + users.size());
// Process all users (save to database, etc.)
saveUsers(users);
}
@Override
public void onException(Exception exception, AnalysisContext context) {
System.err.println("Error at row " + context.getCurrentRowNum() +
": " + exception.getMessage());
// Continue processing instead of stopping
}
private void saveUsers(List<UserData> users) {
// Implementation for saving users
}
}
// Usage
EasyExcel.read("users.xlsx", UserData.class, new UserDataListener())
.sheet()
.doRead();import com.alibaba.excel.read.listener.PageReadListener;
import com.alibaba.excel.context.AnalysisContext;
public class UserBatchProcessor extends PageReadListener<UserData> {
private final UserService userService;
public UserBatchProcessor(UserService userService) {
super(1000); // Process in batches of 1000
this.userService = userService;
}
@Override
public void invokeAfterBatch(List<UserData> userBatch, AnalysisContext context) {
System.out.println("Processing batch of " + userBatch.size() + " users");
// Process batch (validate, transform, save)
List<UserData> validUsers = userBatch.stream()
.filter(this::isValid)
.collect(Collectors.toList());
userService.saveBatch(validUsers);
System.out.println("Saved " + validUsers.size() + " valid users");
}
@Override
public void doAfterAllPageRead(AnalysisContext context) {
System.out.println("Completed processing all data");
userService.finalizeImport();
}
private boolean isValid(UserData user) {
return user.getName() != null &&
user.getEmail() != null &&
user.getEmail().contains("@");
}
}
// Usage
EasyExcel.read("large-users.xlsx", UserData.class, new UserBatchProcessor(userService))
.sheet()
.doRead();import com.alibaba.excel.context.AnalysisContext;
import com.alibaba.excel.event.AnalysisEventListener;
import com.alibaba.excel.metadata.CellExtra;
import com.alibaba.excel.metadata.data.ReadCellData;
public class DetailedDataListener extends AnalysisEventListener<UserData> {
private Map<Integer, String> headers = new HashMap<>();
private List<String> comments = new ArrayList<>();
@Override
public void invokeHead(Map<Integer, ReadCellData<?>> headMap, AnalysisContext context) {
System.out.println("Processing headers:");
headMap.forEach((index, cellData) -> {
String headerName = cellData.getStringValue();
headers.put(index, headerName);
System.out.println("Column " + index + ": " + headerName);
});
}
@Override
public void invoke(UserData data, AnalysisContext context) {
System.out.println("Row " + context.getCurrentRowNum() + ": " + data);
}
@Override
public void extra(CellExtra extra, AnalysisContext context) {
switch (extra.getType()) {
case COMMENT:
comments.add("Comment at (" + extra.getFirstRowIndex() +
"," + extra.getFirstColumnIndex() + "): " + extra.getText());
break;
case HYPERLINK:
System.out.println("Hyperlink found: " + extra.getText());
break;
case MERGE:
System.out.println("Merged region: " +
extra.getFirstRowIndex() + "-" + extra.getLastRowIndex() +
", " + extra.getFirstColumnIndex() + "-" + extra.getLastColumnIndex());
break;
}
}
@Override
public void doAfterAllAnalysed(AnalysisContext context) {
System.out.println("Headers processed: " + headers.size());
System.out.println("Comments found: " + comments.size());
comments.forEach(System.out::println);
}
}
// Usage with extra information reading
EasyExcel.read("data-with-extras.xlsx", UserData.class, new DetailedDataListener())
.extraRead(CellExtraTypeEnum.COMMENT)
.extraRead(CellExtraTypeEnum.HYPERLINK)
.extraRead(CellExtraTypeEnum.MERGE)
.sheet()
.doRead();import com.alibaba.excel.context.AnalysisContext;
import com.alibaba.excel.event.AnalysisEventListener;
public class RobustDataListener extends AnalysisEventListener<UserData> {
private int successCount = 0;
private int errorCount = 0;
private List<String> errors = new ArrayList<>();
@Override
public void invoke(UserData data, AnalysisContext context) {
try {
// Process data with potential validation
validateAndProcess(data);
successCount++;
} catch (Exception e) {
errorCount++;
String error = "Row " + context.getCurrentRowNum() + ": " + e.getMessage();
errors.add(error);
System.err.println(error);
}
}
@Override
public void onException(Exception exception, AnalysisContext context) {
errorCount++;
String error = "Parse error at row " + context.getCurrentRowNum() +
": " + exception.getMessage();
errors.add(error);
System.err.println(error);
// Continue processing instead of stopping
}
@Override
public void doAfterAllAnalysed(AnalysisContext context) {
System.out.println("Processing completed:");
System.out.println("- Successful rows: " + successCount);
System.out.println("- Error rows: " + errorCount);
if (!errors.isEmpty()) {
System.out.println("Errors encountered:");
errors.forEach(System.out::println);
}
}
private void validateAndProcess(UserData data) {
if (data.getName() == null || data.getName().trim().isEmpty()) {
throw new IllegalArgumentException("Name is required");
}
if (data.getEmail() == null || !data.getEmail().contains("@")) {
throw new IllegalArgumentException("Valid email is required");
}
// Process valid data
saveUser(data);
}
private void saveUser(UserData user) {
// Implementation for saving user
}
}import com.alibaba.excel.context.AnalysisContext;
import com.alibaba.excel.event.AnalysisEventListener;
public class ConditionalReadListener extends AnalysisEventListener<UserData> {
private int maxRows = 1000;
private int processedRows = 0;
private boolean stopReading = false;
@Override
public void invoke(UserData data, AnalysisContext context) {
processedRows++;
// Process data
System.out.println("Processing row " + processedRows + ": " + data.getName());
// Check stopping condition
if (processedRows >= maxRows) {
System.out.println("Reached maximum rows limit: " + maxRows);
stopReading = true;
}
// Could also stop based on data content
if ("STOP".equals(data.getName())) {
System.out.println("Found stop marker, ending processing");
stopReading = true;
}
}
@Override
public boolean hasNext(AnalysisContext context) {
return !stopReading;
}
@Override
public void doAfterAllAnalysed(AnalysisContext context) {
System.out.println("Processing ended. Total rows processed: " + processedRows);
}
}Install with Tessl CLI
npx tessl i tessl/maven-com-alibaba--easyexcel