Test utilities for Apache Flink's Table API and SQL ecosystem enabling robust testing of table operations and data transformations.
—
Specialized assertions for collections of RowData with bulk operations and type-safe comparisons. These assertions enable efficient testing of multiple rows with automatic type conversion and flexible comparison modes.
Comprehensive assertions for collections of RowData objects supporting various collection types and bulk operations.
public class RowDataListAssert extends AbstractListAssert<RowDataListAssert, List<RowData>, RowData, RowDataAssert> {
public RowDataListAssert(List<RowData> rowDataList);
// Type conversion for bulk comparison
public RowDataListAssert asGeneric(DataType dataType);
public RowDataListAssert asGeneric(LogicalType logicalType);
/** Requires flink-table-runtime in classpath */
public ListAssert<Row> asRows(DataType dataType);
}Multiple factory methods support different collection types:
// From TableAssertions class
public static RowDataListAssert assertThatRows(Iterator<RowData> actual);
public static RowDataListAssert assertThatRows(Iterable<RowData> actual);
public static RowDataListAssert assertThatRows(Stream<RowData> actual);
public static RowDataListAssert assertThatRows(RowData... rows);import static org.apache.flink.table.test.TableAssertions.assertThatRows;
import org.apache.flink.table.data.RowData;
import org.apache.flink.table.data.GenericRowData;
List<RowData> rows = Arrays.asList(
GenericRowData.of(1, "Alice"),
GenericRowData.of(2, "Bob"),
GenericRowData.of(3, "Charlie")
);
// Basic collection validation
assertThatRows(rows)
.hasSize(3)
.isNotEmpty()
.doesNotContainNull();
// Using inherited AssertJ list assertions
assertThatRows(rows)
.extracting(row -> row.getArity())
.containsOnly(2);import org.apache.flink.table.types.DataType;
import org.apache.flink.table.api.DataTypes;
DataType rowType = DataTypes.ROW(
DataTypes.FIELD("id", DataTypes.INT()),
DataTypes.FIELD("name", DataTypes.STRING())
);
List<RowData> actualRows = /* ... from table operation */;
List<RowData> expectedRows = Arrays.asList(
GenericRowData.of(1, StringData.fromString("Alice")),
GenericRowData.of(2, StringData.fromString("Bob"))
);
// Convert to generic format for comparison
assertThatRows(actualRows)
.asGeneric(rowType)
.containsExactly(expectedRows.toArray(new RowData[0]));
// Using logical type
LogicalType logicalType = rowType.getLogicalType();
assertThatRows(actualRows)
.asGeneric(logicalType)
.containsOnly(expectedRows.toArray(new RowData[0]));import org.apache.flink.types.Row;
// Convert to external Row objects for comparison
List<Row> expectedExternalRows = Arrays.asList(
Row.of(1, "Alice"),
Row.of(2, "Bob")
);
assertThatRows(actualRows)
.asRows(rowType)
.containsExactly(expectedExternalRows.toArray(new Row[0]));// From Iterator
Iterator<RowData> iterator = rows.iterator();
assertThatRows(iterator)
.hasSize(3);
// From Stream
Stream<RowData> stream = rows.stream();
assertThatRows(stream)
.allSatisfy(row -> assertThat(row).hasArity(2));
// From varargs
RowData row1 = GenericRowData.of(1, "Alice");
RowData row2 = GenericRowData.of(2, "Bob");
assertThatRows(row1, row2)
.hasSize(2);
// From Iterable (Set, etc.)
Set<RowData> rowSet = new HashSet<>(rows);
assertThatRows(rowSet)
.hasSizeGreaterThan(0);// Validate rows with mixed operations
assertThatRows(actualRows)
.hasSize(expectedCount)
.asGeneric(rowType)
.satisfies(genericRows -> {
// All rows should be INSERT kind
assertThat(genericRows)
.allSatisfy(row -> assertThat(row).hasKind(RowKind.INSERT));
// Specific field validations
assertThat(genericRows)
.extracting(row -> row.getInt(0))
.containsExactly(1, 2, 3);
});// For streaming table results
CloseableIterator<Row> streamingResults = /* ... */;
// Convert to list for comprehensive testing
List<RowData> collectedRows = new ArrayList<>();
while (streamingResults.hasNext()) {
Row row = streamingResults.next();
collectedRows.add(/* convert row to RowData */);
}
assertThatRows(collectedRows)
.asGeneric(resultType)
.satisfies(rows -> {
// Validate streaming-specific properties
assertThat(rows)
.hasSize(expectedStreamingCount)
.allSatisfy(row ->
assertThat(row).isNotNullAt(0) // timestamp field
);
});import org.apache.flink.types.RowKind;
// Validate change stream results
List<RowData> changeStreamRows = /* ... */;
assertThatRows(changeStreamRows)
.asGeneric(rowType)
.satisfies(rows -> {
// Count different row kinds
long insertCount = rows.stream()
.filter(row -> row.getRowKind() == RowKind.INSERT)
.count();
long deleteCount = rows.stream()
.filter(row -> row.getRowKind() == RowKind.DELETE)
.count();
assertThat(insertCount).isEqualTo(expectedInserts);
assertThat(deleteCount).isEqualTo(expectedDeletes);
});// Partial matching with flexible ordering
List<RowData> subset = Arrays.asList(
GenericRowData.of(1, StringData.fromString("Alice")),
GenericRowData.of(3, StringData.fromString("Charlie"))
);
assertThatRows(actualRows)
.asGeneric(rowType)
.containsAll(subset);
// Exclusive content validation
assertThatRows(actualRows)
.asGeneric(rowType)
.doesNotContainAnyElementsOf(forbiddenRows);
// Size-based validation with content sampling
assertThatRows(actualRows)
.hasSizeGreaterThan(1000)
.satisfies(rows -> {
// Sample validation for performance
List<RowData> sample = rows.subList(0, Math.min(100, rows.size()));
assertThatRows(sample)
.asGeneric(rowType)
.allSatisfy(row ->
assertThat(row)
.hasArity(expectedArity)
.isNotNullAt(keyFieldIndex)
);
});Since RowDataListAssert extends AssertJ's AbstractListAssert, all standard list assertions are available:
// Standard AssertJ list operations
assertThatRows(rows)
.hasSize(3)
.isNotEmpty()
.doesNotContainNull()
.startsWith(firstExpectedRow)
.endsWith(lastExpectedRow)
.contains(middleExpectedRow)
.doesNotHaveDuplicates();
// Element extraction and mapping
assertThatRows(rows)
.extracting(RowData::getArity)
.containsOnly(2);
assertThatRows(rows)
.extracting(row -> row.getString(1))
.contains("Alice", "Bob");Install with Tessl CLI
npx tessl i tessl/maven-org-apache-flink--flink-table-test-utils