CtrlK
BlogDocsLog inGet started
Tessl Logo

tessl/maven-org-apache-flink--flink-table-api-java-uber

Comprehensive uber JAR that consolidates all Java APIs for Apache Flink's Table/SQL ecosystem, enabling developers to write table programs and integrate with other Flink APIs through a single dependency.

Pending
Overview
Eval results
Files

data-types.mddocs/

Data Types System

Comprehensive type system for defining table schemas, supporting primitive types, temporal types, and complex nested structures in Apache Flink's Table API.

Capabilities

DataTypes Factory

Central factory class for creating all Table API data types with full type safety and validation.

// Primitive numeric types
public static DataType BOOLEAN();
public static DataType TINYINT();
public static DataType SMALLINT();
public static DataType INT();
public static DataType BIGINT();
public static DataType FLOAT();
public static DataType DOUBLE();

/**
 * Create a decimal type with precision and scale
 * @param precision Total number of digits
 * @param scale Number of digits after decimal point
 * @return DECIMAL data type
 */
public static DataType DECIMAL(int precision, int scale);

// String and binary types
public static DataType CHAR(int length);
public static DataType VARCHAR(int length);
public static DataType STRING();
public static DataType BINARY(int length);
public static DataType VARBINARY(int length);
public static DataType BYTES();

// Temporal types
public static DataType DATE();
public static DataType TIME();
public static DataType TIME(int precision);
public static DataType TIMESTAMP();
public static DataType TIMESTAMP(int precision);
public static DataType TIMESTAMP_WITH_TIME_ZONE();
public static DataType TIMESTAMP_WITH_TIME_ZONE(int precision);
public static DataType TIMESTAMP_WITH_LOCAL_TIME_ZONE();
public static DataType TIMESTAMP_WITH_LOCAL_TIME_ZONE(int precision);

// Interval types
public static DataType INTERVAL(DataTypes.Resolution resolution);
public static DataType INTERVAL(DataTypes.Resolution from, DataTypes.Resolution to);

/**
 * Create an array type
 * @param elementType Type of array elements
 * @return ARRAY data type
 */
public static DataType ARRAY(DataType elementType);

/**
 * Create a map type
 * @param keyType Type of map keys
 * @param valueType Type of map values
 * @return MAP data type
 */
public static DataType MAP(DataType keyType, DataType valueType);

/**
 * Create a multiset type (bag of elements)
 * @param elementType Type of multiset elements
 * @return MULTISET data type
 */
public static DataType MULTISET(DataType elementType);

/**
 * Create a row/struct type with named fields
 * @param fields Field definitions
 * @return ROW data type
 */
public static DataType ROW(Field... fields);

/**
 * Create a row type with field names and types
 * @param fieldNames Array of field names
 * @param fieldTypes Array of corresponding field types
 * @return ROW data type
 */
public static DataType ROW(String[] fieldNames, DataType[] fieldTypes);

/**
 * Create a field definition for row types
 * @param name Field name
 * @param type Field data type
 * @return Field definition
 */
public static Field FIELD(String name, DataType type);

/**
 * Create data type from Java class
 * @param clazz Java class to convert
 * @return Corresponding DataType
 */
public static DataType of(Class<?> clazz);

/**
 * Create data type from type string
 * @param typeString String representation of the type
 * @return Parsed DataType
 */
public static DataType of(String typeString);

Usage Examples:

// Primitive types
DataType userId = DataTypes.BIGINT();
DataType userName = DataTypes.STRING();
DataType balance = DataTypes.DECIMAL(10, 2);
DataType isActive = DataTypes.BOOLEAN();
DataType createdAt = DataTypes.TIMESTAMP(3);

// Complex types
DataType tags = DataTypes.ARRAY(DataTypes.STRING());
DataType attributes = DataTypes.MAP(DataTypes.STRING(), DataTypes.STRING());

// Row type for nested structure
DataType address = DataTypes.ROW(
    DataTypes.FIELD("street", DataTypes.STRING()),
    DataTypes.FIELD("city", DataTypes.STRING()),
    DataTypes.FIELD("zipcode", DataTypes.STRING())
);

// User record with nested address
DataType userRecord = DataTypes.ROW(
    DataTypes.FIELD("id", DataTypes.BIGINT()),
    DataTypes.FIELD("name", DataTypes.STRING()),
    DataTypes.FIELD("address", address),
    DataTypes.FIELD("tags", tags)
);

Schema Definition

Builder pattern for defining table schemas with columns, watermarks, and constraints.

/**
 * Create a new schema builder
 * @return Builder instance for constructing schema
 */
public static Builder newBuilder();

public static class Builder {
    /**
     * Add a physical column to the schema
     * @param name Column name
     * @param type Column data type
     * @return Builder for method chaining
     */
    public Builder column(String name, DataType type);
    
    /**
     * Add a computed column defined by an expression
     * @param name Column name
     * @param expression Expression for computing the column value
     * @return Builder for method chaining
     */
    public Builder columnByExpression(String name, Expression expression);
    
    /**
     * Add a metadata column for accessing connector metadata
     * @param name Column name
     * @param type Column data type
     * @return Builder for method chaining
     */
    public Builder columnByMetadata(String name, DataType type);
    
    /**
     * Add a metadata column with explicit metadata key
     * @param name Column name
     * @param type Column data type
     * @param key Metadata key from the connector
     * @return Builder for method chaining
     */
    public Builder columnByMetadata(String name, DataType type, String key);
    
    /**
     * Add a metadata column with virtual flag
     * @param name Column name
     * @param type Column data type
     * @param key Metadata key from the connector
     * @param isVirtual Whether the column is virtual (not persisted)
     * @return Builder for method chaining
     */
    public Builder columnByMetadata(String name, DataType type, String key, boolean isVirtual);
    
    /**
     * Define a watermark strategy for event time processing
     * @param columnName Name of the time column
     * @param watermarkExpression Expression for watermark generation
     * @return Builder for method chaining
     */
    public Builder watermark(String columnName, Expression watermarkExpression);
    
    /**
     * Define a primary key constraint
     * @param columnNames Names of columns forming the primary key
     * @return Builder for method chaining
     */
    public Builder primaryKey(String... columnNames);
    
    /**
     * Define a named primary key constraint
     * @param constraintName Name for the primary key constraint
     * @param columnNames Names of columns forming the primary key
     * @return Builder for method chaining
     */
    public Builder primaryKeyNamed(String constraintName, String... columnNames);
    
    /**
     * Build the final schema
     * @return Constructed Schema instance
     */
    public Schema build();
}

Schema Usage Examples:

// Basic schema with columns and primary key
Schema userSchema = Schema.newBuilder()
    .column("id", DataTypes.BIGINT())
    .column("name", DataTypes.STRING())
    .column("email", DataTypes.STRING())
    .column("created_at", DataTypes.TIMESTAMP(3))
    .primaryKey("id")
    .build();

// Schema with computed column and watermark for streaming
Schema eventSchema = Schema.newBuilder()
    .column("user_id", DataTypes.BIGINT())
    .column("event_type", DataTypes.STRING())
    .column("event_time", DataTypes.TIMESTAMP_LTZ(3))
    .column("payload", DataTypes.STRING())
    .columnByExpression("hour_of_day", $("event_time").extract(DateTimeUnit.HOUR))
    .watermark("event_time", $("event_time").minus(lit(5).seconds()))
    .build();

// Schema with metadata columns for Kafka connector
Schema kafkaSchema = Schema.newBuilder()
    .column("user_id", DataTypes.BIGINT())
    .column("message", DataTypes.STRING())
    .columnByMetadata("kafka_topic", DataTypes.STRING(), "topic")
    .columnByMetadata("kafka_partition", DataTypes.INT(), "partition")
    .columnByMetadata("kafka_offset", DataTypes.BIGINT(), "offset")
    .columnByMetadata("kafka_timestamp", DataTypes.TIMESTAMP_LTZ(3), "timestamp")
    .build();

Type Information Classes

Type information and utilities for working with data types at runtime.

/**
 * Abstract base class for all data types
 */
public abstract class DataType {
    /**
     * Get the logical type information
     * @return LogicalType instance
     */
    public LogicalType getLogicalType();
    
    /**
     * Get the Java class that represents this type
     * @return Java Class<?> for this data type
     */
    public Class<?> getConversionClass();
    
    /**
     * Create a nullable version of this type
     * @return DataType that accepts null values
     */
    public DataType nullable();
    
    /**
     * Create a non-nullable version of this type
     * @return DataType that does not accept null values
     */
    public DataType notNull();
    
    /**
     * Check if this type accepts null values
     * @return true if nullable, false otherwise
     */
    public boolean isNullable();
}

/**
 * Represents a field in a row type
 */
public final class Field {
    /**
     * Get the field name
     * @return Field name
     */
    public String getName();
    
    /**
     * Get the field data type
     * @return DataType of this field
     */
    public DataType getType();
    
    /**
     * Get the field description
     * @return Optional description of the field
     */
    public Optional<String> getDescription();
}

/**
 * Resolved schema containing all column and constraint information
 */
public interface ResolvedSchema {
    /**
     * Get the number of columns
     * @return Column count
     */
    public int getColumnCount();
    
    /**
     * Get column names in order
     * @return List of column names
     */
    public List<String> getColumnNames();
    
    /**
     * Get column data types in order
     * @return List of DataType instances
     */
    public List<DataType> getColumnDataTypes();
    
    /**
     * Get a specific column by index
     * @param index Column index
     * @return Column information
     */
    public Column getColumn(int index);
    
    /**
     * Get a specific column by name
     * @param name Column name
     * @return Optional Column information
     */
    public Optional<Column> getColumn(String name);
    
    /**
     * Get primary key constraint
     * @return Optional primary key constraint
     */
    public Optional<UniqueConstraint> getPrimaryKey();
    
    /**
     * Get all watermark specifications
     * @return List of watermark specifications
     */
    public List<WatermarkSpec> getWatermarkSpecs();
}

Built-in Type Conversions

Utilities for converting between different type representations.

/**
 * Type conversion utilities
 */
public class TypeConversions {
    /**
     * Convert from legacy TypeInformation to DataType
     * @param typeInfo Legacy TypeInformation
     * @return Equivalent DataType
     */
    public static DataType fromLegacyInfoToDataType(TypeInformation<?> typeInfo);
    
    /**
     * Convert DataType to legacy TypeInformation
     * @param dataType DataType to convert
     * @return Equivalent TypeInformation
     */
    public static TypeInformation<?> fromDataTypeToLegacyInfo(DataType dataType);
}

JSON Type Support

Special support for JSON data types and processing.

/**
 * JSON type enumeration for JSON processing functions
 */
public enum JsonType {
    VALUE,
    ARRAY,
    OBJECT
}

/**
 * JSON null handling behavior
 */
public enum JsonOnNull {
    NULL,
    ABSENT
}

/**
 * JSON value extraction behavior on empty or error
 */
public enum JsonValueOnEmptyOrError {
    NULL,
    ERROR,
    DEFAULT_VALUE
}

Common Type Patterns

Temporal Types with Precision:

// High precision timestamps for financial data
DataType orderTime = DataTypes.TIMESTAMP(9); // nanosecond precision
DataType tradeTime = DataTypes.TIMESTAMP_LTZ(6); // microsecond precision with timezone

// Date and time types
DataType birthDate = DataTypes.DATE();
DataType appointmentTime = DataTypes.TIME(3); // millisecond precision

Complex Nested Structures:

// E-commerce order structure
DataType orderItem = DataTypes.ROW(
    DataTypes.FIELD("product_id", DataTypes.BIGINT()),
    DataTypes.FIELD("quantity", DataTypes.INT()),
    DataTypes.FIELD("price", DataTypes.DECIMAL(10, 2))
);

DataType order = DataTypes.ROW(
    DataTypes.FIELD("order_id", DataTypes.BIGINT()),
    DataTypes.FIELD("customer_id", DataTypes.BIGINT()),
    DataTypes.FIELD("items", DataTypes.ARRAY(orderItem)),
    DataTypes.FIELD("total_amount", DataTypes.DECIMAL(12, 2)),
    DataTypes.FIELD("order_date", DataTypes.TIMESTAMP(3))
);

Map Types for Dynamic Data:

// Configuration or metadata as key-value pairs
DataType config = DataTypes.MAP(DataTypes.STRING(), DataTypes.STRING());
DataType metrics = DataTypes.MAP(DataTypes.STRING(), DataTypes.DOUBLE());

// Multi-language text support
DataType translations = DataTypes.MAP(
    DataTypes.STRING(), // language code
    DataTypes.STRING()  // translated text
);

Install with Tessl CLI

npx tessl i tessl/maven-org-apache-flink--flink-table-api-java-uber

docs

connectors.md

data-types.md

datastream-bridge.md

expressions.md

functions.md

index.md

sql-gateway.md

table-operations.md

tile.json