A bundled JDBC driver for Apache Flink SQL that packages the JDBC driver implementation along with its dependencies into a single JAR file
npx @tessl/cli install tessl/maven-org-apache-flink--flink-sql-jdbc-driver-bundle@2.1.0A comprehensive JDBC driver bundle for Apache Flink SQL that enables Java applications to connect to Flink SQL Gateway and execute SQL queries against Flink's distributed stream and batch processing engine. The bundle packages all necessary dependencies using Maven shade plugin to create a standalone JAR that can be easily integrated into applications without dependency conflicts.
org.apache.flink:flink-sql-jdbc-driver-bundle:2.1.0Standard JDBC imports:
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.Statement;
import java.sql.ResultSet;
import java.sql.SQLException;Flink-specific imports (optional):
import org.apache.flink.table.jdbc.FlinkDriver;
import org.apache.flink.table.jdbc.FlinkDataSource;import java.sql.*;
public class FlinkJdbcExample {
public static void main(String[] args) throws SQLException {
// Connection URL format: jdbc:flink://host:port[/catalog[/database]]
String url = "jdbc:flink://localhost:8083";
// Connect to Flink SQL Gateway
Connection connection = DriverManager.getConnection(url);
// Create and execute a query
Statement statement = connection.createStatement();
ResultSet results = statement.executeQuery("SELECT * FROM my_table LIMIT 10");
// Process results
while (results.next()) {
System.out.println(results.getString(1));
}
// Clean up
results.close();
statement.close();
connection.close();
}
}import org.apache.flink.table.jdbc.FlinkDataSource;
import java.sql.*;
import java.util.Properties;
public class FlinkDataSourceExample {
public static void main(String[] args) throws SQLException {
Properties props = new Properties();
props.setProperty("catalog", "my_catalog");
FlinkDataSource dataSource = new FlinkDataSource(
"jdbc:flink://localhost:8083",
props
);
Connection connection = dataSource.getConnection();
// Use connection as normal...
}
}The Flink JDBC driver follows standard JDBC architecture patterns while providing specific integration with Flink SQL Gateway:
Important: Connection and Statement implementations are explicitly NOT thread-safe. Use separate connections for each thread.
Core JDBC driver functionality including automatic driver registration, connection establishment to Flink SQL Gateway, and connection management with catalog/schema support.
// Automatic driver registration via META-INF/services
public class FlinkDriver implements Driver {
public Connection connect(String url, Properties driverProperties) throws SQLException;
public boolean acceptsURL(String url) throws SQLException;
public int getMajorVersion();
public int getMinorVersion();
public boolean jdbcCompliant(); // Returns false
}
public class FlinkDataSource implements DataSource {
public FlinkDataSource(String url, Properties properties);
public Connection getConnection() throws SQLException;
}Driver Registration and Connection Management
Statement execution capabilities supporting both DDL and DML operations, with special handling for INSERT statements that return job IDs as result sets.
public class FlinkStatement extends BaseStatement {
public ResultSet executeQuery(String sql) throws SQLException;
public boolean execute(String sql) throws SQLException;
public ResultSet getResultSet() throws SQLException;
public int getUpdateCount() throws SQLException;
public Connection getConnection() throws SQLException;
public void close() throws SQLException;
public void cancel() throws SQLException;
}Comprehensive result set processing with support for all Java primitive types, temporal data, decimal precision, and complex data structures including Maps.
public class FlinkResultSet extends BaseResultSet {
// Navigation
public boolean next() throws SQLException;
public boolean wasNull() throws SQLException;
// Data retrieval by index
public String getString(int columnIndex) throws SQLException;
public boolean getBoolean(int columnIndex) throws SQLException;
public int getInt(int columnIndex) throws SQLException;
public long getLong(int columnIndex) throws SQLException;
public double getDouble(int columnIndex) throws SQLException;
public BigDecimal getBigDecimal(int columnIndex) throws SQLException;
public Date getDate(int columnIndex) throws SQLException;
public Time getTime(int columnIndex) throws SQLException;
public Timestamp getTimestamp(int columnIndex) throws SQLException;
public Object getObject(int columnIndex) throws SQLException;
// Data retrieval by label
public String getString(String columnLabel) throws SQLException;
// ... all types also available by column label
public ResultSetMetaData getMetaData() throws SQLException;
public int findColumn(String columnLabel) throws SQLException;
}Database metadata functionality for discovering available catalogs, schemas, and database capabilities, with specific Flink SQL Gateway integration.
public class FlinkDatabaseMetaData extends BaseDatabaseMetaData {
public ResultSet getCatalogs() throws SQLException;
public ResultSet getSchemas() throws SQLException;
public String getDatabaseProductName() throws SQLException; // "Apache Flink"
public String getDatabaseProductVersion() throws SQLException;
public String getDriverName() throws SQLException;
public String getDriverVersion() throws SQLException;
public boolean isReadOnly() throws SQLException; // true
public String getIdentifierQuoteString() throws SQLException; // "`"
}// URL Format: jdbc:flink://host:port[/catalog[/database]][?param=value&...]
public class DriverUri {
public static DriverUri create(String url, Properties properties) throws SQLException;
public static boolean acceptsURL(String url);
public InetSocketAddress getAddress();
public Optional<String> getCatalog();
public Optional<String> getDatabase();
public Properties getProperties();
}public class FlinkResultSetMetaData implements ResultSetMetaData {
public int getColumnCount() throws SQLException;
public String getColumnName(int column) throws SQLException;
public String getColumnLabel(int column) throws SQLException;
public int getColumnType(int column) throws SQLException;
public String getColumnTypeName(int column) throws SQLException;
public int getPrecision(int column) throws SQLException;
public int getScale(int column) throws SQLException;
public int isNullable(int column) throws SQLException;
public String getColumnClassName(int column) throws SQLException;
}public class ColumnInfo {
public static ColumnInfo fromLogicalType(String columnName, LogicalType type);
public int getColumnType();
public boolean isSigned();
public int getPrecision();
public int getScale();
public int getColumnDisplaySize();
public String getColumnName();
public boolean isNullable();
public String columnTypeName();
}Primitive Types: boolean, byte, short, int, long, float, double
Text: String, binary data (byte[])
Numeric: BigDecimal with precision/scale support
Temporal: Date, Time, Timestamp
Complex: Maps (converted to Java Map objects)
Generic: Object (for any type)
Thread Safety: Connection and Statement implementations are NOT thread-safe
JDBC Compliance: Not fully JDBC compliant (returns false for jdbcCompliant())
Query Support: Batch mode queries only; streaming queries not supported via JDBC
Unsupported Features: Prepared statements, callable statements, transactions, savepoints, result set updates, stored procedures, generated keys, Array data type retrieval
The driver throws standard JDBC exceptions:
SQLException for general database errorsSQLFeatureNotSupportedException for unsupported JDBC featuresSQLClientInfoException for client info operations