SQL parser component for Apache Flink that provides Hive dialect support for parsing Hive-specific DDL and DML statements
npx @tessl/cli install tessl/maven-org-apache-flink--flink-sql-parser-hive@1.17.0Flink SQL Parser Hive is a specialized SQL parser component for Apache Flink that extends the core SQL parser to support Hive dialect syntax. It provides comprehensive parsing capabilities for Hive-specific DDL (Data Definition Language) and DML (Data Manipulation Language) statements, enabling seamless integration with Hive metastores and compatibility with existing Hive data warehouses within Flink's distributed stream and batch processing engine.
org.apache.flink:flink-sql-parser-hive:1.17.2import org.apache.flink.sql.parser.hive.impl.FlinkHiveSqlParserImpl;
import org.apache.flink.sql.parser.hive.ddl.*;
import org.apache.flink.sql.parser.hive.dml.*;
import org.apache.flink.sql.parser.hive.type.*;import org.apache.calcite.sql.SqlParser;
import org.apache.flink.sql.parser.hive.impl.FlinkHiveSqlParserImpl;
// Create a Hive SQL parser
SqlParser parser = SqlParser.create(sqlStatement,
SqlParser.config()
.withParserFactory(FlinkHiveSqlParserImpl.FACTORY)
.withQuoting(Quoting.DOUBLE_QUOTE)
.withUnquotedCasing(Casing.TO_UPPER)
.withQuotedCasing(Casing.UNCHANGED));
// Parse Hive SQL statement
SqlNode sqlNode = parser.parseStmt();
// Example: Create Hive table with partitions
String createTableSql = """
CREATE TABLE IF NOT EXISTS sales_data (
id BIGINT,
customer_name STRING,
amount DECIMAL(10,2)
)
PARTITIONED BY (year INT, month INT)
STORED AS PARQUET
LOCATION '/data/sales'
TBLPROPERTIES ('transactional'='true')
""";The Flink SQL Parser Hive is built around several key components:
FlinkHiveSqlParserImpl provides the main parsing entry point using JavaCC and FMPP code generationCore parser factory and integration point for creating Hive SQL parsers within Apache Calcite framework.
/**
* Main parser factory for creating Hive SQL parser instances
*/
public class FlinkHiveSqlParserImpl {
public static final SqlParserImplFactory FACTORY;
}Complete database lifecycle management including creation, alteration, and property management for Hive databases.
/**
* CREATE DATABASE statement for Hive dialect
*/
public class SqlCreateHiveDatabase extends SqlCreateDatabase {
public SqlCreateHiveDatabase(SqlParserPos pos, SqlIdentifier databaseName,
SqlNodeList propertyList, SqlCharStringLiteral comment,
SqlCharStringLiteral location, boolean ifNotExists) throws ParseException;
}
/**
* Base class for ALTER DATABASE operations
*/
public abstract class SqlAlterHiveDatabase extends SqlAlterDatabase {
public enum AlterHiveDatabaseOp { CHANGE_PROPS, CHANGE_LOCATION, CHANGE_OWNER }
}Comprehensive table management including creation with Hive-specific features, alteration, and column management.
/**
* CREATE TABLE statement for Hive dialect with full Hive table features
*/
public class SqlCreateHiveTable extends SqlCreateTable {
public SqlCreateHiveTable(SqlParserPos pos, SqlIdentifier tableName, SqlNodeList columnList,
HiveTableCreationContext creationContext, SqlNodeList propertyList,
SqlNodeList partColList, SqlCharStringLiteral comment, boolean isTemporary,
boolean isExternal, HiveTableRowFormat rowFormat,
HiveTableStoredAs storedAs, SqlCharStringLiteral location,
boolean ifNotExists) throws ParseException;
}
/**
* ROW FORMAT specification for Hive tables
*/
public static class HiveTableRowFormat {
public static HiveTableRowFormat withDelimited(...) throws ParseException;
public static HiveTableRowFormat withSerDe(...) throws ParseException;
}
/**
* STORED AS specification for Hive tables
*/
public static class HiveTableStoredAs {
public static HiveTableStoredAs ofFileFormat(...) throws ParseException;
public static HiveTableStoredAs ofInputOutputFormat(...) throws ParseException;
}Partition operations for adding, renaming, and managing Hive table partitions.
/**
* ADD PARTITION statement for Hive tables
*/
public class SqlAddHivePartitions extends SqlCall {
public SqlAddHivePartitions(SqlParserPos pos, SqlIdentifier tableName, boolean ifNotExists,
List<SqlNodeList> partSpecs, List<SqlCharStringLiteral> partLocations);
}
/**
* PARTITION RENAME statement for Hive tables
*/
public class SqlAlterHivePartitionRename extends SqlAlterHiveTable {
public SqlNodeList getNewPartSpec();
}View creation and management with Hive-specific properties and syntax.
/**
* CREATE VIEW statement for Hive dialect
*/
public class SqlCreateHiveView extends SqlCreateView {
public SqlCreateHiveView(SqlParserPos pos, SqlIdentifier viewName, SqlNodeList fieldList,
SqlNode query, boolean ifNotExists, SqlCharStringLiteral comment,
SqlNodeList properties);
}Enhanced INSERT statements with comprehensive partition support for both static and dynamic partitioning.
/**
* Enhanced INSERT statement for Hive tables with partition support
*/
public class RichSqlHiveInsert extends RichSqlInsert {
public RichSqlHiveInsert(SqlParserPos pos, SqlNodeList keywords, SqlNodeList extendedKeywords,
SqlNode targetTable, SqlNode source, SqlNodeList columnList,
SqlNodeList staticPartitions, SqlNodeList allPartKeys);
}Three-dimensional constraint system supporting ENABLE/DISABLE, VALIDATE/NOVALIDATE, and RELY/NORELY traits.
/**
* Complete constraint trait specification
*/
public class SqlHiveConstraintTrait {
public SqlHiveConstraintTrait(SqlLiteral enable, SqlLiteral validate, SqlLiteral rely);
public boolean isEnable();
public boolean isValidate();
public boolean isRely();
}
/**
* Constraint enable/disable options
*/
public enum SqlHiveConstraintEnable { ENABLE, DISABLE }
/**
* Constraint validate/no-validate options
*/
public enum SqlHiveConstraintValidate { VALIDATE, NOVALIDATE }
/**
* Constraint rely/no-rely options
*/
public enum SqlHiveConstraintRely { RELY, NORELY }Extended type system supporting Hive-specific data types including enhanced STRUCT types with field comments.
/**
* STRUCT type specification with field names, types, and comments
*/
public class ExtendedHiveStructTypeNameSpec extends ExtendedSqlRowTypeNameSpec {
public ExtendedHiveStructTypeNameSpec(SqlParserPos pos, List<SqlIdentifier> fieldNames,
List<SqlDataTypeSpec> fieldTypes,
List<SqlCharStringLiteral> comments) throws ParseException;
}Utility classes for property validation, data type conversion, and constraint trait management.
/**
* Utility methods for Hive DDL operations
*/
public class HiveDDLUtils {
public static final String COL_DELIMITER;
public static SqlNodeList checkReservedTableProperties(SqlNodeList props) throws ParseException;
public static void convertDataTypes(SqlNodeList columns) throws ParseException;
public static byte encodeConstraintTrait(SqlHiveConstraintTrait trait);
public static SqlCharStringLiteral unescapeStringLiteral(SqlCharStringLiteral literal);
}All parsing operations may throw ParseException when SQL syntax is invalid or when validation fails:
try {
SqlNode node = parser.parseStmt();
} catch (ParseException e) {
// Handle parsing errors
System.err.println("SQL parsing failed: " + e.getMessage());
}This parser integrates seamlessly with Apache Flink's table ecosystem: