SQL parser component for Apache Flink that provides Hive dialect support for parsing Hive-specific DDL and DML statements
—
The constraint system provides a three-dimensional constraint framework supporting ENABLE/DISABLE, VALIDATE/NOVALIDATE, and RELY/NORELY traits for Hive table constraints.
Complete constraint trait specification with three dimensions of constraint behavior.
/**
* Complete constraint trait specification
* Represents three-dimensional constraint behavior: enable/disable, validate/no-validate, rely/no-rely
*/
public class SqlHiveConstraintTrait {
/**
* Creates a constraint trait with all three dimensions
* @param enable Enable or disable literal (ENABLE/DISABLE)
* @param validate Validate or no-validate literal (VALIDATE/NOVALIDATE)
* @param rely Rely or no-rely literal (RELY/NORELY)
*/
public SqlHiveConstraintTrait(SqlLiteral enable, SqlLiteral validate, SqlLiteral rely);
/**
* Converts constraint trait to SQL representation
* @param writer SQL writer for output
* @param leftPrec Left precedence
* @param rightPrec Right precedence
*/
public void unparse(SqlWriter writer, int leftPrec, int rightPrec);
/**
* Checks if constraint is enabled
* @return true if constraint is enabled, false otherwise
*/
public boolean isEnable();
/**
* Checks if constraint validation is enabled
* @return true if constraint validation is enabled, false otherwise
*/
public boolean isValidate();
/**
* Checks if constraint rely is enabled
* @return true if constraint rely is enabled, false otherwise
*/
public boolean isRely();
}Control whether constraints are enabled or disabled.
/**
* Constraint enable/disable options
* Controls whether the constraint is active
*/
public enum SqlHiveConstraintEnable {
/** Enable the constraint */
ENABLE,
/** Disable the constraint */
DISABLE;
/**
* Creates SQL literal symbol for the enable state
* @param pos Parser position information
* @return SqlLiteral representing the enable state
*/
public SqlLiteral symbol(SqlParserPos pos);
}Control whether constraints are validated during data operations.
/**
* Constraint validate/no-validate options
* Controls whether constraint validation occurs during data operations
*/
public enum SqlHiveConstraintValidate {
/** Validate constraint during data operations */
VALIDATE,
/** Do not validate constraint during data operations */
NOVALIDATE;
/**
* Creates SQL literal symbol for the validation state
* @param pos Parser position information
* @return SqlLiteral representing the validation state
*/
public SqlLiteral symbol(SqlParserPos pos);
}Control whether the query optimizer can rely on constraints for optimization.
/**
* Constraint rely/no-rely options
* Controls whether the optimizer can rely on the constraint for query optimization
*/
public enum SqlHiveConstraintRely {
/** Optimizer can rely on the constraint */
RELY,
/** Optimizer cannot rely on the constraint */
NORELY;
/**
* Creates SQL literal symbol for the rely state
* @param pos Parser position information
* @return SqlLiteral representing the rely state
*/
public SqlLiteral symbol(SqlParserPos pos);
}// Create constraint traits programmatically
SqlParserPos pos = SqlParserPos.ZERO;
// Create ENABLE VALIDATE RELY trait (default/recommended)
SqlLiteral enable = SqlHiveConstraintEnable.ENABLE.symbol(pos);
SqlLiteral validate = SqlHiveConstraintValidate.VALIDATE.symbol(pos);
SqlLiteral rely = SqlHiveConstraintRely.RELY.symbol(pos);
SqlHiveConstraintTrait defaultTrait = new SqlHiveConstraintTrait(enable, validate, rely);
// Create DISABLE NOVALIDATE NORELY trait (minimal constraint)
SqlLiteral disable = SqlHiveConstraintEnable.DISABLE.symbol(pos);
SqlLiteral novalidate = SqlHiveConstraintValidate.NOVALIDATE.symbol(pos);
SqlLiteral norely = SqlHiveConstraintRely.NORELY.symbol(pos);
SqlHiveConstraintTrait minimalTrait = new SqlHiveConstraintTrait(disable, novalidate, norely);
// Check trait properties
boolean isEnabled = defaultTrait.isEnable(); // true
boolean isValidated = defaultTrait.isValidate(); // true
boolean isReliable = defaultTrait.isRely(); // true
boolean isDisabled = minimalTrait.isEnable(); // false
boolean isNotValidated = minimalTrait.isValidate(); // false
boolean isNotReliable = minimalTrait.isRely(); // false-- Table creation with constraints and traits
CREATE TABLE customer_data (
customer_id BIGINT,
email STRING,
phone STRING,
registration_date DATE,
-- Primary key with default trait (ENABLE VALIDATE RELY)
PRIMARY KEY (customer_id) ENABLE VALIDATE RELY,
-- NOT NULL constraint with custom trait
CONSTRAINT nn_email NOT NULL (email) ENABLE NOVALIDATE RELY,
-- Unique constraint with minimal enforcement
CONSTRAINT uk_email UNIQUE (email) DISABLE NOVALIDATE NORELY
);
-- Alter table to modify constraint traits
ALTER TABLE customer_data
CHANGE CONSTRAINT nn_email
NOT NULL (email) ENABLE VALIDATE RELY;The HiveDDLUtils class provides utility methods for working with constraint traits:
// Default constraint trait (ENABLE NOVALIDATE RELY)
byte defaultTrait = HiveDDLUtils.defaultTrait();
// Modify traits
byte enabledTrait = HiveDDLUtils.enableConstraint(defaultTrait);
byte disabledTrait = HiveDDLUtils.disableConstraint(defaultTrait);
byte validatedTrait = HiveDDLUtils.validateConstraint(defaultTrait);
byte nonValidatedTrait = HiveDDLUtils.noValidateConstraint(defaultTrait);
byte reliableTrait = HiveDDLUtils.relyConstraint(defaultTrait);
byte nonReliableTrait = HiveDDLUtils.noRelyConstraint(defaultTrait);
// Check trait properties
boolean requiresEnable = HiveDDLUtils.requireEnableConstraint(defaultTrait);
boolean requiresValidate = HiveDDLUtils.requireValidateConstraint(defaultTrait);
boolean requiresRely = HiveDDLUtils.requireRelyConstraint(defaultTrait);
// Encode constraint trait object to byte representation
SqlHiveConstraintTrait traitObj = new SqlHiveConstraintTrait(enable, validate, rely);
byte encodedTrait = HiveDDLUtils.encodeConstraintTrait(traitObj);Primary key constraints with various trait combinations:
// Create table with primary key constraint
String createTableWithPKSql = """
CREATE TABLE orders (
order_id BIGINT,
customer_id BIGINT,
order_date DATE,
total_amount DECIMAL(10,2),
-- Primary key with full enforcement
CONSTRAINT pk_order PRIMARY KEY (order_id) ENABLE VALIDATE RELY
)
TBLPROPERTIES (
'hive.pk.constraint.trait' = 'ENABLE_VALIDATE_RELY'
)
""";
// Programmatic primary key creation
HiveTableCreationContext context = new HiveTableCreationContext();
context.pkTrait = new SqlHiveConstraintTrait(
SqlHiveConstraintEnable.ENABLE.symbol(pos),
SqlHiveConstraintValidate.VALIDATE.symbol(pos),
SqlHiveConstraintRely.RELY.symbol(pos)
);NOT NULL constraints with trait specifications:
// Create table with NOT NULL constraints
String createTableWithNotNullSql = """
CREATE TABLE customer_profile (
customer_id BIGINT NOT NULL ENABLE VALIDATE RELY,
first_name STRING NOT NULL ENABLE NOVALIDATE RELY,
last_name STRING NOT NULL ENABLE NOVALIDATE RELY,
email STRING NOT NULL DISABLE NOVALIDATE NORELY,
phone STRING,
registration_date DATE NOT NULL ENABLE VALIDATE RELY
)
""";
// Programmatic NOT NULL constraint creation
HiveTableCreationContext context = new HiveTableCreationContext();
// Create NOT NULL traits for multiple columns
context.notNullTraits = List.of(
new SqlHiveConstraintTrait(
SqlHiveConstraintEnable.ENABLE.symbol(pos),
SqlHiveConstraintValidate.VALIDATE.symbol(pos),
SqlHiveConstraintRely.RELY.symbol(pos)
),
new SqlHiveConstraintTrait(
SqlHiveConstraintEnable.ENABLE.symbol(pos),
SqlHiveConstraintValidate.NOVALIDATE.symbol(pos),
SqlHiveConstraintRely.RELY.symbol(pos)
)
);
context.notNullCols = List.of(
new SqlIdentifier("customer_id", pos),
new SqlIdentifier("email", pos)
);public class HiveConstraintManager {
/**
* Creates a constraint trait for different enforcement levels
*/
public static SqlHiveConstraintTrait createConstraintTrait(String enforcementLevel, SqlParserPos pos) {
SqlLiteral enable, validate, rely;
switch (enforcementLevel.toUpperCase()) {
case "STRICT":
// Full enforcement: validate data and enable optimizer reliance
enable = SqlHiveConstraintEnable.ENABLE.symbol(pos);
validate = SqlHiveConstraintValidate.VALIDATE.symbol(pos);
rely = SqlHiveConstraintRely.RELY.symbol(pos);
break;
case "OPTIMIZER_ONLY":
// Enable for optimizer, but don't validate data
enable = SqlHiveConstraintEnable.ENABLE.symbol(pos);
validate = SqlHiveConstraintValidate.NOVALIDATE.symbol(pos);
rely = SqlHiveConstraintRely.RELY.symbol(pos);
break;
case "INFORMATIONAL":
// Metadata only, no enforcement or optimization
enable = SqlHiveConstraintEnable.DISABLE.symbol(pos);
validate = SqlHiveConstraintValidate.NOVALIDATE.symbol(pos);
rely = SqlHiveConstraintRely.NORELY.symbol(pos);
break;
default: // "STANDARD"
// Default Hive behavior: enable but don't validate
enable = SqlHiveConstraintEnable.ENABLE.symbol(pos);
validate = SqlHiveConstraintValidate.NOVALIDATE.symbol(pos);
rely = SqlHiveConstraintRely.RELY.symbol(pos);
break;
}
return new SqlHiveConstraintTrait(enable, validate, rely);
}
/**
* Validates constraint trait combinations
*/
public static boolean isValidConstraintCombination(SqlHiveConstraintTrait trait) {
// Validation rules:
// 1. Cannot validate disabled constraints
// 2. Cannot rely on disabled constraints
// 3. Should not rely on non-validated constraints (warning, not error)
if (!trait.isEnable() && trait.isValidate()) {
return false; // Cannot validate disabled constraint
}
if (!trait.isEnable() && trait.isRely()) {
return false; // Cannot rely on disabled constraint
}
return true;
}
/**
* Provides recommendations for constraint traits based on table usage
*/
public static String recommendConstraintTrait(String tableType, String workloadType) {
// Recommend constraint traits based on usage patterns
if ("TRANSACTIONAL".equals(tableType)) {
if ("OLTP".equals(workloadType)) {
return "STRICT"; // Full validation for transactional workloads
} else {
return "STANDARD"; // Enable but don't validate for analytical workloads
}
} else if ("ANALYTICAL".equals(tableType)) {
if ("ETL".equals(workloadType)) {
return "OPTIMIZER_ONLY"; // Help optimizer without validation overhead
} else {
return "INFORMATIONAL"; // Metadata only for ad-hoc queries
}
}
return "STANDARD"; // Default recommendation
}
}
// Usage examples
SqlParserPos pos = SqlParserPos.ZERO;
// Create strict constraint for critical data
SqlHiveConstraintTrait strictTrait = HiveConstraintManager.createConstraintTrait("STRICT", pos);
System.out.println("Strict trait - Enable: " + strictTrait.isEnable() +
", Validate: " + strictTrait.isValidate() +
", Rely: " + strictTrait.isRely());
// Validate constraint combination
boolean isValid = HiveConstraintManager.isValidConstraintCombination(strictTrait);
System.out.println("Constraint combination is valid: " + isValid);
// Get recommendation
String recommendation = HiveConstraintManager.recommendConstraintTrait("TRANSACTIONAL", "OLTP");
System.out.println("Recommended constraint level: " + recommendation);// Evolution of constraint enforcement over time
public class ConstraintEvolution {
/**
* Migrates constraints from informational to enforced
*/
public void migrateConstraintsToEnforced(String tableName, TableEnvironment tableEnv) {
// Step 1: Start with informational constraints (DISABLE NOVALIDATE NORELY)
String initialConstraintSql = String.format("""
ALTER TABLE %s
ADD CONSTRAINT pk_temp PRIMARY KEY (id) DISABLE NOVALIDATE NORELY
""", tableName);
// Step 2: Enable constraint for optimizer (ENABLE NOVALIDATE RELY)
String enableForOptimizerSql = String.format("""
ALTER TABLE %s
CHANGE CONSTRAINT pk_temp PRIMARY KEY (id) ENABLE NOVALIDATE RELY
""", tableName);
// Step 3: Enable validation after data cleanup (ENABLE VALIDATE RELY)
String enableValidationSql = String.format("""
ALTER TABLE %s
CHANGE CONSTRAINT pk_temp PRIMARY KEY (id) ENABLE VALIDATE RELY
""", tableName);
try {
// Execute migration steps
tableEnv.executeSql(initialConstraintSql);
System.out.println("Step 1: Added informational constraint");
// Data quality assessment would happen here
tableEnv.executeSql(enableForOptimizerSql);
System.out.println("Step 2: Enabled constraint for optimizer");
// Data cleanup and validation would happen here
tableEnv.executeSql(enableValidationSql);
System.out.println("Step 3: Enabled full constraint validation");
} catch (Exception e) {
System.err.println("Constraint migration failed: " + e.getMessage());
}
}
}Install with Tessl CLI
npx tessl i tessl/maven-org-apache-flink--flink-sql-parser-hive