or run

npx @tessl/cli init
Log in

Version

Tile

Overview

Evals

Files

Files

docs

constraint-system.mddata-manipulation.mddatabase-operations.mdindex.mdparser-integration.mdpartition-management.mdtable-operations.mdtype-system.mdutilities.mdview-operations.md

utilities.mddocs/

0

# Utilities and Helpers

1

2

Utility classes provide property validation, data type conversion, constraint trait management, and various helper functions for Hive DDL operations.

3

4

## Capabilities

5

6

### DDL Utilities

7

8

Comprehensive utility methods for Hive DDL operations including property validation and data type conversion.

9

10

```java { .api }

11

/**

12

* Utility methods for Hive DDL SQL nodes

13

* Provides property validation, data type conversion, and constraint handling

14

*/

15

public class HiveDDLUtils {

16

/** Column delimiter for internal use */

17

public static final String COL_DELIMITER = ";";

18

19

/**

20

* Validates database properties against reserved property names

21

* @param props Database properties to validate

22

* @return Validated property list

23

* @throws ParseException if reserved properties are used

24

*/

25

public static SqlNodeList checkReservedDBProperties(SqlNodeList props) throws ParseException;

26

27

/**

28

* Validates table properties against reserved property names and prefixes

29

* @param props Table properties to validate

30

* @return Validated property list

31

* @throws ParseException if reserved properties are used

32

*/

33

public static SqlNodeList checkReservedTableProperties(SqlNodeList props) throws ParseException;

34

35

/**

36

* Ensures table properties do not mark table as generic

37

* @param props Table properties to check

38

* @return Validated property list

39

* @throws ParseException if generic table creation is attempted

40

*/

41

public static SqlNodeList ensureNonGeneric(SqlNodeList props) throws ParseException;

42

43

/**

44

* Creates table option from key-value pair

45

* @param key Property key

46

* @param value Property value node

47

* @param pos Parser position

48

* @return SqlTableOption instance

49

*/

50

public static SqlTableOption toTableOption(String key, SqlNode value, SqlParserPos pos);

51

52

/**

53

* Creates table option from key-value strings

54

* @param key Property key

55

* @param value Property value string

56

* @param pos Parser position

57

* @return SqlTableOption instance

58

*/

59

public static SqlTableOption toTableOption(String key, String value, SqlParserPos pos);

60

61

/**

62

* Converts data types in column list for Hive compatibility

63

* @param columns Column list to convert

64

* @throws ParseException if conversion fails

65

*/

66

public static void convertDataTypes(SqlNodeList columns) throws ParseException;

67

68

/**

69

* Converts data type in single column for Hive compatibility

70

* @param column Column to convert

71

* @throws ParseException if conversion fails

72

*/

73

public static void convertDataTypes(SqlRegularColumn column) throws ParseException;

74

}

75

```

76

77

### Constraint Trait Management

78

79

Utility methods for working with Hive constraint traits.

80

81

```java { .api }

82

/**

83

* Constraint trait utility methods

84

* Provides encoding, decoding, and manipulation of constraint traits

85

*/

86

public class HiveDDLUtils {

87

/**

88

* Returns default constraint trait (ENABLE NOVALIDATE RELY)

89

* @return Default trait as byte encoding

90

*/

91

public static byte defaultTrait();

92

93

/**

94

* Enables constraint in trait

95

* @param trait Current trait encoding

96

* @return Modified trait with enable flag set

97

*/

98

public static byte enableConstraint(byte trait);

99

100

/**

101

* Disables constraint in trait

102

* @param trait Current trait encoding

103

* @return Modified trait with enable flag cleared

104

*/

105

public static byte disableConstraint(byte trait);

106

107

/**

108

* Sets validate flag in constraint trait

109

* @param trait Current trait encoding

110

* @return Modified trait with validate flag set

111

*/

112

public static byte validateConstraint(byte trait);

113

114

/**

115

* Clears validate flag in constraint trait

116

* @param trait Current trait encoding

117

* @return Modified trait with validate flag cleared

118

*/

119

public static byte noValidateConstraint(byte trait);

120

121

/**

122

* Sets rely flag in constraint trait

123

* @param trait Current trait encoding

124

* @return Modified trait with rely flag set

125

*/

126

public static byte relyConstraint(byte trait);

127

128

/**

129

* Clears rely flag in constraint trait

130

* @param trait Current trait encoding

131

* @return Modified trait with rely flag cleared

132

*/

133

public static byte noRelyConstraint(byte trait);

134

135

/**

136

* Checks if constraint is enabled

137

* @param trait Trait encoding to check

138

* @return true if constraint is enabled

139

*/

140

public static boolean requireEnableConstraint(byte trait);

141

142

/**

143

* Checks if constraint validation is enabled

144

* @param trait Trait encoding to check

145

* @return true if validation is enabled

146

*/

147

public static boolean requireValidateConstraint(byte trait);

148

149

/**

150

* Checks if constraint rely is enabled

151

* @param trait Trait encoding to check

152

* @return true if rely is enabled

153

*/

154

public static boolean requireRelyConstraint(byte trait);

155

156

/**

157

* Encodes constraint trait object to byte representation

158

* @param trait SqlHiveConstraintTrait object

159

* @return Byte encoding of the trait

160

*/

161

public static byte encodeConstraintTrait(SqlHiveConstraintTrait trait);

162

}

163

```

164

165

### Data Manipulation Utilities

166

167

Utility methods for data copying and manipulation.

168

169

```java { .api }

170

/**

171

* Data manipulation utility methods

172

* Provides deep copying and data transformation functions

173

*/

174

public class HiveDDLUtils {

175

/**

176

* Creates deep copy of column list

177

* @param colList Original column list

178

* @return Deep copy of the column list

179

*/

180

public static SqlNodeList deepCopyColList(SqlNodeList colList);

181

182

/**

183

* Creates deep copy of table column

184

* @param column Original column definition

185

* @return Deep copy of the column

186

*/

187

public static SqlRegularColumn deepCopyTableColumn(SqlRegularColumn column);

188

}

189

```

190

191

### String Processing Utilities

192

193

Utility methods for string literal processing and escaping.

194

195

```java { .api }

196

/**

197

* String processing utility methods

198

* Handles string literal escaping and unescaping for SQL compatibility

199

*/

200

public class HiveDDLUtils {

201

/**

202

* Unescapes string literals in property list

203

* @param properties Property list containing string literals

204

*/

205

public static void unescapeProperties(SqlNodeList properties);

206

207

/**

208

* Unescapes single string literal

209

* @param literal String literal to unescape

210

* @return Unescaped string literal

211

*/

212

public static SqlCharStringLiteral unescapeStringLiteral(SqlCharStringLiteral literal);

213

214

/**

215

* Unescapes partition specification values

216

* @param partSpec Partition specification to unescape

217

*/

218

public static void unescapePartitionSpec(SqlNodeList partSpec);

219

}

220

```

221

222

## Usage Examples

223

224

### Property Validation

225

226

```java

227

// Validate table properties before table creation

228

SqlNodeList tableProps = new SqlNodeList(SqlParserPos.ZERO);

229

tableProps.add(new SqlTableOption("owner", "data_team", SqlParserPos.ZERO));

230

tableProps.add(new SqlTableOption("environment", "production", SqlParserPos.ZERO));

231

232

try {

233

// Validate against reserved properties

234

SqlNodeList validatedProps = HiveDDLUtils.checkReservedTableProperties(tableProps);

235

236

// Ensure non-generic table creation

237

SqlNodeList finalProps = HiveDDLUtils.ensureNonGeneric(validatedProps);

238

239

System.out.println("Properties validated successfully");

240

} catch (ParseException e) {

241

System.err.println("Property validation failed: " + e.getMessage());

242

}

243

244

// Example of reserved property that would fail validation

245

SqlNodeList invalidProps = new SqlNodeList(SqlParserPos.ZERO);

246

invalidProps.add(new SqlTableOption("hive.location-uri", "/data/test", SqlParserPos.ZERO)); // Reserved!

247

248

try {

249

HiveDDLUtils.checkReservedTableProperties(invalidProps);

250

} catch (ParseException e) {

251

System.err.println("Expected failure for reserved property: " + e.getMessage());

252

}

253

```

254

255

### Data Type Conversion

256

257

```java

258

// Create column list with data types that need conversion

259

SqlNodeList columns = new SqlNodeList(SqlParserPos.ZERO);

260

261

// TIMESTAMP column (will be converted for Hive compatibility)

262

SqlDataTypeSpec timestampType = new SqlDataTypeSpec(

263

new SqlBasicTypeNameSpec(SqlTypeName.TIMESTAMP, SqlParserPos.ZERO),

264

SqlParserPos.ZERO

265

);

266

SqlRegularColumn timestampCol = new SqlRegularColumn(

267

SqlParserPos.ZERO,

268

new SqlIdentifier("created_at", SqlParserPos.ZERO),

269

timestampType,

270

null, null

271

);

272

columns.add(timestampCol);

273

274

// BINARY column (will be converted for Hive compatibility)

275

SqlDataTypeSpec binaryType = new SqlDataTypeSpec(

276

new SqlBasicTypeNameSpec(SqlTypeName.BINARY, SqlParserPos.ZERO),

277

SqlParserPos.ZERO

278

);

279

SqlRegularColumn binaryCol = new SqlRegularColumn(

280

SqlParserPos.ZERO,

281

new SqlIdentifier("data_payload", SqlParserPos.ZERO),

282

binaryType,

283

null, null

284

);

285

columns.add(binaryCol);

286

287

try {

288

// Convert data types for Hive compatibility

289

HiveDDLUtils.convertDataTypes(columns);

290

System.out.println("Data types converted successfully");

291

} catch (ParseException e) {

292

System.err.println("Data type conversion failed: " + e.getMessage());

293

}

294

```

295

296

### Constraint Trait Management

297

298

```java

299

// Working with constraint traits

300

byte defaultTrait = HiveDDLUtils.defaultTrait();

301

302

System.out.println("Default trait - Enable: " + HiveDDLUtils.requireEnableConstraint(defaultTrait));

303

System.out.println("Default trait - Validate: " + HiveDDLUtils.requireValidateConstraint(defaultTrait));

304

System.out.println("Default trait - Rely: " + HiveDDLUtils.requireRelyConstraint(defaultTrait));

305

306

// Modify constraint traits

307

byte strictTrait = HiveDDLUtils.validateConstraint(defaultTrait);

308

byte lenientTrait = HiveDDLUtils.disableConstraint(defaultTrait);

309

byte optimizerOnlyTrait = HiveDDLUtils.noValidateConstraint(defaultTrait);

310

311

System.out.println("Strict trait validates: " + HiveDDLUtils.requireValidateConstraint(strictTrait));

312

System.out.println("Lenient trait enabled: " + HiveDDLUtils.requireEnableConstraint(lenientTrait));

313

System.out.println("Optimizer trait validates: " + HiveDDLUtils.requireValidateConstraint(optimizerOnlyTrait));

314

315

// Encode constraint trait object

316

SqlHiveConstraintTrait traitObj = new SqlHiveConstraintTrait(

317

SqlHiveConstraintEnable.ENABLE.symbol(SqlParserPos.ZERO),

318

SqlHiveConstraintValidate.VALIDATE.symbol(SqlParserPos.ZERO),

319

SqlHiveConstraintRely.RELY.symbol(SqlParserPos.ZERO)

320

);

321

322

byte encodedTrait = HiveDDLUtils.encodeConstraintTrait(traitObj);

323

System.out.println("Encoded trait matches strict: " + (encodedTrait == strictTrait));

324

```

325

326

### String Processing

327

328

```java

329

// Process string literals with escaping

330

SqlNodeList properties = new SqlNodeList(SqlParserPos.ZERO);

331

332

// Add properties with escaped values

333

properties.add(new SqlTableOption("description", "Data contains\\ttabs and\\nnewlines", SqlParserPos.ZERO));

334

properties.add(new SqlTableOption("path", "/data/files with spaces/table", SqlParserPos.ZERO));

335

properties.add(new SqlTableOption("pattern", "*.txt", SqlParserPos.ZERO));

336

337

// Unescape all property values

338

HiveDDLUtils.unescapeProperties(properties);

339

340

// Unescape individual string literal

341

SqlCharStringLiteral escapedLiteral = SqlLiteral.createCharString("Value with\\tescapes\\n", SqlParserPos.ZERO);

342

SqlCharStringLiteral unescapedLiteral = HiveDDLUtils.unescapeStringLiteral(escapedLiteral);

343

344

System.out.println("Original: " + escapedLiteral.getValueAs(String.class));

345

System.out.println("Unescaped: " + unescapedLiteral.getValueAs(String.class));

346

347

// Process partition specifications

348

SqlNodeList partSpec = new SqlNodeList(SqlParserPos.ZERO);

349

partSpec.add(new SqlTableOption("date_str", "2023-12-01", SqlParserPos.ZERO));

350

partSpec.add(new SqlTableOption("path_segment", "/data/special\\tchars", SqlParserPos.ZERO));

351

352

HiveDDLUtils.unescapePartitionSpec(partSpec);

353

```

354

355

### Deep Copying

356

357

```java

358

// Deep copy column definitions for modification

359

SqlNodeList originalColumns = new SqlNodeList(SqlParserPos.ZERO);

360

361

SqlRegularColumn originalCol = new SqlRegularColumn(

362

SqlParserPos.ZERO,

363

new SqlIdentifier("user_id", SqlParserPos.ZERO),

364

new SqlDataTypeSpec(new SqlBasicTypeNameSpec(SqlTypeName.BIGINT, SqlParserPos.ZERO), SqlParserPos.ZERO),

365

null, null

366

);

367

originalColumns.add(originalCol);

368

369

// Create deep copy for modification

370

SqlNodeList copiedColumns = HiveDDLUtils.deepCopyColList(originalColumns);

371

SqlRegularColumn copiedCol = HiveDDLUtils.deepCopyTableColumn(originalCol);

372

373

// Modify copied version without affecting original

374

// (Modification logic would depend on specific use case)

375

376

System.out.println("Original and copied columns are independent");

377

System.out.println("Original column count: " + originalColumns.size());

378

System.out.println("Copied column count: " + copiedColumns.size());

379

```

380

381

## Advanced Utility Patterns

382

383

### Utility Service Class

384

385

```java

386

public class HiveUtilityService {

387

388

/**

389

* Validates and prepares table properties for Hive table creation

390

*/

391

public SqlNodeList prepareTableProperties(Map<String, String> userProperties) throws ParseException {

392

SqlNodeList properties = new SqlNodeList(SqlParserPos.ZERO);

393

394

// Convert map to SqlNodeList

395

for (Map.Entry<String, String> entry : userProperties.entrySet()) {

396

SqlTableOption option = HiveDDLUtils.toTableOption(

397

entry.getKey(),

398

entry.getValue(),

399

SqlParserPos.ZERO

400

);

401

properties.add(option);

402

}

403

404

// Validate properties

405

properties = HiveDDLUtils.checkReservedTableProperties(properties);

406

properties = HiveDDLUtils.ensureNonGeneric(properties);

407

408

// Unescape string values

409

HiveDDLUtils.unescapeProperties(properties);

410

411

return properties;

412

}

413

414

/**

415

* Prepares column definitions with proper data type conversion

416

*/

417

public SqlNodeList prepareColumns(List<ColumnDefinition> columnDefs) throws ParseException {

418

SqlNodeList columns = new SqlNodeList(SqlParserPos.ZERO);

419

420

for (ColumnDefinition colDef : columnDefs) {

421

SqlDataTypeSpec typeSpec = createDataTypeSpec(colDef.getType());

422

SqlRegularColumn column = new SqlRegularColumn(

423

SqlParserPos.ZERO,

424

new SqlIdentifier(colDef.getName(), SqlParserPos.ZERO),

425

typeSpec,

426

null, // default value

427

null // constraint

428

);

429

columns.add(column);

430

}

431

432

// Convert types for Hive compatibility

433

HiveDDLUtils.convertDataTypes(columns);

434

435

return columns;

436

}

437

438

/**

439

* Creates constraint trait based on enforcement policy

440

*/

441

public byte createConstraintTrait(String enforcementPolicy) {

442

byte trait = HiveDDLUtils.defaultTrait();

443

444

switch (enforcementPolicy.toUpperCase()) {

445

case "STRICT":

446

trait = HiveDDLUtils.enableConstraint(trait);

447

trait = HiveDDLUtils.validateConstraint(trait);

448

trait = HiveDDLUtils.relyConstraint(trait);

449

break;

450

case "OPTIMIZER_ONLY":

451

trait = HiveDDLUtils.enableConstraint(trait);

452

trait = HiveDDLUtils.noValidateConstraint(trait);

453

trait = HiveDDLUtils.relyConstraint(trait);

454

break;

455

case "DISABLED":

456

trait = HiveDDLUtils.disableConstraint(trait);

457

trait = HiveDDLUtils.noValidateConstraint(trait);

458

trait = HiveDDLUtils.noRelyConstraint(trait);

459

break;

460

default: // STANDARD

461

// Use default trait (ENABLE NOVALIDATE RELY)

462

break;

463

}

464

465

return trait;

466

}

467

468

private SqlDataTypeSpec createDataTypeSpec(String typeString) {

469

// Implementation would parse type string and create appropriate SqlDataTypeSpec

470

// This is a simplified example

471

SqlTypeNameSpec typeNameSpec;

472

473

switch (typeString.toUpperCase()) {

474

case "STRING":

475

typeNameSpec = new SqlBasicTypeNameSpec(SqlTypeName.VARCHAR, SqlParserPos.ZERO);

476

break;

477

case "BIGINT":

478

typeNameSpec = new SqlBasicTypeNameSpec(SqlTypeName.BIGINT, SqlParserPos.ZERO);

479

break;

480

case "TIMESTAMP":

481

typeNameSpec = new SqlBasicTypeNameSpec(SqlTypeName.TIMESTAMP, SqlParserPos.ZERO);

482

break;

483

default:

484

throw new IllegalArgumentException("Unsupported type: " + typeString);

485

}

486

487

return new SqlDataTypeSpec(typeNameSpec, SqlParserPos.ZERO);

488

}

489

490

public static class ColumnDefinition {

491

private final String name;

492

private final String type;

493

494

public ColumnDefinition(String name, String type) {

495

this.name = name;

496

this.type = type;

497

}

498

499

public String getName() { return name; }

500

public String getType() { return type; }

501

}

502

}

503

504

// Usage

505

HiveUtilityService service = new HiveUtilityService();

506

507

// Prepare table properties

508

Map<String, String> userProps = Map.of(

509

"owner", "data_team",

510

"environment", "production",

511

"description", "Sales data with\\ttabs"

512

);

513

514

SqlNodeList tableProperties = service.prepareTableProperties(userProps);

515

516

// Prepare columns

517

List<HiveUtilityService.ColumnDefinition> columnDefs = List.of(

518

new HiveUtilityService.ColumnDefinition("id", "BIGINT"),

519

new HiveUtilityService.ColumnDefinition("name", "STRING"),

520

new HiveUtilityService.ColumnDefinition("created_at", "TIMESTAMP")

521

);

522

523

SqlNodeList columns = service.prepareColumns(columnDefs);

524

525

// Create constraint trait

526

byte strictTrait = service.createConstraintTrait("STRICT");

527

byte optimizerTrait = service.createConstraintTrait("OPTIMIZER_ONLY");

528

529

System.out.println("Strict constraint validates: " + HiveDDLUtils.requireValidateConstraint(strictTrait));

530

System.out.println("Optimizer constraint validates: " + HiveDDLUtils.requireValidateConstraint(optimizerTrait));

531

```