or run

npx @tessl/cli init
Log in

Version

Tile

Overview

Evals

Files

Files

docs

constraint-system.mddata-manipulation.mddatabase-operations.mdindex.mdparser-integration.mdpartition-management.mdtable-operations.mdtype-system.mdutilities.mdview-operations.md

table-operations.mddocs/

0

# Table Operations

1

2

Table operations provide comprehensive table management including creation with Hive-specific features, alteration, and column management.

3

4

## Capabilities

5

6

### Table Creation

7

8

Create Hive tables with comprehensive support for partitioning, storage formats, and table properties.

9

10

```java { .api }

11

/**

12

* CREATE TABLE statement for Hive dialect with full Hive table features

13

* Supports partitioning, external tables, storage formats, and row formats

14

*/

15

public class SqlCreateHiveTable extends SqlCreateTable {

16

/**

17

* Creates a new Hive table creation statement

18

* @param pos Parser position information

19

* @param tableName Name of the table to create

20

* @param columnList List of table columns

21

* @param creationContext Context containing constraints and metadata

22

* @param propertyList Table properties (TBLPROPERTIES)

23

* @param partColList Partition columns

24

* @param comment Table comment

25

* @param isTemporary Whether table is temporary

26

* @param isExternal Whether table is external

27

* @param rowFormat Row format specification (SERDE or DELIMITED)

28

* @param storedAs Storage format specification

29

* @param location Table location URI

30

* @param ifNotExists Whether to use IF NOT EXISTS clause

31

* @throws ParseException if validation fails

32

*/

33

public SqlCreateHiveTable(SqlParserPos pos, SqlIdentifier tableName, SqlNodeList columnList,

34

HiveTableCreationContext creationContext, SqlNodeList propertyList,

35

SqlNodeList partColList, SqlCharStringLiteral comment, boolean isTemporary,

36

boolean isExternal, HiveTableRowFormat rowFormat,

37

HiveTableStoredAs storedAs, SqlCharStringLiteral location,

38

boolean ifNotExists) throws ParseException;

39

}

40

```

41

42

**Constants:**

43

```java { .api }

44

public static final String IDENTIFIER = "hive";

45

public static final String TABLE_LOCATION_URI = "hive.location-uri";

46

public static final String TABLE_IS_EXTERNAL = "hive.is-external";

47

public static final String PK_CONSTRAINT_TRAIT = "hive.pk.constraint.trait";

48

public static final String NOT_NULL_CONSTRAINT_TRAITS = "hive.not.null.constraint.traits";

49

public static final String NOT_NULL_COLS = "hive.not.null.cols";

50

```

51

52

### Table Creation Context

53

54

Context class for managing table constraints and metadata during creation.

55

56

```java { .api }

57

/**

58

* Context for Hive table creation containing constraint information

59

*/

60

public static class HiveTableCreationContext extends TableCreationContext {

61

/** Primary key constraint trait */

62

public SqlHiveConstraintTrait pkTrait = null;

63

64

/** List of NOT NULL constraint traits */

65

public List<SqlHiveConstraintTrait> notNullTraits = null;

66

67

/** List of NOT NULL column identifiers */

68

public List<SqlIdentifier> notNullCols = null;

69

}

70

```

71

72

### Row Format Specification

73

74

Specification for Hive table row format including SERDE and delimited formats.

75

76

```java { .api }

77

/**

78

* ROW FORMAT specification for Hive tables

79

* Supports both SERDE-based and delimited field formats

80

*/

81

public static class HiveTableRowFormat {

82

/**

83

* Creates delimited row format specification

84

* @param pos Parser position

85

* @param fieldsTerminator Field delimiter character

86

* @param escape Escape character

87

* @param collectionTerminator Collection item delimiter

88

* @param mapKeyTerminator Map key-value delimiter

89

* @param linesTerminator Line terminator character

90

* @param nullAs NULL value representation

91

* @return HiveTableRowFormat instance

92

* @throws ParseException if validation fails

93

*/

94

public static HiveTableRowFormat withDelimited(SqlParserPos pos,

95

SqlCharStringLiteral fieldsTerminator,

96

SqlCharStringLiteral escape,

97

SqlCharStringLiteral collectionTerminator,

98

SqlCharStringLiteral mapKeyTerminator,

99

SqlCharStringLiteral linesTerminator,

100

SqlCharStringLiteral nullAs) throws ParseException;

101

102

/**

103

* Creates SERDE-based row format specification

104

* @param pos Parser position

105

* @param serdeClass SerDe class name

106

* @param serdeProps SerDe properties

107

* @return HiveTableRowFormat instance

108

* @throws ParseException if validation fails

109

*/

110

public static HiveTableRowFormat withSerDe(SqlParserPos pos, SqlCharStringLiteral serdeClass,

111

SqlNodeList serdeProps) throws ParseException;

112

113

/**

114

* Converts row format to property list representation

115

* @return SqlNodeList containing row format properties

116

*/

117

public SqlNodeList toPropList();

118

}

119

```

120

121

**Constants:**

122

```java { .api }

123

public static final String SERDE_LIB_CLASS_NAME = "hive.serde.lib.class.name";

124

public static final String SERDE_INFO_PROP_PREFIX = "hive.serde.info.prop.";

125

public static final String FIELD_DELIM = SERDE_INFO_PROP_PREFIX + "field.delim";

126

public static final String COLLECTION_DELIM = SERDE_INFO_PROP_PREFIX + "collection.delim";

127

public static final String ESCAPE_CHAR = SERDE_INFO_PROP_PREFIX + "escape.delim";

128

public static final String MAPKEY_DELIM = SERDE_INFO_PROP_PREFIX + "mapkey.delim";

129

public static final String LINE_DELIM = SERDE_INFO_PROP_PREFIX + "line.delim";

130

public static final String SERIALIZATION_NULL_FORMAT = SERDE_INFO_PROP_PREFIX + "serialization.null.format";

131

```

132

133

### Storage Format Specification

134

135

Specification for Hive table storage format including file formats and input/output formats.

136

137

```java { .api }

138

/**

139

* STORED AS specification for Hive tables

140

* Supports file format and input/output format specifications

141

*/

142

public static class HiveTableStoredAs {

143

/**

144

* Creates storage format from file format

145

* @param pos Parser position

146

* @param fileFormat File format identifier (PARQUET, ORC, TEXTFILE, etc.)

147

* @return HiveTableStoredAs instance

148

* @throws ParseException if validation fails

149

*/

150

public static HiveTableStoredAs ofFileFormat(SqlParserPos pos, SqlIdentifier fileFormat) throws ParseException;

151

152

/**

153

* Creates storage format from input/output format classes

154

* @param pos Parser position

155

* @param inputFormat Input format class name

156

* @param outputFormat Output format class name

157

* @return HiveTableStoredAs instance

158

* @throws ParseException if validation fails

159

*/

160

public static HiveTableStoredAs ofInputOutputFormat(SqlParserPos pos,

161

SqlCharStringLiteral inputFormat,

162

SqlCharStringLiteral outputFormat) throws ParseException;

163

164

/**

165

* Converts storage format to property list representation

166

* @return SqlNodeList containing storage format properties

167

*/

168

public SqlNodeList toPropList();

169

}

170

```

171

172

**Constants:**

173

```java { .api }

174

public static final String STORED_AS_FILE_FORMAT = "hive.storage.file-format";

175

public static final String STORED_AS_INPUT_FORMAT = "hive.stored.as.input.format";

176

public static final String STORED_AS_OUTPUT_FORMAT = "hive.stored.as.output.format";

177

```

178

179

### Table Alteration Base

180

181

Abstract base class for all table alteration operations.

182

183

```java { .api }

184

/**

185

* Base class for ALTER TABLE operations

186

* Provides common functionality for all table alterations

187

*/

188

public abstract class SqlAlterHiveTable extends SqlAlterTable {

189

/**

190

* Creates base table alteration statement

191

* @param op Type of alteration operation

192

* @param pos Parser position information

193

* @param tableName Name of table to alter

194

* @param partSpec Partition specification (if applicable)

195

* @param propertyList Properties associated with the alteration

196

*/

197

public SqlAlterHiveTable(AlterTableOp op, SqlParserPos pos, SqlIdentifier tableName,

198

SqlNodeList partSpec, SqlNodeList propertyList);

199

200

/**

201

* Types of table alteration operations supported

202

*/

203

public enum AlterTableOp {

204

CHANGE_TBL_PROPS, // Change table properties

205

CHANGE_SERDE_PROPS, // Change SerDe properties

206

CHANGE_FILE_FORMAT, // Change file format

207

CHANGE_LOCATION, // Change table location

208

ALTER_COLUMNS // Alter table columns

209

}

210

}

211

```

212

213

**Constants:**

214

```java { .api }

215

public static final String ALTER_TABLE_OP = "alter.table.op";

216

public static final String ALTER_COL_CASCADE = "alter.column.cascade";

217

```

218

219

### Table Properties Alteration

220

221

Change table properties (TBLPROPERTIES) of an existing table.

222

223

```java { .api }

224

/**

225

* ALTER TABLE SET TBLPROPERTIES statement

226

* Changes the properties of an existing table

227

*/

228

public class SqlAlterHiveTableProps extends SqlAlterHiveTable {

229

/**

230

* Creates table properties alteration statement

231

* @param pos Parser position information

232

* @param tableName Name of table to alter

233

* @param propertyList New properties to set

234

* @throws ParseException if properties validation fails

235

*/

236

public SqlAlterHiveTableProps(SqlParserPos pos, SqlIdentifier tableName,

237

SqlNodeList propertyList) throws ParseException;

238

}

239

```

240

241

### Table Location Alteration

242

243

Change the location of an existing table or partition.

244

245

```java { .api }

246

/**

247

* ALTER TABLE SET LOCATION statement

248

* Changes the location URI of an existing table or partition

249

*/

250

public class SqlAlterHiveTableLocation extends SqlAlterHiveTable {

251

/**

252

* Creates table location alteration statement

253

* @param pos Parser position information

254

* @param tableName Name of table to alter

255

* @param partitionSpec Partition specification (null for entire table)

256

* @param location New location URI

257

*/

258

public SqlAlterHiveTableLocation(SqlParserPos pos, SqlIdentifier tableName,

259

SqlNodeList partitionSpec, SqlCharStringLiteral location);

260

}

261

```

262

263

### Table File Format Alteration

264

265

Change the file format of an existing table or partition.

266

267

```java { .api }

268

/**

269

* ALTER TABLE SET FILEFORMAT statement

270

* Changes the file format of an existing table or partition

271

*/

272

public class SqlAlterHiveTableFileFormat extends SqlAlterHiveTable {

273

/**

274

* Creates table file format alteration statement

275

* @param pos Parser position information

276

* @param tableName Name of table to alter

277

* @param partSpec Partition specification (null for entire table)

278

* @param format New file format

279

*/

280

public SqlAlterHiveTableFileFormat(SqlParserPos pos, SqlIdentifier tableName,

281

SqlNodeList partSpec, SqlIdentifier format);

282

}

283

```

284

285

### Table SerDe Alteration

286

287

Change the SerDe (Serializer/Deserializer) of an existing table or partition.

288

289

```java { .api }

290

/**

291

* ALTER TABLE SET SERDE statement

292

* Changes the SerDe class and properties of an existing table or partition

293

*/

294

public class SqlAlterHiveTableSerDe extends SqlAlterHiveTable {

295

/**

296

* Creates table SerDe alteration statement

297

* @param pos Parser position information

298

* @param tableName Name of table to alter

299

* @param partitionSpec Partition specification (null for entire table)

300

* @param propertyList SerDe properties

301

* @param serdeLib SerDe library class name

302

* @throws ParseException if validation fails

303

*/

304

public SqlAlterHiveTableSerDe(SqlParserPos pos, SqlIdentifier tableName,

305

SqlNodeList partitionSpec, SqlNodeList propertyList,

306

SqlCharStringLiteral serdeLib) throws ParseException;

307

}

308

```

309

310

### Column Management

311

312

Add, replace, or change table columns.

313

314

```java { .api }

315

/**

316

* ALTER TABLE ADD/REPLACE COLUMNS statement

317

* Adds new columns or replaces existing columns

318

*/

319

public class SqlAlterHiveTableAddReplaceColumn extends SqlAlterHiveTable {

320

/**

321

* Creates add/replace columns statement

322

* @param pos Parser position information

323

* @param tableName Name of table to alter

324

* @param cascade Whether to cascade to partitions

325

* @param columns List of columns to add/replace

326

* @param replace True for REPLACE COLUMNS, false for ADD COLUMNS

327

* @throws ParseException if validation fails

328

*/

329

public SqlAlterHiveTableAddReplaceColumn(SqlParserPos pos, SqlIdentifier tableName,

330

boolean cascade, SqlNodeList columns,

331

boolean replace) throws ParseException;

332

}

333

334

/**

335

* ALTER TABLE CHANGE COLUMN statement

336

* Changes an existing column definition

337

*/

338

public class SqlAlterHiveTableChangeColumn extends SqlAlterHiveTable {

339

/**

340

* Creates change column statement

341

* @param pos Parser position information

342

* @param tableName Name of table to alter

343

* @param cascade Whether to cascade to partitions

344

* @param oldName Current column name

345

* @param newColumn New column definition

346

* @param first Whether to place column first

347

* @param after Column to place new column after

348

* @throws ParseException if validation fails

349

*/

350

public SqlAlterHiveTableChangeColumn(SqlParserPos pos, SqlIdentifier tableName,

351

boolean cascade, SqlIdentifier oldName,

352

SqlRegularColumn newColumn, boolean first,

353

SqlIdentifier after) throws ParseException;

354

}

355

```

356

357

## Usage Examples

358

359

### Complete Table Creation

360

361

```java

362

// Create partitioned external table with custom storage

363

String createTableSql = """

364

CREATE EXTERNAL TABLE IF NOT EXISTS sales_data (

365

id BIGINT COMMENT 'Unique transaction ID',

366

customer_id STRING COMMENT 'Customer identifier',

367

product_name STRING COMMENT 'Product name',

368

amount DECIMAL(10,2) COMMENT 'Transaction amount',

369

transaction_date DATE COMMENT 'Date of transaction'

370

)

371

COMMENT 'Sales transaction data'

372

PARTITIONED BY (

373

year INT COMMENT 'Year of transaction',

374

month INT COMMENT 'Month of transaction'

375

)

376

ROW FORMAT DELIMITED

377

FIELDS TERMINATED BY '\\t'

378

COLLECTION ITEMS TERMINATED BY ','

379

MAP KEYS TERMINATED BY ':'

380

LINES TERMINATED BY '\\n'

381

NULL DEFINED AS '\\\\N'

382

STORED AS PARQUET

383

LOCATION '/data/sales'

384

TBLPROPERTIES (

385

'parquet.compression' = 'SNAPPY',

386

'transactional' = 'true',

387

'owner' = 'data_team'

388

)

389

""";

390

```

391

392

### Table with SerDe

393

394

```java

395

// Create table with custom SerDe

396

String customSerdeTableSql = """

397

CREATE TABLE json_data (

398

id BIGINT,

399

data STRING,

400

metadata MAP<STRING, STRING>

401

)

402

ROW FORMAT SERDE 'org.apache.hive.hcatalog.data.JsonSerDe'

403

WITH SERDEPROPERTIES (

404

'serialization.format' = '1',

405

'ignore.malformed.json' = 'true'

406

)

407

STORED AS TEXTFILE

408

""";

409

```

410

411

### Table Alterations

412

413

```java

414

// Change table properties

415

String alterPropsSql = """

416

ALTER TABLE sales_data SET TBLPROPERTIES (

417

'last_modified' = '2023-12-01',

418

'retention_days' = '1095',

419

'backup_enabled' = 'true'

420

)

421

""";

422

423

// Change table location

424

String alterLocationSql = "ALTER TABLE sales_data SET LOCATION '/data/sales_v2'";

425

426

// Change partition location

427

String alterPartLocationSql = """

428

ALTER TABLE sales_data PARTITION (year=2023, month=12)

429

SET LOCATION '/data/sales/2023/12'

430

""";

431

432

// Add columns

433

String addColumnsSql = """

434

ALTER TABLE sales_data ADD COLUMNS (

435

discount_rate DECIMAL(5,4) COMMENT 'Applied discount rate',

436

payment_method STRING COMMENT 'Payment method used'

437

) CASCADE

438

""";

439

440

// Change column

441

String changeColumnSql = """

442

ALTER TABLE sales_data CHANGE COLUMN amount

443

total_amount DECIMAL(12,2) COMMENT 'Total transaction amount'

444

AFTER customer_id CASCADE

445

""";

446

```

447

448

### Programmatic Table Creation

449

450

```java

451

// Build table creation programmatically

452

SqlIdentifier tableName = new SqlIdentifier("analytics_data", SqlParserPos.ZERO);

453

454

// Define columns

455

SqlNodeList columns = new SqlNodeList(SqlParserPos.ZERO);

456

columns.add(new SqlRegularColumn(

457

SqlParserPos.ZERO,

458

new SqlIdentifier("id", SqlParserPos.ZERO),

459

new SqlDataTypeSpec(new SqlBasicTypeNameSpec(SqlTypeName.BIGINT, SqlParserPos.ZERO), SqlParserPos.ZERO),

460

null, // no default

461

null // no constraint

462

));

463

464

// Define partition columns

465

SqlNodeList partColumns = new SqlNodeList(SqlParserPos.ZERO);

466

partColumns.add(new SqlRegularColumn(

467

SqlParserPos.ZERO,

468

new SqlIdentifier("date_partition", SqlParserPos.ZERO),

469

new SqlDataTypeSpec(new SqlBasicTypeNameSpec(SqlTypeName.DATE, SqlParserPos.ZERO), SqlParserPos.ZERO),

470

null,

471

null

472

));

473

474

// Define storage format

475

HiveTableStoredAs storedAs = HiveTableStoredAs.ofFileFormat(

476

SqlParserPos.ZERO,

477

new SqlIdentifier("PARQUET", SqlParserPos.ZERO)

478

);

479

480

// Define row format

481

HiveTableRowFormat rowFormat = HiveTableRowFormat.withDelimited(

482

SqlParserPos.ZERO,

483

SqlLiteral.createCharString(",", SqlParserPos.ZERO), // field delimiter

484

SqlLiteral.createCharString("\\", SqlParserPos.ZERO), // escape char

485

SqlLiteral.createCharString("|", SqlParserPos.ZERO), // collection delimiter

486

SqlLiteral.createCharString(":", SqlParserPos.ZERO), // map key delimiter

487

SqlLiteral.createCharString("\\n", SqlParserPos.ZERO), // line delimiter

488

SqlLiteral.createCharString("NULL", SqlParserPos.ZERO) // null format

489

);

490

491

// Create table

492

SqlCreateHiveTable createTable = new SqlCreateHiveTable(

493

SqlParserPos.ZERO,

494

tableName,

495

columns,

496

new HiveTableCreationContext(),

497

new SqlNodeList(SqlParserPos.ZERO), // properties

498

partColumns,

499

SqlLiteral.createCharString("Analytics data table", SqlParserPos.ZERO),

500

false, // not temporary

501

true, // external table

502

rowFormat,

503

storedAs,

504

SqlLiteral.createCharString("/data/analytics", SqlParserPos.ZERO),

505

true // IF NOT EXISTS

506

);

507

```

508

509

### Table Description

510

511

Describe Hive tables to get detailed metadata information.

512

513

```java { .api }

514

/**

515

* DESCRIBE statement for Hive tables with extended and formatted options

516

* Provides detailed metadata about table structure, partitions, and properties

517

*/

518

public class SqlDescribeHiveTable extends SqlRichDescribeTable {

519

/**

520

* Creates a describe table statement

521

* @param pos Parser position information

522

* @param tableNameIdentifier Name of table to describe

523

* @param extended Whether to use EXTENDED keyword for detailed metadata

524

* @param formatted Whether to use FORMATTED keyword for formatted output

525

*/

526

public SqlDescribeHiveTable(SqlParserPos pos, SqlIdentifier tableNameIdentifier,

527

boolean extended, boolean formatted);

528

529

public boolean isExtended();

530

public boolean isFormatted();

531

}

532

```

533

534

**Usage Examples:**

535

536

```java

537

// Basic table description

538

String basicDescribeSql = "DESCRIBE sales_data";

539

540

// Extended description with detailed metadata

541

String extendedDescribeSql = "DESCRIBE EXTENDED sales_data";

542

543

// Formatted description with structured output

544

String formattedDescribeSql = "DESCRIBE FORMATTED sales_data";

545

546

// Programmatic table description

547

SqlIdentifier tableName = new SqlIdentifier("customer_profile", SqlParserPos.ZERO);

548

549

// Basic describe

550

SqlDescribeHiveTable basicDescribe = new SqlDescribeHiveTable(

551

SqlParserPos.ZERO,

552

tableName,

553

false, // not extended

554

false // not formatted

555

);

556

557

// Extended describe for detailed metadata

558

SqlDescribeHiveTable extendedDescribe = new SqlDescribeHiveTable(

559

SqlParserPos.ZERO,

560

tableName,

561

true, // extended

562

false // not formatted

563

);

564

565

// Formatted describe for structured output

566

SqlDescribeHiveTable formattedDescribe = new SqlDescribeHiveTable(

567

SqlParserPos.ZERO,

568

tableName,

569

false, // not extended

570

true // formatted

571

);

572

573

// Check describe options

574

if (extendedDescribe.isExtended()) {

575

System.out.println("Will show extended metadata");

576

}

577

578

if (formattedDescribe.isFormatted()) {

579

System.out.println("Will show formatted output");

580

}

581

```

582

583

**Describe Output Types:**

584

585

- **Basic DESCRIBE**: Shows column names, types, and comments

586

- **DESCRIBE EXTENDED**: Includes basic info plus detailed table properties, location, input/output formats, and storage descriptors

587

- **DESCRIBE FORMATTED**: Provides human-readable formatted output with sections for table info, storage info, and partition info

588

589

**Integration with Table Analysis:**

590

591

```java

592

public class HiveTableAnalyzer {

593

private SqlParser parser;

594

private TableEnvironment tableEnv;

595

596

public HiveTableAnalyzer(TableEnvironment tableEnv) {

597

this.tableEnv = tableEnv;

598

this.parser = SqlParser.create("",

599

SqlParser.config().withParserFactory(FlinkHiveSqlParserImpl.FACTORY));

600

}

601

602

/**

603

* Get comprehensive table information

604

*/

605

public void analyzeTable(String tableName) {

606

try {

607

// Get basic structure

608

String basicDescribe = "DESCRIBE " + tableName;

609

TableResult basicResult = tableEnv.executeSql(basicDescribe);

610

System.out.println("=== Table Structure ===");

611

basicResult.print();

612

613

// Get detailed metadata

614

String extendedDescribe = "DESCRIBE EXTENDED " + tableName;

615

TableResult extendedResult = tableEnv.executeSql(extendedDescribe);

616

System.out.println("=== Extended Metadata ===");

617

extendedResult.print();

618

619

// Get formatted output

620

String formattedDescribe = "DESCRIBE FORMATTED " + tableName;

621

TableResult formattedResult = tableEnv.executeSql(formattedDescribe);

622

System.out.println("=== Formatted Output ===");

623

formattedResult.print();

624

625

} catch (Exception e) {

626

System.err.println("Failed to analyze table " + tableName + ": " + e.getMessage());

627

}

628

}

629

630

/**

631

* Analyze partitioned table

632

*/

633

public void analyzePartitionedTable(String tableName) {

634

analyzeTable(tableName);

635

636

try {

637

// Also show partition information

638

String showPartitions = "SHOW PARTITIONS " + tableName;

639

TableResult partitionResult = tableEnv.executeSql(showPartitions);

640

System.out.println("=== Partition Information ===");

641

partitionResult.print();

642

643

} catch (Exception e) {

644

System.err.println("Failed to get partition info for " + tableName + ": " + e.getMessage());

645

}

646

}

647

}

648

649

// Usage

650

HiveTableAnalyzer analyzer = new HiveTableAnalyzer(tableEnv);

651

652

// Analyze basic table

653

analyzer.analyzeTable("customer_profile");

654

655

// Analyze partitioned table with full details

656

analyzer.analyzePartitionedTable("sales_data");

657

```