or run

npx @tessl/cli init
Log in

Version

Tile

Overview

Evals

Files

Files

docs

cli-operations.mdenvironment-management.mdindex.mdoperation-management.mdserver-management.mdservice-management.mdsession-management.mdsql-execution.mdui-components.md

operation-management.mddocs/

0

# Operation Management

1

2

Management of SQL operations including statement execution, result streaming, and operation lifecycle tracking.

3

4

## Capabilities

5

6

### SparkSQLOperationManager

7

8

Internal operation manager for executing SQL operations and maintaining handles to active queries. This class is used internally by the session manager.

9

10

```scala { .api }

11

/**

12

* Executes queries using Spark SQL, and maintains a list of handles to active queries.

13

*/

14

private[thriftserver] class SparkSQLOperationManager() extends OperationManager with Logging {

15

/** Map of operation handles to operation instances */

16

val handleToOperation: JMap[OperationHandle, Operation]

17

/** Map of session handles to active thread pool names */

18

val sessionToActivePool: ConcurrentHashMap[SessionHandle, String]

19

/** Map of session handles to SQL contexts */

20

val sessionToContexts: ConcurrentHashMap[SessionHandle, SQLContext]

21

22

/**

23

* Create a new execute statement operation

24

* @param parentSession The parent Hive session

25

* @param statement SQL statement to execute

26

* @param confOverlay Configuration overlay for this operation

27

* @param async Whether to run the operation asynchronously

28

* @return New ExecuteStatementOperation instance

29

*/

30

override def newExecuteStatementOperation(

31

parentSession: HiveSession,

32

statement: String,

33

confOverlay: JMap[String, String],

34

async: Boolean

35

): ExecuteStatementOperation

36

37

/**

38

* Set configuration map on SQL configuration

39

* @param conf SQL configuration object

40

* @param confMap Configuration map to apply

41

*/

42

def setConfMap(conf: SQLConf, confMap: java.util.Map[String, String]): Unit

43

}

44

```

45

46

### SparkExecuteStatementOperation

47

48

Implementation for executing SQL statements with result streaming and lifecycle management.

49

50

```scala { .api }

51

/**

52

* Spark implementation of statement execution operation

53

* @param parentSession Parent Hive session

54

* @param statement SQL statement to execute

55

* @param confOverlay Configuration overlay for this operation

56

* @param runInBackground Whether to run in background (default: true)

57

* @param sqlContext SQL context for execution

58

* @param sessionToActivePool Session to pool mapping

59

*/

60

private[hive] class SparkExecuteStatementOperation(

61

parentSession: HiveSession,

62

statement: String,

63

confOverlay: JMap[String, String],

64

runInBackground: Boolean = true

65

)(sqlContext: SQLContext, sessionToActivePool: JMap[SessionHandle, String])

66

extends ExecuteStatementOperation(parentSession, statement, confOverlay, runInBackground)

67

with Logging {

68

69

/**

70

* Close the operation and cleanup resources

71

*/

72

def close(): Unit

73

74

/**

75

* Get the next set of result rows

76

* @param order Fetch orientation (FETCH_NEXT, FETCH_FIRST, etc.)

77

* @param maxRowsL Maximum number of rows to fetch

78

* @return RowSet containing the results

79

*/

80

def getNextRowSet(order: FetchOrientation, maxRowsL: Long): RowSet

81

82

/**

83

* Get the result set schema

84

* @return TableSchema describing the result structure

85

*/

86

def getResultSetSchema: TableSchema

87

88

/**

89

* Add non-null column value to result buffer (used for type conversion)

90

* @param from Source Spark row

91

* @param to Target buffer

92

* @param ordinal Column ordinal

93

*/

94

def addNonNullColumnValue(from: SparkRow, to: ArrayBuffer[Any], ordinal: Int): Unit

95

}

96

```

97

98

## Types

99

100

### Related Types for Operation Management

101

102

```scala { .api }

103

import java.util.{Map => JMap}

104

import java.util.concurrent.ConcurrentHashMap

105

import org.apache.hive.service.cli._

106

import org.apache.hive.service.cli.operation.{ExecuteStatementOperation, Operation, OperationManager}

107

import org.apache.hive.service.cli.session.HiveSession

108

import org.apache.spark.sql.{DataFrame, Row => SparkRow, SQLContext}

109

import org.apache.spark.sql.internal.SQLConf

110

import scala.collection.mutable.ArrayBuffer

111

```