or run

npx @tessl/cli init
Log in

Version

Tile

Overview

Evals

Files

Files

docs

exception-handling.mdindex.mdjava-api-functions.mdlogging.mdnetwork-utilities.mdstorage-configuration.md

exception-handling.mddocs/

0

# Exception Handling and Error Management

1

2

Comprehensive exception handling system providing structured error reporting, error classes, and detailed context information for debugging and error recovery in Spark applications.

3

4

## Capabilities

5

6

### SparkException

7

8

Main exception class for Spark operations with support for error classes, message parameters, and query context.

9

10

```scala { .api }

11

/**

12

* Main Spark exception with structured error information

13

* @param message - Human-readable error message

14

* @param cause - Optional underlying cause

15

* @param errorClass - Optional error classification

16

* @param messageParameters - Key-value parameters for error message formatting

17

* @param context - Array of query context information

18

*/

19

class SparkException(

20

message: String,

21

cause: Throwable = null,

22

errorClass: Option[String] = None,

23

messageParameters: Map[String, String] = Map.empty,

24

context: Array[QueryContext] = Array.empty

25

) extends Exception(message, cause) with SparkThrowable {

26

27

/** Returns the error class identifier */

28

def getErrorClass(): String

29

30

/** Returns error message parameters as Java Map */

31

def getMessageParameters(): java.util.Map[String, String]

32

33

/** Returns query context information */

34

def getQueryContext(): Array[QueryContext]

35

}

36

37

object SparkException {

38

/** Creates an internal error exception */

39

def internalError(msg: String): SparkException

40

41

/** Creates a categorized internal error exception */

42

def internalError(msg: String, category: String): SparkException

43

44

/** Creates an internal error exception with cause */

45

def internalError(msg: String, cause: Throwable): SparkException

46

}

47

```

48

49

**Usage Examples:**

50

51

```scala

52

import org.apache.spark.SparkException

53

54

// Basic exception creation

55

val ex = new SparkException("Operation failed")

56

57

// Exception with error class and parameters

58

val structuredEx = new SparkException(

59

"INVALID_PARAMETER_VALUE",

60

Map("parameter" -> "timeout", "value" -> "-1"),

61

null

62

)

63

64

// Creating internal errors

65

val internalEx = SparkException.internalError("Unexpected state in executor")

66

67

// Exception handling

68

try {

69

// Some Spark operation

70

} catch {

71

case ex: SparkException =>

72

println(s"Error class: ${ex.getErrorClass}")

73

println(s"Parameters: ${ex.getMessageParameters}")

74

ex.getQueryContext.foreach { ctx =>

75

println(s"Error in ${ctx.objectType}: ${ctx.objectName}")

76

}

77

}

78

```

79

80

### SparkThrowable

81

82

Interface for standardized error handling in Spark exceptions, providing consistent error classification and context.

83

84

```java { .api }

85

/**

86

* Interface for Spark throwables with structured error information

87

*/

88

public interface SparkThrowable {

89

/** Returns the error class identifier */

90

String getErrorClass();

91

92

/** Returns SQL state for the error class */

93

default String getSqlState() {

94

return SparkThrowableHelper.getSqlState(this.getErrorClass());

95

}

96

97

/** Checks if this error is an internal error */

98

default boolean isInternalError() {

99

return SparkThrowableHelper.isInternalError(this.getErrorClass());

100

}

101

102

/** Returns error parameters for message formatting */

103

default Map<String, String> getMessageParameters() {

104

return new HashMap<>();

105

}

106

107

/** Returns query context information */

108

default QueryContext[] getQueryContext() {

109

return new QueryContext[0];

110

}

111

}

112

```

113

114

**Usage Examples:**

115

116

```java

117

import org.apache.spark.SparkThrowable;

118

import org.apache.spark.SparkException;

119

120

// Handling SparkThrowable in Java

121

try {

122

// Spark operation

123

} catch (SparkThrowable ex) {

124

System.out.println("Error class: " + ex.getErrorClass());

125

System.out.println("Is internal: " + ex.isInternalError());

126

127

Map<String, String> params = ex.getMessageParameters();

128

params.forEach((key, value) ->

129

System.out.println(key + ": " + value));

130

}

131

```

132

133

### QueryContext

134

135

Provides context information for SparkThrowable to help locate error sources in queries and operations.

136

137

```java { .api }

138

/**

139

* Query context information for error reporting

140

*/

141

public interface QueryContext {

142

/** Returns the type of object where error occurred */

143

String objectType();

144

145

/** Returns the name of object where error occurred */

146

String objectName();

147

148

/** Returns start index in the query fragment */

149

int startIndex();

150

151

/** Returns stop index in the query fragment */

152

int stopIndex();

153

154

/** Returns the relevant query fragment */

155

String fragment();

156

}

157

```

158

159

**Usage Examples:**

160

161

```java

162

import org.apache.spark.QueryContext;

163

164

// Processing query context from exception

165

public void handleSparkException(SparkException ex) {

166

QueryContext[] contexts = ex.getQueryContext();

167

168

for (QueryContext ctx : contexts) {

169

System.out.println("Object type: " + ctx.objectType());

170

System.out.println("Object name: " + ctx.objectName());

171

System.out.println("Fragment: " + ctx.fragment());

172

System.out.println("Position: " + ctx.startIndex() + "-" + ctx.stopIndex());

173

}

174

}

175

```

176

177

### ErrorClassesJsonReader

178

179

Reader for loading error information from JSON configuration files, enabling structured error message formatting.

180

181

```scala { .api }

182

/**

183

* Reader for error class definitions from JSON files

184

* @param jsonFileURLs - Sequence of URLs pointing to error definition JSON files

185

*/

186

class ErrorClassesJsonReader(jsonFileURLs: Seq[URL]) {

187

188

/**

189

* Gets formatted error message for the given error class

190

* @param errorClass - Error class identifier

191

* @param messageParameters - Parameters for message formatting

192

* @return Formatted error message

193

*/

194

def getErrorMessage(errorClass: String, messageParameters: Map[String, String]): String

195

196

/**

197

* Gets raw message template for the given error class

198

* @param errorClass - Error class identifier

199

* @return Message template with parameter placeholders

200

*/

201

def getMessageTemplate(errorClass: String): String

202

203

/**

204

* Gets SQL state for the given error class

205

* @param errorClass - Error class identifier

206

* @return SQL state code or null if not defined

207

*/

208

def getSqlState(errorClass: String): String

209

}

210

```

211

212

**Usage Examples:**

213

214

```scala

215

import org.apache.spark.ErrorClassesJsonReader

216

import java.net.URL

217

218

// Create reader with error definition files

219

val errorReader = new ErrorClassesJsonReader(Seq(

220

new URL("file:///path/to/error-classes.json")

221

))

222

223

// Get formatted error message

224

val message = errorReader.getErrorMessage(

225

"INVALID_PARAMETER_VALUE",

226

Map("parameter" -> "timeout", "value" -> "-1")

227

)

228

229

// Get message template

230

val template = errorReader.getMessageTemplate("INVALID_PARAMETER_VALUE")

231

// Returns: "Invalid value '<value>' for parameter '<parameter>'"

232

233

// Get SQL state

234

val sqlState = errorReader.getSqlState("INVALID_PARAMETER_VALUE")

235

```

236

237

### SparkArithmeticException

238

239

Arithmetic exception with Spark error class support for mathematical operation errors.

240

241

```scala { .api }

242

/**

243

* Arithmetic exception thrown from Spark with structured error information

244

* @param errorClass - Error class identifier

245

* @param messageParameters - Parameters for error message formatting

246

* @param context - Query context information

247

* @param summary - Error summary

248

*/

249

class SparkArithmeticException(

250

errorClass: String,

251

messageParameters: Map[String, String],

252

context: Array[QueryContext],

253

summary: String

254

) extends ArithmeticException with SparkThrowable {

255

256

def this(message: String) = // Constructor for simple messages

257

258

override def getErrorClass: String

259

override def getMessageParameters: java.util.Map[String, String]

260

override def getQueryContext: Array[QueryContext]

261

}

262

```

263

264

### SparkRuntimeException

265

266

Runtime exception with Spark error class support for general runtime errors.

267

268

```scala { .api }

269

/**

270

* Runtime exception thrown from Spark with structured error information

271

* @param errorClass - Error class identifier

272

* @param messageParameters - Parameters for error message formatting

273

* @param cause - Optional underlying cause

274

* @param context - Query context information

275

* @param summary - Error summary

276

*/

277

class SparkRuntimeException(

278

errorClass: String,

279

messageParameters: Map[String, String],

280

cause: Throwable = null,

281

context: Array[QueryContext] = Array.empty,

282

summary: String = ""

283

) extends RuntimeException with SparkThrowable {

284

285

def this(message: String, cause: Option[Throwable]) = // Constructor for simple messages

286

287

override def getErrorClass: String

288

override def getMessageParameters: java.util.Map[String, String]

289

override def getQueryContext: Array[QueryContext]

290

}

291

```

292

293

### SparkIllegalArgumentException

294

295

Illegal argument exception with Spark error class support for parameter validation errors.

296

297

```scala { .api }

298

/**

299

* Illegal argument exception thrown from Spark with structured error information

300

* @param errorClass - Error class identifier

301

* @param messageParameters - Parameters for error message formatting

302

* @param context - Query context information

303

* @param summary - Error summary

304

* @param cause - Optional underlying cause

305

*/

306

class SparkIllegalArgumentException(

307

errorClass: String,

308

messageParameters: Map[String, String],

309

context: Array[QueryContext] = Array.empty,

310

summary: String = "",

311

cause: Throwable = null

312

) extends IllegalArgumentException with SparkThrowable {

313

314

def this(message: String, cause: Option[Throwable]) = // Constructor for simple messages

315

316

override def getErrorClass: String

317

override def getMessageParameters: java.util.Map[String, String]

318

override def getQueryContext: Array[QueryContext]

319

}

320

```

321

322

### SparkDateTimeException

323

324

DateTime exception with Spark error class support for date/time operation errors.

325

326

```scala { .api }

327

/**

328

* DateTime exception thrown from Spark with structured error information

329

* @param errorClass - Error class identifier

330

* @param messageParameters - Parameters for error message formatting

331

* @param context - Query context information

332

* @param summary - Error summary

333

*/

334

class SparkDateTimeException(

335

errorClass: String,

336

messageParameters: Map[String, String],

337

context: Array[QueryContext],

338

summary: String

339

) extends DateTimeException with SparkThrowable {

340

341

def this(message: String) = // Constructor for simple messages

342

343

override def getErrorClass: String

344

override def getMessageParameters: java.util.Map[String, String]

345

override def getQueryContext: Array[QueryContext]

346

}

347

```

348

349

### SparkNumberFormatException

350

351

Number format exception with Spark error class support for numeric parsing errors.

352

353

```scala { .api }

354

/**

355

* Number format exception thrown from Spark with structured error information

356

* @param errorClass - Error class identifier

357

* @param messageParameters - Parameters for error message formatting

358

* @param context - Query context information

359

* @param summary - Error summary

360

*/

361

class SparkNumberFormatException(

362

errorClass: String,

363

messageParameters: Map[String, String],

364

context: Array[QueryContext],

365

summary: String

366

) extends NumberFormatException with SparkThrowable {

367

368

def this(message: String) = // Constructor for simple messages

369

370

override def getErrorClass: String

371

override def getMessageParameters: java.util.Map[String, String]

372

override def getQueryContext: Array[QueryContext]

373

}

374

```

375

376

### SparkUnsupportedOperationException

377

378

Unsupported operation exception with Spark error class support.

379

380

```scala { .api }

381

/**

382

* Unsupported operation exception thrown from Spark with structured error information

383

* @param errorClass - Error class identifier

384

* @param messageParameters - Parameters for error message formatting

385

*/

386

class SparkUnsupportedOperationException(

387

errorClass: String,

388

messageParameters: Map[String, String]

389

) extends UnsupportedOperationException with SparkThrowable {

390

391

def this(message: String) = // Constructor for simple messages

392

393

override def getErrorClass: String

394

override def getMessageParameters: java.util.Map[String, String]

395

}

396

```

397

398

## Error Handling Patterns

399

400

### Basic Exception Handling

401

402

```scala

403

import org.apache.spark.SparkException

404

405

try {

406

// Spark operation that might fail

407

someSparkOperation()

408

} catch {

409

case ex: SparkException if ex.getErrorClass == "RESOURCE_NOT_FOUND" =>

410

// Handle specific error class

411

logWarning(s"Resource not found: ${ex.getMessageParameters}")

412

413

case ex: SparkException if ex.isInternalError =>

414

// Handle internal errors differently

415

logError("Internal Spark error occurred", ex)

416

throw ex

417

418

case ex: SparkException =>

419

// Handle other Spark exceptions

420

logError(s"Spark operation failed: ${ex.getMessage}")

421

}

422

```

423

424

### Exception Creation Patterns

425

426

```scala

427

import org.apache.spark.SparkException

428

429

// Create exception with error class

430

def validateParameter(name: String, value: Any): Unit = {

431

if (value == null) {

432

throw new SparkException(

433

"NULL_PARAMETER",

434

Map("parameter" -> name),

435

null

436

)

437

}

438

}

439

440

// Create internal error

441

def handleUnexpectedState(): Nothing = {

442

throw SparkException.internalError(

443

"Reached unexpected code path in partition processing"

444

)

445

}

446

```

447

448

## Type Definitions

449

450

```scala { .api }

451

// Exception with structured error information

452

class SparkException(

453

message: String,

454

cause: Throwable,

455

errorClass: Option[String],

456

messageParameters: Map[String, String],

457

context: Array[QueryContext]

458

) extends Exception with SparkThrowable

459

460

// Error classification interface

461

trait SparkThrowable {

462

def getErrorClass(): String

463

def getSqlState(): String

464

def isInternalError(): Boolean

465

def getMessageParameters(): java.util.Map[String, String]

466

def getQueryContext(): Array[QueryContext]

467

}

468

469

// Query context for error location

470

trait QueryContext {

471

def objectType(): String

472

def objectName(): String

473

def startIndex(): Int

474

def stopIndex(): Int

475

def fragment(): String

476

}

477

```