or run

npx @tessl/cli init
Log in

Version

Tile

Overview

Evals

Files

Files

docs

build-info.mdexception-handling.mdindex.mdjava-functions.mdlogging.mdstorage-management.mdutilities.md

build-info.mddocs/

0

# Build Information

1

2

Build metadata and version information system providing runtime introspection capabilities for Apache Spark deployments, including version details, build timestamps, and repository information.

3

4

## Capabilities

5

6

### SparkBuildInfo Object

7

8

Central repository for Spark build and version metadata loaded from embedded properties files.

9

10

```scala { .api }

11

/**

12

* Provides Spark build and version information

13

* Loads metadata from spark-version-info.properties at runtime

14

*/

15

private[spark] object SparkBuildInfo {

16

17

/** Spark version string (e.g., "3.5.6") */

18

val spark_version: String

19

20

/** Git branch used for the build (e.g., "branch-3.5") */

21

val spark_branch: String

22

23

/** Git revision/commit hash (e.g., "abc123def456") */

24

val spark_revision: String

25

26

/** User who performed the build */

27

val spark_build_user: String

28

29

/** Repository URL where the source code is hosted */

30

val spark_repo_url: String

31

32

/** Build timestamp (e.g., "2024-02-15T10:30:00Z") */

33

val spark_build_date: String

34

35

/** Documentation root URL */

36

val spark_doc_root: String

37

}

38

```

39

40

**Usage Examples:**

41

42

```scala

43

import org.apache.spark.SparkBuildInfo

44

45

// Access version information

46

val version = SparkBuildInfo.spark_version

47

println(s"Running Spark version: $version")

48

49

// Check build details

50

val buildInfo = s"""

51

|Spark Build Information:

52

| Version: ${SparkBuildInfo.spark_version}

53

| Branch: ${SparkBuildInfo.spark_branch}

54

| Revision: ${SparkBuildInfo.spark_revision}

55

| Build Date: ${SparkBuildInfo.spark_build_date}

56

| Build User: ${SparkBuildInfo.spark_build_user}

57

| Repository: ${SparkBuildInfo.spark_repo_url}

58

|""".stripMargin

59

60

println(buildInfo)

61

62

// Version compatibility checking

63

def checkSparkVersion(): Unit = {

64

val currentVersion = SparkBuildInfo.spark_version

65

val requiredMinVersion = "3.5.0"

66

67

if (compareVersions(currentVersion, requiredMinVersion) >= 0) {

68

println(s"Spark version $currentVersion meets requirements")

69

} else {

70

throw new IllegalStateException(

71

s"Spark version $currentVersion is below required minimum $requiredMinVersion"

72

)

73

}

74

}

75

76

// Development vs production build detection

77

def isDevBuild(): Boolean = {

78

SparkBuildInfo.spark_build_user match {

79

case "jenkins" | "buildbot" => false // Production builds

80

case _ => true // Developer builds

81

}

82

}

83

```

84

85

### Build Information Access Patterns

86

87

Common patterns for accessing and using build metadata in Spark applications.

88

89

```scala { .api }

90

// Runtime version verification

91

object SparkVersionChecker {

92

def verifyCompatibility(minVersion: String): Unit = {

93

val current = SparkBuildInfo.spark_version

94

// Version comparison logic

95

}

96

97

def getBuildSummary(): Map[String, String] = {

98

Map(

99

"version" -> SparkBuildInfo.spark_version,

100

"branch" -> SparkBuildInfo.spark_branch,

101

"revision" -> SparkBuildInfo.spark_revision,

102

"buildDate" -> SparkBuildInfo.spark_build_date

103

)

104

}

105

}

106

107

// Application metadata integration

108

class SparkApplicationInfo {

109

def getEnvironmentInfo(): String = {

110

s"""

111

|Application Environment:

112

| Spark Version: ${SparkBuildInfo.spark_version}

113

| Git Branch: ${SparkBuildInfo.spark_branch}

114

| Build Date: ${SparkBuildInfo.spark_build_date}

115

| JVM Version: ${System.getProperty("java.version")}

116

| Scala Version: ${scala.util.Properties.versionString}

117

|""".stripMargin

118

}

119

}

120

```

121

122

**Practical Usage Examples:**

123

124

```scala

125

import org.apache.spark.SparkBuildInfo

126

127

// Logging build information at application startup

128

class SparkApplication extends Logging {

129

def logBuildInfo(): Unit = {

130

logInfo(s"Starting Spark application")

131

logInfo(s"Spark version: ${SparkBuildInfo.spark_version}")

132

logInfo(s"Build from branch: ${SparkBuildInfo.spark_branch}")

133

logInfo(s"Build date: ${SparkBuildInfo.spark_build_date}")

134

135

if (SparkBuildInfo.spark_build_user != "jenkins") {

136

logWarning("Running development build - not recommended for production")

137

}

138

}

139

}

140

141

// Feature availability checking based on version

142

object FeatureCompatibility {

143

def supportsStructuredStreaming(): Boolean = {

144

// Structured Streaming introduced in Spark 2.0

145

val version = SparkBuildInfo.spark_version

146

val majorVersion = version.split("\\.").head.toInt

147

majorVersion >= 2

148

}

149

150

def supportsAdaptiveQueryExecution(): Boolean = {

151

// AQE significantly improved in Spark 3.0

152

val version = SparkBuildInfo.spark_version

153

val majorVersion = version.split("\\.").head.toInt

154

majorVersion >= 3

155

}

156

}

157

158

// Build reproducibility verification

159

class BuildVerifier {

160

def verifyBuildIntegrity(): Unit = {

161

val expectedRepo = "https://github.com/apache/spark"

162

163

if (!SparkBuildInfo.spark_repo_url.startsWith(expectedRepo)) {

164

logWarning(s"Unexpected repository URL: ${SparkBuildInfo.spark_repo_url}")

165

}

166

167

// Verify revision format (Git SHA)

168

val revisionPattern = """^[a-f0-9]{7,40}$""".r

169

if (!revisionPattern.matches(SparkBuildInfo.spark_revision)) {

170

logWarning(s"Invalid revision format: ${SparkBuildInfo.spark_revision}")

171

}

172

}

173

}

174

175

// Documentation URL generation

176

object DocumentationHelper {

177

def getVersionSpecificDocsUrl(topic: String): String = {

178

val baseUrl = SparkBuildInfo.spark_doc_root

179

val version = SparkBuildInfo.spark_version

180

s"$baseUrl/$version/$topic"

181

}

182

183

def getApiDocsUrl(): String = {

184

getVersionSpecificDocsUrl("api/scala/index.html")

185

}

186

}

187

188

// Release information extraction

189

case class ReleaseInfo(

190

version: String,

191

branch: String,

192

revision: String,

193

buildDate: String,

194

isSnapshot: Boolean,

195

isRelease: Boolean

196

)

197

198

object ReleaseInfoExtractor {

199

def extractReleaseInfo(): ReleaseInfo = {

200

val version = SparkBuildInfo.spark_version

201

val isSnapshot = version.contains("SNAPSHOT")

202

val isRelease = !isSnapshot && version.matches("""\d+\.\d+\.\d+""")

203

204

ReleaseInfo(

205

version = version,

206

branch = SparkBuildInfo.spark_branch,

207

revision = SparkBuildInfo.spark_revision,

208

buildDate = SparkBuildInfo.spark_build_date,

209

isSnapshot = isSnapshot,

210

isRelease = isRelease

211

)

212

}

213

}

214

```

215

216

## Build Information Properties

217

218

The build information is loaded from a properties file embedded in the JAR:

219

220

### Properties File Structure

221

222

The `spark-version-info.properties` file contains:

223

224

```properties

225

version=3.5.6

226

branch=branch-3.5

227

revision=abc123def456789

228

user=jenkins

229

url=https://github.com/apache/spark

230

date=2024-02-15T10:30:00Z

231

docroot=https://spark.apache.org/docs

232

```

233

234

### Loading Mechanism

235

236

```scala { .api }

237

// Internal loading logic (simplified)

238

private def loadBuildInfo(): (String, String, String, String, String, String, String) = {

239

val resourceStream = Thread.currentThread().getContextClassLoader

240

.getResourceAsStream("spark-version-info.properties")

241

242

if (resourceStream == null) {

243

throw new SparkException("Could not find spark-version-info.properties")

244

}

245

246

try {

247

val unknownProp = "<unknown>"

248

val props = new Properties()

249

props.load(resourceStream)

250

(

251

props.getProperty("version", unknownProp),

252

props.getProperty("branch", unknownProp),

253

props.getProperty("revision", unknownProp),

254

props.getProperty("user", unknownProp),

255

props.getProperty("url", unknownProp),

256

props.getProperty("date", unknownProp),

257

props.getProperty("docroot", unknownProp)

258

)

259

} catch {

260

case e: Exception =>

261

throw new SparkException("Error loading properties from spark-version-info.properties", e)

262

} finally {

263

if (resourceStream != null) {

264

resourceStream.close()

265

}

266

}

267

}

268

```

269

270

## Integration Patterns

271

272

### Application Startup Logging

273

274

```scala

275

class SparkAppStarter extends Logging {

276

def start(): Unit = {

277

logBuildInformation()

278

// Application initialization

279

}

280

281

private def logBuildInformation(): Unit = {

282

logInfo("="*50)

283

logInfo("Spark Application Starting")

284

logInfo("="*50)

285

logInfo(s"Spark Version: ${SparkBuildInfo.spark_version}")

286

logInfo(s"Git Branch: ${SparkBuildInfo.spark_branch}")

287

logInfo(s"Git Revision: ${SparkBuildInfo.spark_revision}")

288

logInfo(s"Build Date: ${SparkBuildInfo.spark_build_date}")

289

logInfo(s"Build User: ${SparkBuildInfo.spark_build_user}")

290

logInfo("="*50)

291

}

292

}

293

```

294

295

### Compatibility Matrices

296

297

```scala

298

object SparkCompatibility {

299

val SUPPORTED_VERSIONS = Seq("3.5.0", "3.5.1", "3.5.2", "3.5.3", "3.5.4", "3.5.5", "3.5.6")

300

301

def checkCompatibility(): Unit = {

302

val currentVersion = SparkBuildInfo.spark_version

303

304

if (!SUPPORTED_VERSIONS.contains(currentVersion)) {

305

logWarning(s"Untested Spark version: $currentVersion")

306

logWarning(s"Supported versions: ${SUPPORTED_VERSIONS.mkString(", ")}")

307

}

308

}

309

}

310

```

311

312

### Diagnostic Information

313

314

```scala

315

class DiagnosticCollector {

316

def collectEnvironmentInfo(): Map[String, Any] = {

317

Map(

318

"spark" -> Map(

319

"version" -> SparkBuildInfo.spark_version,

320

"branch" -> SparkBuildInfo.spark_branch,

321

"revision" -> SparkBuildInfo.spark_revision,

322

"buildDate" -> SparkBuildInfo.spark_build_date,

323

"buildUser" -> SparkBuildInfo.spark_build_user,

324

"repoUrl" -> SparkBuildInfo.spark_repo_url

325

),

326

"jvm" -> Map(

327

"version" -> System.getProperty("java.version"),

328

"vendor" -> System.getProperty("java.vendor")

329

),

330

"scala" -> Map(

331

"version" -> scala.util.Properties.versionString

332

)

333

)

334

}

335

}

336

```