or run

npx @tessl/cli init
Log in

Version

Tile

Overview

Evals

Files

docs

auto-completion.mdcode-interpreter.mddistributed-class-loading.mdindex.mdinteractive-shell.md
tile.json

code-interpreter.mddocs/

Code Interpreter

The Code Interpreter provides the core functionality for compiling, executing, and managing Scala code within the REPL environment. It handles code compilation, execution, variable management, and introspection capabilities.

Capabilities

SparkIMain Class

The core interpreter class that handles all aspects of code interpretation including compilation, execution, and state management.

/**
 * Core interpreter for Spark REPL handling code compilation and execution
 * @param initialSettings Scala compiler settings
 * @param out Output writer for results and errors
 * @param propagateExceptions Whether to propagate exceptions to caller
 */
@DeveloperApi
class SparkIMain(
  initialSettings: Settings,
  val out: JPrintWriter,
  propagateExceptions: Boolean = false
) extends SparkImports with Logging { imain =>

  /**
   * Constructor with default settings
   */
  def this() = this(new Settings())
  
  /**
   * Constructor with custom settings
   */
  def this(settings: Settings) = this(settings, new NewLinePrintWriter(new ConsoleWriter, true))
}

Core Properties:

/**
 * Output directory for compiled classes
 */
@DeveloperApi
lazy val getClassOutputDirectory: File

/**
 * The underlying Scala compiler instance
 */
@DeveloperApi  
lazy val global: Global

/**
 * Code wrapper used for execution context
 */
@DeveloperApi
def executionWrapper: String

/**
 * Whether compilation errors were reported
 */
@DeveloperApi
def isReportingErrors: Boolean

Initialization

Methods for initializing the interpreter and preparing it for code execution.

/**
 * Initialize the interpreter synchronously
 * Must be called before using interpretation methods
 */
@DeveloperApi
def initializeSynchronous(): Unit

Usage Example:

import org.apache.spark.repl.SparkIMain

val interpreter = new SparkIMain()
interpreter.initializeSynchronous()
// Now ready for code interpretation

Code Execution

Core methods for interpreting and compiling Scala code.

/**
 * Interpret a line of Scala code
 * @param line Scala code to interpret
 * @return Result indicating success, error, or incomplete input
 */
@DeveloperApi
def interpret(line: String): IR.Result

/**
 * Interpret code with synthetic naming (internal use)
 * @param line Scala code to interpret
 * @return Result indicating success, error, or incomplete input
 */
@DeveloperApi
def interpretSynthetic(line: String): IR.Result

/**
 * Compile source files
 * @param sources Variable number of SourceFile instances
 * @return Boolean indicating compilation success
 */
@DeveloperApi
def compileSources(sources: SourceFile*): Boolean

/**
 * Compile a string of Scala code
 * @param code Scala code to compile
 * @return Boolean indicating compilation success
 */
@DeveloperApi
def compileString(code: String): Boolean

Usage Examples:

import org.apache.spark.repl.SparkIMain
import scala.tools.nsc.interpreter.{Results => IR}

val interpreter = new SparkIMain()
interpreter.initializeSynchronous()

// Interpret simple expressions
val result1 = interpreter.interpret("val x = 42")
result1 match {
  case IR.Success => println("Successfully defined x")
  case IR.Error => println("Error in code")
  case IR.Incomplete => println("Code is incomplete")
}

// Interpret complex code
val result2 = interpreter.interpret("""
  def fibonacci(n: Int): Int = {
    if (n <= 1) n else fibonacci(n-1) + fibonacci(n-2)
  }
""")

// Compile string directly
val compiled = interpreter.compileString("case class Person(name: String, age: Int)")

Code Analysis

Methods for parsing and analyzing code structure without execution.

/**
 * Parse Scala code into Abstract Syntax Tree
 * @param line Scala code to parse
 * @return Optional list of Tree nodes representing parsed code
 */
@DeveloperApi
def parse(line: String): Option[List[Tree]]

/**
 * Get symbol information for a line of code
 * @param code Scala code to analyze
 * @return Symbol representing the code
 */
@DeveloperApi
def symbolOfLine(code: String): Symbol

/**
 * Get type information for an expression
 * @param expr Scala expression to analyze
 * @param silent Whether to suppress error messages
 * @return Type information for the expression
 */
@DeveloperApi
def typeOfExpression(expr: String, silent: Boolean = true): Type

Usage Examples:

val interpreter = new SparkIMain()
interpreter.initializeSynchronous()

// Parse code structure
val parsed = interpreter.parse("val x: Int = 42")
parsed.foreach(trees => println(s"Parsed ${trees.length} tree nodes"))

// Get type information
val exprType = interpreter.typeOfExpression("List(1, 2, 3)")
println(s"Expression type: $exprType")

Variable Management

Methods for binding variables and managing the interpreter's variable namespace.

/**
 * Bind a variable to the interpreter namespace
 * @param name Variable name
 * @param boundType Type of the variable as string
 * @param value Value to bind
 * @param modifiers List of modifiers (e.g., "lazy", "implicit")
 * @return Result indicating binding success
 */
@DeveloperApi
def bind(name: String, boundType: String, value: Any, modifiers: List[String] = Nil): IR.Result

/**
 * Direct variable binding without wrapper
 * @param name Variable name
 * @param boundType Type of the variable as string  
 * @param value Value to bind
 * @return Result indicating binding success
 */
@DeveloperApi
def directBind(name: String, boundType: String, value: Any): IR.Result

/**
 * Rebind an existing variable with new value
 * @param p NamedParam containing name and value
 * @return Result indicating rebinding success
 */
@DeveloperApi
def rebind(p: NamedParam): IR.Result

/**
 * Add import statements to the interpreter
 * @param ids Variable number of import strings
 * @return Result indicating import success
 */
@DeveloperApi
def addImports(ids: String*): IR.Result

Usage Examples:

val interpreter = new SparkIMain()
interpreter.initializeSynchronous()

// Bind variables
val bindResult = interpreter.bind("myList", "List[Int]", List(1, 2, 3, 4, 5))
val directResult = interpreter.directBind("pi", "Double", 3.14159)

// Add imports
val importResult = interpreter.addImports("scala.util.Random", "java.time.LocalDateTime")

// Check results
if (bindResult == IR.Success) {
  println("Successfully bound myList")
}

State Management

Methods for managing the interpreter's state and lifecycle.

/**
 * Reset the interpreter state, clearing all definitions
 */
@DeveloperApi
def reset(): Unit

/**
 * Close the interpreter and clean up resources
 */
@DeveloperApi
def close(): Unit

Introspection

Methods for inspecting the current state of defined names, symbols, and variables.

/**
 * Get all defined names in the interpreter
 * @return List of all defined names
 */
@DeveloperApi
def allDefinedNames: List[Name]

/**
 * Get all defined term names
 * @return List of defined term names
 */
@DeveloperApi
def definedTerms: List[TermName]

/**
 * Get all defined type names
 * @return List of defined type names
 */
@DeveloperApi
def definedTypes: List[TypeName]

/**
 * Get all defined symbols as a set
 * @return Set of defined symbols
 */
@DeveloperApi
def definedSymbols: Set[Symbol]

/**
 * Get all defined symbols as a list
 * @return List of defined symbols
 */
@DeveloperApi
def definedSymbolList: List[Symbol]

/**
 * Get user-defined term names (excluding res0, res1, etc.)
 * @return List of user-defined term names
 */
@DeveloperApi
def namedDefinedTerms: List[TermName]

/**
 * Get the name of the most recent result variable
 * @return Most recent result variable name
 */
@DeveloperApi
def mostRecentVar: String

/**
 * Get recent compiler warnings
 * @return List of warning tuples (position, message)
 */
@DeveloperApi
def lastWarnings: List[(Position, String)]

Usage Examples:

val interpreter = new SparkIMain()
interpreter.initializeSynchronous()

// Define some variables
interpreter.interpret("val x = 42")
interpreter.interpret("def square(n: Int) = n * n")
interpreter.interpret("case class Point(x: Int, y: Int)")

// Inspect defined names
val allNames = interpreter.allDefinedNames
val terms = interpreter.definedTerms
val types = interpreter.definedTypes
val userTerms = interpreter.namedDefinedTerms

println(s"All names: ${allNames.mkString(", ")}")
println(s"Terms: ${terms.mkString(", ")}")
println(s"Types: ${types.mkString(", ")}")
println(s"User terms: ${userTerms.mkString(", ")}")

// Get most recent result
val recentVar = interpreter.mostRecentVar
println(s"Most recent variable: $recentVar")

Value Access

Methods for accessing values, types, and runtime information of defined terms.

/**
 * Get the runtime value of a term
 * @param id Term identifier
 * @return Optional runtime value
 */
@DeveloperApi
def valueOfTerm(id: String): Option[AnyRef]

/**
 * Get the runtime class of a term
 * @param id Term identifier
 * @return Optional Java class
 */
@DeveloperApi
def classOfTerm(id: String): Option[JClass]

/**
 * Get the compile-time type of a term
 * @param id Term identifier
 * @return Type information
 */
@DeveloperApi
def typeOfTerm(id: String): Type

/**
 * Get the symbol of a term
 * @param id Term identifier
 * @return Symbol information
 */
@DeveloperApi
def symbolOfTerm(id: String): Symbol

/**
 * Get both runtime class and type information
 * @param id Term identifier
 * @return Optional tuple of (Java class, Type)
 */
@DeveloperApi
def runtimeClassAndTypeOfTerm(id: String): Option[(JClass, Type)]

/**
 * Get the runtime type of a term
 * @param id Term identifier
 * @return Runtime type information
 */
@DeveloperApi
def runtimeTypeOfTerm(id: String): Type

Usage Examples:

val interpreter = new SparkIMain()
interpreter.initializeSynchronous()

// Define a variable
interpreter.interpret("val numbers = List(1, 2, 3, 4, 5)")

// Access its value and type information
val value = interpreter.valueOfTerm("numbers")
val clazz = interpreter.classOfTerm("numbers")
val tpe = interpreter.typeOfTerm("numbers")

value.foreach(v => println(s"Value: $v"))
clazz.foreach(c => println(s"Class: ${c.getName}"))
println(s"Type: $tpe")

// Check runtime information
val runtimeInfo = interpreter.runtimeClassAndTypeOfTerm("numbers")
runtimeInfo.foreach { case (runtimeClass, runtimeType) =>
  println(s"Runtime class: ${runtimeClass.getName}")
  println(s"Runtime type: $runtimeType")
}

Code Generation

Methods for working with generated names and code paths.

/**
 * Get the real generated name for a REPL-defined name
 * @param simpleName Simple name used in REPL
 * @return Optional real generated name
 */
@DeveloperApi
def generatedName(simpleName: String): Option[String]

/**
 * Get the full path to access a name
 * @param name Name to get path for
 * @return Full access path
 */
@DeveloperApi
def pathToName(name: Name): String

Execution Control

Methods for controlling output and execution behavior.

/**
 * Execute code block while suppressing normal output
 * @param body Code block to execute
 * @return Result of code block execution
 */
@DeveloperApi
def beQuietDuring[T](body: => T): T

/**
 * Execute code block while masking all output
 * @param operation Code block to execute  
 * @return Result of code block execution
 */
@DeveloperApi
def beSilentDuring[T](operation: => T): T

/**
 * Set custom execution wrapper code
 * @param code Wrapper code string
 */
@DeveloperApi
def setExecutionWrapper(code: String): Unit

/**
 * Clear the execution wrapper
 */
@DeveloperApi
def clearExecutionWrapper(): Unit

Classpath Management

Methods for managing the interpreter's classpath.

/**
 * Add URLs to the interpreter's classpath
 * @param urls Variable number of URL instances
 */
@DeveloperApi
def addUrlsToClassPath(urls: URL*): Unit

Usage Example:

import java.net.URL

val interpreter = new SparkIMain()
interpreter.initializeSynchronous()

// Add JARs to classpath
val jarUrl = new URL("file:///path/to/library.jar")
interpreter.addUrlsToClassPath(jarUrl)

// Now can use classes from the JAR
interpreter.interpret("import com.example.SomeClass")

Integration Patterns

Complete Interpreter Usage

import org.apache.spark.repl.SparkIMain
import scala.tools.nsc.interpreter.{Results => IR}
import java.net.URL

// Create and initialize interpreter
val interpreter = new SparkIMain()
interpreter.initializeSynchronous()

// Add external dependencies
val externalJar = new URL("file:///path/to/spark-sql.jar")  
interpreter.addUrlsToClassPath(externalJar)

// Execute code
val result = interpreter.interpret("""
  import org.apache.spark.sql.SparkSession
  val spark = SparkSession.builder()
    .appName("REPL Session")
    .master("local[*]")
    .getOrCreate()
""")

if (result == IR.Success) {
  // Access the created SparkSession
  val sparkSession = interpreter.valueOfTerm("spark")
  println(s"SparkSession created: $sparkSession")
  
  // Get information about defined variables
  val definedTerms = interpreter.namedDefinedTerms
  println(s"User-defined terms: ${definedTerms.mkString(", ")}")
}

// Clean up
interpreter.close()