or run

npx @tessl/cli init
Log in

Version

Tile

Overview

Evals

Files

docs

async-operations.mddata-sources.mdfunction-interfaces.mdindex.mdkeyed-streams.mdoutput-operations.mdscala-extensions.mdstream-composition.mdstream-environment.mdstream-partitioning.mdstream-transformations.mdwindow-operations.mdwindowing.md
tile.json

scala-extensions.mddocs/

Scala Extensions and Partial Functions

Scala-specific extensions that enable partial function support for more idiomatic Scala programming with automatic conversion between partial and total functions.

Extension Import

import org.apache.flink.streaming.api.scala.extensions._

Partial Function Support

class OnDataStream[T] {
  def mapWith[R: TypeInformation](fun: PartialFunction[T, R]): DataStream[R]
  def flatMapWith[R: TypeInformation](fun: PartialFunction[T, TraversableOnce[R]]): DataStream[R]
  def filterWith(fun: PartialFunction[T, Boolean]): DataStream[T]
  def keyingBy[K: TypeInformation](fun: PartialFunction[T, K]): KeyedStream[T, K]
}

class OnKeyedStream[T, K] {
  def mapWith[R: TypeInformation](fun: PartialFunction[T, R]): DataStream[R]
  def flatMapWith[R: TypeInformation](fun: PartialFunction[T, TraversableOnce[R]]): DataStream[R]
  def filterWith(fun: PartialFunction[T, Boolean]): DataStream[T]
  def reduceWith(fun: PartialFunction[(T, T), T]): DataStream[T]
}

Usage Examples

import org.apache.flink.streaming.api.scala._
import org.apache.flink.streaming.api.scala.extensions._

val env = StreamExecutionEnvironment.getExecutionEnvironment
val events = env.fromElements("start", "data", "end", "error")

// Partial function mapping
val mapped = events.mapWith {
  case "start" => "BEGIN"
  case "end" => "FINISH"
  case data => s"PROCESS: $data"
}

// Partial function filtering
val filtered = events.filterWith {
  case "error" => false
  case _ => true
}

Scala extensions provide idiomatic Scala programming patterns while maintaining Flink's type safety and performance characteristics.