or run

tessl search
Log in

Version

Workspace
tessl
Visibility
Public
Created
Last updated
Describes
pypipkg:pypi/kedro@1.1.x

docs

api

configuration.mddata-catalog-advanced.mddata-catalog.mdhooks.mdpipeline.mdrunners-advanced.mdrunners.md
index.md
tile.json

tessl/pypi-kedro

tessl install tessl/pypi-kedro@1.1.0

Kedro helps you build production-ready data and analytics pipelines

Agent Success

Agent success rate when using this tile

98%

Improvement

Agent success rate improvement when using this tile compared to baseline

1.32x

Baseline

Agent success rate without this tile

74%

context.mddocs/api/framework/

KedroContext API Reference

Base class holding project configuration and providing access to catalog and settings.

Module Import

from kedro.framework.context import KedroContext

KedroContext Class

class KedroContext:
    """Base class holding configuration and main Kedro functionality."""

    def __init__(
        self,
        project_path: Path | str,
        config_loader: AbstractConfigLoader,
        env: str | None = None,
        package_name: str | None = None,
        hook_manager: PluginManager | None = None,
        runtime_params: dict[str, Any] | None = None,
    ):
        """
        Initialize KedroContext.

        Parameters:
        - project_path: Path to the project root directory
        - config_loader: Instance of configuration loader to use
        - env: Environment name (e.g., 'local', 'base', 'prod'). Defaults to None
        - package_name: Python package name for the project. Defaults to None
        - hook_manager: Hook manager for plugin system. Defaults to None
        - runtime_params: Additional runtime parameters to merge with loaded params. Defaults to None
        """

    @property
    def catalog(self) -> DataCatalog:
        """
        Get the project's data catalog.

        Returns:
        DataCatalog instance containing all configured datasets
        """

    @property
    def config_loader(self) -> AbstractConfigLoader:
        """
        Get the configuration loader.

        Returns:
        Configuration loader instance used by this context
        """

    @property
    def params(self) -> dict[str, Any]:
        """
        Get project parameters.

        Returns:
        Dictionary containing all project parameters merged from configuration
        """

    @property
    def env(self) -> str:
        """
        Get the current environment name.

        Returns:
        Environment name (e.g., 'local', 'base', 'prod')
        """

    @property
    def project_path(self) -> Path:
        """
        Get the project root path.

        Returns:
        Path object pointing to the project root directory
        """

    def _get_catalog(
        self,
        catalog_class: type[DataCatalog] | None = None,
        save_version: str | None = None,
        load_versions: dict[str, str] | None = None,
    ) -> DataCatalog:
        """
        Get or create a data catalog instance.

        Parameters:
        - catalog_class: Custom DataCatalog class to use. Defaults to None (uses DataCatalog)
        - save_version: Version string to use when saving datasets. Defaults to None
        - load_versions: Mapping of dataset names to version strings for loading. Defaults to None

        Returns:
        DataCatalog instance configured with the specified parameters
        """

CatalogCommandsMixin

class CatalogCommandsMixin:
    """
    Mixin providing catalog-related command methods for context.
    Adds catalog inspection and pattern resolution capabilities.
    """

    def describe_datasets(
        self,
        pipelines: dict[str, Pipeline] | None = None,
    ) -> dict[str, dict[str, Any]]:
        """
        Generate detailed descriptions for all datasets in the catalog.

        Parameters:
        - pipelines: Optional dictionary of pipeline names to Pipeline objects.
                    Used to determine which datasets are used in pipelines.
                    Defaults to None

        Returns:
        Dictionary mapping dataset names to their descriptions, including:
        - type: Dataset class name
        - filepath: File path (if applicable)
        - layer: Data layer (if using DataCatalog layers)
        - used_in_pipelines: List of pipeline names using this dataset
        """

    def list_patterns(self) -> list[str]:
        """
        List all dataset factory patterns registered in the catalog.

        Returns:
        List of pattern strings (e.g., '{default}#csv', 'params:{name}')
        """

    def resolve_patterns(
        self,
        pipelines: dict[str, Pipeline] | None = None,
    ) -> dict[str, dict[str, Any]]:
        """
        Resolve dataset factory patterns and return information about resolved datasets.

        Parameters:
        - pipelines: Optional dictionary of pipeline names to Pipeline objects.
                    Used to determine which patterns are used in pipelines.
                    Defaults to None

        Returns:
        Dictionary mapping resolved dataset names to their configuration details
        """

compose_classes

def compose_classes(*classes: type) -> type:
    """
    Compose multiple classes into a single class using multiple inheritance.

    Useful for combining mixins with base context classes.

    Parameters:
    - classes: Variable number of classes to compose

    Returns:
    New composed class type combining all input classes

    Example:
    >>> from kedro.framework.context import compose_classes
    >>> CustomContext = compose_classes(KedroContext, CatalogCommandsMixin)
    """

Usage Examples

Basic KedroContext Usage

from kedro.framework.session import KedroSession

with KedroSession.create() as session:
    context = session.load_context()

    # Access catalog
    catalog = context.catalog
    data = catalog.load("input_data")

    # Access parameters
    params = context.params
    learning_rate = params["model"]["learning_rate"]

    # Access config loader
    config_loader = context.config_loader
    custom_config = config_loader["custom_config"]

    # Check environment
    print(f"Running in {context.env} environment")
    print(f"Project path: {context.project_path}")

Creating Custom Context with Mixins

from kedro.framework.context import KedroContext, compose_classes, CatalogCommandsMixin
from kedro.framework.session import KedroSession

# Compose context with catalog commands capability
CustomContext = compose_classes(KedroContext, CatalogCommandsMixin)

with KedroSession.create() as session:
    context = session.load_context()

    # List all dataset factory patterns
    patterns = context.list_patterns()
    print(f"Available patterns: {patterns}")

    # Describe all datasets
    dataset_descriptions = context.describe_datasets()
    for name, info in dataset_descriptions.items():
        print(f"{name}: {info['type']} at {info.get('filepath', 'N/A')}")

Using _get_catalog for Versioned Data

from kedro.framework.session import KedroSession

with KedroSession.create() as session:
    context = session.load_context()

    # Get catalog with specific version for saving
    versioned_catalog = context._get_catalog(
        save_version="2024-01-10-15.30.00"
    )

    # Get catalog with specific load versions
    load_catalog = context._get_catalog(
        load_versions={
            "input_data": "2024-01-09-12.00.00",
            "model_features": "2024-01-09-12.00.00"
        }
    )

Initializing Custom KedroContext

from pathlib import Path
from kedro.config import OmegaConfigLoader
from kedro.framework.context import KedroContext

# Initialize config loader
config_loader = OmegaConfigLoader(
    conf_source=str(Path.cwd() / "conf"),
    base_env="base",
    default_run_env="local"
)

# Create custom context instance
context = KedroContext(
    project_path=Path.cwd(),
    config_loader=config_loader,
    env="local",
    package_name="my_project",
    runtime_params={"extra_param": "value"}
)

# Use the context
catalog = context.catalog
params = context.params

See Also

  • KedroSession API - Session management
  • OmegaConfigLoader API - Configuration loading
  • Framework Utilities - Additional utility functions and mixins
  • DataCatalog API - Data catalog reference