Database for AI powered by a storage format optimized for deep-learning applications.
75
Evaluation — 75%
↑ 1.59xAgent success when using this tile
Deep Lake provides comprehensive exception handling for various failure scenarios including authentication, authorization, storage, dataset operations, and data validation. All exceptions are importable from the main deeplake module and provide detailed error information for debugging and recovery.
Errors related to user authentication and access permissions for datasets and cloud services.
class AuthenticationError:
"""Authentication failed or credentials invalid."""
class AuthorizationError:
"""User lacks permissions for requested operation."""
class AgreementError:
"""Dataset agreement acceptance required."""
class AgreementNotAcceptedError:
"""User has not accepted required agreements."""
class NotLoggedInAgreementError:
"""Agreement acceptance requires user login."""
class ExpiredTokenError:
"""Authentication token has expired."""Errors related to storage backend operations, network connectivity, and data transfer.
class StorageAccessDenied:
"""Access denied to storage location."""
class StorageKeyNotFound:
"""Requested storage key does not exist."""
class StorageKeyAlreadyExists:
"""Storage key already exists, cannot overwrite."""
class StorageInternalError:
"""Internal storage system error."""
class StorageNetworkConnectionError:
"""Network connection to storage failed."""
class GcsStorageProviderFailed:
"""Google Cloud Storage operation failed."""Errors related to dataset creation, access, modification, and lifecycle management.
class DatasetUnavailableError:
"""Dataset temporarily unavailable."""
class NotFoundError:
"""Requested dataset or resource not found."""
class ReadOnlyDatasetModificationError:
"""Attempted modification of read-only dataset."""
class IncorrectDeeplakePathError:
"""Invalid Deep Lake dataset path format."""
class InvalidURIError:
"""Invalid URI format provided."""
class PermissionDeniedError:
"""Insufficient permissions for operation."""
class BadRequestError:
"""Invalid request parameters or format."""Errors related to branching, tagging, merging, and version management operations.
class BranchExistsError:
"""Branch with given name already exists."""
class BranchNotFoundError:
"""Requested branch does not exist."""
class CannotDeleteMainBranchError:
"""Main branch cannot be deleted."""
class CannotRenameMainBranchError:
"""Main branch cannot be renamed."""
class TagExistsError:
"""Tag with given name already exists."""
class TagNotFoundError:
"""Requested tag does not exist."""
class VersionNotFoundError:
"""Requested version does not exist."""
class PushError:
"""Error occurred during push operation."""Errors related to column operations, schema validation, and data structure modifications.
class ColumnAlreadyExistsError:
"""Column with given name already exists."""
class ColumnDoesNotExistError:
"""Requested column does not exist."""
class EmptyColumnNameError:
"""Column name cannot be empty."""
class ColumnMissingAppendValueError:
"""Value required for column during append operation."""
class InvalidColumnValueError:
"""Invalid value provided for column."""
class UnevenColumnsError:
"""Columns have mismatched lengths."""
class UnevenUpdateError:
"""Update data has inconsistent dimensions."""Errors related to data types, dimensions, format validation, and compression settings.
class DimensionsMismatch:
"""Data dimensions do not match expected schema."""
class DimensionsMismatchError:
"""Dimension validation failed."""
class DtypeMismatch:
"""Data type does not match column type."""
class InvalidType:
"""Invalid or unsupported data type."""
class InvalidTypeDimensions:
"""Invalid dimensions for given type."""
class UnknownType:
"""Unknown or unrecognized data type."""
class UnspecifiedDtype:
"""Data type must be specified."""
class UnsupportedPythonType:
"""Python type not supported for conversion."""
class InvalidTypeAndFormatPair:
"""Type and format combination not supported."""Errors related to data compression, format validation, and encoding issues.
class FormatNotSupportedError:
"""Data format not supported."""
class UnknownFormat:
"""Unrecognized data format."""
class InvalidImageCompression:
"""Invalid compression settings for image data."""
class InvalidBinaryMaskCompression:
"""Invalid compression for binary mask data."""
class InvalidSegmentMaskCompression:
"""Invalid compression for segmentation mask data."""
class InvalidMedicalCompression:
"""Invalid compression for medical imaging data."""
class UnsupportedChunkCompression:
"""Chunk compression type not supported."""
class UnsupportedSampleCompression:
"""Sample compression type not supported."""
class WrongChunkCompression:
"""Incorrect chunk compression applied."""
class WrongSampleCompression:
"""Incorrect sample compression applied."""Errors related to indexing operations, query execution, and search functionality.
class IndexAlreadyExistsError:
"""Index with given configuration already exists."""
class InvalidIndexCreationError:
"""Index creation parameters invalid."""
class JSONIndexNotFound:
"""JSON index not found for query."""
class JSONKeyNotFound:
"""Requested JSON key not found."""Errors related to data processing, ingestion pipelines, and specialized data formats.
class InvalidLinkDataError:
"""Linked data validation failed."""
class InvalidLinkType:
"""Invalid link type specified."""
class InvalidSequenceOfSequence:
"""Invalid nested sequence structure."""
class InvalidPolygonShapeError:
"""Polygon data has invalid shape."""
class UnexpectedInputDataForDicomColumn:
"""DICOM column received unexpected data type."""
class UnexpectedMedicalTypeInputData:
"""Medical imaging data validation failed."""
class UnknownBoundingBoxCoordinateFormat:
"""Bounding box coordinate format not recognized."""
class UnknownBoundingBoxPixelFormat:
"""Bounding box pixel format not recognized."""
class UnknownStringType:
"""String type specification not recognized."""
class InvalidTextType:
"""Text data type validation failed."""Errors related to tensor operations, array indexing, and chunk management.
class TensorAlreadyExists:
"""Tensor with given name already exists."""
class BytePositionIndexOutOfChunk:
"""Byte position index exceeds chunk boundaries."""
class ShapeIndexOutOfChunk:
"""Shape index exceeds chunk boundaries."""
class CanNotCreateTensorWithProvidedCompressions:
"""Cannot create tensor with specified compression settings."""Errors related to HTTP requests, API communication, and network operations.
class HTTPRequestFailedError:
"""HTTP request failed with error response."""
class HTTPBodyIsMissingError:
"""Required HTTP body is missing."""
class HTTPBodyIsNotJSONError:
"""HTTP body is not valid JSON format."""Errors related to configuration management and credential handling.
class CredsKeyAlreadyAssignedError:
"""Credentials key already assigned to dataset."""
class InvalidCredsKeyAssignmentError:
"""Invalid credentials key assignment."""Errors related to specialized data formats and processing operations.
class EmbeddingSizeMismatch:
"""Embedding vector size does not match expected dimensions."""
class InvalidChunkStrategyType:
"""Invalid chunk strategy specified."""
class WriteFailedError:
"""Data write operation failed."""
class LogExistsError:
"""Log file already exists."""
class LogNotexistsError:
"""Required log file does not exist."""import deeplake
try:
dataset = deeplake.open("./nonexistent_dataset")
except deeplake.NotFoundError:
print("Dataset not found, creating new one...")
dataset = deeplake.create("./new_dataset")
try:
dataset.add_column("images", deeplake.types.Image())
except deeplake.ColumnAlreadyExistsError:
print("Column 'images' already exists")try:
dataset = deeplake.create(
"s3://my-bucket/dataset",
creds={"aws_access_key_id": "invalid", "aws_secret_access_key": "invalid"}
)
except deeplake.AuthenticationError:
print("AWS credentials are invalid")
except deeplake.StorageAccessDenied:
print("Access denied to S3 bucket")
except deeplake.StorageNetworkConnectionError:
print("Network connection to S3 failed")try:
branch = dataset.branch("feature-branch")
except deeplake.BranchExistsError:
print("Branch already exists, switching to it...")
branch = dataset.branches["feature-branch"]
try:
dataset.tag("v1.0", "Release version 1.0")
except deeplake.TagExistsError:
print("Tag v1.0 already exists")try:
dataset.append({
"images": "invalid_image_path.txt", # Wrong file type
"embeddings": [1, 2, 3] # Wrong dimensions
})
except deeplake.FormatNotSupportedError:
print("Image format not supported")
except deeplake.DimensionsMismatch:
print("Embedding dimensions don't match schema")
except deeplake.InvalidColumnValueError as e:
print(f"Invalid column value: {e}")def safe_dataset_operation(url, operation_func):
"""Safely execute dataset operations with comprehensive error handling."""
try:
return operation_func(url)
except deeplake.NotFoundError:
print(f"Dataset not found: {url}")
except deeplake.AuthenticationError:
print("Authentication failed - check your token")
except deeplake.AuthorizationError:
print("Access denied - insufficient permissions")
except deeplake.StorageNetworkConnectionError:
print("Network connection failed - check connectivity")
except deeplake.ReadOnlyDatasetModificationError:
print("Cannot modify read-only dataset")
except deeplake.DatasetUnavailableError:
print("Dataset temporarily unavailable - try again later")
except Exception as e:
print(f"Unexpected error: {type(e).__name__}: {e}")
raise
# Usage
safe_dataset_operation("./my_dataset", lambda url: deeplake.open(url))try:
results = deeplake.query("SELECT * FROM invalid_dataset WHERE column = 'value'")
except deeplake.NotFoundError:
print("Referenced dataset not found in query")
except deeplake.JSONKeyNotFound:
print("Query references non-existent column")
except deeplake.BadRequestError:
print("Invalid query syntax")import time
import random
def retry_with_backoff(func, max_retries=3, base_delay=1):
"""Retry function with exponential backoff for transient errors."""
for attempt in range(max_retries):
try:
return func()
except (deeplake.StorageNetworkConnectionError,
deeplake.DatasetUnavailableError,
deeplake.StorageInternalError) as e:
if attempt == max_retries - 1:
raise
delay = base_delay * (2 ** attempt) + random.uniform(0, 1)
print(f"Transient error {type(e).__name__}, retrying in {delay:.2f}s...")
time.sleep(delay)
# Usage
dataset = retry_with_backoff(lambda: deeplake.open("s3://my-bucket/dataset"))def open_dataset_with_fallback(primary_url, fallback_url):
"""Open dataset with fallback location."""
try:
return deeplake.open(primary_url)
except (deeplake.NotFoundError, deeplake.StorageAccessDenied):
print(f"Primary dataset unavailable, using fallback: {fallback_url}")
return deeplake.open(fallback_url)
dataset = open_dataset_with_fallback(
"s3://primary-bucket/dataset",
"./local_backup/dataset"
)Install with Tessl CLI
npx tessl i tessl/pypi-deeplakedocs
evals
scenario-1
scenario-2
scenario-3
scenario-4
scenario-5
scenario-6
scenario-7
scenario-8
scenario-9
scenario-10