0
# Temporalio
1
2
A comprehensive Python SDK for building distributed, scalable, durable, and highly available workflows and activities using the Temporal orchestration engine. The SDK transforms async Python functions into distributed workflows backed by a fault-tolerant event loop, providing seamless integration with asyncio concepts while maintaining durability and reliability in distributed environments.
3
4
## Package Information
5
6
- **Package Name**: temporalio
7
- **Language**: Python
8
- **Installation**: `pip install temporalio`
9
10
## Core Imports
11
12
```python
13
from temporalio.client import Client
14
from temporalio.worker import Worker
15
from temporalio import workflow, activity
16
```
17
18
For advanced usage:
19
20
```python
21
import temporalio
22
from temporalio import common, converter, exceptions, runtime, testing
23
from temporalio.common import RetryPolicy, SearchAttributeKey, MetricMeter
24
from temporalio.exceptions import ApplicationError, TemporalError, is_cancelled_exception
25
from temporalio.runtime import Runtime, TelemetryConfig
26
```
27
28
For integrations:
29
30
```python
31
# Pydantic integration
32
from temporalio.contrib.pydantic import pydantic_data_converter
33
```
34
35
## Basic Usage
36
37
```python
38
from datetime import timedelta
39
from temporalio import workflow, activity
40
from temporalio.client import Client
41
from temporalio.worker import Worker
42
import asyncio
43
import concurrent.futures
44
45
# Define an activity
46
@activity.defn
47
async def say_hello(name: str) -> str:
48
return f"Hello, {name}!"
49
50
# Define a workflow
51
@workflow.defn
52
class SayHello:
53
@workflow.run
54
async def run(self, name: str) -> str:
55
return await workflow.execute_activity(
56
say_hello,
57
name,
58
schedule_to_close_timeout=timedelta(seconds=5)
59
)
60
61
# Create and run worker
62
async def run_worker():
63
client = await Client.connect("localhost:7233")
64
65
with concurrent.futures.ThreadPoolExecutor(max_workers=100) as executor:
66
worker = Worker(
67
client,
68
task_queue="my-task-queue",
69
workflows=[SayHello],
70
activities=[say_hello],
71
activity_executor=executor,
72
)
73
await worker.run()
74
75
# Execute workflow
76
async def run_workflow():
77
client = await Client.connect("localhost:7233")
78
79
result = await client.execute_workflow(
80
SayHello.run,
81
"World",
82
id="my-workflow-id",
83
task_queue="my-task-queue"
84
)
85
print(f"Result: {result}")
86
87
if __name__ == "__main__":
88
# Run worker in production or run_workflow() for testing
89
asyncio.run(run_worker())
90
```
91
92
## Architecture
93
94
Temporal's architecture centers around several key concepts:
95
96
- **Workflows**: Long-running, fault-tolerant business logic that coordinates activities and maintains state
97
- **Activities**: Individual units of work that can fail and be retried, typically I/O operations or external service calls
98
- **Workers**: Processes that execute workflows and activities by polling task queues
99
- **Client**: Interface for starting workflows, sending signals, and querying workflow state
100
- **Task Queues**: Named queues that route work between clients and workers
101
102
The Python SDK implements workflows using a custom asyncio event loop that provides distributed fault tolerance, deterministic execution, and seamless integration with Python's async/await patterns.
103
104
## Capabilities
105
106
### Client Operations
107
108
Connect to Temporal server, start workflows, manage executions, and handle schedules. The client provides the primary interface for interacting with the Temporal server.
109
110
```python { .api }
111
class Client:
112
@classmethod
113
async def connect(
114
cls,
115
target_host: str = "localhost:7233",
116
*,
117
namespace: str = "default",
118
data_converter: DataConverter = None,
119
interceptors: Sequence[Interceptor] = [],
120
tls: TLSConfig | bool = False,
121
retry_config: RetryConfig = None,
122
keep_alive_config: KeepAliveConfig = None,
123
rpc_metadata: Mapping[str, str] = {},
124
identity: str = None,
125
lazy: bool = False,
126
runtime: Runtime = None,
127
http_connect_proxy_config: HttpConnectProxyConfig = None,
128
) -> Client: ...
129
130
async def execute_workflow(
131
self,
132
workflow: MethodAsyncNoParam[WorkflowReturnType] | str,
133
arg: Any = temporalio.common._arg_unset,
134
*,
135
id: str,
136
task_queue: str,
137
execution_timeout: timedelta = None,
138
run_timeout: timedelta = None,
139
task_timeout: timedelta = None,
140
id_reuse_policy: WorkflowIDReusePolicy = WorkflowIDReusePolicy.ALLOW_DUPLICATE_FAILED_ONLY,
141
id_conflict_policy: WorkflowIDConflictPolicy = WorkflowIDConflictPolicy.UNSPECIFIED,
142
retry_policy: RetryPolicy = None,
143
cron_schedule: str = None,
144
memo: Mapping[str, Any] = None,
145
search_attributes: SearchAttributes = None,
146
start_signal: str = None,
147
start_signal_args: Sequence[Any] = [],
148
request_eager_start: bool = True,
149
) -> WorkflowReturnType: ...
150
151
async def start_workflow(
152
self,
153
workflow: MethodAsyncNoParam[WorkflowReturnType] | str,
154
arg: Any = temporalio.common._arg_unset,
155
*,
156
id: str,
157
task_queue: str,
158
execution_timeout: timedelta = None,
159
run_timeout: timedelta = None,
160
task_timeout: timedelta = None,
161
id_reuse_policy: WorkflowIDReusePolicy = WorkflowIDReusePolicy.ALLOW_DUPLICATE_FAILED_ONLY,
162
id_conflict_policy: WorkflowIDConflictPolicy = WorkflowIDConflictPolicy.UNSPECIFIED,
163
retry_policy: RetryPolicy = None,
164
cron_schedule: str = None,
165
memo: Mapping[str, Any] = None,
166
search_attributes: SearchAttributes = None,
167
start_signal: str = None,
168
start_signal_args: Sequence[Any] = [],
169
request_eager_start: bool = True,
170
) -> WorkflowHandle[WorkflowReturnType, WorkflowReturnType]: ...
171
172
class WorkflowHandle(Generic[SelfType, ReturnType]):
173
@property
174
def id(self) -> str: ...
175
176
@property
177
def result_run_id(self) -> str | None: ...
178
179
async def result(self) -> ReturnType: ...
180
181
async def cancel(self) -> None: ...
182
183
async def terminate(self, reason: str = None) -> None: ...
184
185
async def signal(
186
self,
187
signal: MethodSyncOrAsyncSingleParam[SelfType, Any] | str,
188
arg: Any = temporalio.common._arg_unset,
189
) -> None: ...
190
191
async def query(
192
self,
193
query: MethodSyncOrAsyncNoParam[SelfType, ReturnType] | str,
194
arg: Any = temporalio.common._arg_unset,
195
) -> ReturnType: ...
196
197
async def start_update(
198
self,
199
update: MethodSyncOrAsyncSingleParam[SelfType, UpdateReturnType] | str,
200
arg: Any = temporalio.common._arg_unset,
201
*,
202
id: str = None,
203
wait_for_stage: WorkflowUpdateStage = WorkflowUpdateStage.COMPLETED,
204
) -> WorkflowUpdateHandle[UpdateReturnType]: ...
205
```
206
207
[Client Operations](./client.md)
208
209
### Workflow Development
210
211
Define workflows with signals, queries, updates, and child workflow management. Workflows contain the core business logic and coordinate activities and other workflows.
212
213
```python { .api }
214
def defn(
215
cls: Type[MultiParamSpec],
216
*,
217
name: str = None,
218
dynamic: bool = False,
219
failure_exception_types: Sequence[type[BaseException]] = [],
220
versioning_intent: VersioningIntent = VersioningIntent.COMPATIBLE,
221
) -> Type[MultiParamSpec]: ...
222
223
def run(fn: MethodAsyncNoParam[SelfType, ReturnType]) -> MethodAsyncNoParam[SelfType, ReturnType]: ...
224
225
def signal(fn: MethodSyncOrAsyncSingleParam[SelfType, None] | None = None, *, name: str = None, dynamic: bool = False) -> Any: ...
226
227
def query(fn: MethodSyncOrAsyncNoParam[SelfType, ReturnType] | None = None, *, name: str = None, dynamic: bool = False) -> Any: ...
228
229
def update(fn: MethodSyncOrAsyncSingleParam[SelfType, ReturnType] | None = None, *, name: str = None, dynamic: bool = False) -> Any: ...
230
231
async def execute_activity(
232
activity: MethodSyncOrAsyncSingleParam[Any, ActivityReturnType] | str,
233
arg: Any = temporalio.common._arg_unset,
234
*,
235
activity_id: str = None,
236
task_queue: str = None,
237
schedule_to_close_timeout: timedelta = None,
238
schedule_to_start_timeout: timedelta = None,
239
start_to_close_timeout: timedelta = None,
240
heartbeat_timeout: timedelta = None,
241
retry_policy: RetryPolicy = None,
242
cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL,
243
versioning_intent: VersioningIntent = VersioningIntent.UNSPECIFIED,
244
) -> ActivityReturnType: ...
245
246
async def execute_child_workflow(
247
workflow: MethodAsyncNoParam[Any, ChildWorkflowReturnType] | str,
248
arg: Any = temporalio.common._arg_unset,
249
*,
250
id: str = None,
251
task_queue: str = None,
252
execution_timeout: timedelta = None,
253
run_timeout: timedelta = None,
254
task_timeout: timedelta = None,
255
id_reuse_policy: WorkflowIDReusePolicy = WorkflowIDReusePolicy.ALLOW_DUPLICATE_FAILED_ONLY,
256
retry_policy: RetryPolicy = None,
257
cron_schedule: str = None,
258
parent_close_policy: ParentClosePolicy = ParentClosePolicy.TERMINATE,
259
cancellation_type: ChildWorkflowCancellationType = ChildWorkflowCancellationType.WAIT_CANCELLATION_COMPLETED,
260
versioning_intent: VersioningIntent = VersioningIntent.COMPATIBLE,
261
) -> ChildWorkflowReturnType: ...
262
263
def info() -> Info: ...
264
265
async def sleep(seconds: float) -> None: ...
266
267
def now() -> datetime: ...
268
269
def uuid4() -> str: ...
270
271
class Info:
272
attempt: int
273
continued_run_id: str | None
274
cron_schedule: str | None
275
execution_timeout: timedelta | None
276
namespace: str
277
parent: ParentInfo | None
278
retry_policy: RetryPolicy | None
279
root: RootInfo | None
280
run_id: str
281
run_timeout: timedelta | None
282
search_attributes: SearchAttributes
283
start_time: datetime
284
task_queue: str
285
task_timeout: timedelta | None
286
workflow_id: str
287
workflow_type: str
288
```
289
290
[Workflow Development](./workflow.md)
291
292
### Activity Development
293
294
Create activities with context access, heartbeating, cancellation handling, and different execution models (async, threaded, multiprocess).
295
296
```python { .api }
297
def defn(
298
fn: CallableAsyncSingleParam[ActivityParam, ActivityReturnType] | None = None,
299
*,
300
name: str = None,
301
dynamic: bool = False,
302
) -> Any: ...
303
304
def info() -> Info: ...
305
306
def in_activity() -> bool: ...
307
308
async def heartbeat(*details: Any) -> None: ...
309
310
def is_cancelled() -> bool: ...
311
312
def is_worker_shutdown() -> bool: ...
313
314
def cancellation_details() -> ActivityCancellationDetails | None: ...
315
316
async def wait_for_cancelled() -> None: ...
317
318
def wait_for_cancelled_sync(timeout: timedelta = None) -> bool: ...
319
320
def wait_for_worker_shutdown_sync(timeout: timedelta = None) -> bool: ...
321
322
def raise_complete_async() -> NoReturn: ...
323
324
class Info:
325
activity_id: str
326
activity_type: str
327
attempt: int
328
current_attempt_scheduled_time: datetime
329
heartbeat_details: Sequence[Any]
330
heartbeat_timeout: timedelta | None
331
is_local: bool
332
local_retry_threshold: timedelta | None
333
schedule_to_close_timeout: timedelta | None
334
schedule_to_start_timeout: timedelta | None
335
scheduled_time: datetime
336
start_to_close_timeout: timedelta | None
337
started_time: datetime
338
task_queue: str
339
task_token: bytes
340
workflow_id: str
341
workflow_namespace: str
342
workflow_run_id: str
343
workflow_type: str
344
345
class ActivityCancellationDetails:
346
reason: str
347
details: Sequence[Any]
348
```
349
350
[Activity Development](./activity.md)
351
352
### Worker Management
353
354
Configure and run workers that execute workflows and activities, including thread pool management, task queue polling, and interceptor configuration.
355
356
```python { .api }
357
class Worker:
358
def __init__(
359
self,
360
client: Client,
361
*,
362
task_queue: str,
363
activities: Sequence[Callable] = [],
364
workflows: Sequence[type] = [],
365
interceptors: Sequence[Interceptor] = [],
366
activity_executor: Executor = None,
367
workflow_task_executor: Executor = None,
368
workflow_runner: WorkflowRunner = None,
369
shared_state_manager: SharedStateManager = None,
370
debug_mode: bool = False,
371
on_fatal_error: Callable[[Exception], None] = None,
372
max_cached_workflows: int = 1000,
373
max_concurrent_workflow_task_polls: int = 100,
374
nonsticky_to_sticky_poll_ratio: float = 0.2,
375
max_concurrent_activity_task_polls: int = 100,
376
no_remote_activities: bool = False,
377
sticky_queue_schedule_to_start_timeout: timedelta = timedelta(seconds=10),
378
max_heartbeat_throttle_interval: timedelta = timedelta(seconds=60),
379
default_heartbeat_throttle_interval: timedelta = timedelta(seconds=1),
380
max_activities_per_second: float = None,
381
max_task_queue_activities_per_second: float = None,
382
graceful_shutdown_timeout: timedelta = timedelta(0),
383
use_worker_versioning: bool = True,
384
build_id: str = None,
385
identity: str = None,
386
tuner: WorkerTuner = None,
387
workflow_failure_exception_types: Sequence[type[BaseException]] = [],
388
): ...
389
390
async def run(self) -> None: ...
391
392
async def shutdown(self) -> None: ...
393
394
class WorkerConfig:
395
client: Client
396
task_queue: str
397
activities: Sequence[Callable]
398
workflows: Sequence[type]
399
interceptors: Sequence[Interceptor]
400
activity_executor: Executor | None
401
workflow_task_executor: Executor | None
402
debug_mode: bool
403
max_cached_workflows: int
404
max_concurrent_workflow_task_polls: int
405
max_concurrent_activity_task_polls: int
406
graceful_shutdown_timeout: timedelta
407
```
408
409
[Worker Management](./worker.md)
410
411
### Data Conversion
412
413
Handle serialization and deserialization of workflow and activity data, including custom payload converters, failure converters, and search attribute encoding.
414
415
```python { .api }
416
def default() -> DataConverter: ...
417
418
class DataConverter:
419
def __init__(
420
self,
421
*,
422
payload_converter: PayloadConverter = None,
423
failure_converter: FailureConverter = None,
424
payload_codec: PayloadCodec = None,
425
): ...
426
427
async def encode(
428
self,
429
values: Sequence[Any]
430
) -> temporalio.api.common.v1.Payloads: ...
431
432
async def decode(
433
self,
434
payloads: temporalio.api.common.v1.Payloads,
435
type_hints: Sequence[type] = None
436
) -> list[Any]: ...
437
438
class PayloadConverter(ABC):
439
@abstractmethod
440
def to_payloads(self, values: Sequence[Any]) -> Sequence[temporalio.api.common.v1.Payload] | None: ...
441
442
@abstractmethod
443
def from_payloads(
444
self,
445
payloads: Sequence[temporalio.api.common.v1.Payload],
446
type_hints: Sequence[type] = None
447
) -> list[Any]: ...
448
449
class DefaultPayloadConverter(PayloadConverter):
450
def __init__(
451
self,
452
*,
453
encoding_payload_converters: Sequence[EncodingPayloadConverter] = None,
454
): ...
455
456
class JSONPlainPayloadConverter(EncodingPayloadConverter):
457
def __init__(
458
self,
459
*,
460
encoder: type[json.JSONEncoder] = AdvancedJSONEncoder,
461
decoder: Callable[[str], Any] = json.loads,
462
encoding: str = "utf-8",
463
): ...
464
465
def encode_search_attributes(search_attributes: SearchAttributes) -> temporalio.api.common.v1.SearchAttributes: ...
466
467
def decode_search_attributes(proto: temporalio.api.common.v1.SearchAttributes) -> SearchAttributes: ...
468
```
469
470
[Data Conversion](./data-conversion.md)
471
472
### Testing
473
474
Comprehensive testing utilities including workflow environments, activity environments, time skipping, and mocking capabilities.
475
476
```python { .api }
477
class WorkflowEnvironment:
478
@classmethod
479
async def start_time_skipping(
480
cls,
481
*,
482
client: Client = None,
483
download_dest_dir: Path | str | None = Path("."),
484
dev_server_exe: str | None = None,
485
dev_server_existing_path: str | None = None,
486
dev_server_extra_args: Sequence[str] = [],
487
namespace: str = "default",
488
data_converter: DataConverter = None,
489
interceptors: Sequence[Interceptor] = [],
490
tls: TLSConfig | bool = False,
491
port: int | None = None,
492
ui: bool = True,
493
log_level: str = "warn",
494
log_format: str = "pretty",
495
database_filename: str | None = None,
496
sqlite_pragma: Mapping[str, str] = {},
497
) -> WorkflowEnvironment: ...
498
499
async def shutdown(self) -> None: ...
500
501
@property
502
def client(self) -> Client: ...
503
504
async def sleep(self, duration: timedelta) -> None: ...
505
506
class ActivityEnvironment:
507
def __init__(
508
self,
509
*,
510
info: activity.Info,
511
on_heartbeat: Callable[..., None] = None,
512
on_complete_async: Callable[[], None] = None,
513
cancelled: bool = False,
514
worker_shutdown: bool = False,
515
): ...
516
517
@contextmanager
518
def as_current(self) -> Iterator[None]: ...
519
520
async def new_random_task_queue() -> str: ...
521
```
522
523
[Testing](./testing.md)
524
525
### Common Utilities
526
527
Core utilities, configuration objects, and types used throughout the SDK including retry policies, search attributes, metrics, and workflow configuration.
528
529
```python { .api }
530
@dataclass
531
class RetryPolicy:
532
initial_interval: timedelta = timedelta(seconds=1)
533
backoff_coefficient: float = 2.0
534
maximum_interval: Optional[timedelta] = None
535
maximum_attempts: int = 0
536
non_retryable_error_types: Optional[Sequence[str]] = None
537
538
class WorkflowIDReusePolicy(IntEnum):
539
ALLOW_DUPLICATE = 0
540
ALLOW_DUPLICATE_FAILED_ONLY = 1
541
REJECT_DUPLICATE = 2
542
TERMINATE_IF_RUNNING = 3
543
544
class SearchAttributeKey(ABC, Generic[SearchAttributeValueType]):
545
@staticmethod
546
def for_text(name: str) -> SearchAttributeKey[str]: ...
547
548
@staticmethod
549
def for_keyword(name: str) -> SearchAttributeKey[str]: ...
550
551
@staticmethod
552
def for_int(name: str) -> SearchAttributeKey[int]: ...
553
554
class MetricMeter(ABC):
555
@abstractmethod
556
def create_counter(
557
self, name: str, description: Optional[str] = None, unit: Optional[str] = None
558
) -> MetricCounter: ...
559
```
560
561
[Common Utilities](./common.md)
562
563
### Exception Handling
564
565
Comprehensive exception hierarchy for handling errors in workflows, activities, and client operations with detailed error information and utility functions.
566
567
```python { .api }
568
class TemporalError(Exception):
569
@property
570
def cause(self) -> Optional[BaseException]: ...
571
572
class ApplicationError(FailureError):
573
def __init__(
574
self,
575
message: str,
576
*details: Any,
577
type: Optional[str] = None,
578
non_retryable: bool = False,
579
next_retry_delay: Optional[timedelta] = None,
580
category: ApplicationErrorCategory = ApplicationErrorCategory.UNSPECIFIED,
581
): ...
582
583
class ActivityError(FailureError):
584
@property
585
def activity_type(self) -> str: ...
586
587
@property
588
def retry_state(self) -> Optional[RetryState]: ...
589
590
def is_cancelled_exception(exception: BaseException) -> bool: ...
591
```
592
593
[Exception Handling](./exceptions.md)
594
595
### Runtime Configuration
596
597
Runtime and telemetry configuration for managing SDK resources, logging, metrics collection, and observability features.
598
599
```python { .api }
600
class Runtime:
601
@staticmethod
602
def default() -> Runtime: ...
603
604
@staticmethod
605
def set_default(runtime: Runtime, *, error_if_already_set: bool = True) -> None: ...
606
607
def __init__(self, *, telemetry: TelemetryConfig) -> None: ...
608
609
@dataclass(frozen=True)
610
class TelemetryConfig:
611
logging: Optional[LoggingConfig] = LoggingConfig.default
612
metrics: Optional[Union[OpenTelemetryConfig, PrometheusConfig, MetricBuffer]] = None
613
global_tags: Mapping[str, str] = field(default_factory=dict)
614
615
@dataclass(frozen=True)
616
class PrometheusConfig:
617
bind_address: str
618
counters_total_suffix: bool = False
619
durations_as_seconds: bool = False
620
```
621
622
[Runtime Configuration](./runtime.md)
623
624
### Pydantic Integration
625
626
Pydantic v2 data converter for automatic serialization and validation of Pydantic models with type safety and custom JSON options.
627
628
```python { .api }
629
pydantic_data_converter = DataConverter(
630
payload_converter_class=PydanticPayloadConverter
631
)
632
633
class PydanticJSONPlainPayloadConverter(EncodingPayloadConverter):
634
def __init__(self, to_json_options: Optional[ToJsonOptions] = None): ...
635
636
@dataclass
637
class ToJsonOptions:
638
exclude_unset: bool = False
639
```
640
641
[Pydantic Integration](./contrib-pydantic.md)
642
643
## Types
644
645
```python { .api }
646
from typing import TypeVar, Generic, Protocol, Callable, Any, Sequence, Mapping, Iterator
647
from datetime import datetime, timedelta
648
from enum import IntEnum, Enum
649
650
# Generic type variables
651
WorkflowReturnType = TypeVar("WorkflowReturnType")
652
ActivityReturnType = TypeVar("ActivityReturnType")
653
SelfType = TypeVar("SelfType")
654
ReturnType = TypeVar("ReturnType")
655
MultiParamSpec = TypeVar("MultiParamSpec")
656
657
# Protocol types for callables
658
class MethodAsyncNoParam(Protocol[SelfType, ReturnType]):
659
async def __call__(self: SelfType) -> ReturnType: ...
660
661
class MethodSyncOrAsyncSingleParam(Protocol[SelfType, ReturnType]):
662
def __call__(self: SelfType, arg: Any) -> ReturnType: ...
663
664
class CallableAsyncSingleParam(Protocol[ActivityParam, ActivityReturnType]):
665
async def __call__(self, arg: ActivityParam) -> ActivityReturnType: ...
666
667
# Configuration types
668
class RetryPolicy:
669
initial_interval: timedelta
670
backoff_coefficient: float
671
maximum_interval: timedelta
672
maximum_attempts: int
673
non_retryable_error_types: Sequence[str]
674
675
class TLSConfig:
676
server_root_ca_cert: bytes | None
677
domain: str | None
678
client_cert: bytes | None
679
client_private_key: bytes | None
680
681
# Enum types
682
class WorkflowIDReusePolicy(IntEnum):
683
ALLOW_DUPLICATE = 0
684
ALLOW_DUPLICATE_FAILED_ONLY = 1
685
REJECT_DUPLICATE = 2
686
TERMINATE_IF_RUNNING = 3
687
688
class WorkflowIDConflictPolicy(IntEnum):
689
UNSPECIFIED = 0
690
FAIL = 1
691
USE_EXISTING = 2
692
693
class ActivityCancellationType(IntEnum):
694
TRY_CANCEL = 0
695
WAIT_CANCELLATION_COMPLETED = 1
696
ABANDON = 2
697
698
class ChildWorkflowCancellationType(IntEnum):
699
WAIT_CANCELLATION_COMPLETED = 0
700
REQUEST_CANCEL = 1
701
ABANDON = 2
702
703
class ParentClosePolicy(IntEnum):
704
TERMINATE = 0
705
ABANDON = 1
706
REQUEST_CANCEL = 2
707
708
class VersioningIntent(Enum):
709
UNSPECIFIED = "Unspecified"
710
COMPATIBLE = "Compatible"
711
DEFAULT = "Default"
712
713
class WorkflowUpdateStage(IntEnum):
714
ADMITTED = 0
715
ACCEPTED = 1
716
COMPLETED = 2
717
718
# Search attributes
719
SearchAttributes = Mapping[SearchAttributeKey[Any], Any]
720
SearchAttributeKey = type[SearchAttributeKey[Any]]
721
722
# Common aliases
723
Executor = concurrent.futures.Executor
724
Interceptor = Any # Base type for interceptor implementations
725
```