diff --git a/pyproject.toml b/pyproject.toml index 9e0987b6b..f21923d87 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -238,6 +238,9 @@ exclude = [ [tool.ruff] target-version = "py39" +[tool.ruff.lint.per-file-ignores] +"temporalio/workflow/__init__.py" = ["F403"] + [build-system] requires = ["maturin>=1.0,<2.0"] build-backend = "maturin" diff --git a/temporalio/workflow.py b/temporalio/workflow.py deleted file mode 100644 index 59a353286..000000000 --- a/temporalio/workflow.py +++ /dev/null @@ -1,5888 +0,0 @@ -"""Utilities that can decorate or be called inside workflows.""" - -from __future__ import annotations - -import asyncio -import contextvars -import inspect -import logging -import sys -import threading -import typing -import uuid -import warnings -from abc import ABC, abstractmethod -from collections.abc import ( - Awaitable, - Callable, - Generator, - Iterable, - Iterator, - Mapping, - MutableMapping, - Sequence, -) -from contextlib import contextmanager -from dataclasses import dataclass -from datetime import datetime, timedelta, timezone -from enum import Enum, Flag, IntEnum, auto -from functools import partial -from random import Random -from typing import ( - TYPE_CHECKING, - Any, - Concatenate, - Generic, - Literal, - NoReturn, - TypeVar, - cast, - overload, -) - -import nexusrpc -import nexusrpc.handler -from nexusrpc import InputT, OutputT -from typing_extensions import ( - Protocol, - TypedDict, - runtime_checkable, -) - -import temporalio.api.common.v1 -import temporalio.api.enums -import temporalio.api.enums.v1 -import temporalio.bridge.proto.child_workflow -import temporalio.bridge.proto.common -import temporalio.bridge.proto.nexus -import temporalio.bridge.proto.workflow_commands -import temporalio.common -import temporalio.converter -import temporalio.exceptions -import temporalio.nexus -import temporalio.workflow -from temporalio.nexus._util import ServiceHandlerT - -from .types import ( - AnyType, - CallableAsyncNoParam, - CallableAsyncSingleParam, - CallableAsyncType, - CallableSyncNoParam, - CallableSyncOrAsyncReturnNoneType, - CallableSyncOrAsyncType, - CallableSyncSingleParam, - CallableType, - ClassType, - MethodAsyncNoParam, - MethodAsyncSingleParam, - MethodSyncNoParam, - MethodSyncOrAsyncNoParam, - MethodSyncOrAsyncSingleParam, - MethodSyncSingleParam, - MultiParamSpec, - ParamType, - ProtocolReturnType, - ReturnType, - SelfType, -) - - -@overload -def defn(cls: ClassType) -> ClassType: ... - - -@overload -def defn( - *, - name: str | None = None, - sandboxed: bool = True, - failure_exception_types: Sequence[type[BaseException]] = [], - versioning_behavior: temporalio.common.VersioningBehavior = temporalio.common.VersioningBehavior.UNSPECIFIED, -) -> Callable[[ClassType], ClassType]: ... - - -@overload -def defn( - *, - sandboxed: bool = True, - dynamic: bool = False, - versioning_behavior: temporalio.common.VersioningBehavior = temporalio.common.VersioningBehavior.UNSPECIFIED, -) -> Callable[[ClassType], ClassType]: ... - - -def defn( - cls: ClassType | None = None, - *, - name: str | None = None, - sandboxed: bool = True, - dynamic: bool = False, - failure_exception_types: Sequence[type[BaseException]] = [], - versioning_behavior: temporalio.common.VersioningBehavior = temporalio.common.VersioningBehavior.UNSPECIFIED, -) -> Callable[[ClassType], ClassType]: - """Decorator for workflow classes. - - This must be set on any registered workflow class (it is ignored if on a - base class). - - Args: - cls: The class to decorate. - name: Name to use for the workflow. Defaults to class ``__name__``. This - cannot be set if dynamic is set. - sandboxed: Whether the workflow should run in a sandbox. Default is - true. - dynamic: If true, this activity will be dynamic. Dynamic workflows have - to accept a single 'Sequence[RawValue]' parameter. This cannot be - set to true if name is present. - failure_exception_types: The types of exceptions that, if a - workflow-thrown exception extends, will cause the workflow/update to - fail instead of suspending the workflow via task failure. These are - applied in addition to ones set on the worker constructor. If - ``Exception`` is set, it effectively will fail a workflow/update in - all user exception cases. WARNING: This setting is experimental. - versioning_behavior: Specifies the versioning behavior to use for this workflow. - """ - - def decorator(cls: ClassType) -> ClassType: - # This performs validation - _Definition._apply_to_class( - cls, - workflow_name=name or cls.__name__ if not dynamic else None, - sandboxed=sandboxed, - failure_exception_types=failure_exception_types, - versioning_behavior=versioning_behavior, - ) - return cls - - if cls is not None: - return decorator(cls) - return decorator - - -def init( - init_fn: CallableType, -) -> CallableType: - """Decorator for the workflow init method. - - This may be used on the __init__ method of the workflow class to specify - that it accepts the same workflow input arguments as the ``@workflow.run`` - method. If used, the parameters of your __init__ and ``@workflow.run`` - methods must be identical. - - Args: - init_fn: The __init__ method to decorate. - """ - if init_fn.__name__ != "__init__": - raise ValueError("@workflow.init may only be used on the __init__ method") - - setattr(init_fn, "__temporal_workflow_init", True) - return init_fn - - -def run(fn: CallableAsyncType) -> CallableAsyncType: - """Decorator for the workflow run method. - - This must be used on one and only one async method defined on the same class - as ``@workflow.defn``. This can be defined on a base class method but must - then be explicitly overridden and defined on the workflow class. - - Run methods can only have positional parameters. Best practice is to only - take a single object/dataclass argument that can accept more fields later if - needed. - - Args: - fn: The function to decorate. - """ - if not inspect.iscoroutinefunction(fn): - raise ValueError("Workflow run method must be an async function") - # Disallow local classes because we need to have the class globally - # referenceable by name - if "" in fn.__qualname__: - raise ValueError( - "Local classes unsupported, @workflow.run cannot be on a local class" - ) - setattr(fn, "__temporal_workflow_run", True) - # TODO(cretz): Why is MyPy unhappy with this return? - return fn # type: ignore[return-value] - - -class HandlerUnfinishedPolicy(Enum): - """Actions taken if a workflow terminates with running handlers. - - Policy defining actions taken when a workflow exits while update or signal handlers are running. - The workflow exit may be due to successful return, failure, cancellation, or continue-as-new. - """ - - WARN_AND_ABANDON = 1 - """Issue a warning in addition to abandoning.""" - ABANDON = 2 - """Abandon the handler. - - In the case of an update handler this means that the client will receive an error rather than - the update result.""" - - -class UnfinishedUpdateHandlersWarning(RuntimeWarning): - """The workflow exited before all update handlers had finished executing.""" - - -class UnfinishedSignalHandlersWarning(RuntimeWarning): - """The workflow exited before all signal handlers had finished executing.""" - - -@overload -def signal( - fn: CallableSyncOrAsyncReturnNoneType, -) -> CallableSyncOrAsyncReturnNoneType: ... - - -@overload -def signal( - *, - unfinished_policy: HandlerUnfinishedPolicy = HandlerUnfinishedPolicy.WARN_AND_ABANDON, - description: str | None = None, -) -> Callable[ - [CallableSyncOrAsyncReturnNoneType], CallableSyncOrAsyncReturnNoneType -]: ... - - -@overload -def signal( - *, - name: str, - unfinished_policy: HandlerUnfinishedPolicy = HandlerUnfinishedPolicy.WARN_AND_ABANDON, - description: str | None = None, -) -> Callable[ - [CallableSyncOrAsyncReturnNoneType], CallableSyncOrAsyncReturnNoneType -]: ... - - -@overload -def signal( - *, - dynamic: Literal[True], - unfinished_policy: HandlerUnfinishedPolicy = HandlerUnfinishedPolicy.WARN_AND_ABANDON, - description: str | None = None, -) -> Callable[ - [CallableSyncOrAsyncReturnNoneType], CallableSyncOrAsyncReturnNoneType -]: ... - - -def signal( - fn: CallableSyncOrAsyncReturnNoneType | None = None, - *, - name: str | None = None, - dynamic: bool | None = False, - unfinished_policy: HandlerUnfinishedPolicy = HandlerUnfinishedPolicy.WARN_AND_ABANDON, - description: str | None = None, -) -> ( - Callable[[CallableSyncOrAsyncReturnNoneType], CallableSyncOrAsyncReturnNoneType] - | CallableSyncOrAsyncReturnNoneType -): - """Decorator for a workflow signal method. - - This is used on any async or non-async method that you wish to be called upon - receiving a signal. If a function overrides one with this decorator, it too - must be decorated. - - Signal methods can only have positional parameters. Best practice for - non-dynamic signal methods is to only take a single object/dataclass - argument that can accept more fields later if needed. Return values from - signal methods are ignored. - - Args: - fn: The function to decorate. - name: Signal name. Defaults to method ``__name__``. Cannot be present - when ``dynamic`` is present. - dynamic: If true, this handles all signals not otherwise handled. The - parameters of the method must be self, a string name, and a - ``*args`` positional varargs. Cannot be present when ``name`` is - present. - unfinished_policy: Actions taken if a workflow terminates with - a running instance of this handler. - description: A short description of the signal that may appear in the UI/CLI. - """ - - def decorator( - name: str | None, - unfinished_policy: HandlerUnfinishedPolicy, - fn: CallableSyncOrAsyncReturnNoneType, - ) -> CallableSyncOrAsyncReturnNoneType: - if not name and not dynamic: - name = fn.__name__ - defn = _SignalDefinition( - name=name, - fn=fn, - is_method=True, - unfinished_policy=unfinished_policy, - description=description, - ) - setattr(fn, "__temporal_signal_definition", defn) - if defn.dynamic_vararg: - warnings.warn( - "Dynamic signals with vararg third param is deprecated, use Sequence[RawValue]", - DeprecationWarning, - stacklevel=2, - ) - return fn - - if not fn: - if name is not None and dynamic: - raise RuntimeError("Cannot provide name and dynamic boolean") - return partial(decorator, name, unfinished_policy) - else: - return decorator(fn.__name__, unfinished_policy, fn) - - -@overload -def query(fn: CallableType) -> CallableType: ... - - -@overload -def query( - *, name: str, description: str | None = None -) -> Callable[[CallableType], CallableType]: ... - - -@overload -def query( - *, dynamic: Literal[True], description: str | None = None -) -> Callable[[CallableType], CallableType]: ... - - -@overload -def query(*, description: str) -> Callable[[CallableType], CallableType]: ... - - -def query( - fn: CallableType | None = None, # type: ignore[reportInvalidTypeVarUse] - *, - name: str | None = None, - dynamic: bool | None = False, - description: str | None = None, -): - """Decorator for a workflow query method. - - This is used on any non-async method that expects to handle a query. If a - function overrides one with this decorator, it too must be decorated. - - Query methods can only have positional parameters. Best practice for - non-dynamic query methods is to only take a single object/dataclass - argument that can accept more fields later if needed. The return value is - the resulting query value. Query methods must not mutate any workflow state. - - Args: - fn: The function to decorate. - name: Query name. Defaults to method ``__name__``. Cannot be present - when ``dynamic`` is present. - dynamic: If true, this handles all queries not otherwise handled. The - parameters of the method should be self, a string name, and a - ``Sequence[RawValue]``. An older form of this accepted vararg - parameters which will now warn. Cannot be present when ``name`` is - present. - description: A short description of the query that may appear in the UI/CLI. - """ - - def decorator( - name: str | None, - description: str | None, - fn: CallableType, - *, - bypass_async_check: bool = False, - ) -> CallableType: - if not name and not dynamic: - name = fn.__name__ - if not bypass_async_check and inspect.iscoroutinefunction(fn): - warnings.warn( - "Queries as async def functions are deprecated", - DeprecationWarning, - stacklevel=2, - ) - defn = _QueryDefinition( - name=name, fn=fn, is_method=True, description=description - ) - setattr(fn, "__temporal_query_definition", defn) - if defn.dynamic_vararg: - warnings.warn( - "Dynamic queries with vararg third param is deprecated, use Sequence[RawValue]", - DeprecationWarning, - stacklevel=2, - ) - return fn - - if name is not None or dynamic or description: - if name is not None and dynamic: - raise RuntimeError("Cannot provide name and dynamic boolean") - return partial(decorator, name, description) - if fn is None: - raise RuntimeError("Cannot create query without function or name or dynamic") - if inspect.iscoroutinefunction(fn): - warnings.warn( - "Queries as async def functions are deprecated", - DeprecationWarning, - stacklevel=2, - ) - return decorator(fn.__name__, description, fn, bypass_async_check=True) - - -@dataclass(frozen=True) -class DynamicWorkflowConfig: - """Returned by functions using the :py:func:`dynamic_config` decorator, see it for more.""" - - failure_exception_types: Sequence[type[BaseException]] | None = None - """The types of exceptions that, if a workflow-thrown exception extends, will cause the - workflow/update to fail instead of suspending the workflow via task failure. These are applied - in addition to ones set on the worker constructor. If ``Exception`` is set, it effectively will - fail a workflow/update in all user exception cases. - - Always overrides the equivalent parameter on :py:func:`defn` if set not-None. - - WARNING: This setting is experimental. - """ - versioning_behavior: temporalio.common.VersioningBehavior = ( - temporalio.common.VersioningBehavior.UNSPECIFIED - ) - """Specifies the versioning behavior to use for this workflow. - - Always overrides the equivalent parameter on :py:func:`defn`. - """ - - -def dynamic_config( - fn: MethodSyncNoParam[SelfType, DynamicWorkflowConfig], -) -> MethodSyncNoParam[SelfType, DynamicWorkflowConfig]: - """Decorator to allow configuring a dynamic workflow's behavior. - - Because dynamic workflows may conceptually represent more than one workflow type, it may be - desirable to have different settings for fields that would normally be passed to - :py:func:`defn`, but vary based on the workflow type name or other information available in - the workflow's context. This function will be called after the workflow's :py:func:`init`, - if it has one, but before the workflow's :py:func:`run` method. - - The method must only take self as a parameter, and any values set in the class it returns will - override those provided to :py:func:`defn`. - - Cannot be specified on non-dynamic workflows. - - Args: - fn: The function to decorate. - """ - if inspect.iscoroutinefunction(fn): - raise ValueError("Workflow dynamic_config method must be synchronous") - params = list(inspect.signature(fn).parameters.values()) - if len(params) != 1: - raise ValueError("Workflow dynamic_config method must only take self parameter") - - # Add marker attribute - setattr(fn, "__temporal_workflow_dynamic_config", True) - return fn - - -@dataclass(frozen=True) -class Info: - """Information about the running workflow. - - Retrieved inside a workflow via :py:func:`info`. This object is immutable - with the exception of the :py:attr:`search_attributes` and - :py:attr:`typed_search_attributes` which is updated on - :py:func:`upsert_search_attributes`. - - Note, required fields may be added here in future versions. This class - should never be constructed by users. - """ - - attempt: int - continued_run_id: str | None - cron_schedule: str | None - execution_timeout: timedelta | None - first_execution_run_id: str - headers: Mapping[str, temporalio.api.common.v1.Payload] - namespace: str - parent: ParentInfo | None - root: RootInfo | None - priority: temporalio.common.Priority - """The priority of this workflow execution. If not set, or this server predates priorities, - then returns a default instance.""" - raw_memo: Mapping[str, temporalio.api.common.v1.Payload] - retry_policy: temporalio.common.RetryPolicy | None - run_id: str - run_timeout: timedelta | None - - search_attributes: temporalio.common.SearchAttributes - """Search attributes for the workflow. - - .. deprecated:: - Use :py:attr:`typed_search_attributes` instead. - """ - - start_time: datetime - """The start time of the first task executed by the workflow.""" - - task_queue: str - task_timeout: timedelta - - typed_search_attributes: temporalio.common.TypedSearchAttributes - """Search attributes for the workflow. - - Note, this may have invalid values or be missing values if passing the - deprecated form of dictionary attributes to - :py:meth:`upsert_search_attributes`. - """ - - workflow_id: str - - workflow_start_time: datetime - """The start time of the workflow based on the workflow initialization.""" - - workflow_type: str - - def _logger_details(self) -> Mapping[str, Any]: - return { - # TODO(cretz): worker ID? - "attempt": self.attempt, - "namespace": self.namespace, - "run_id": self.run_id, - "task_queue": self.task_queue, - "workflow_id": self.workflow_id, - "workflow_type": self.workflow_type, - } - - def get_current_build_id(self) -> str: - """Get the Build ID of the worker which executed the current Workflow Task. - - May be undefined if the task was completed by a worker without a Build ID. If this worker is - the one executing this task for the first time and has a Build ID set, then its ID will be - used. This value may change over the lifetime of the workflow run, but is deterministic and - safe to use for branching. - - .. deprecated:: - Use get_current_deployment_version instead. - """ - return _Runtime.current().workflow_get_current_build_id() - - def get_current_deployment_version( - self, - ) -> temporalio.common.WorkerDeploymentVersion | None: - """Get the deployment version of the worker which executed the current Workflow Task. - - May be None if the task was completed by a worker without a deployment version or build - id. If this worker is the one executing this task for the first time and has a deployment - version set, then its ID will be used. This value may change over the lifetime of the - workflow run, but is deterministic and safe to use for branching. - """ - return _Runtime.current().workflow_get_current_deployment_version() - - def get_current_history_length(self) -> int: - """Get the current number of events in history. - - Note, this value may not be up to date if accessed inside a query. - - Returns: - Current number of events in history (up until the current task). - """ - return _Runtime.current().workflow_get_current_history_length() - - def get_current_history_size(self) -> int: - """Get the current byte size of history. - - Note, this value may not be up to date if accessed inside a query. - - Returns: - Current byte-size of history (up until the current task). - """ - return _Runtime.current().workflow_get_current_history_size() - - def is_continue_as_new_suggested(self) -> bool: - """Get whether or not continue as new is suggested. - - Note, this value may not be up to date if accessed inside a query. - - Returns: - True if the server is configured to suggest continue as new and it - is suggested. - """ - return _Runtime.current().workflow_is_continue_as_new_suggested() - - def is_target_worker_deployment_version_changed(self) -> bool: - """Check whether the target worker deployment version has changed. - - Note: Upgrade-on-Continue-as-New is currently experimental. - - Returns: - True if the target worker deployment version has changed. - """ - return _Runtime.current().workflow_is_target_worker_deployment_version_changed() - - -@dataclass(frozen=True) -class ParentInfo: - """Information about the parent workflow.""" - - namespace: str - run_id: str - workflow_id: str - - -@dataclass(frozen=True) -class RootInfo: - """Information about the root workflow.""" - - run_id: str - workflow_id: str - - -@dataclass(frozen=True) -class UpdateInfo: - """Information about a workflow update.""" - - id: str - """Update ID.""" - - name: str - """Update type name.""" - - @property - def _logger_details(self) -> Mapping[str, Any]: - """Data to be included in string appended to default logging output.""" - return { - "update_id": self.id, - "update_name": self.name, - } - - -class _Runtime(ABC): - @staticmethod - def current() -> _Runtime: - loop = _Runtime.maybe_current() - if not loop: - raise _NotInWorkflowEventLoopError("Not in workflow event loop") - return loop - - @staticmethod - def maybe_current() -> _Runtime | None: - try: - return getattr( - asyncio.get_running_loop(), "__temporal_workflow_runtime", None - ) - except RuntimeError: - return None - - @staticmethod - def set_on_loop(loop: asyncio.AbstractEventLoop, runtime: _Runtime | None) -> None: - if runtime: - setattr(loop, "__temporal_workflow_runtime", runtime) - elif hasattr(loop, "__temporal_workflow_runtime"): - delattr(loop, "__temporal_workflow_runtime") - - def __init__(self) -> None: - super().__init__() - self._logger_details: Mapping[str, Any] | None = None - - @property - def logger_details(self) -> Mapping[str, Any]: - if self._logger_details is None: - self._logger_details = self.workflow_info()._logger_details() - return self._logger_details - - @abstractmethod - def workflow_all_handlers_finished(self) -> bool: ... - - @abstractmethod - def workflow_continue_as_new( - self, - *args: Any, - workflow: None | Callable | str, - task_queue: str | None, - run_timeout: timedelta | None, - task_timeout: timedelta | None, - retry_policy: temporalio.common.RetryPolicy | None, - memo: Mapping[str, Any] | None, - search_attributes: None - | ( - temporalio.common.SearchAttributes | temporalio.common.TypedSearchAttributes - ), - versioning_intent: VersioningIntent | None, - initial_versioning_behavior: ContinueAsNewVersioningBehavior | None, - ) -> NoReturn: ... - - @abstractmethod - def workflow_extern_functions(self) -> Mapping[str, Callable]: ... - - @abstractmethod - def workflow_get_current_build_id(self) -> str: ... - - @abstractmethod - def workflow_get_current_deployment_version( - self, - ) -> temporalio.common.WorkerDeploymentVersion | None: ... - - @abstractmethod - def workflow_get_current_history_length(self) -> int: ... - - @abstractmethod - def workflow_get_current_history_size(self) -> int: ... - - @abstractmethod - def workflow_get_external_workflow_handle( - self, id: str, *, run_id: str | None - ) -> ExternalWorkflowHandle[Any]: ... - - @abstractmethod - def workflow_get_query_handler(self, name: str | None) -> Callable | None: ... - - @abstractmethod - def workflow_get_signal_handler(self, name: str | None) -> Callable | None: ... - - @abstractmethod - def workflow_get_update_handler(self, name: str | None) -> Callable | None: ... - - @abstractmethod - def workflow_get_update_validator(self, name: str | None) -> Callable | None: ... - - @abstractmethod - def workflow_info(self) -> Info: ... - - @abstractmethod - def workflow_instance(self) -> Any: ... - - @abstractmethod - def workflow_is_continue_as_new_suggested(self) -> bool: ... - - @abstractmethod - def workflow_is_target_worker_deployment_version_changed(self) -> bool: ... - - @abstractmethod - def workflow_is_replaying(self) -> bool: ... - - @abstractmethod - def workflow_is_replaying_history_events(self) -> bool: ... - - @abstractmethod - def workflow_is_read_only(self) -> bool: ... - - @abstractmethod - def workflow_memo(self) -> Mapping[str, Any]: ... - - @abstractmethod - def workflow_memo_value( - self, key: str, default: Any, *, type_hint: type | None - ) -> Any: ... - - @abstractmethod - def workflow_upsert_memo(self, updates: Mapping[str, Any]) -> None: ... - - @abstractmethod - def workflow_metric_meter(self) -> temporalio.common.MetricMeter: ... - - @abstractmethod - def workflow_patch(self, id: str, *, deprecated: bool) -> bool: ... - - @abstractmethod - def workflow_payload_converter(self) -> temporalio.converter.PayloadConverter: ... - - @abstractmethod - def workflow_random(self) -> Random: ... - - @abstractmethod - def workflow_set_query_handler( - self, name: str | None, handler: Callable | None - ) -> None: ... - - @abstractmethod - def workflow_set_signal_handler( - self, name: str | None, handler: Callable | None - ) -> None: ... - - @abstractmethod - def workflow_set_update_handler( - self, - name: str | None, - handler: Callable | None, - validator: Callable | None, - ) -> None: ... - - @abstractmethod - def workflow_start_activity( - self, - activity: Any, - *args: Any, - task_queue: str | None, - result_type: type | None, - schedule_to_close_timeout: timedelta | None, - schedule_to_start_timeout: timedelta | None, - start_to_close_timeout: timedelta | None, - heartbeat_timeout: timedelta | None, - retry_policy: temporalio.common.RetryPolicy | None, - cancellation_type: ActivityCancellationType, - activity_id: str | None, - versioning_intent: VersioningIntent | None, - summary: str | None = None, - priority: temporalio.common.Priority = temporalio.common.Priority.default, - ) -> ActivityHandle[Any]: ... - - @abstractmethod - async def workflow_start_child_workflow( - self, - workflow: Any, - *args: Any, - id: str, - task_queue: str | None, - result_type: type | None, - cancellation_type: ChildWorkflowCancellationType, - parent_close_policy: ParentClosePolicy, - execution_timeout: timedelta | None, - run_timeout: timedelta | None, - task_timeout: timedelta | None, - id_reuse_policy: temporalio.common.WorkflowIDReusePolicy, - retry_policy: temporalio.common.RetryPolicy | None, - cron_schedule: str, - memo: Mapping[str, Any] | None, - search_attributes: None - | ( - temporalio.common.SearchAttributes | temporalio.common.TypedSearchAttributes - ), - versioning_intent: VersioningIntent | None, - static_summary: str | None = None, - static_details: str | None = None, - priority: temporalio.common.Priority = temporalio.common.Priority.default, - ) -> ChildWorkflowHandle[Any, Any]: ... - - @abstractmethod - def workflow_start_local_activity( - self, - activity: Any, - *args: Any, - result_type: type | None, - schedule_to_close_timeout: timedelta | None, - schedule_to_start_timeout: timedelta | None, - start_to_close_timeout: timedelta | None, - retry_policy: temporalio.common.RetryPolicy | None, - local_retry_threshold: timedelta | None, - cancellation_type: ActivityCancellationType, - activity_id: str | None, - summary: str | None, - ) -> ActivityHandle[Any]: ... - - @abstractmethod - async def workflow_start_nexus_operation( - self, - endpoint: str, - service: str, - operation: nexusrpc.Operation[InputT, OutputT] | str | Callable[..., Any], - input: Any, - output_type: type[OutputT] | None, - schedule_to_close_timeout: timedelta | None, - schedule_to_start_timeout: timedelta | None, - start_to_close_timeout: timedelta | None, - cancellation_type: temporalio.workflow.NexusOperationCancellationType, - headers: Mapping[str, str] | None, - summary: str | None, - ) -> NexusOperationHandle[OutputT]: ... - - @abstractmethod - def workflow_time_ns(self) -> int: ... - - @abstractmethod - def workflow_upsert_search_attributes( - self, - attributes: ( - temporalio.common.SearchAttributes - | Sequence[temporalio.common.SearchAttributeUpdate] - ), - ) -> None: ... - - @abstractmethod - async def workflow_sleep( - self, duration: float, *, summary: str | None = None - ) -> None: ... - - @abstractmethod - async def workflow_wait_condition( - self, - fn: Callable[[], bool], - *, - timeout: float | None = None, - timeout_summary: str | None = None, - ) -> None: ... - - @abstractmethod - def workflow_get_current_details(self) -> str: ... - - @abstractmethod - def workflow_set_current_details(self, details: str): ... - - @abstractmethod - def workflow_is_failure_exception(self, err: BaseException) -> bool: ... - - @abstractmethod - def workflow_has_last_completion_result(self) -> bool: ... - - @abstractmethod - def workflow_last_completion_result(self, type_hint: type | None) -> Any | None: ... - - @abstractmethod - def workflow_last_failure(self) -> BaseException | None: ... - - @abstractmethod - def workflow_random_seed(self) -> int: ... - - @abstractmethod - def workflow_register_random_seed_callback( - self, callback: Callable[[int], None] - ) -> None: ... - - -_current_update_info: contextvars.ContextVar[UpdateInfo] = contextvars.ContextVar( - "__temporal_current_update_info" -) - - -def _set_current_update_info(info: UpdateInfo) -> None: # type: ignore[reportUnusedFunction] - _current_update_info.set(info) - - -def current_update_info() -> UpdateInfo | None: - """Info for the current update if any. - - This is powered by :py:mod:`contextvars` so it is only valid within the - update handler and coroutines/tasks it has started. - - Returns: - Info for the current update handler the code calling this is executing - within if any. - """ - return _current_update_info.get(None) - - -def deprecate_patch(id: str) -> None: - """Mark a patch as deprecated. - - This marks a workflow that had :py:func:`patched` in a previous version of - the code as no longer applicable because all workflows that use the old code - path are done and will never be queried again. Therefore the old code path - is removed as well. - - Args: - id: The identifier originally used with :py:func:`patched`. - """ - _Runtime.current().workflow_patch(id, deprecated=True) - - -def extern_functions() -> Mapping[str, Callable]: - """External functions available in the workflow sandbox. - - Returns: - Mapping of external functions that can be called from inside a workflow - sandbox. - """ - return _Runtime.current().workflow_extern_functions() - - -def info() -> Info: - """Current workflow's info. - - Returns: - Info for the currently running workflow. - """ - return _Runtime.current().workflow_info() - - -def instance() -> Any: - """Current workflow's instance. - - Returns: - The currently running workflow instance. - """ - return _Runtime.current().workflow_instance() - - -def in_workflow() -> bool: - """Whether the code is currently running in a workflow.""" - return _Runtime.maybe_current() is not None - - -def memo() -> Mapping[str, Any]: - """Current workflow's memo values, converted without type hints. - - Since type hints are not used, the default converted values will come back. - For example, if the memo was originally created with a dataclass, the value - will be a dict. To convert using proper type hints, use - :py:func:`memo_value`. - - Returns: - Mapping of all memo keys and they values without type hints. - """ - return _Runtime.current().workflow_memo() - - -def is_failure_exception(err: BaseException) -> bool: - """Checks if the given exception is a workflow failure in the current workflow. - - Returns: - True if the given exception is a workflow failure in the current workflow. - """ - return _Runtime.current().workflow_is_failure_exception(err) - - -@overload -def memo_value(key: str, default: Any = temporalio.common._arg_unset) -> Any: ... - - -@overload -def memo_value(key: str, *, type_hint: type[ParamType]) -> ParamType: ... - - -@overload -def memo_value( - key: str, default: AnyType, *, type_hint: type[ParamType] -) -> AnyType | ParamType: ... - - -def memo_value( - key: str, - default: Any = temporalio.common._arg_unset, - *, - type_hint: type | None = None, -) -> Any: - """Memo value for the given key, optional default, and optional type - hint. - - Args: - key: Key to get memo value for. - default: Default to use if key is not present. If unset, a - :py:class:`KeyError` is raised when the key does not exist. - type_hint: Type hint to use when converting. - - Returns: - Memo value, converted with the type hint if present. - - Raises: - KeyError: Key not present and default not set. - """ - return _Runtime.current().workflow_memo_value(key, default, type_hint=type_hint) - - -def upsert_memo(updates: Mapping[str, Any]) -> None: - """Adds, modifies, and/or removes memos, with upsert semantics. - - Every memo that has a matching key has its value replaced with the one specified in ``updates``. - If the value is set to ``None``, the memo is removed instead. - For every key with no existing memo, a new memo is added with specified value (unless the value is ``None``). - Memos with keys not included in ``updates`` remain unchanged. - """ - return _Runtime.current().workflow_upsert_memo(updates) - - -def get_current_details() -> str: - """Get the current details of the workflow which may appear in the UI/CLI. - Unlike static details set at start, this value can be updated throughout - the life of the workflow and is independent of the static details. - This can be in Temporal markdown format and can span multiple lines. - """ - return _Runtime.current().workflow_get_current_details() - - -def has_last_completion_result() -> bool: - """Gets whether there is a last completion result of the workflow.""" - return _Runtime.current().workflow_has_last_completion_result() - - -@overload -def get_last_completion_result() -> Any | None: ... - - -@overload -def get_last_completion_result(type_hint: type[ParamType]) -> ParamType | None: ... - - -def get_last_completion_result(type_hint: type | None = None) -> Any | None: - """Get the result of the last run of the workflow. This will be None if there was - no previous completion or the result was None. has_last_completion_result() - can be used to differentiate. - """ - return _Runtime.current().workflow_last_completion_result(type_hint) - - -def get_last_failure() -> BaseException | None: - """Get the last failure of the workflow if it has run previously.""" - return _Runtime.current().workflow_last_failure() - - -def set_current_details(description: str) -> None: - """Set the current details of the workflow which may appear in the UI/CLI. - Unlike static details set at start, this value can be updated throughout - the life of the workflow and is independent of the static details. - This can be in Temporal markdown format and can span multiple lines. - """ - _Runtime.current().workflow_set_current_details(description) - - -def metric_meter() -> temporalio.common.MetricMeter: - """Get the metric meter for the current workflow. - - This meter is replay safe which means that metrics will not be recorded - during replay. - - Returns: - Current metric meter for this workflow for recording metrics. - """ - return _Runtime.current().workflow_metric_meter() - - -def now() -> datetime: - """Current time from the workflow perspective. - - This is the workflow equivalent of :py:func:`datetime.now` with the - :py:attr:`timezone.utc` parameter. - - Returns: - UTC datetime for the current workflow time. The datetime does have UTC - set as the time zone. - """ - return datetime.fromtimestamp(time(), timezone.utc) - - -def patched(id: str) -> bool: - """Patch a workflow. - - When called, this will only return true if code should take the newer path - which means this is either not replaying or is replaying and has seen this - patch before. - - Use :py:func:`deprecate_patch` when all workflows are done and will never be - queried again. The old code path can be used at that time too. - - Args: - id: The identifier for this patch. This identifier may be used - repeatedly in the same workflow to represent the same patch - - Returns: - True if this should take the newer path, false if it should take the - older path. - """ - return _Runtime.current().workflow_patch(id, deprecated=False) - - -def payload_converter() -> temporalio.converter.PayloadConverter: - """Get the payload converter for the current workflow. - - The returned converter has :py:class:`temporalio.converter.WorkflowSerializationContext` set. - This is often used for dynamic workflows/signals/queries to convert - payloads. - """ - return _Runtime.current().workflow_payload_converter() - - -def random() -> Random: - """Get a deterministic pseudo-random number generator. - - Note, this random number generator is not cryptographically safe and should - not be used for security purposes. - - Returns: - The deterministically-seeded pseudo-random number generator. - """ - return _Runtime.current().workflow_random() - - -def random_seed() -> int: - """Get the current random seed value from core. - - This returns the seed value currently being used by the workflow's - deterministic random number generator. - - Returns: - The current random seed as an integer. - """ - return _Runtime.current().workflow_random_seed() - - -def register_random_seed_callback(callback: Callable[[int], None]) -> None: - """Register a callback to be notified when the random seed changes. - - The callback will be invoked whenever the workflow receives a new random - seed from the core. This is useful for maintaining external random number - generators that need to stay in sync with the workflow's randomness. - - Args: - callback: Function to be called with the new seed value when it changes. - """ - return _Runtime.current().workflow_register_random_seed_callback(callback) - - -def new_random() -> Random: - """Create a Random instance that automatically reseeds when the workflow seed changes. - - This creates a new Random instance that is initially seeded with the current - workflow seed, and automatically registers a callback to reseed itself - whenever the workflow receives a new seed from core. - - Returns: - A Random instance that stays synchronized with the workflow's randomness. - """ - current_seed = random_seed() - auto_random = Random(current_seed) - - def reseed_callback(new_seed: int) -> None: - auto_random.seed(new_seed) - - register_random_seed_callback(reseed_callback) - return auto_random - - -def time() -> float: - """Current seconds since the epoch from the workflow perspective. - - This is the workflow equivalent of :py:func:`time.time`. - - Returns: - Seconds since the epoch as a float. - """ - return time_ns() / 1e9 - - -def time_ns() -> int: - """Current nanoseconds since the epoch from the workflow perspective. - - This is the workflow equivalent of :py:func:`time.time_ns`. - - Returns: - Nanoseconds since the epoch - """ - return _Runtime.current().workflow_time_ns() - - -def upsert_search_attributes( - attributes: ( - temporalio.common.SearchAttributes - | Sequence[temporalio.common.SearchAttributeUpdate] - ), -) -> None: - """Upsert search attributes for this workflow. - - Args: - attributes: The attributes to set. This should be a sequence of - updates (i.e. values created via value_set and value_unset calls on - search attribute keys). The dictionary form of attributes is - DEPRECATED and if used, result in invalid key types on the - typed_search_attributes property in the info. - """ - if not attributes: - return - temporalio.common._warn_on_deprecated_search_attributes(attributes) - _Runtime.current().workflow_upsert_search_attributes(attributes) - - -# Needs to be defined here to avoid a circular import -@runtime_checkable -class UpdateMethodMultiParam(Protocol[MultiParamSpec, ProtocolReturnType]): - """Decorated workflow update functions implement this.""" - - _defn: temporalio.workflow._UpdateDefinition - - def __call__( - self, *args: MultiParamSpec.args, **kwargs: MultiParamSpec.kwargs - ) -> ProtocolReturnType | Awaitable[ProtocolReturnType]: - """Generic callable type callback.""" - ... - - def validator( - self, vfunc: Callable[MultiParamSpec, None] - ) -> Callable[MultiParamSpec, None]: - """Use to decorate a function to validate the arguments passed to the update handler.""" - ... - - -@overload -def update( - fn: Callable[MultiParamSpec, Awaitable[ReturnType]], -) -> UpdateMethodMultiParam[MultiParamSpec, ReturnType]: ... - - -@overload -def update( - fn: Callable[MultiParamSpec, ReturnType], -) -> UpdateMethodMultiParam[MultiParamSpec, ReturnType]: ... - - -@overload -def update( - *, - unfinished_policy: HandlerUnfinishedPolicy = HandlerUnfinishedPolicy.WARN_AND_ABANDON, - description: str | None = None, -) -> Callable[ - [Callable[MultiParamSpec, ReturnType]], - UpdateMethodMultiParam[MultiParamSpec, ReturnType], -]: ... - - -@overload -def update( - *, - name: str, - unfinished_policy: HandlerUnfinishedPolicy = HandlerUnfinishedPolicy.WARN_AND_ABANDON, - description: str | None = None, -) -> Callable[ - [Callable[MultiParamSpec, ReturnType]], - UpdateMethodMultiParam[MultiParamSpec, ReturnType], -]: ... - - -@overload -def update( - *, - dynamic: Literal[True], - unfinished_policy: HandlerUnfinishedPolicy = HandlerUnfinishedPolicy.WARN_AND_ABANDON, - description: str | None = None, -) -> Callable[ - [Callable[MultiParamSpec, ReturnType]], - UpdateMethodMultiParam[MultiParamSpec, ReturnType], -]: ... - - -def update( - fn: CallableSyncOrAsyncType | None = None, # type: ignore[reportInvalidTypeVarUse] - *, - name: str | None = None, - dynamic: bool | None = False, - unfinished_policy: HandlerUnfinishedPolicy = HandlerUnfinishedPolicy.WARN_AND_ABANDON, - description: str | None = None, -) -> ( - UpdateMethodMultiParam[MultiParamSpec, ReturnType] - | Callable[ - [Callable[MultiParamSpec, ReturnType]], - UpdateMethodMultiParam[MultiParamSpec, ReturnType], - ] -): - """Decorator for a workflow update handler method. - - This is used on any async or non-async method that you wish to be called upon - receiving an update. If a function overrides one with this decorator, it too - must be decorated. - - You may also optionally define a validator method that will be called before - this handler you have applied this decorator to. You can specify the validator - with ``@update_handler_function_name.validator``. - - Update methods can only have positional parameters. Best practice for - non-dynamic update methods is to only take a single object/dataclass - argument that can accept more fields later if needed. The handler may return - a serializable value which will be sent back to the caller of the update. - - Args: - fn: The function to decorate. - name: Update name. Defaults to method ``__name__``. Cannot be present - when ``dynamic`` is present. - dynamic: If true, this handles all updates not otherwise handled. The - parameters of the method must be self, a string name, and a - ``*args`` positional varargs. Cannot be present when ``name`` is - present. - unfinished_policy: Actions taken if a workflow terminates with - a running instance of this handler. - description: A short description of the update that may appear in the UI/CLI. - """ - - def decorator( - name: str | None, - unfinished_policy: HandlerUnfinishedPolicy, - fn: CallableSyncOrAsyncType, - ) -> CallableSyncOrAsyncType: - if not name and not dynamic: - name = fn.__name__ - defn = _UpdateDefinition( - name=name, - fn=fn, - is_method=True, - unfinished_policy=unfinished_policy, - description=description, - ) - if defn.dynamic_vararg: - raise RuntimeError( - "Dynamic updates do not support a vararg third param, use Sequence[RawValue]", - ) - setattr(fn, "_defn", defn) - setattr(fn, "validator", partial(_update_validator, defn)) - return fn - - if not fn: - if name is not None and dynamic: - raise RuntimeError("Cannot provide name and dynamic boolean") - return partial(decorator, name, unfinished_policy) # type: ignore[reportReturnType, return-value] - else: - return decorator(fn.__name__, unfinished_policy, fn) # type: ignore[reportReturnType, return-value] - - -def _update_validator( - update_def: _UpdateDefinition, fn: Callable[..., None] | None = None -) -> Callable[..., None] | None: - """Decorator for a workflow update validator method.""" - if fn is not None: - update_def.set_validator(fn) - return fn - - -def uuid4() -> uuid.UUID: - """Get a new, determinism-safe v4 UUID based on :py:func:`random`. - - Note, this UUID is not cryptographically safe and should not be used for - security purposes. - - Returns: - A deterministically-seeded v4 UUID. - """ - return uuid.UUID(bytes=random().getrandbits(16 * 8).to_bytes(16, "big"), version=4) - - -async def sleep(duration: float | timedelta, *, summary: str | None = None) -> None: - """Sleep for the given duration. - - Args: - duration: Duration to sleep in seconds or as a timedelta. - summary: A single-line fixed summary for this timer that may appear in UI/CLI. - This can be in single-line Temporal markdown format. - """ - await _Runtime.current().workflow_sleep( - duration=( - duration.total_seconds() if isinstance(duration, timedelta) else duration - ), - summary=summary, - ) - - -async def wait_condition( - fn: Callable[[], bool], - *, - timeout: timedelta | float | None = None, - timeout_summary: str | None = None, -) -> None: - """Wait on a callback to become true. - - This function returns when the callback returns true (invoked each loop - iteration) or the timeout has been reached. - - Args: - fn: Non-async callback that accepts no parameters and returns a boolean. - timeout: Optional number of seconds to wait until throwing - :py:class:`asyncio.TimeoutError`. - timeout_summary: Optional simple string identifying the timer (created if ``timeout`` is - present) that may be visible in UI/CLI. While it can be normal text, it is best to treat - as a timer ID. - """ - await _Runtime.current().workflow_wait_condition( - fn, - timeout=timeout.total_seconds() if isinstance(timeout, timedelta) else timeout, - timeout_summary=timeout_summary, - ) - - -_sandbox_unrestricted = threading.local() -_in_sandbox = threading.local() -_imports_passed_through = threading.local() -_sandbox_import_notification_policy_override = threading.local() - - -class SandboxImportNotificationPolicy(Flag): - """Defines the behavior taken when modules are imported into the sandbox after the workflow is initially loaded or unintentionally missing from the passthrough list.""" - - SILENT = auto() - """Allow imports that do not violate sandbox restrictions and no warnings are generated.""" - WARN_ON_DYNAMIC_IMPORT = auto() - """Allows dynamic imports that do not violate sandbox restrictions but issues a warning when an import is triggered in the sandbox after initial workflow load.""" - WARN_ON_UNINTENTIONAL_PASSTHROUGH = auto() - """Allows imports that do not violate sandbox restrictions but issues a warning when an import is triggered in the sandbox that was unintentionally passed through.""" - RAISE_ON_UNINTENTIONAL_PASSTHROUGH = auto() - """Raise an error when an import is triggered in the sandbox that was unintentionally passed through.""" - - -class unsafe: - """Contains static methods that should not normally be called during - workflow execution except in advanced cases. - """ - - def __init__(self) -> None: # noqa: D107 - raise NotImplementedError - - @staticmethod - def in_sandbox() -> bool: - """Whether the code is executing on a sandboxed thread. - - Returns: - True if the code is executing in the sandbox thread. - """ - return getattr(_in_sandbox, "value", False) - - @staticmethod - def _set_in_sandbox(v: bool) -> None: - _in_sandbox.value = v - - @staticmethod - def is_replaying() -> bool: - """Whether the workflow is currently replaying. - - This includes queries and update validators that occur during replay. - - Returns: - True if the workflow is currently replaying - """ - return _Runtime.current().workflow_is_replaying() - - @staticmethod - def is_replaying_history_events() -> bool: - """Whether the workflow is replaying history events. - - This excludes queries and update validators, which are live operations. - - Returns: - True if replaying history events, False otherwise. - """ - return _Runtime.current().workflow_is_replaying_history_events() - - @staticmethod - def is_read_only() -> bool: - """Whether the workflow is currently in read-only mode. - - Read-only mode occurs during queries and update validators where - side effects are not allowed. - - Returns: - True if the workflow is in read-only mode, False otherwise. - """ - return _Runtime.current().workflow_is_read_only() - - @staticmethod - def is_sandbox_unrestricted() -> bool: - """Whether the current block of code is not restricted via sandbox. - - Returns: - True if the current code is not restricted in the sandbox. - """ - # Activations happen in different threads than init and possibly the - # local hasn't been initialized in _that_ thread, so we allow unset here - # instead of just setting value = False globally. - return getattr(_sandbox_unrestricted, "value", False) - - @staticmethod - @contextmanager - def sandbox_unrestricted() -> Iterator[None]: - """A context manager to run code without sandbox restrictions.""" - # Only apply if not already applied. Nested calls just continue - # unrestricted. - if unsafe.is_sandbox_unrestricted(): - yield None - return - _sandbox_unrestricted.value = True - try: - yield None - finally: - _sandbox_unrestricted.value = False - - @staticmethod - def is_imports_passed_through() -> bool: - """Whether the current block of code is in - :py:meth:imports_passed_through. - - Returns: - True if the current code's imports will be passed through - """ - # See comment in is_sandbox_unrestricted for why we allow unset instead - # of just global false. - return getattr(_imports_passed_through, "value", False) - - @staticmethod - @contextmanager - def imports_passed_through() -> Iterator[None]: - """Context manager to mark all imports that occur within it as passed - through (meaning not reloaded by the sandbox). - """ - # Only apply if not already applied. Nested calls just continue - # passed through. - if unsafe.is_imports_passed_through(): - yield None - return - _imports_passed_through.value = True - try: - yield None - finally: - _imports_passed_through.value = False - - @staticmethod - def current_import_notification_policy_override() -> ( - SandboxImportNotificationPolicy | None - ): - """Gets the current import notification policy override if one is set.""" - applied_policy = getattr( - _sandbox_import_notification_policy_override, - "value", - None, - ) - return applied_policy - - @staticmethod - @contextmanager - def sandbox_import_notification_policy( - policy: SandboxImportNotificationPolicy, - ) -> Iterator[None]: - """Context manager to apply the given import notification policy.""" - original_policy = _sandbox_import_notification_policy_override.value = getattr( - _sandbox_import_notification_policy_override, - "value", - None, - ) - _sandbox_import_notification_policy_override.value = policy - try: - yield None - finally: - _sandbox_import_notification_policy_override.value = original_policy - - -def _build_log_context( - workflow_details: Mapping[str, Any] | None, - update_details: Mapping[str, Any] | None = None, - *, - workflow_info_on_message: bool = True, - workflow_info_on_extra: bool = True, - full_workflow_info: Info | None = None, -) -> tuple[dict[str, Any], dict[str, Any]]: - """Build the msg_extra suffix and extra dict entries for a temporal log record. - - Returns: - (msg_extra, extra) where msg_extra should be appended to the log message - and extra should be merged into the log record's extra dict. - """ - msg_extra: dict[str, Any] = {} - extra: dict[str, Any] = {} - - if workflow_details is not None: - if workflow_info_on_message: - msg_extra.update(workflow_details) - if workflow_info_on_extra: - extra["temporal_workflow"] = dict(workflow_details) - - if update_details is not None: - if workflow_info_on_message: - msg_extra.update(update_details) - if workflow_info_on_extra: - extra.setdefault("temporal_workflow", {}).update(update_details) - - if full_workflow_info is not None: - extra["workflow_info"] = full_workflow_info - - return msg_extra, extra - - -class LoggerAdapter(logging.LoggerAdapter): - """Adapter that adds details to the log about the running workflow. - - Attributes: - workflow_info_on_message: Boolean for whether a string representation of - a dict of some workflow info will be appended to each message. - Default is True. - workflow_info_on_extra: Boolean for whether a ``temporal_workflow`` - dictionary value will be added to the ``extra`` dictionary with some - workflow info, making it present on the ``LogRecord.__dict__`` for - use by others. Default is True. - full_workflow_info_on_extra: Boolean for whether a ``workflow_info`` - value will be added to the ``extra`` dictionary with the entire - workflow info, making it present on the ``LogRecord.__dict__`` for - use by others. Default is False. - log_during_replay: Boolean for whether logs should occur during replay. - Default is False. - - Values added to ``extra`` are merged with the ``extra`` dictionary from a - logging call, with values from the logging call taking precedence. I.e. the - behavior is that of ``merge_extra=True`` in Python >= 3.13. - """ - - def __init__(self, logger: logging.Logger, extra: Mapping[str, Any] | None) -> None: - """Create the logger adapter.""" - super().__init__(logger, extra or {}) - self.workflow_info_on_message = True - self.workflow_info_on_extra = True - self.full_workflow_info_on_extra = False - self.log_during_replay = False - self.disable_sandbox = False - - def process( - self, msg: Any, kwargs: MutableMapping[str, Any] - ) -> tuple[Any, MutableMapping[str, Any]]: - """Override to add workflow details.""" - msg_extra: dict[str, Any] = {} - extra: dict[str, Any] = {} - - if ( - self.workflow_info_on_message - or self.workflow_info_on_extra - or self.full_workflow_info_on_extra - ): - runtime = _Runtime.maybe_current() - update_info = current_update_info() - msg_extra, extra = _build_log_context( - runtime.logger_details if runtime else None, - update_info._logger_details if update_info else None, - workflow_info_on_message=self.workflow_info_on_message, - workflow_info_on_extra=self.workflow_info_on_extra, - full_workflow_info=runtime.workflow_info() - if runtime and self.full_workflow_info_on_extra - else None, - ) - - kwargs["extra"] = {**extra, **(kwargs.get("extra") or {})} - if msg_extra: - msg = f"{msg} ({msg_extra})" - return msg, kwargs - - def log( - self, - level: int, - msg: object, - *args: Any, - stacklevel: int = 1, - **kwargs: Any, - ): - """Override to potentially disable the sandbox.""" - if sys.version_info < (3, 11) and stacklevel == 1: - # An additional stacklevel is needed on 3.10 because it doesn't skip internal frames until after stacklevel - # is decremented, so it needs an additional stacklevel to skip the internal frame. - stacklevel += 1 # type: ignore[reportUnreachable] - stacklevel += 1 - if self.disable_sandbox: - with unsafe.sandbox_unrestricted(): - with unsafe.imports_passed_through(): - super().log(level, msg, *args, stacklevel=stacklevel, **kwargs) - else: - super().log(level, msg, *args, stacklevel=stacklevel, **kwargs) - - def isEnabledFor(self, level: int) -> bool: - """Override to ignore replay logs.""" - if not self.log_during_replay and unsafe.is_replaying_history_events(): - return False - return super().isEnabledFor(level) - - @property - def base_logger(self) -> logging.Logger: - """Underlying logger usable for actions such as adding - handlers/formatters. - """ - return self.logger - - def unsafe_disable_sandbox(self, value: bool = True): - """Disable the sandbox during log processing. - Can be turned back on with unsafe_disable_sandbox(False). - """ - self.disable_sandbox = value - - -logger = LoggerAdapter(logging.getLogger(__name__), None) -"""Logger that will have contextual workflow details embedded. - -Logs are skipped during replay by default. -""" - - -@dataclass(frozen=True) -class _Definition: - name: str | None - cls: type - run_fn: Callable[..., Awaitable] - signals: Mapping[str | None, _SignalDefinition] - queries: Mapping[str | None, _QueryDefinition] - updates: Mapping[str | None, _UpdateDefinition] - sandboxed: bool - failure_exception_types: Sequence[type[BaseException]] - # Types loaded on post init if both are None - arg_types: list[type] | None = None - ret_type: type | None = None - versioning_behavior: temporalio.common.VersioningBehavior | None = None - dynamic_config_fn: Callable[..., DynamicWorkflowConfig] | None = None - - @staticmethod - def from_class(cls: type) -> _Definition | None: # type: ignore[reportSelfClsParameterName] - # We make sure to only return it if it's on _this_ class - defn = getattr(cls, "__temporal_workflow_definition", None) - if defn and defn.cls == cls: - return defn - return None - - @staticmethod - def must_from_class(cls: type) -> _Definition: # type: ignore[reportSelfClsParameterName] - ret = _Definition.from_class(cls) - if ret: - return ret - cls_name = getattr(cls, "__name__", "") - raise ValueError( - f"Workflow {cls_name} missing attributes, was it decorated with @workflow.defn?" - ) - - @staticmethod - def from_run_fn(fn: Callable[..., Awaitable[Any]]) -> _Definition | None: - return getattr(fn, "__temporal_workflow_definition", None) - - @staticmethod - def must_from_run_fn(fn: Callable[..., Awaitable[Any]]) -> _Definition: - ret = _Definition.from_run_fn(fn) - if ret: - return ret - fn_name = getattr(fn, "__qualname__", "") - raise ValueError( - f"Function {fn_name} missing attributes, was it decorated with @workflow.run and was its class decorated with @workflow.defn?" - ) - - @classmethod - def get_name_and_result_type( - cls, name_or_run_fn: str | Callable[..., Awaitable[Any]] - ) -> tuple[str, type | None]: - if isinstance(name_or_run_fn, str): - return name_or_run_fn, None - elif callable(name_or_run_fn): - defn = cls.must_from_run_fn(name_or_run_fn) - if not defn.name: - raise ValueError("Cannot invoke dynamic workflow explicitly") - return defn.name, defn.ret_type - else: - raise TypeError("Workflow must be a string or callable") # type: ignore[reportUnreachable] - - @staticmethod - def _apply_to_class( - cls: type, # type: ignore[reportSelfClsParameterName] - *, - workflow_name: str | None, - sandboxed: bool, - failure_exception_types: Sequence[type[BaseException]], - versioning_behavior: temporalio.common.VersioningBehavior, - ) -> None: - # Check it's not being doubly applied - if _Definition.from_class(cls): - raise ValueError("Class already contains workflow definition") - issues: list[str] = [] - - # Collect run fn and all signal/query/update fns - init_fn: Callable[..., None] | None = None - run_fn: Callable[..., Awaitable[Any]] | None = None - dynamic_config_fn: Callable[..., DynamicWorkflowConfig] | None = None - seen_run_attr = False - signals: dict[str | None, _SignalDefinition] = {} - queries: dict[str | None, _QueryDefinition] = {} - updates: dict[str | None, _UpdateDefinition] = {} - for name, member in inspect.getmembers(cls): - if hasattr(member, "__temporal_workflow_run"): - seen_run_attr = True - if not _is_unbound_method_on_cls(member, cls): - issues.append( - f"@workflow.run method {name} must be defined on {cls.__qualname__}" - ) - elif run_fn is not None: - issues.append( - f"Multiple @workflow.run methods found (at least on {name} and {run_fn.__name__})" - ) - else: - # We can guarantee the @workflow.run decorator did - # validation of the function itself - run_fn = member - elif hasattr(member, "__temporal_signal_definition"): - signal_defn = cast( - _SignalDefinition, getattr(member, "__temporal_signal_definition") - ) - if signal_defn.name in signals: - defn_name = signal_defn.name or "" - # TODO(cretz): Remove cast when https://github.com/python/mypy/issues/5485 fixed - other_fn = cast(Callable, signals[signal_defn.name].fn) - issues.append( - f"Multiple signal methods found for {defn_name} " - f"(at least on {name} and {other_fn.__name__})" - ) - else: - signals[signal_defn.name] = signal_defn - elif hasattr(member, "__temporal_query_definition"): - query_defn = cast( - _QueryDefinition, getattr(member, "__temporal_query_definition") - ) - if query_defn.name in queries: - defn_name = query_defn.name or "" - issues.append( - f"Multiple query methods found for {defn_name} " - f"(at least on {name} and {queries[query_defn.name].fn.__name__})" - ) - else: - queries[query_defn.name] = query_defn - elif name == "__init__" and hasattr(member, "__temporal_workflow_init"): - init_fn = member - elif hasattr(member, "__temporal_workflow_dynamic_config"): - if workflow_name: - issues.append( - "@workflow.dynamic_config can only be used in dynamic workflows, but " - f"workflow class {workflow_name} ({cls.__name__}) is not dynamic" - ) - if dynamic_config_fn: - issues.append( - "@workflow.dynamic_config can only be defined once per workflow" - ) - dynamic_config_fn = member - elif isinstance(member, UpdateMethodMultiParam): - update_defn = member._defn - if update_defn.name in updates: - defn_name = update_defn.name or "" - issues.append( - f"Multiple update methods found for {defn_name} " - f"(at least on {name} and {updates[update_defn.name].fn.__name__})" - ) - elif update_defn.validator and not _parameters_identical_up_to_naming( - update_defn.fn, update_defn.validator - ): - issues.append( - f"Update validator method {update_defn.validator.__name__} parameters " - f"do not match update method {update_defn.fn.__name__} parameters" - ) - else: - updates[update_defn.name] = update_defn - - # Check base classes haven't defined things with different decorators - for base_cls in inspect.getmro(cls)[1:]: - for _, base_member in inspect.getmembers(base_cls): - # We only care about methods defined on this class - if not inspect.isfunction(base_member) or not _is_unbound_method_on_cls( - base_member, base_cls - ): - continue - if hasattr(base_member, "__temporal_workflow_run"): - seen_run_attr = True - if not run_fn or base_member.__name__ != run_fn.__name__: - issues.append( - f"@workflow.run defined on {base_member.__qualname__} but not on the override" - ) - elif hasattr(base_member, "__temporal_signal_definition"): - signal_defn = cast( - _SignalDefinition, - getattr(base_member, "__temporal_signal_definition"), - ) - if signal_defn.name not in signals: - issues.append( - f"@workflow.signal defined on {base_member.__qualname__} but not on the override" - ) - elif hasattr(base_member, "__temporal_query_definition"): - query_defn = cast( - _QueryDefinition, - getattr(base_member, "__temporal_query_definition"), - ) - if query_defn.name not in queries: - issues.append( - f"@workflow.query defined on {base_member.__qualname__} but not on the override" - ) - elif isinstance(base_member, UpdateMethodMultiParam): - update_defn = base_member._defn - if update_defn.name not in updates: - issues.append( - f"@workflow.update defined on {base_member.__qualname__} but not on the override" - ) - - if not seen_run_attr: - issues.append("Missing @workflow.run method") - if init_fn and run_fn: - if not _parameters_identical_up_to_naming(init_fn, run_fn): - issues.append( - "@workflow.init and @workflow.run method parameters do not match" - ) - if issues: - if len(issues) == 1: - raise ValueError(f"Invalid workflow class: {issues[0]}") - raise ValueError( - f"Invalid workflow class for {len(issues)} reasons: {', '.join(issues)}" - ) - - assert run_fn - assert seen_run_attr - defn = _Definition( - name=workflow_name, - cls=cls, - run_fn=run_fn, - signals=signals, - queries=queries, - updates=updates, - sandboxed=sandboxed, - failure_exception_types=failure_exception_types, - versioning_behavior=versioning_behavior, - dynamic_config_fn=dynamic_config_fn, - ) - setattr(cls, "__temporal_workflow_definition", defn) - setattr(run_fn, "__temporal_workflow_definition", defn) - - def __post_init__(self) -> None: - if self.arg_types is None and self.ret_type is None: - dynamic = self.name is None - arg_types, ret_type = temporalio.common._type_hints_from_func(self.run_fn) - # If dynamic, must be a sequence of raw values - if dynamic and ( - not arg_types - or len(arg_types) != 1 - or arg_types[0] != Sequence[temporalio.common.RawValue] - ): - raise TypeError( - "Dynamic workflow must accept a single Sequence[temporalio.common.RawValue]" - ) - object.__setattr__(self, "arg_types", arg_types) - object.__setattr__(self, "ret_type", ret_type) - - -def _parameters_identical_up_to_naming(fn1: Callable, fn2: Callable) -> bool: - """Return True if the functions have identical parameter lists, ignoring parameter names.""" - - def params(fn: Callable) -> list[inspect.Parameter]: - # Ignore name when comparing parameters (remaining fields are kind, - # default, and annotation). - return [p.replace(name="x") for p in inspect.signature(fn).parameters.values()] - - # We require that any type annotations present match exactly; i.e. we do - # not support any notion of subtype compatibility. - return params(fn1) == params(fn2) - - -# Async safe version of partial -def _bind_method(obj: Any, fn: Callable[..., Any]) -> Callable[..., Any]: - # Curry instance on the definition function since that represents an - # unbound method - if inspect.iscoroutinefunction(fn): - # We cannot use functools.partial here because in <= 3.7 that isn't - # considered an inspect.iscoroutinefunction - fn = cast(Callable[..., Awaitable[Any]], fn) - - async def with_object(*args: Any, **kwargs: Any) -> Any: - return await fn(obj, *args, **kwargs) - - return with_object - return partial(fn, obj) - - -# Returns true if normal form, false if vararg form -def _assert_dynamic_handler_args( - fn: Callable, arg_types: list[type] | None, is_method: bool -) -> bool: - # Dynamic query/signal/update must have three args: self, name, and - # Sequence[RawValue]. An older form accepted varargs for the third param for signals/queries so - # we will too (but will warn in the signal/query code). - params = list(inspect.signature(fn).parameters.values()) - total_expected_params = 3 if is_method else 2 - if ( - len(params) == total_expected_params - and params[-2].kind is inspect.Parameter.POSITIONAL_OR_KEYWORD - and params[-1].kind is inspect.Parameter.VAR_POSITIONAL - ): - # Old var-arg form - return False - if ( - not arg_types - or len(arg_types) != 2 - or arg_types[0] != str - or ( - arg_types[1] != Sequence[temporalio.common.RawValue] - and arg_types[1] != typing.Sequence[temporalio.common.RawValue] # type: ignore[reportDeprecated] - ) - ): - raise RuntimeError( - "Dynamic handler must have 3 arguments: self, str, and Sequence[temporalio.common.RawValue]" - ) - return True - - -@dataclass(frozen=True) -class _SignalDefinition: - # None if dynamic - name: str | None - fn: Callable[..., None | Awaitable[None]] - is_method: bool - unfinished_policy: HandlerUnfinishedPolicy = ( - HandlerUnfinishedPolicy.WARN_AND_ABANDON - ) - description: str | None = None - # Types loaded on post init if None - arg_types: list[type] | None = None - dynamic_vararg: bool = False - - @staticmethod - def from_fn(fn: Callable) -> _SignalDefinition | None: - return getattr(fn, "__temporal_signal_definition", None) - - @staticmethod - def must_name_from_fn_or_str(signal: str | Callable) -> str: - if callable(signal): - defn = _SignalDefinition.from_fn(signal) - if not defn: - raise RuntimeError( - f"Signal definition not found on {signal.__qualname__}, " - "is it decorated with @workflow.signal?" - ) - elif not defn.name: - raise RuntimeError("Cannot invoke dynamic signal definition") - # TODO(cretz): Check count/type of args at runtime? - return defn.name - return str(signal) - - def __post_init__(self) -> None: - if self.arg_types is None: - arg_types, _ = temporalio.common._type_hints_from_func(self.fn) - # If dynamic, assert it - if not self.name: - object.__setattr__( - self, - "dynamic_vararg", - not _assert_dynamic_handler_args( - self.fn, arg_types, self.is_method - ), - ) - object.__setattr__(self, "arg_types", arg_types) - - def bind_fn(self, obj: Any) -> Callable[..., Any]: - return _bind_method(obj, self.fn) - - -@dataclass(frozen=True) -class _QueryDefinition: - # None if dynamic - name: str | None - fn: Callable[..., Any] - is_method: bool - description: str | None = None - # Types loaded on post init if both are None - arg_types: list[type] | None = None - ret_type: type | None = None - dynamic_vararg: bool = False - - @staticmethod - def from_fn(fn: Callable) -> _QueryDefinition | None: - return getattr(fn, "__temporal_query_definition", None) - - def __post_init__(self) -> None: - if self.arg_types is None and self.ret_type is None: - arg_types, ret_type = temporalio.common._type_hints_from_func(self.fn) - # If dynamic, assert it - if not self.name: - object.__setattr__( - self, - "dynamic_vararg", - not _assert_dynamic_handler_args( - self.fn, arg_types, self.is_method - ), - ) - object.__setattr__(self, "arg_types", arg_types) - object.__setattr__(self, "ret_type", ret_type) - - def bind_fn(self, obj: Any) -> Callable[..., Any]: - return _bind_method(obj, self.fn) - - -@dataclass(frozen=True) -class _UpdateDefinition: - # None if dynamic - name: str | None - fn: Callable[..., Any | Awaitable[Any]] - is_method: bool - unfinished_policy: HandlerUnfinishedPolicy = ( - HandlerUnfinishedPolicy.WARN_AND_ABANDON - ) - description: str | None = None - # Types loaded on post init if None - arg_types: list[type] | None = None - ret_type: type | None = None - validator: Callable[..., None] | None = None - dynamic_vararg: bool = False - - def __post_init__(self) -> None: - if self.arg_types is None: - arg_types, ret_type = temporalio.common._type_hints_from_func(self.fn) - # Disallow dynamic varargs - if not self.name and not _assert_dynamic_handler_args( - self.fn, arg_types, self.is_method - ): - raise RuntimeError( - "Dynamic updates do not support a vararg third param, use Sequence[RawValue]", - ) - object.__setattr__(self, "arg_types", arg_types) - object.__setattr__(self, "ret_type", ret_type) - - def bind_fn(self, obj: Any) -> Callable[..., Any]: - return _bind_method(obj, self.fn) - - def bind_validator(self, obj: Any) -> Callable[..., Any]: - if self.validator is not None: - return _bind_method(obj, self.validator) - return lambda *args, **kwargs: None - - def set_validator(self, validator: Callable[..., None]) -> None: - if self.validator: - raise RuntimeError(f"Validator already set for update {self.name}") - object.__setattr__(self, "validator", validator) - - @classmethod - def get_name_and_result_type( - cls, - name_or_update_fn: str | Callable[..., Any], - ) -> tuple[str, type | None]: - if isinstance(name_or_update_fn, temporalio.workflow.UpdateMethodMultiParam): - defn = name_or_update_fn._defn - if not defn.name: - raise RuntimeError("Cannot invoke dynamic update definition") - # TODO(cretz): Check count/type of args at runtime? - return defn.name, defn.ret_type - else: - return str(name_or_update_fn), None - - -# See https://mypy.readthedocs.io/en/latest/runtime_troubles.html#using-classes-that-are-generic-in-stubs-but-not-at-runtime -if TYPE_CHECKING: - - class _AsyncioTask(asyncio.Task[AnyType]): - pass - -else: - # TODO: inherited classes should be other way around? - class _AsyncioTask(Generic[AnyType], asyncio.Task): - pass - - -class ActivityHandle(_AsyncioTask[ReturnType]): # type: ignore[type-var] - """Handle returned from :py:func:`start_activity` and - :py:func:`start_local_activity`. - - This extends :py:class:`asyncio.Task` and supports all task features. - """ - - pass - - -class ActivityCancellationType(IntEnum): - """How an activity cancellation should be handled.""" - - TRY_CANCEL = int( - temporalio.bridge.proto.workflow_commands.ActivityCancellationType.TRY_CANCEL - ) - WAIT_CANCELLATION_COMPLETED = int( - temporalio.bridge.proto.workflow_commands.ActivityCancellationType.WAIT_CANCELLATION_COMPLETED - ) - ABANDON = int( - temporalio.bridge.proto.workflow_commands.ActivityCancellationType.ABANDON - ) - - -class ActivityConfig(TypedDict, total=False): - """TypedDict of config that can be used for :py:func:`start_activity` and - :py:func:`execute_activity`. - """ - - task_queue: str | None - schedule_to_close_timeout: timedelta | None - schedule_to_start_timeout: timedelta | None - start_to_close_timeout: timedelta | None - heartbeat_timeout: timedelta | None - retry_policy: temporalio.common.RetryPolicy | None - cancellation_type: ActivityCancellationType - activity_id: str | None - versioning_intent: VersioningIntent | None - summary: str | None - priority: temporalio.common.Priority - - -# Overload for async no-param activity -@overload -def start_activity( - activity: CallableAsyncNoParam[ReturnType], - *, - task_queue: str | None = None, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - heartbeat_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - versioning_intent: VersioningIntent | None = None, - summary: str | None = None, - priority: temporalio.common.Priority = temporalio.common.Priority.default, -) -> ActivityHandle[ReturnType]: ... - - -# Overload for sync no-param activity -@overload -def start_activity( - activity: CallableSyncNoParam[ReturnType], - *, - task_queue: str | None = None, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - heartbeat_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - versioning_intent: VersioningIntent | None = None, - summary: str | None = None, - priority: temporalio.common.Priority = temporalio.common.Priority.default, -) -> ActivityHandle[ReturnType]: ... - - -# Overload for async single-param activity -@overload -def start_activity( - activity: CallableAsyncSingleParam[ParamType, ReturnType], - arg: ParamType, - *, - task_queue: str | None = None, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - heartbeat_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - versioning_intent: VersioningIntent | None = None, - summary: str | None = None, - priority: temporalio.common.Priority = temporalio.common.Priority.default, -) -> ActivityHandle[ReturnType]: ... - - -# Overload for sync single-param activity -@overload -def start_activity( - activity: CallableSyncSingleParam[ParamType, ReturnType], - arg: ParamType, - *, - task_queue: str | None = None, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - heartbeat_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - versioning_intent: VersioningIntent | None = None, - summary: str | None = None, - priority: temporalio.common.Priority = temporalio.common.Priority.default, -) -> ActivityHandle[ReturnType]: ... - - -# Overload for async multi-param activity -@overload -def start_activity( - activity: Callable[..., Awaitable[ReturnType]], - *, - args: Sequence[Any], - task_queue: str | None = None, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - heartbeat_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - versioning_intent: VersioningIntent | None = None, - summary: str | None = None, - priority: temporalio.common.Priority = temporalio.common.Priority.default, -) -> ActivityHandle[ReturnType]: ... - - -# Overload for sync multi-param activity -@overload -def start_activity( - activity: Callable[..., ReturnType], - *, - args: Sequence[Any], - task_queue: str | None = None, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - heartbeat_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - versioning_intent: VersioningIntent | None = None, - summary: str | None = None, - priority: temporalio.common.Priority = temporalio.common.Priority.default, -) -> ActivityHandle[ReturnType]: ... - - -# Overload for string-name activity -@overload -def start_activity( - activity: str, - arg: Any = temporalio.common._arg_unset, - *, - args: Sequence[Any] = [], - task_queue: str | None = None, - result_type: type | None = None, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - heartbeat_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - versioning_intent: VersioningIntent | None = None, - summary: str | None = None, - priority: temporalio.common.Priority = temporalio.common.Priority.default, -) -> ActivityHandle[Any]: ... - - -def start_activity( - activity: Any, - arg: Any = temporalio.common._arg_unset, - *, - args: Sequence[Any] = [], - task_queue: str | None = None, - result_type: type | None = None, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - heartbeat_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - versioning_intent: VersioningIntent | None = None, - summary: str | None = None, - priority: temporalio.common.Priority = temporalio.common.Priority.default, -) -> ActivityHandle[Any]: - """Start an activity and return its handle. - - At least one of ``schedule_to_close_timeout`` or ``start_to_close_timeout`` - must be present. - - Args: - activity: Activity name or function reference. - arg: Single argument to the activity. - args: Multiple arguments to the activity. Cannot be set if arg is. - task_queue: Task queue to run the activity on. Defaults to the current - workflow's task queue. - result_type: For string activities, this can set the specific result - type hint to deserialize into. - schedule_to_close_timeout: Max amount of time the activity can take from - first being scheduled to being completed before it times out. This - is inclusive of all retries. - schedule_to_start_timeout: Max amount of time the activity can take to - be started from first being scheduled. - start_to_close_timeout: Max amount of time a single activity run can - take from when it starts to when it completes. This is per retry. - heartbeat_timeout: How frequently an activity must invoke heartbeat - while running before it is considered timed out. - retry_policy: How an activity is retried on failure. If unset, a - server-defined default is used. Set maximum attempts to 1 to disable - retries. - cancellation_type: How the activity is treated when it is cancelled from - the workflow. - activity_id: Optional unique identifier for the activity. This is an - advanced setting that should not be set unless users are sure they - need to. Contact Temporal before setting this value. - versioning_intent: When using the Worker Versioning feature, specifies whether this Activity - should run on a worker with a compatible Build Id or not. - Deprecated: Use Worker Deployment versioning instead. - summary: A single-line fixed summary for this activity that may appear in UI/CLI. - This can be in single-line Temporal markdown format. - priority: Priority of the activity. - - Returns: - An activity handle to the activity which is an async task. - """ - return _Runtime.current().workflow_start_activity( - activity, - *temporalio.common._arg_or_args(arg, args), - task_queue=task_queue, - result_type=result_type, - schedule_to_close_timeout=schedule_to_close_timeout, - schedule_to_start_timeout=schedule_to_start_timeout, - start_to_close_timeout=start_to_close_timeout, - heartbeat_timeout=heartbeat_timeout, - retry_policy=retry_policy, - cancellation_type=cancellation_type, - activity_id=activity_id, - versioning_intent=versioning_intent, - summary=summary, - priority=priority, - ) - - -# Overload for async no-param activity -@overload -async def execute_activity( - activity: CallableAsyncNoParam[ReturnType], - *, - task_queue: str | None = None, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - heartbeat_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - versioning_intent: VersioningIntent | None = None, - summary: str | None = None, - priority: temporalio.common.Priority = temporalio.common.Priority.default, -) -> ReturnType: ... - - -# Overload for sync no-param activity -@overload -async def execute_activity( - activity: CallableSyncNoParam[ReturnType], - *, - task_queue: str | None = None, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - heartbeat_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - versioning_intent: VersioningIntent | None = None, - summary: str | None = None, - priority: temporalio.common.Priority = temporalio.common.Priority.default, -) -> ReturnType: ... - - -# Overload for async single-param activity -@overload -async def execute_activity( - activity: CallableAsyncSingleParam[ParamType, ReturnType], - arg: ParamType, - *, - task_queue: str | None = None, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - heartbeat_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - versioning_intent: VersioningIntent | None = None, - summary: str | None = None, - priority: temporalio.common.Priority = temporalio.common.Priority.default, -) -> ReturnType: ... - - -# Overload for sync single-param activity -@overload -async def execute_activity( - activity: CallableSyncSingleParam[ParamType, ReturnType], - arg: ParamType, - *, - task_queue: str | None = None, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - heartbeat_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - versioning_intent: VersioningIntent | None = None, - summary: str | None = None, - priority: temporalio.common.Priority = temporalio.common.Priority.default, -) -> ReturnType: ... - - -# Overload for async multi-param activity -@overload -async def execute_activity( - activity: Callable[..., Awaitable[ReturnType]], - *, - args: Sequence[Any], - task_queue: str | None = None, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - heartbeat_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - versioning_intent: VersioningIntent | None = None, - summary: str | None = None, - priority: temporalio.common.Priority = temporalio.common.Priority.default, -) -> ReturnType: ... - - -# Overload for sync multi-param activity -@overload -async def execute_activity( - activity: Callable[..., ReturnType], - *, - args: Sequence[Any], - task_queue: str | None = None, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - heartbeat_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - versioning_intent: VersioningIntent | None = None, - summary: str | None = None, - priority: temporalio.common.Priority = temporalio.common.Priority.default, -) -> ReturnType: ... - - -# Overload for string-name activity -@overload -async def execute_activity( - activity: str, - arg: Any = temporalio.common._arg_unset, - *, - args: Sequence[Any] = [], - task_queue: str | None = None, - result_type: type | None = None, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - heartbeat_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - versioning_intent: VersioningIntent | None = None, - summary: str | None = None, - priority: temporalio.common.Priority = temporalio.common.Priority.default, -) -> Any: ... - - -async def execute_activity( - activity: Any, - arg: Any = temporalio.common._arg_unset, - *, - args: Sequence[Any] = [], - task_queue: str | None = None, - result_type: type | None = None, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - heartbeat_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - versioning_intent: VersioningIntent | None = None, - summary: str | None = None, - priority: temporalio.common.Priority = temporalio.common.Priority.default, -) -> Any: - """Start an activity and wait for completion. - - This is a shortcut for ``await`` :py:meth:`start_activity`. - """ - # We call the runtime directly instead of top-level start_activity to ensure - # we don't miss new parameters - return await _Runtime.current().workflow_start_activity( - activity, - *temporalio.common._arg_or_args(arg, args), - task_queue=task_queue, - result_type=result_type, - schedule_to_close_timeout=schedule_to_close_timeout, - schedule_to_start_timeout=schedule_to_start_timeout, - start_to_close_timeout=start_to_close_timeout, - heartbeat_timeout=heartbeat_timeout, - retry_policy=retry_policy, - cancellation_type=cancellation_type, - activity_id=activity_id, - versioning_intent=versioning_intent, - summary=summary, - priority=priority, - ) - - -# Overload for async no-param activity -@overload -def start_activity_class( - activity: type[CallableAsyncNoParam[ReturnType]], - *, - task_queue: str | None = None, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - heartbeat_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - versioning_intent: VersioningIntent | None = None, - summary: str | None = None, - priority: temporalio.common.Priority = temporalio.common.Priority.default, -) -> ActivityHandle[ReturnType]: ... - - -# Overload for sync no-param activity -@overload -def start_activity_class( - activity: type[CallableSyncNoParam[ReturnType]], - *, - task_queue: str | None = None, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - heartbeat_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - versioning_intent: VersioningIntent | None = None, - summary: str | None = None, - priority: temporalio.common.Priority = temporalio.common.Priority.default, -) -> ActivityHandle[ReturnType]: ... - - -# Overload for async single-param activity -@overload -def start_activity_class( - activity: type[CallableAsyncSingleParam[ParamType, ReturnType]], - arg: ParamType, - *, - task_queue: str | None = None, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - heartbeat_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - versioning_intent: VersioningIntent | None = None, - summary: str | None = None, - priority: temporalio.common.Priority = temporalio.common.Priority.default, -) -> ActivityHandle[ReturnType]: ... - - -# Overload for sync single-param activity -@overload -def start_activity_class( - activity: type[CallableSyncSingleParam[ParamType, ReturnType]], - arg: ParamType, - *, - task_queue: str | None = None, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - heartbeat_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - versioning_intent: VersioningIntent | None = None, - summary: str | None = None, - priority: temporalio.common.Priority = temporalio.common.Priority.default, -) -> ActivityHandle[ReturnType]: ... - - -# Overload for async multi-param activity -@overload -def start_activity_class( - activity: type[Callable[..., Awaitable[ReturnType]]], # type: ignore[reportOverlappingOverload] - *, - args: Sequence[Any], - task_queue: str | None = None, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - heartbeat_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - versioning_intent: VersioningIntent | None = None, - summary: str | None = None, - priority: temporalio.common.Priority = temporalio.common.Priority.default, -) -> ActivityHandle[ReturnType]: ... - - -# Overload for sync multi-param activity -@overload -def start_activity_class( # type: ignore[reportOverlappingOverload] - activity: type[Callable[..., ReturnType]], # type: ignore[reportOverlappingOverload] - *, - args: Sequence[Any], - task_queue: str | None = None, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - heartbeat_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - versioning_intent: VersioningIntent | None = None, - summary: str | None = None, - priority: temporalio.common.Priority = temporalio.common.Priority.default, -) -> ActivityHandle[ReturnType]: ... - - -def start_activity_class( - activity: type[Callable], # type: ignore[reportOverlappingOverload] - arg: Any = temporalio.common._arg_unset, - *, - args: Sequence[Any] = [], - task_queue: str | None = None, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - heartbeat_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - versioning_intent: VersioningIntent | None = None, - summary: str | None = None, - priority: temporalio.common.Priority = temporalio.common.Priority.default, -) -> ActivityHandle[Any]: - """Start an activity from a callable class. - - See :py:meth:`start_activity` for parameter and return details. - """ - return _Runtime.current().workflow_start_activity( - activity, - *temporalio.common._arg_or_args(arg, args), - task_queue=task_queue, - result_type=None, - schedule_to_close_timeout=schedule_to_close_timeout, - schedule_to_start_timeout=schedule_to_start_timeout, - start_to_close_timeout=start_to_close_timeout, - heartbeat_timeout=heartbeat_timeout, - retry_policy=retry_policy, - cancellation_type=cancellation_type, - activity_id=activity_id, - versioning_intent=versioning_intent, - summary=summary, - priority=priority, - ) - - -# Overload for async no-param activity -@overload -async def execute_activity_class( - activity: type[CallableAsyncNoParam[ReturnType]], - *, - task_queue: str | None = None, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - heartbeat_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - versioning_intent: VersioningIntent | None = None, - summary: str | None = None, - priority: temporalio.common.Priority = temporalio.common.Priority.default, -) -> ReturnType: ... - - -# Overload for sync no-param activity -@overload -async def execute_activity_class( - activity: type[CallableSyncNoParam[ReturnType]], - *, - task_queue: str | None = None, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - heartbeat_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - versioning_intent: VersioningIntent | None = None, - summary: str | None = None, - priority: temporalio.common.Priority = temporalio.common.Priority.default, -) -> ReturnType: ... - - -# Overload for async single-param activity -@overload -async def execute_activity_class( - activity: type[CallableAsyncSingleParam[ParamType, ReturnType]], - arg: ParamType, - *, - task_queue: str | None = None, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - heartbeat_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - versioning_intent: VersioningIntent | None = None, - summary: str | None = None, - priority: temporalio.common.Priority = temporalio.common.Priority.default, -) -> ReturnType: ... - - -# Overload for sync single-param activity -@overload -async def execute_activity_class( - activity: type[CallableSyncSingleParam[ParamType, ReturnType]], - arg: ParamType, - *, - task_queue: str | None = None, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - heartbeat_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - versioning_intent: VersioningIntent | None = None, - summary: str | None = None, - priority: temporalio.common.Priority = temporalio.common.Priority.default, -) -> ReturnType: ... - - -# Overload for async multi-param activity -@overload -async def execute_activity_class( - activity: type[Callable[..., Awaitable[ReturnType]]], # type: ignore[reportOverlappingOverload] - *, - args: Sequence[Any], - task_queue: str | None = None, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - heartbeat_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - versioning_intent: VersioningIntent | None = None, - summary: str | None = None, - priority: temporalio.common.Priority = temporalio.common.Priority.default, -) -> ReturnType: ... - - -# Overload for sync multi-param activity -@overload -async def execute_activity_class( - activity: type[Callable[..., ReturnType]], # type: ignore[reportOverlappingOverload] - *, - args: Sequence[Any], - task_queue: str | None = None, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - heartbeat_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - versioning_intent: VersioningIntent | None = None, - summary: str | None = None, - priority: temporalio.common.Priority = temporalio.common.Priority.default, -) -> ReturnType: ... - - -async def execute_activity_class( - activity: type[Callable], # type: ignore[reportOverlappingOverload] - arg: Any = temporalio.common._arg_unset, - *, - args: Sequence[Any] = [], - task_queue: str | None = None, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - heartbeat_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - versioning_intent: VersioningIntent | None = None, - summary: str | None = None, - priority: temporalio.common.Priority = temporalio.common.Priority.default, -) -> Any: - """Start an activity from a callable class and wait for completion. - - This is a shortcut for ``await`` :py:meth:`start_activity_class`. - """ - return await _Runtime.current().workflow_start_activity( - activity, - *temporalio.common._arg_or_args(arg, args), - task_queue=task_queue, - result_type=None, - schedule_to_close_timeout=schedule_to_close_timeout, - schedule_to_start_timeout=schedule_to_start_timeout, - start_to_close_timeout=start_to_close_timeout, - heartbeat_timeout=heartbeat_timeout, - retry_policy=retry_policy, - cancellation_type=cancellation_type, - activity_id=activity_id, - versioning_intent=versioning_intent, - summary=summary, - priority=priority, - ) - - -# Overload for async no-param activity -@overload -def start_activity_method( - activity: MethodAsyncNoParam[SelfType, ReturnType], - *, - task_queue: str | None = None, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - heartbeat_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - versioning_intent: VersioningIntent | None = None, - summary: str | None = None, - priority: temporalio.common.Priority = temporalio.common.Priority.default, -) -> ActivityHandle[ReturnType]: ... - - -# Overload for sync no-param activity -@overload -def start_activity_method( - activity: MethodSyncNoParam[SelfType, ReturnType], - *, - task_queue: str | None = None, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - heartbeat_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - versioning_intent: VersioningIntent | None = None, - summary: str | None = None, - priority: temporalio.common.Priority = temporalio.common.Priority.default, -) -> ActivityHandle[ReturnType]: ... - - -# Overload for async single-param activity -@overload -def start_activity_method( - activity: MethodAsyncSingleParam[SelfType, ParamType, ReturnType], - arg: ParamType, - *, - task_queue: str | None = None, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - heartbeat_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - versioning_intent: VersioningIntent | None = None, - summary: str | None = None, - priority: temporalio.common.Priority = temporalio.common.Priority.default, -) -> ActivityHandle[ReturnType]: ... - - -# Overload for sync single-param activity -@overload -def start_activity_method( - activity: MethodSyncSingleParam[SelfType, ParamType, ReturnType], - arg: ParamType, - *, - task_queue: str | None = None, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - heartbeat_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - versioning_intent: VersioningIntent | None = None, - summary: str | None = None, - priority: temporalio.common.Priority = temporalio.common.Priority.default, -) -> ActivityHandle[ReturnType]: ... - - -# Overload for async multi-param activity -@overload -def start_activity_method( - activity: Callable[Concatenate[SelfType, MultiParamSpec], Awaitable[ReturnType]], - *, - args: Sequence[Any], - task_queue: str | None = None, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - heartbeat_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - versioning_intent: VersioningIntent | None = None, - summary: str | None = None, - priority: temporalio.common.Priority = temporalio.common.Priority.default, -) -> ActivityHandle[ReturnType]: ... - - -# Overload for sync multi-param activity -@overload -def start_activity_method( - activity: Callable[Concatenate[SelfType, MultiParamSpec], ReturnType], - *, - args: Sequence[Any], - task_queue: str | None = None, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - heartbeat_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - versioning_intent: VersioningIntent | None = None, - summary: str | None = None, - priority: temporalio.common.Priority = temporalio.common.Priority.default, -) -> ActivityHandle[ReturnType]: ... - - -def start_activity_method( - activity: Callable, - arg: Any = temporalio.common._arg_unset, - *, - args: Sequence[Any] = [], - task_queue: str | None = None, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - heartbeat_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - versioning_intent: VersioningIntent | None = None, - summary: str | None = None, - priority: temporalio.common.Priority = temporalio.common.Priority.default, -) -> ActivityHandle[Any]: - """Start an activity from a method. - - See :py:meth:`start_activity` for parameter and return details. - """ - return _Runtime.current().workflow_start_activity( - activity, - *temporalio.common._arg_or_args(arg, args), - task_queue=task_queue, - result_type=None, - schedule_to_close_timeout=schedule_to_close_timeout, - schedule_to_start_timeout=schedule_to_start_timeout, - start_to_close_timeout=start_to_close_timeout, - heartbeat_timeout=heartbeat_timeout, - retry_policy=retry_policy, - cancellation_type=cancellation_type, - activity_id=activity_id, - versioning_intent=versioning_intent, - summary=summary, - priority=priority, - ) - - -# Overload for async no-param activity -@overload -async def execute_activity_method( - activity: MethodAsyncNoParam[SelfType, ReturnType], - *, - task_queue: str | None = None, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - heartbeat_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - versioning_intent: VersioningIntent | None = None, - summary: str | None = None, - priority: temporalio.common.Priority = temporalio.common.Priority.default, -) -> ReturnType: ... - - -# Overload for sync no-param activity -@overload -async def execute_activity_method( - activity: MethodSyncNoParam[SelfType, ReturnType], - *, - task_queue: str | None = None, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - heartbeat_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - versioning_intent: VersioningIntent | None = None, - summary: str | None = None, - priority: temporalio.common.Priority = temporalio.common.Priority.default, -) -> ReturnType: ... - - -# Overload for async single-param activity -@overload -async def execute_activity_method( - activity: MethodAsyncSingleParam[SelfType, ParamType, ReturnType], - arg: ParamType, - *, - task_queue: str | None = None, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - heartbeat_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - versioning_intent: VersioningIntent | None = None, - summary: str | None = None, - priority: temporalio.common.Priority = temporalio.common.Priority.default, -) -> ReturnType: ... - - -# Overload for sync single-param activity -@overload -async def execute_activity_method( - activity: MethodSyncSingleParam[SelfType, ParamType, ReturnType], - arg: ParamType, - *, - task_queue: str | None = None, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - heartbeat_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - versioning_intent: VersioningIntent | None = None, - summary: str | None = None, - priority: temporalio.common.Priority = temporalio.common.Priority.default, -) -> ReturnType: ... - - -# Overload for async multi-param activity -@overload -async def execute_activity_method( - activity: Callable[Concatenate[SelfType, MultiParamSpec], Awaitable[ReturnType]], - *, - args: Sequence[Any], - task_queue: str | None = None, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - heartbeat_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - versioning_intent: VersioningIntent | None = None, - summary: str | None = None, - priority: temporalio.common.Priority = temporalio.common.Priority.default, -) -> ReturnType: ... - - -# Overload for sync multi-param activity -@overload -async def execute_activity_method( - activity: Callable[Concatenate[SelfType, MultiParamSpec], ReturnType], - *, - args: Sequence[Any], - task_queue: str | None = None, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - heartbeat_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - versioning_intent: VersioningIntent | None = None, - summary: str | None = None, - priority: temporalio.common.Priority = temporalio.common.Priority.default, -) -> ReturnType: ... - - -async def execute_activity_method( - activity: Callable, - arg: Any = temporalio.common._arg_unset, - *, - args: Sequence[Any] = [], - task_queue: str | None = None, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - heartbeat_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - versioning_intent: VersioningIntent | None = None, - summary: str | None = None, - priority: temporalio.common.Priority = temporalio.common.Priority.default, -) -> Any: - """Start an activity from a method and wait for completion. - - This is a shortcut for ``await`` :py:meth:`start_activity_method`. - """ - # We call the runtime directly instead of top-level start_activity to ensure - # we don't miss new parameters - return await _Runtime.current().workflow_start_activity( - activity, - *temporalio.common._arg_or_args(arg, args), - task_queue=task_queue, - result_type=None, - schedule_to_close_timeout=schedule_to_close_timeout, - schedule_to_start_timeout=schedule_to_start_timeout, - start_to_close_timeout=start_to_close_timeout, - heartbeat_timeout=heartbeat_timeout, - retry_policy=retry_policy, - cancellation_type=cancellation_type, - activity_id=activity_id, - versioning_intent=versioning_intent, - summary=summary, - priority=priority, - ) - - -class LocalActivityConfig(TypedDict, total=False): - """TypedDict of config that can be used for :py:func:`start_local_activity` - and :py:func:`execute_local_activity`. - """ - - schedule_to_close_timeout: timedelta | None - schedule_to_start_timeout: timedelta | None - start_to_close_timeout: timedelta | None - retry_policy: temporalio.common.RetryPolicy | None - local_retry_threshold: timedelta | None - cancellation_type: ActivityCancellationType - activity_id: str | None - summary: str | None - - -# Overload for async no-param activity -@overload -def start_local_activity( - activity: CallableAsyncNoParam[ReturnType], - *, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - local_retry_threshold: timedelta | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - summary: str | None = None, -) -> ActivityHandle[ReturnType]: ... - - -# Overload for sync no-param activity -@overload -def start_local_activity( - activity: CallableSyncNoParam[ReturnType], - *, - activity_id: str | None = None, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - local_retry_threshold: timedelta | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - summary: str | None = None, -) -> ActivityHandle[ReturnType]: ... - - -# Overload for async single-param activity -@overload -def start_local_activity( - activity: CallableAsyncSingleParam[ParamType, ReturnType], - arg: ParamType, - *, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - local_retry_threshold: timedelta | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - summary: str | None = None, -) -> ActivityHandle[ReturnType]: ... - - -# Overload for sync single-param activity -@overload -def start_local_activity( - activity: CallableSyncSingleParam[ParamType, ReturnType], - arg: ParamType, - *, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - local_retry_threshold: timedelta | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - summary: str | None = None, -) -> ActivityHandle[ReturnType]: ... - - -# Overload for async multi-param activity -@overload -def start_local_activity( - activity: Callable[..., Awaitable[ReturnType]], - *, - args: Sequence[Any], - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - local_retry_threshold: timedelta | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - summary: str | None = None, -) -> ActivityHandle[ReturnType]: ... - - -# Overload for sync multi-param activity -@overload -def start_local_activity( - activity: Callable[..., ReturnType], - *, - args: Sequence[Any], - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - local_retry_threshold: timedelta | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - summary: str | None = None, -) -> ActivityHandle[ReturnType]: ... - - -# Overload for string-name activity -@overload -def start_local_activity( - activity: str, - arg: Any = temporalio.common._arg_unset, - *, - args: Sequence[Any] = [], - result_type: type | None = None, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - local_retry_threshold: timedelta | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - summary: str | None = None, -) -> ActivityHandle[Any]: ... - - -def start_local_activity( - activity: Any, - arg: Any = temporalio.common._arg_unset, - *, - args: Sequence[Any] = [], - result_type: type | None = None, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - local_retry_threshold: timedelta | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - summary: str | None = None, -) -> ActivityHandle[Any]: - """Start a local activity and return its handle. - - At least one of ``schedule_to_close_timeout`` or ``start_to_close_timeout`` - must be present. - - Args: - activity: Activity name or function reference. - arg: Single argument to the activity. - args: Multiple arguments to the activity. Cannot be set if arg is. - result_type: For string activities, this can set the specific result - type hint to deserialize into. - schedule_to_close_timeout: Max amount of time the activity can take from - first being scheduled to being completed before it times out. This - is inclusive of all retries. - schedule_to_start_timeout: Max amount of time the activity can take to - be started from first being scheduled. - start_to_close_timeout: Max amount of time a single activity run can - take from when it starts to when it completes. This is per retry. - retry_policy: How an activity is retried on failure. If unset, an - SDK-defined default is used. Set maximum attempts to 1 to disable - retries. - cancellation_type: How the activity is treated when it is cancelled from - the workflow. - activity_id: Optional unique identifier for the activity. This is an - advanced setting that should not be set unless users are sure they - need to. Contact Temporal before setting this value. - summary: Optional summary for the activity. - - Returns: - An activity handle to the activity which is an async task. - """ - return _Runtime.current().workflow_start_local_activity( - activity, - *temporalio.common._arg_or_args(arg, args), - result_type=result_type, - schedule_to_close_timeout=schedule_to_close_timeout, - schedule_to_start_timeout=schedule_to_start_timeout, - start_to_close_timeout=start_to_close_timeout, - retry_policy=retry_policy, - local_retry_threshold=local_retry_threshold, - cancellation_type=cancellation_type, - activity_id=activity_id, - summary=summary, - ) - - -# Overload for async no-param activity -@overload -async def execute_local_activity( - activity: CallableAsyncNoParam[ReturnType], - *, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - local_retry_threshold: timedelta | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - summary: str | None = None, -) -> ReturnType: ... - - -# Overload for sync no-param activity -@overload -async def execute_local_activity( - activity: CallableSyncNoParam[ReturnType], - *, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - local_retry_threshold: timedelta | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - summary: str | None = None, -) -> ReturnType: ... - - -# Overload for async single-param activity -@overload -async def execute_local_activity( - activity: CallableAsyncSingleParam[ParamType, ReturnType], - arg: ParamType, - *, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - local_retry_threshold: timedelta | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - summary: str | None = None, -) -> ReturnType: ... - - -# Overload for sync single-param activity -@overload -async def execute_local_activity( - activity: CallableSyncSingleParam[ParamType, ReturnType], - arg: ParamType, - *, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - local_retry_threshold: timedelta | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - summary: str | None = None, -) -> ReturnType: ... - - -# Overload for async multi-param activity -@overload -async def execute_local_activity( - activity: Callable[..., Awaitable[ReturnType]], - *, - args: Sequence[Any], - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - local_retry_threshold: timedelta | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - summary: str | None = None, -) -> ReturnType: ... - - -# Overload for sync multi-param activity -@overload -async def execute_local_activity( - activity: Callable[..., ReturnType], - *, - args: Sequence[Any], - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - local_retry_threshold: timedelta | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - summary: str | None = None, -) -> ReturnType: ... - - -# Overload for string-name activity -@overload -async def execute_local_activity( - activity: str, - arg: Any = temporalio.common._arg_unset, - *, - args: Sequence[Any] = [], - result_type: type | None = None, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - local_retry_threshold: timedelta | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - summary: str | None = None, -) -> Any: ... - - -async def execute_local_activity( - activity: Any, - arg: Any = temporalio.common._arg_unset, - *, - args: Sequence[Any] = [], - result_type: type | None = None, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - local_retry_threshold: timedelta | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - summary: str | None = None, -) -> Any: - """Start a local activity and wait for completion. - - This is a shortcut for ``await`` :py:meth:`start_local_activity`. - """ - # We call the runtime directly instead of top-level start_local_activity to - # ensure we don't miss new parameters - return await _Runtime.current().workflow_start_local_activity( - activity, - *temporalio.common._arg_or_args(arg, args), - result_type=result_type, - schedule_to_close_timeout=schedule_to_close_timeout, - schedule_to_start_timeout=schedule_to_start_timeout, - start_to_close_timeout=start_to_close_timeout, - retry_policy=retry_policy, - local_retry_threshold=local_retry_threshold, - cancellation_type=cancellation_type, - activity_id=activity_id, - summary=summary, - ) - - -# Overload for async no-param activity -@overload -def start_local_activity_class( - activity: type[CallableAsyncNoParam[ReturnType]], - *, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - local_retry_threshold: timedelta | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, -) -> ActivityHandle[ReturnType]: ... - - -# Overload for sync no-param activity -@overload -def start_local_activity_class( - activity: type[CallableSyncNoParam[ReturnType]], - *, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - local_retry_threshold: timedelta | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, -) -> ActivityHandle[ReturnType]: ... - - -# Overload for async single-param activity -@overload -def start_local_activity_class( - activity: type[CallableAsyncSingleParam[ParamType, ReturnType]], - arg: ParamType, - *, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - local_retry_threshold: timedelta | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, -) -> ActivityHandle[ReturnType]: ... - - -# Overload for sync single-param activity -@overload -def start_local_activity_class( - activity: type[CallableSyncSingleParam[ParamType, ReturnType]], - arg: ParamType, - *, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - local_retry_threshold: timedelta | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, -) -> ActivityHandle[ReturnType]: ... - - -# Overload for async multi-param activity -@overload -def start_local_activity_class( - activity: type[Callable[..., Awaitable[ReturnType]]], # type: ignore[reportInvalidTypeForm] - *, - args: Sequence[Any], - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - local_retry_threshold: timedelta | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, -) -> ActivityHandle[ReturnType]: ... - - -# Overload for sync multi-param activity -@overload -def start_local_activity_class( # type: ignore[reportOverlappingOverload] - activity: type[Callable[..., ReturnType]], # type: ignore[reportInvalidTypeForm] - *, - args: Sequence[Any], - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - local_retry_threshold: timedelta | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, -) -> ActivityHandle[ReturnType]: ... - - -def start_local_activity_class( - activity: type[Callable], # type: ignore[reportInvalidTypeForm] - arg: Any = temporalio.common._arg_unset, - *, - args: Sequence[Any] = [], - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - local_retry_threshold: timedelta | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - summary: str | None = None, -) -> ActivityHandle[Any]: - """Start a local activity from a callable class. - - See :py:meth:`start_local_activity` for parameter and return details. - """ - return _Runtime.current().workflow_start_local_activity( - activity, - *temporalio.common._arg_or_args(arg, args), - result_type=None, - schedule_to_close_timeout=schedule_to_close_timeout, - schedule_to_start_timeout=schedule_to_start_timeout, - start_to_close_timeout=start_to_close_timeout, - retry_policy=retry_policy, - local_retry_threshold=local_retry_threshold, - cancellation_type=cancellation_type, - activity_id=activity_id, - summary=summary, - ) - - -# Overload for async no-param activity -@overload -async def execute_local_activity_class( - activity: type[CallableAsyncNoParam[ReturnType]], - *, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - local_retry_threshold: timedelta | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - summary: str | None = None, -) -> ReturnType: ... - - -# Overload for sync no-param activity -@overload -async def execute_local_activity_class( - activity: type[CallableSyncNoParam[ReturnType]], - *, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - local_retry_threshold: timedelta | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - summary: str | None = None, -) -> ReturnType: ... - - -# Overload for async single-param activity -@overload -async def execute_local_activity_class( - activity: type[CallableAsyncSingleParam[ParamType, ReturnType]], - arg: ParamType, - *, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - local_retry_threshold: timedelta | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - summary: str | None = None, -) -> ReturnType: ... - - -# Overload for sync single-param activity -@overload -async def execute_local_activity_class( - activity: type[CallableSyncSingleParam[ParamType, ReturnType]], - arg: ParamType, - *, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - local_retry_threshold: timedelta | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - summary: str | None = None, -) -> ReturnType: ... - - -# Overload for async multi-param activity -@overload -async def execute_local_activity_class( # type: ignore[reportOverlappingOverload] - activity: type[Callable[..., Awaitable[ReturnType]]], # type: ignore[reportInvalidTypeForm] - *, - args: Sequence[Any], - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - local_retry_threshold: timedelta | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - summary: str | None = None, -) -> ReturnType: ... - - -# Overload for sync multi-param activity -@overload -async def execute_local_activity_class( - activity: type[Callable[..., ReturnType]], # type: ignore[reportInvalidTypeForm] - *, - args: Sequence[Any], - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - local_retry_threshold: timedelta | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - summary: str | None = None, -) -> ReturnType: ... - - -async def execute_local_activity_class( - activity: type[Callable], # type: ignore[reportInvalidTypeForm] - arg: Any = temporalio.common._arg_unset, - *, - args: Sequence[Any] = [], - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - local_retry_threshold: timedelta | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - summary: str | None = None, -) -> Any: - """Start a local activity from a callable class and wait for completion. - - This is a shortcut for ``await`` :py:meth:`start_local_activity_class`. - """ - # We call the runtime directly instead of top-level start_local_activity to - # ensure we don't miss new parameters - return await _Runtime.current().workflow_start_local_activity( - activity, - *temporalio.common._arg_or_args(arg, args), - result_type=None, - schedule_to_close_timeout=schedule_to_close_timeout, - schedule_to_start_timeout=schedule_to_start_timeout, - start_to_close_timeout=start_to_close_timeout, - retry_policy=retry_policy, - local_retry_threshold=local_retry_threshold, - cancellation_type=cancellation_type, - activity_id=activity_id, - summary=summary, - ) - - -# Overload for async no-param activity -@overload -def start_local_activity_method( - activity: MethodAsyncNoParam[SelfType, ReturnType], - *, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - local_retry_threshold: timedelta | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - summary: str | None = None, -) -> ActivityHandle[ReturnType]: ... - - -# Overload for sync no-param activity -@overload -def start_local_activity_method( - activity: MethodSyncNoParam[SelfType, ReturnType], - *, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - local_retry_threshold: timedelta | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - summary: str | None = None, -) -> ActivityHandle[ReturnType]: ... - - -# Overload for async single-param activity -@overload -def start_local_activity_method( - activity: MethodAsyncSingleParam[SelfType, ParamType, ReturnType], - arg: ParamType, - *, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - local_retry_threshold: timedelta | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - summary: str | None = None, -) -> ActivityHandle[ReturnType]: ... - - -# Overload for sync single-param activity -@overload -def start_local_activity_method( - activity: MethodSyncSingleParam[SelfType, ParamType, ReturnType], - arg: ParamType, - *, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - local_retry_threshold: timedelta | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - summary: str | None = None, -) -> ActivityHandle[ReturnType]: ... - - -# Overload for async multi-param activity -@overload -def start_local_activity_method( - activity: Callable[Concatenate[SelfType, MultiParamSpec], Awaitable[ReturnType]], - *, - args: Sequence[Any], - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - local_retry_threshold: timedelta | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - summary: str | None = None, -) -> ActivityHandle[ReturnType]: ... - - -# Overload for sync multi-param activity -@overload -def start_local_activity_method( - activity: Callable[Concatenate[SelfType, MultiParamSpec], ReturnType], - *, - args: Sequence[Any], - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - local_retry_threshold: timedelta | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - summary: str | None = None, -) -> ActivityHandle[ReturnType]: ... - - -def start_local_activity_method( - activity: Callable, - arg: Any = temporalio.common._arg_unset, - *, - args: Sequence[Any] = [], - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - local_retry_threshold: timedelta | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - summary: str | None = None, -) -> ActivityHandle[Any]: - """Start a local activity from a method. - - See :py:meth:`start_local_activity` for parameter and return details. - """ - return _Runtime.current().workflow_start_local_activity( - activity, - *temporalio.common._arg_or_args(arg, args), - result_type=None, - schedule_to_close_timeout=schedule_to_close_timeout, - schedule_to_start_timeout=schedule_to_start_timeout, - start_to_close_timeout=start_to_close_timeout, - retry_policy=retry_policy, - local_retry_threshold=local_retry_threshold, - cancellation_type=cancellation_type, - activity_id=activity_id, - summary=summary, - ) - - -# Overload for async no-param activity -@overload -async def execute_local_activity_method( - activity: MethodAsyncNoParam[SelfType, ReturnType], - *, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - local_retry_threshold: timedelta | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - summary: str | None = None, -) -> ReturnType: ... - - -# Overload for sync no-param activity -@overload -async def execute_local_activity_method( - activity: MethodSyncNoParam[SelfType, ReturnType], - *, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - local_retry_threshold: timedelta | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - summary: str | None = None, -) -> ReturnType: ... - - -# Overload for async single-param activity -@overload -async def execute_local_activity_method( - activity: MethodAsyncSingleParam[SelfType, ParamType, ReturnType], - arg: ParamType, - *, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - local_retry_threshold: timedelta | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - summary: str | None = None, -) -> ReturnType: ... - - -# Overload for sync single-param activity -@overload -async def execute_local_activity_method( - activity: MethodSyncSingleParam[SelfType, ParamType, ReturnType], - arg: ParamType, - *, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - local_retry_threshold: timedelta | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - summary: str | None = None, -) -> ReturnType: ... - - -# Overload for async multi-param activity -@overload -async def execute_local_activity_method( - activity: Callable[Concatenate[SelfType, MultiParamSpec], Awaitable[ReturnType]], - *, - args: Sequence[Any], - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - local_retry_threshold: timedelta | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - summary: str | None = None, -) -> ReturnType: ... - - -# Overload for sync multi-param activity -@overload -async def execute_local_activity_method( - activity: Callable[Concatenate[SelfType, MultiParamSpec], ReturnType], - *, - args: Sequence[Any], - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - local_retry_threshold: timedelta | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - summary: str | None = None, -) -> ReturnType: ... - - -async def execute_local_activity_method( - activity: Callable, - arg: Any = temporalio.common._arg_unset, - *, - args: Sequence[Any] = [], - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - local_retry_threshold: timedelta | None = None, - cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, - activity_id: str | None = None, - summary: str | None = None, -) -> Any: - """Start a local activity from a method and wait for completion. - - This is a shortcut for ``await`` :py:meth:`start_local_activity_method`. - """ - # We call the runtime directly instead of top-level start_local_activity to - # ensure we don't miss new parameters - return await _Runtime.current().workflow_start_local_activity( - activity, - *temporalio.common._arg_or_args(arg, args), - result_type=None, - schedule_to_close_timeout=schedule_to_close_timeout, - schedule_to_start_timeout=schedule_to_start_timeout, - start_to_close_timeout=start_to_close_timeout, - retry_policy=retry_policy, - local_retry_threshold=local_retry_threshold, - cancellation_type=cancellation_type, - activity_id=activity_id, - summary=summary, - ) - - -class ChildWorkflowHandle(_AsyncioTask[ReturnType], Generic[SelfType, ReturnType]): # type: ignore[type-var] - """Handle for interacting with a child workflow. - - This is created via :py:func:`start_child_workflow`. - - This extends :py:class:`asyncio.Task` and supports all task features. - """ - - @property - def id(self) -> str: - """ID for the workflow.""" - raise NotImplementedError - - @property - def first_execution_run_id(self) -> str | None: - """Run ID for the workflow.""" - raise NotImplementedError - - @overload - async def signal( - self, - signal: MethodSyncOrAsyncNoParam[SelfType, None], - ) -> None: ... - - @overload - async def signal( - self, - signal: MethodSyncOrAsyncSingleParam[SelfType, ParamType, None], - arg: ParamType, - ) -> None: ... - - @overload - async def signal( - self, - signal: Callable[Concatenate[SelfType, MultiParamSpec], Awaitable[None] | None], - *, - args: Sequence[Any], - ) -> None: ... - - @overload - async def signal( - self, - signal: str, - arg: Any = temporalio.common._arg_unset, - *, - args: Sequence[Any] = [], - ) -> None: ... - - async def signal( - self, - signal: str | Callable, # type: ignore[reportUnusedParameter] - arg: Any = temporalio.common._arg_unset, # type: ignore[reportUnusedParameter] - *, - args: Sequence[Any] = [], # type: ignore[reportUnusedParameter] - ) -> None: - """Signal this child workflow. - - Args: - signal: Name or method reference for the signal. - arg: Single argument to the signal. - args: Multiple arguments to the signal. Cannot be set if arg is. - - """ - raise NotImplementedError - - -class ChildWorkflowCancellationType(IntEnum): - """How a child workflow cancellation should be handled.""" - - ABANDON = int( - temporalio.bridge.proto.child_workflow.ChildWorkflowCancellationType.ABANDON - ) - TRY_CANCEL = int( - temporalio.bridge.proto.child_workflow.ChildWorkflowCancellationType.TRY_CANCEL - ) - WAIT_CANCELLATION_COMPLETED = int( - temporalio.bridge.proto.child_workflow.ChildWorkflowCancellationType.WAIT_CANCELLATION_COMPLETED - ) - WAIT_CANCELLATION_REQUESTED = int( - temporalio.bridge.proto.child_workflow.ChildWorkflowCancellationType.WAIT_CANCELLATION_REQUESTED - ) - - -class ParentClosePolicy(IntEnum): - """How a child workflow should be handled when the parent closes.""" - - UNSPECIFIED = int( - temporalio.bridge.proto.child_workflow.ParentClosePolicy.PARENT_CLOSE_POLICY_UNSPECIFIED - ) - TERMINATE = int( - temporalio.bridge.proto.child_workflow.ParentClosePolicy.PARENT_CLOSE_POLICY_TERMINATE - ) - ABANDON = int( - temporalio.bridge.proto.child_workflow.ParentClosePolicy.PARENT_CLOSE_POLICY_ABANDON - ) - REQUEST_CANCEL = int( - temporalio.bridge.proto.child_workflow.ParentClosePolicy.PARENT_CLOSE_POLICY_REQUEST_CANCEL - ) - - -class ChildWorkflowConfig(TypedDict, total=False): - """TypedDict of config that can be used for :py:func:`start_child_workflow` - and :py:func:`execute_child_workflow`. - """ - - id: str | None - task_queue: str | None - cancellation_type: ChildWorkflowCancellationType - parent_close_policy: ParentClosePolicy - execution_timeout: timedelta | None - run_timeout: timedelta | None - task_timeout: timedelta | None - id_reuse_policy: temporalio.common.WorkflowIDReusePolicy - retry_policy: temporalio.common.RetryPolicy | None - cron_schedule: str - memo: Mapping[str, Any] | None - search_attributes: None | ( - temporalio.common.SearchAttributes | temporalio.common.TypedSearchAttributes - ) - versioning_intent: VersioningIntent | None - static_summary: str | None - static_details: str | None - priority: temporalio.common.Priority - - -# Overload for no-param workflow -@overload -async def start_child_workflow( - workflow: MethodAsyncNoParam[SelfType, ReturnType], - *, - id: str | None = None, - task_queue: str | None = None, - cancellation_type: ChildWorkflowCancellationType = ChildWorkflowCancellationType.WAIT_CANCELLATION_COMPLETED, - parent_close_policy: ParentClosePolicy = ParentClosePolicy.TERMINATE, - execution_timeout: timedelta | None = None, - run_timeout: timedelta | None = None, - task_timeout: timedelta | None = None, - id_reuse_policy: temporalio.common.WorkflowIDReusePolicy = temporalio.common.WorkflowIDReusePolicy.ALLOW_DUPLICATE, - retry_policy: temporalio.common.RetryPolicy | None = None, - cron_schedule: str = "", - memo: Mapping[str, Any] | None = None, - search_attributes: None - | ( - temporalio.common.SearchAttributes | temporalio.common.TypedSearchAttributes - ) = None, - versioning_intent: VersioningIntent | None = None, - static_summary: str | None = None, - static_details: str | None = None, - priority: temporalio.common.Priority = temporalio.common.Priority.default, -) -> ChildWorkflowHandle[SelfType, ReturnType]: ... - - -# Overload for single-param workflow -@overload -async def start_child_workflow( - workflow: MethodAsyncSingleParam[SelfType, ParamType, ReturnType], - arg: ParamType, - *, - id: str | None = None, - task_queue: str | None = None, - cancellation_type: ChildWorkflowCancellationType = ChildWorkflowCancellationType.WAIT_CANCELLATION_COMPLETED, - parent_close_policy: ParentClosePolicy = ParentClosePolicy.TERMINATE, - execution_timeout: timedelta | None = None, - run_timeout: timedelta | None = None, - task_timeout: timedelta | None = None, - id_reuse_policy: temporalio.common.WorkflowIDReusePolicy = temporalio.common.WorkflowIDReusePolicy.ALLOW_DUPLICATE, - retry_policy: temporalio.common.RetryPolicy | None = None, - cron_schedule: str = "", - memo: Mapping[str, Any] | None = None, - search_attributes: None - | ( - temporalio.common.SearchAttributes | temporalio.common.TypedSearchAttributes - ) = None, - versioning_intent: VersioningIntent | None = None, - static_summary: str | None = None, - static_details: str | None = None, - priority: temporalio.common.Priority = temporalio.common.Priority.default, -) -> ChildWorkflowHandle[SelfType, ReturnType]: ... - - -# Overload for multi-param workflow -@overload -async def start_child_workflow( - workflow: Callable[Concatenate[SelfType, MultiParamSpec], Awaitable[ReturnType]], - *, - args: Sequence[Any], - id: str | None = None, - task_queue: str | None = None, - cancellation_type: ChildWorkflowCancellationType = ChildWorkflowCancellationType.WAIT_CANCELLATION_COMPLETED, - parent_close_policy: ParentClosePolicy = ParentClosePolicy.TERMINATE, - execution_timeout: timedelta | None = None, - run_timeout: timedelta | None = None, - task_timeout: timedelta | None = None, - id_reuse_policy: temporalio.common.WorkflowIDReusePolicy = temporalio.common.WorkflowIDReusePolicy.ALLOW_DUPLICATE, - retry_policy: temporalio.common.RetryPolicy | None = None, - cron_schedule: str = "", - memo: Mapping[str, Any] | None = None, - search_attributes: None - | ( - temporalio.common.SearchAttributes | temporalio.common.TypedSearchAttributes - ) = None, - versioning_intent: VersioningIntent | None = None, - static_summary: str | None = None, - static_details: str | None = None, - priority: temporalio.common.Priority = temporalio.common.Priority.default, -) -> ChildWorkflowHandle[SelfType, ReturnType]: ... - - -# Overload for string-name workflow -@overload -async def start_child_workflow( - workflow: str, - arg: Any = temporalio.common._arg_unset, - *, - args: Sequence[Any] = [], - id: str | None = None, - task_queue: str | None = None, - result_type: type | None = None, - cancellation_type: ChildWorkflowCancellationType = ChildWorkflowCancellationType.WAIT_CANCELLATION_COMPLETED, - parent_close_policy: ParentClosePolicy = ParentClosePolicy.TERMINATE, - execution_timeout: timedelta | None = None, - run_timeout: timedelta | None = None, - task_timeout: timedelta | None = None, - id_reuse_policy: temporalio.common.WorkflowIDReusePolicy = temporalio.common.WorkflowIDReusePolicy.ALLOW_DUPLICATE, - retry_policy: temporalio.common.RetryPolicy | None = None, - cron_schedule: str = "", - memo: Mapping[str, Any] | None = None, - search_attributes: None - | ( - temporalio.common.SearchAttributes | temporalio.common.TypedSearchAttributes - ) = None, - versioning_intent: VersioningIntent | None = None, - static_summary: str | None = None, - static_details: str | None = None, - priority: temporalio.common.Priority = temporalio.common.Priority.default, -) -> ChildWorkflowHandle[Any, Any]: ... - - -async def start_child_workflow( - workflow: Any, - arg: Any = temporalio.common._arg_unset, - *, - args: Sequence[Any] = [], - id: str | None = None, - task_queue: str | None = None, - result_type: type | None = None, - cancellation_type: ChildWorkflowCancellationType = ChildWorkflowCancellationType.WAIT_CANCELLATION_COMPLETED, - parent_close_policy: ParentClosePolicy = ParentClosePolicy.TERMINATE, - execution_timeout: timedelta | None = None, - run_timeout: timedelta | None = None, - task_timeout: timedelta | None = None, - id_reuse_policy: temporalio.common.WorkflowIDReusePolicy = temporalio.common.WorkflowIDReusePolicy.ALLOW_DUPLICATE, - retry_policy: temporalio.common.RetryPolicy | None = None, - cron_schedule: str = "", - memo: Mapping[str, Any] | None = None, - search_attributes: None - | ( - temporalio.common.SearchAttributes | temporalio.common.TypedSearchAttributes - ) = None, - versioning_intent: VersioningIntent | None = None, - static_summary: str | None = None, - static_details: str | None = None, - priority: temporalio.common.Priority = temporalio.common.Priority.default, -) -> ChildWorkflowHandle[Any, Any]: - """Start a child workflow and return its handle. - - Args: - workflow: String name or class method decorated with ``@workflow.run`` - for the workflow to start. - arg: Single argument to the child workflow. - args: Multiple arguments to the child workflow. Cannot be set if arg is. - id: Optional unique identifier for the workflow execution. If not set, - defaults to :py:func:`uuid4`. - task_queue: Task queue to run the workflow on. Defaults to the current - workflow's task queue. - result_type: For string workflows, this can set the specific result type - hint to deserialize into. - cancellation_type: How the child workflow will react to cancellation. - parent_close_policy: How to handle the child workflow when the parent - workflow closes. - execution_timeout: Total workflow execution timeout including - retries and continue as new. - run_timeout: Timeout of a single workflow run. - task_timeout: Timeout of a single workflow task. - id_reuse_policy: How already-existing IDs are treated. - retry_policy: Retry policy for the workflow. - cron_schedule: See https://docs.temporal.io/docs/content/what-is-a-temporal-cron-job/ - memo: Memo for the workflow. - search_attributes: Search attributes for the workflow. The dictionary - form of this is DEPRECATED. - versioning_intent: When using the Worker Versioning feature, specifies whether this Child - Workflow should run on a worker with a compatible Build Id or not. - Deprecated: Use Worker Deployment versioning instead. - static_summary: A single-line fixed summary for this child workflow execution that may appear - in the UI/CLI. This can be in single-line Temporal markdown format. - static_details: General fixed details for this child workflow execution that may appear in - UI/CLI. This can be in Temporal markdown format and can span multiple lines. This is - a fixed value on the workflow that cannot be updated. For details that can be - updated, use :py:meth:`get_current_details` within the workflow. - priority: Priority to use for this workflow. - - Returns: - A workflow handle to the started/existing workflow. - """ - temporalio.common._warn_on_deprecated_search_attributes(search_attributes) - return await _Runtime.current().workflow_start_child_workflow( - workflow, - *temporalio.common._arg_or_args(arg, args), - id=id or str(uuid4()), - task_queue=task_queue, - result_type=result_type, - cancellation_type=cancellation_type, - parent_close_policy=parent_close_policy, - execution_timeout=execution_timeout, - run_timeout=run_timeout, - task_timeout=task_timeout, - id_reuse_policy=id_reuse_policy, - retry_policy=retry_policy, - cron_schedule=cron_schedule, - memo=memo, - search_attributes=search_attributes, - versioning_intent=versioning_intent, - static_summary=static_summary, - static_details=static_details, - priority=priority, - ) - - -# Overload for no-param workflow -@overload -async def execute_child_workflow( - workflow: MethodAsyncNoParam[SelfType, ReturnType], - *, - id: str | None = None, - task_queue: str | None = None, - cancellation_type: ChildWorkflowCancellationType = ChildWorkflowCancellationType.WAIT_CANCELLATION_COMPLETED, - parent_close_policy: ParentClosePolicy = ParentClosePolicy.TERMINATE, - execution_timeout: timedelta | None = None, - run_timeout: timedelta | None = None, - task_timeout: timedelta | None = None, - id_reuse_policy: temporalio.common.WorkflowIDReusePolicy = temporalio.common.WorkflowIDReusePolicy.ALLOW_DUPLICATE, - retry_policy: temporalio.common.RetryPolicy | None = None, - cron_schedule: str = "", - memo: Mapping[str, Any] | None = None, - search_attributes: None - | ( - temporalio.common.SearchAttributes | temporalio.common.TypedSearchAttributes - ) = None, - versioning_intent: VersioningIntent | None = None, - static_summary: str | None = None, - static_details: str | None = None, - priority: temporalio.common.Priority = temporalio.common.Priority.default, -) -> ReturnType: ... - - -# Overload for single-param workflow -@overload -async def execute_child_workflow( - workflow: MethodAsyncSingleParam[SelfType, ParamType, ReturnType], - arg: ParamType, - *, - id: str | None = None, - task_queue: str | None = None, - cancellation_type: ChildWorkflowCancellationType = ChildWorkflowCancellationType.WAIT_CANCELLATION_COMPLETED, - parent_close_policy: ParentClosePolicy = ParentClosePolicy.TERMINATE, - execution_timeout: timedelta | None = None, - run_timeout: timedelta | None = None, - task_timeout: timedelta | None = None, - id_reuse_policy: temporalio.common.WorkflowIDReusePolicy = temporalio.common.WorkflowIDReusePolicy.ALLOW_DUPLICATE, - retry_policy: temporalio.common.RetryPolicy | None = None, - cron_schedule: str = "", - memo: Mapping[str, Any] | None = None, - search_attributes: None - | ( - temporalio.common.SearchAttributes | temporalio.common.TypedSearchAttributes - ) = None, - versioning_intent: VersioningIntent | None = None, - static_summary: str | None = None, - static_details: str | None = None, - priority: temporalio.common.Priority = temporalio.common.Priority.default, -) -> ReturnType: ... - - -# Overload for multi-param workflow -@overload -async def execute_child_workflow( - workflow: Callable[Concatenate[SelfType, MultiParamSpec], Awaitable[ReturnType]], - *, - args: Sequence[Any], - id: str | None = None, - task_queue: str | None = None, - cancellation_type: ChildWorkflowCancellationType = ChildWorkflowCancellationType.WAIT_CANCELLATION_COMPLETED, - parent_close_policy: ParentClosePolicy = ParentClosePolicy.TERMINATE, - execution_timeout: timedelta | None = None, - run_timeout: timedelta | None = None, - task_timeout: timedelta | None = None, - id_reuse_policy: temporalio.common.WorkflowIDReusePolicy = temporalio.common.WorkflowIDReusePolicy.ALLOW_DUPLICATE, - retry_policy: temporalio.common.RetryPolicy | None = None, - cron_schedule: str = "", - memo: Mapping[str, Any] | None = None, - search_attributes: None - | ( - temporalio.common.SearchAttributes | temporalio.common.TypedSearchAttributes - ) = None, - versioning_intent: VersioningIntent | None = None, - static_summary: str | None = None, - static_details: str | None = None, - priority: temporalio.common.Priority = temporalio.common.Priority.default, -) -> ReturnType: ... - - -# Overload for string-name workflow -@overload -async def execute_child_workflow( - workflow: str, - arg: Any = temporalio.common._arg_unset, - *, - args: Sequence[Any] = [], - id: str | None = None, - task_queue: str | None = None, - result_type: type | None = None, - cancellation_type: ChildWorkflowCancellationType = ChildWorkflowCancellationType.WAIT_CANCELLATION_COMPLETED, - parent_close_policy: ParentClosePolicy = ParentClosePolicy.TERMINATE, - execution_timeout: timedelta | None = None, - run_timeout: timedelta | None = None, - task_timeout: timedelta | None = None, - id_reuse_policy: temporalio.common.WorkflowIDReusePolicy = temporalio.common.WorkflowIDReusePolicy.ALLOW_DUPLICATE, - retry_policy: temporalio.common.RetryPolicy | None = None, - cron_schedule: str = "", - memo: Mapping[str, Any] | None = None, - search_attributes: None - | ( - temporalio.common.SearchAttributes | temporalio.common.TypedSearchAttributes - ) = None, - versioning_intent: VersioningIntent | None = None, - static_summary: str | None = None, - static_details: str | None = None, - priority: temporalio.common.Priority = temporalio.common.Priority.default, -) -> Any: ... - - -async def execute_child_workflow( - workflow: Any, - arg: Any = temporalio.common._arg_unset, - *, - args: Sequence[Any] = [], - id: str | None = None, - task_queue: str | None = None, - result_type: type | None = None, - cancellation_type: ChildWorkflowCancellationType = ChildWorkflowCancellationType.WAIT_CANCELLATION_COMPLETED, - parent_close_policy: ParentClosePolicy = ParentClosePolicy.TERMINATE, - execution_timeout: timedelta | None = None, - run_timeout: timedelta | None = None, - task_timeout: timedelta | None = None, - id_reuse_policy: temporalio.common.WorkflowIDReusePolicy = temporalio.common.WorkflowIDReusePolicy.ALLOW_DUPLICATE, - retry_policy: temporalio.common.RetryPolicy | None = None, - cron_schedule: str = "", - memo: Mapping[str, Any] | None = None, - search_attributes: None - | ( - temporalio.common.SearchAttributes | temporalio.common.TypedSearchAttributes - ) = None, - versioning_intent: VersioningIntent | None = None, - static_summary: str | None = None, - static_details: str | None = None, - priority: temporalio.common.Priority = temporalio.common.Priority.default, -) -> Any: - """Start a child workflow and wait for completion. - - This is a shortcut for ``await (await`` :py:meth:`start_child_workflow` ``)``. - """ - temporalio.common._warn_on_deprecated_search_attributes(search_attributes) - # We call the runtime directly instead of top-level start_child_workflow to - # ensure we don't miss new parameters - handle = await _Runtime.current().workflow_start_child_workflow( - workflow, - *temporalio.common._arg_or_args(arg, args), - id=id or str(uuid4()), - task_queue=task_queue, - result_type=result_type, - cancellation_type=cancellation_type, - parent_close_policy=parent_close_policy, - execution_timeout=execution_timeout, - run_timeout=run_timeout, - task_timeout=task_timeout, - id_reuse_policy=id_reuse_policy, - retry_policy=retry_policy, - cron_schedule=cron_schedule, - memo=memo, - search_attributes=search_attributes, - versioning_intent=versioning_intent, - static_summary=static_summary, - static_details=static_details, - priority=priority, - ) - return await handle - - -class NexusOperationHandle(Generic[OutputT]): - """Handle for interacting with a Nexus operation.""" - - # TODO(nexus-preview): should attempts to instantiate directly throw? - - def cancel(self) -> bool: - """Request cancellation of the operation.""" - raise NotImplementedError - - def __await__(self) -> Generator[Any, Any, OutputT]: - """Support await.""" - raise NotImplementedError - - @property - def operation_token(self) -> str | None: - """The operation token for this handle.""" - raise NotImplementedError - - -class ExternalWorkflowHandle(Generic[SelfType]): - """Handle for interacting with an external workflow. - - This is created via :py:func:`get_external_workflow_handle` or - :py:func:`get_external_workflow_handle_for`. - """ - - @property - def id(self) -> str: - """ID for the workflow.""" - raise NotImplementedError - - @property - def run_id(self) -> str | None: - """Run ID for the workflow if any.""" - raise NotImplementedError - - @overload - async def signal( - self, - signal: MethodSyncOrAsyncNoParam[SelfType, None], - ) -> None: ... - - @overload - async def signal( - self, - signal: MethodSyncOrAsyncSingleParam[SelfType, ParamType, None], - arg: ParamType, - ) -> None: ... - - @overload - async def signal( - self, - signal: str, - arg: Any = temporalio.common._arg_unset, - *, - args: Sequence[Any] = [], - ) -> None: ... - - async def signal( - self, - signal: str | Callable, # type: ignore[reportUnusedParameter] - arg: Any = temporalio.common._arg_unset, # type: ignore[reportUnusedParameter] - *, - args: Sequence[Any] = [], # type: ignore[reportUnusedParameter] - ) -> None: - """Signal this external workflow. - - Args: - signal: Name or method reference for the signal. - arg: Single argument to the signal. - args: Multiple arguments to the signal. Cannot be set if arg is. - - """ - raise NotImplementedError - - async def cancel(self) -> None: - """Send a cancellation request to this external workflow. - - This will fail if the workflow cannot accept the request (e.g. if the - workflow is not found). - """ - raise NotImplementedError - - -def get_external_workflow_handle( - workflow_id: str, - *, - run_id: str | None = None, -) -> ExternalWorkflowHandle[Any]: - """Get a workflow handle to an existing workflow by its ID. - - Args: - workflow_id: Workflow ID to get a handle to. - run_id: Optional run ID for the workflow. - - Returns: - The external workflow handle. - """ - return _Runtime.current().workflow_get_external_workflow_handle( - workflow_id, run_id=run_id - ) - - -def get_external_workflow_handle_for( - workflow: MethodAsyncNoParam[SelfType, Any] # type: ignore[reportUnusedParameter] - | MethodAsyncSingleParam[SelfType, Any, Any], - workflow_id: str, - *, - run_id: str | None = None, -) -> ExternalWorkflowHandle[SelfType]: - """Get a typed workflow handle to an existing workflow by its ID. - - This is the same as :py:func:`get_external_workflow_handle` but typed. Note, - the workflow type given is not validated, it is only for typing. - - Args: - workflow: The workflow run method to use for typing the handle. - workflow_id: Workflow ID to get a handle to. - run_id: Optional run ID for the workflow. - - Returns: - The external workflow handle. - """ - return get_external_workflow_handle(workflow_id, run_id=run_id) - - -class ContinueAsNewError(BaseException): - """Error thrown by :py:func:`continue_as_new`. - - This should not be caught, but instead be allowed to throw out of the - workflow which then triggers the continue as new. This should never be - instantiated directly. - """ - - def __init__(self, *args: object) -> None: - """Direct instantiation is disabled. Use :py:func:`continue_as_new`.""" - if type(self) == ContinueAsNewError: - raise RuntimeError("Cannot instantiate ContinueAsNewError directly") - super().__init__(*args) - - -# Overload for self (unfortunately, cannot type args) -@overload -def continue_as_new( - arg: Any = temporalio.common._arg_unset, - *, - args: Sequence[Any] = [], - task_queue: str | None = None, - run_timeout: timedelta | None = None, - task_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - memo: Mapping[str, Any] | None = None, - search_attributes: None - | ( - temporalio.common.SearchAttributes | temporalio.common.TypedSearchAttributes - ) = None, - versioning_intent: VersioningIntent | None = None, - initial_versioning_behavior: ContinueAsNewVersioningBehavior | None = None, -) -> NoReturn: ... - - -# Overload for no-param workflow -@overload -def continue_as_new( - *, - workflow: MethodAsyncNoParam[SelfType, Any], - task_queue: str | None = None, - run_timeout: timedelta | None = None, - task_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - memo: Mapping[str, Any] | None = None, - search_attributes: None - | ( - temporalio.common.SearchAttributes | temporalio.common.TypedSearchAttributes - ) = None, - versioning_intent: VersioningIntent | None = None, - initial_versioning_behavior: ContinueAsNewVersioningBehavior | None = None, -) -> NoReturn: ... - - -# Overload for single-param workflow -@overload -def continue_as_new( - arg: ParamType, - *, - workflow: MethodAsyncSingleParam[SelfType, ParamType, Any], - task_queue: str | None = None, - run_timeout: timedelta | None = None, - task_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - memo: Mapping[str, Any] | None = None, - search_attributes: None - | ( - temporalio.common.SearchAttributes | temporalio.common.TypedSearchAttributes - ) = None, - versioning_intent: VersioningIntent | None = None, - initial_versioning_behavior: ContinueAsNewVersioningBehavior | None = None, -) -> NoReturn: ... - - -# Overload for multi-param workflow -@overload -def continue_as_new( - *, - workflow: Callable[Concatenate[SelfType, MultiParamSpec], Awaitable[Any]], - args: Sequence[Any], - task_queue: str | None = None, - run_timeout: timedelta | None = None, - task_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - memo: Mapping[str, Any] | None = None, - search_attributes: None - | ( - temporalio.common.SearchAttributes | temporalio.common.TypedSearchAttributes - ) = None, - versioning_intent: VersioningIntent | None = None, - initial_versioning_behavior: ContinueAsNewVersioningBehavior | None = None, -) -> NoReturn: ... - - -# Overload for string-name workflow -@overload -def continue_as_new( - *, - workflow: str, - args: Sequence[Any] = [], - task_queue: str | None = None, - run_timeout: timedelta | None = None, - task_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - memo: Mapping[str, Any] | None = None, - search_attributes: None - | ( - temporalio.common.SearchAttributes | temporalio.common.TypedSearchAttributes - ) = None, - versioning_intent: VersioningIntent | None = None, - initial_versioning_behavior: ContinueAsNewVersioningBehavior | None = None, -) -> NoReturn: ... - - -def continue_as_new( - arg: Any = temporalio.common._arg_unset, - *, - args: Sequence[Any] = [], - workflow: None | Callable | str = None, - task_queue: str | None = None, - run_timeout: timedelta | None = None, - task_timeout: timedelta | None = None, - retry_policy: temporalio.common.RetryPolicy | None = None, - memo: Mapping[str, Any] | None = None, - search_attributes: None - | ( - temporalio.common.SearchAttributes | temporalio.common.TypedSearchAttributes - ) = None, - versioning_intent: VersioningIntent | None = None, - initial_versioning_behavior: ContinueAsNewVersioningBehavior | None = None, -) -> NoReturn: - """Stop the workflow immediately and continue as new. - - Args: - arg: Single argument to the continued workflow. - args: Multiple arguments to the continued workflow. Cannot be set if arg - is. - workflow: Specific workflow to continue to. Defaults to the current - workflow. - task_queue: Task queue to run the workflow on. Defaults to the current - workflow's task queue. - run_timeout: Timeout of a single workflow run. Defaults to the current - workflow's run timeout. - task_timeout: Timeout of a single workflow task. Defaults to the current - workflow's task timeout. - memo: Memo for the workflow. Defaults to the current workflow's memo. - search_attributes: Search attributes for the workflow. Defaults to the - current workflow's search attributes. The dictionary form of this is - DEPRECATED. - versioning_intent: When using the Worker Versioning feature, specifies whether this Workflow - should Continue-as-New onto a worker with a compatible Build Id or not. - Deprecated: Use Worker Deployment versioning instead. - - Returns: - Never returns, always raises a :py:class:`ContinueAsNewError`. - - Raises: - ContinueAsNewError: Always raised by this function. Should not be caught - but instead be allowed to - """ - temporalio.common._warn_on_deprecated_search_attributes(search_attributes) - _Runtime.current().workflow_continue_as_new( - *temporalio.common._arg_or_args(arg, args), - workflow=workflow, - task_queue=task_queue, - run_timeout=run_timeout, - task_timeout=task_timeout, - retry_policy=retry_policy, - memo=memo, - search_attributes=search_attributes, - versioning_intent=versioning_intent, - initial_versioning_behavior=initial_versioning_behavior, - ) - - -def get_signal_handler(name: str) -> Callable | None: - """Get the signal handler for the given name if any. - - This includes handlers created via the ``@workflow.signal`` decorator. - - Args: - name: Name of the signal. - - Returns: - Callable for the signal if any. If a handler is not found for the name, - this will not return the dynamic handler even if there is one. - """ - return _Runtime.current().workflow_get_signal_handler(name) - - -def set_signal_handler(name: str, handler: Callable | None) -> None: - """Set or unset the signal handler for the given name. - - This overrides any existing handlers for the given name, including handlers - created via the ``@workflow.signal`` decorator. - - When set, all unhandled past signals for the given name are immediately sent - to the handler. - - Args: - name: Name of the signal. - handler: Callable to set or None to unset. - """ - _Runtime.current().workflow_set_signal_handler(name, handler) - - -def get_dynamic_signal_handler() -> Callable | None: - """Get the dynamic signal handler if any. - - This includes dynamic handlers created via the ``@workflow.signal`` - decorator. - - Returns: - Callable for the dynamic signal handler if any. - """ - return _Runtime.current().workflow_get_signal_handler(None) - - -def set_dynamic_signal_handler(handler: Callable | None) -> None: - """Set or unset the dynamic signal handler. - - This overrides the existing dynamic handler even if it was created via the - ``@workflow.signal`` decorator. - - When set, all unhandled past signals are immediately sent to the handler. - - Args: - handler: Callable to set or None to unset. - """ - _Runtime.current().workflow_set_signal_handler(None, handler) - - -def get_query_handler(name: str) -> Callable | None: - """Get the query handler for the given name if any. - - This includes handlers created via the ``@workflow.query`` decorator. - - Args: - name: Name of the query. - - Returns: - Callable for the query if any. If a handler is not found for the name, - this will not return the dynamic handler even if there is one. - """ - return _Runtime.current().workflow_get_query_handler(name) - - -def set_query_handler(name: str, handler: Callable | None) -> None: - """Set or unset the query handler for the given name. - - This overrides any existing handlers for the given name, including handlers - created via the ``@workflow.query`` decorator. - - Args: - name: Name of the query. - handler: Callable to set or None to unset. - """ - _Runtime.current().workflow_set_query_handler(name, handler) - - -def get_dynamic_query_handler() -> Callable | None: - """Get the dynamic query handler if any. - - This includes dynamic handlers created via the ``@workflow.query`` - decorator. - - Returns: - Callable for the dynamic query handler if any. - """ - return _Runtime.current().workflow_get_query_handler(None) - - -def set_dynamic_query_handler(handler: Callable | None) -> None: - """Set or unset the dynamic query handler. - - This overrides the existing dynamic handler even if it was created via the - ``@workflow.query`` decorator. - - Args: - handler: Callable to set or None to unset. - """ - _Runtime.current().workflow_set_query_handler(None, handler) - - -def get_update_handler(name: str) -> Callable | None: - """Get the update handler for the given name if any. - - This includes handlers created via the ``@workflow.update`` decorator. - - Args: - name: Name of the update. - - Returns: - Callable for the update if any. If a handler is not found for the name, - this will not return the dynamic handler even if there is one. - """ - return _Runtime.current().workflow_get_update_handler(name) - - -def set_update_handler( - name: str, handler: Callable | None, *, validator: Callable | None = None -) -> None: - """Set or unset the update handler for the given name. - - This overrides any existing handlers for the given name, including handlers - created via the ``@workflow.update`` decorator. - - Args: - name: Name of the update. - handler: Callable to set or None to unset. - validator: Callable to set or None to unset as the update validator. - """ - _Runtime.current().workflow_set_update_handler(name, handler, validator) - - -def get_dynamic_update_handler() -> Callable | None: - """Get the dynamic update handler if any. - - This includes dynamic handlers created via the ``@workflow.update`` - decorator. - - Returns: - Callable for the dynamic update handler if any. - """ - return _Runtime.current().workflow_get_update_handler(None) - - -def set_dynamic_update_handler( - handler: Callable | None, *, validator: Callable | None = None -) -> None: - """Set or unset the dynamic update handler. - - This overrides the existing dynamic handler even if it was created via the - ``@workflow.update`` decorator. - - Args: - handler: Callable to set or None to unset. - validator: Callable to set or None to unset as the update validator. - """ - _Runtime.current().workflow_set_update_handler(None, handler, validator) - - -def all_handlers_finished() -> bool: - """Whether update and signal handlers have finished executing. - - Consider waiting on this condition before workflow return or continue-as-new, to prevent - interruption of in-progress handlers by workflow exit: - ``await workflow.wait_condition(lambda: workflow.all_handlers_finished())`` - - Returns: - True if there are no in-progress update or signal handler executions. - """ - return _Runtime.current().workflow_all_handlers_finished() - - -def as_completed( - fs: Iterable[Awaitable[AnyType]], *, timeout: float | None = None -) -> Iterator[Awaitable[AnyType]]: - """Return an iterator whose values are coroutines. - - This is a deterministic version of :py:func:`asyncio.as_completed`. This - function should be used instead of that one in workflows. - """ - # Taken almost verbatim from - # https://github.com/python/cpython/blob/v3.12.3/Lib/asyncio/tasks.py#L584 - # but the "set" is changed out for a "list" and fixed up some typing/format - - if asyncio.isfuture(fs) or asyncio.iscoroutine(fs): - raise TypeError(f"expect an iterable of futures, not {type(fs).__name__}") - - done: asyncio.Queue[asyncio.Future | None] = asyncio.Queue() - - loop = asyncio.get_event_loop() - todo: list[asyncio.Future] = [asyncio.ensure_future(f, loop=loop) for f in list(fs)] - timeout_handle = None - - def _on_timeout(): - for f in todo: - f.remove_done_callback(_on_completion) - done.put_nowait(None) # Queue a dummy value for _wait_for_one(). - todo.clear() # Can't do todo.remove(f) in the loop. - - def _on_completion(f): # type:ignore[reportMissingParameterType] - if not todo: - return # _on_timeout() was here first. - todo.remove(f) - done.put_nowait(f) - if not todo and timeout_handle is not None: - timeout_handle.cancel() - - async def _wait_for_one(): - f = await done.get() - if f is None: - # Dummy value from _on_timeout(). - raise asyncio.TimeoutError - return f.result() # May raise f.exception(). - - for f in todo: - f.add_done_callback(_on_completion) - if todo and timeout is not None: - timeout_handle = loop.call_later(timeout, _on_timeout) - for _ in range(len(todo)): - yield _wait_for_one() - - -if TYPE_CHECKING: - _FT = TypeVar("_FT", bound=asyncio.Future[Any]) -else: - _FT = TypeVar("_FT", bound=asyncio.Future) - - -@overload -async def wait( # type: ignore[misc] - fs: Iterable[_FT], - *, - timeout: float | None = None, - return_when: str = asyncio.ALL_COMPLETED, -) -> tuple[list[_FT], list[_FT]]: ... - - -@overload -async def wait( - fs: Iterable[asyncio.Task[AnyType]], - *, - timeout: float | None = None, - return_when: str = asyncio.ALL_COMPLETED, -) -> tuple[list[asyncio.Task[AnyType]], list[asyncio.Task[AnyType]]]: ... - - -async def wait( - fs: Iterable, - *, - timeout: float | None = None, - return_when: str = asyncio.ALL_COMPLETED, -) -> tuple: - """Wait for the Futures or Tasks given by fs to complete. - - This is a deterministic version of :py:func:`asyncio.wait`. This function - should be used instead of that one in workflows. - """ - # Taken almost verbatim from - # https://github.com/python/cpython/blob/v3.12.3/Lib/asyncio/tasks.py#L435 - # but the "set" is changed out for a "list" and fixed up some typing/format - - if asyncio.isfuture(fs) or asyncio.iscoroutine(fs): - raise TypeError(f"Expect an iterable of Tasks/Futures, not {type(fs).__name__}") - if not fs: - raise ValueError("Sequence of Tasks/Futures must not be empty.") - if return_when not in ( - asyncio.FIRST_COMPLETED, - asyncio.FIRST_EXCEPTION, - asyncio.ALL_COMPLETED, - ): - raise ValueError(f"Invalid return_when value: {return_when}") - - fs = list(fs) - - if any(asyncio.iscoroutine(f) for f in fs): - raise TypeError("Passing coroutines is forbidden, use tasks explicitly.") - - loop = asyncio.get_running_loop() - return await _wait(fs, timeout, return_when, loop) - - -async def _wait( - fs: Iterable[asyncio.Future | asyncio.Task], - timeout: float | None, - return_when: str, - loop: asyncio.AbstractEventLoop, -) -> tuple[list, list]: - # Taken almost verbatim from - # https://github.com/python/cpython/blob/v3.12.3/Lib/asyncio/tasks.py#L522 - # but the "set" is changed out for a "list" and fixed up some typing/format - - assert fs, "Sequence of Tasks/Futures must not be empty." - waiter = loop.create_future() - timeout_handle = None - if timeout is not None: - timeout_handle = loop.call_later(timeout, _release_waiter, waiter) - counter = len(fs) # type: ignore[arg-type] - - def _on_completion(f): # type:ignore[reportMissingParameterType] - nonlocal counter - counter -= 1 - if ( - counter <= 0 - or return_when == asyncio.FIRST_COMPLETED - or return_when == asyncio.FIRST_EXCEPTION - and (not f.cancelled() and f.exception() is not None) - ): - if timeout_handle is not None: - timeout_handle.cancel() - if not waiter.done(): - waiter.set_result(None) - - for f in fs: - f.add_done_callback(_on_completion) - - try: - await waiter - finally: - if timeout_handle is not None: - timeout_handle.cancel() - for f in fs: - f.remove_done_callback(_on_completion) - - done, pending = [], [] - for f in fs: - if f.done(): - done.append(f) - else: - pending.append(f) - return done, pending - - -def _release_waiter(waiter: asyncio.Future[Any], *_args: Any) -> None: - # Taken almost verbatim from - # https://github.com/python/cpython/blob/v3.12.3/Lib/asyncio/tasks.py#L467 - - if not waiter.done(): - waiter.set_result(None) - - -def _is_unbound_method_on_cls(fn: Callable[..., Any], cls: type) -> bool: - # Python 3 does not make this easy, ref https://stackoverflow.com/questions/3589311 - return ( - inspect.isfunction(fn) - and inspect.getmodule(fn) is inspect.getmodule(cls) - and fn.__qualname__.rsplit(".", 1)[0] == cls.__name__ - ) - - -class NondeterminismError(temporalio.exceptions.TemporalError): - """Error that can be thrown during replay for non-deterministic workflow.""" - - def __init__(self, message: str) -> None: - """Initialize a nondeterminism error.""" - super().__init__(message) - self.message = message - - -class ReadOnlyContextError(temporalio.exceptions.TemporalError): - """Error thrown when trying to do mutable workflow calls in a read-only - context like a query or update validator. - """ - - def __init__(self, message: str) -> None: - """Initialize a read-only context error.""" - super().__init__(message) - self.message = message - - -class _NotInWorkflowEventLoopError(temporalio.exceptions.TemporalError): - def __init__(self, *args: object) -> None: - super().__init__("Not in workflow event loop") - self.message = "Not in workflow event loop" - - -class VersioningIntent(Enum): - """Indicates whether the user intends certain commands to be run on a compatible worker Build - Id version or not. - - `COMPATIBLE` indicates that the command should run on a worker with compatible version if - possible. It may not be possible if the target task queue does not also have knowledge of the - current worker's Build Id. - - `DEFAULT` indicates that the command should run on the target task queue's current - overall-default Build Id. - - Where this type is accepted optionally, an unset value indicates that the SDK should choose the - most sensible default behavior for the type of command, accounting for whether the command will - be run on the same task queue as the current worker. - - .. deprecated:: - Use Worker Deployment versioning instead. - """ - - COMPATIBLE = 1 - DEFAULT = 2 - - def _to_proto(self) -> temporalio.bridge.proto.common.VersioningIntent.ValueType: - if self == VersioningIntent.COMPATIBLE: - return temporalio.bridge.proto.common.VersioningIntent.COMPATIBLE - elif self == VersioningIntent.DEFAULT: - return temporalio.bridge.proto.common.VersioningIntent.DEFAULT - return temporalio.bridge.proto.common.VersioningIntent.UNSPECIFIED - - -class ContinueAsNewVersioningBehavior(IntEnum): - """Experimental. Optionally decide the versioning behavior that the first task of the new run should use. - For example, choose to AutoUpgrade on continue-as-new instead of inheriting the pinned version - of the previous run. - """ - - UNSPECIFIED = int( - temporalio.api.enums.v1.ContinueAsNewVersioningBehavior.CONTINUE_AS_NEW_VERSIONING_BEHAVIOR_UNSPECIFIED - ) - """An initial versioning behavior is not set, follow the existing continue-as-new inheritance semantics. - See https://docs.temporal.io/worker-versioning#inheritance-semantics for more detail. - """ - - AUTO_UPGRADE = int( - temporalio.api.enums.v1.ContinueAsNewVersioningBehavior.CONTINUE_AS_NEW_VERSIONING_BEHAVIOR_AUTO_UPGRADE - ) - """Start the new run with AutoUpgrade behavior. Use the Target Version of the workflow's task queue at - start-time, as AutoUpgrade workflows do. After the first workflow task completes, use whatever - Versioning Behavior the workflow is annotated with in the workflow code. - - Note that if the previous workflow had a Pinned override, that override will be inherited by the - new workflow run regardless of the ContinueAsNewVersioningBehavior specified in the continue-as-new - command. If a Pinned override is inherited by the new run, and the new run starts with AutoUpgrade - behavior, the base version of the new run will be the Target Version as described above, but the - effective version will be whatever is specified by the Versioning Override until the override is removed. - """ - - -ServiceT = TypeVar("ServiceT") - - -class NexusOperationCancellationType(IntEnum): - """Defines behavior of a Nexus operation when the caller workflow initiates cancellation. - - Pass one of these values to :py:meth:`NexusClient.start_operation` to define cancellation - behavior. - - To initiate cancellation, use :py:meth:`NexusOperationHandle.cancel` and then ``await`` the - operation handle. This will result in a :py:class:`exceptions.NexusOperationError`. The values - of this enum define what is guaranteed to have happened by that point. - """ - - ABANDON = int(temporalio.bridge.proto.nexus.NexusOperationCancellationType.ABANDON) - """Do not send any cancellation request to the operation handler; just report cancellation to the caller""" - - TRY_CANCEL = int( - temporalio.bridge.proto.nexus.NexusOperationCancellationType.TRY_CANCEL - ) - """Send a cancellation request but immediately report cancellation to the caller. Note that this - does not guarantee that cancellation is delivered to the operation handler if the caller exits - before the delivery is done. - """ - - WAIT_REQUESTED = int( - temporalio.bridge.proto.nexus.NexusOperationCancellationType.WAIT_CANCELLATION_REQUESTED - ) - """Send a cancellation request and wait for confirmation that the request was received. - Does not wait for the operation to complete. - """ - - WAIT_COMPLETED = int( - temporalio.bridge.proto.nexus.NexusOperationCancellationType.WAIT_CANCELLATION_COMPLETED - ) - """Send a cancellation request and wait for the operation to complete. - Note that the operation may not complete as cancelled (for example, if it catches the - :py:exc:`asyncio.CancelledError` resulting from the cancellation request).""" - - -class NexusClient(ABC, Generic[ServiceT]): - """A client for invoking Nexus operations. - - Example:: - - nexus_client = workflow.create_nexus_client( - endpoint=my_nexus_endpoint, - service=MyService, - ) - handle = await nexus_client.start_operation( - operation=MyService.my_operation, - input=MyOperationInput(value="hello"), - schedule_to_close_timeout=timedelta(seconds=10), - ) - result = await handle.result() - """ - - # Overload for nexusrpc.Operation - @overload - @abstractmethod - async def start_operation( - self, - operation: nexusrpc.Operation[InputT, OutputT], - input: InputT, - *, - output_type: type[OutputT] | None = None, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - cancellation_type: NexusOperationCancellationType = NexusOperationCancellationType.WAIT_COMPLETED, - headers: Mapping[str, str] | None = None, - summary: str | None = None, - ) -> NexusOperationHandle[OutputT]: ... - - # Overload for string operation name - @overload - @abstractmethod - async def start_operation( - self, - operation: str, - input: Any, - *, - output_type: type[OutputT] | None = None, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - cancellation_type: NexusOperationCancellationType = NexusOperationCancellationType.WAIT_COMPLETED, - headers: Mapping[str, str] | None = None, - summary: str | None = None, - ) -> NexusOperationHandle[OutputT]: ... - - # Overload for workflow_run_operation methods - @overload - @abstractmethod - async def start_operation( - self, - operation: Callable[ - [ServiceHandlerT, temporalio.nexus.WorkflowRunOperationContext, InputT], - Awaitable[temporalio.nexus.WorkflowHandle[OutputT]], - ], - input: InputT, - *, - output_type: type[OutputT] | None = None, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - cancellation_type: NexusOperationCancellationType = NexusOperationCancellationType.WAIT_COMPLETED, - headers: Mapping[str, str] | None = None, - summary: str | None = None, - ) -> NexusOperationHandle[OutputT]: ... - - # Overload for sync_operation methods (async def) - @overload - @abstractmethod - async def start_operation( - self, - operation: Callable[ - [ServiceHandlerT, nexusrpc.handler.StartOperationContext, InputT], - Awaitable[OutputT], - ], - input: InputT, - *, - output_type: type[OutputT] | None = None, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - cancellation_type: NexusOperationCancellationType = NexusOperationCancellationType.WAIT_COMPLETED, - headers: Mapping[str, str] | None = None, - summary: str | None = None, - ) -> NexusOperationHandle[OutputT]: ... - - # Overload for sync_operation methods (def) - @overload - @abstractmethod - async def start_operation( - self, - operation: Callable[ - [ServiceHandlerT, nexusrpc.handler.StartOperationContext, InputT], - OutputT, - ], - input: InputT, - *, - output_type: type[OutputT] | None = None, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - cancellation_type: NexusOperationCancellationType = NexusOperationCancellationType.WAIT_COMPLETED, - headers: Mapping[str, str] | None = None, - summary: str | None = None, - ) -> NexusOperationHandle[OutputT]: ... - - # Overload for operation_handler - @overload - @abstractmethod - async def start_operation( - self, - operation: Callable[ - [ServiceHandlerT], nexusrpc.handler.OperationHandler[InputT, OutputT] - ], - input: InputT, - *, - output_type: type[OutputT] | None = None, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - cancellation_type: NexusOperationCancellationType = NexusOperationCancellationType.WAIT_COMPLETED, - headers: Mapping[str, str] | None = None, - summary: str | None = None, - ) -> NexusOperationHandle[OutputT]: ... - - @abstractmethod - async def start_operation( - self, - operation: Any, - input: Any, - *, - output_type: type[OutputT] | None = None, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - cancellation_type: NexusOperationCancellationType = NexusOperationCancellationType.WAIT_COMPLETED, - headers: Mapping[str, str] | None = None, - summary: str | None = None, - ) -> Any: - """Start a Nexus operation and return its handle. - - Args: - operation: The Nexus operation. - input: The Nexus operation input. - output_type: The Nexus operation output type. - schedule_to_close_timeout: Timeout for the entire operation attempt. - schedule_to_start_timeout: Timeout for the operation to be started. - start_to_close_timeout: Timeout for async operations to complete after starting. - headers: Headers to send with the Nexus HTTP request. - - Returns: - A handle to the Nexus operation. The result can be obtained as - ```python - await handle.result() - ``` - """ - ... - - # Overload for nexusrpc.Operation - @overload - @abstractmethod - async def execute_operation( - self, - operation: nexusrpc.Operation[InputT, OutputT], - input: InputT, - *, - output_type: type[OutputT] | None = None, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - cancellation_type: NexusOperationCancellationType = NexusOperationCancellationType.WAIT_COMPLETED, - headers: Mapping[str, str] | None = None, - summary: str | None = None, - ) -> OutputT: ... - - # Overload for string operation name - @overload - @abstractmethod - async def execute_operation( - self, - operation: str, - input: Any, - *, - output_type: type[OutputT] | None = None, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - cancellation_type: NexusOperationCancellationType = NexusOperationCancellationType.WAIT_COMPLETED, - headers: Mapping[str, str] | None = None, - summary: str | None = None, - ) -> OutputT: ... - - # Overload for workflow_run_operation methods - @overload - @abstractmethod - async def execute_operation( - self, - operation: Callable[ - [ServiceHandlerT, temporalio.nexus.WorkflowRunOperationContext, InputT], - Awaitable[temporalio.nexus.WorkflowHandle[OutputT]], - ], - input: InputT, - *, - output_type: type[OutputT] | None = None, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - cancellation_type: NexusOperationCancellationType = NexusOperationCancellationType.WAIT_COMPLETED, - headers: Mapping[str, str] | None = None, - summary: str | None = None, - ) -> OutputT: ... - - # Overload for sync_operation methods (async def) - @overload - @abstractmethod - async def execute_operation( - self, - operation: Callable[ - [ServiceT, nexusrpc.handler.StartOperationContext, InputT], - Awaitable[OutputT], - ], - input: InputT, - *, - output_type: type[OutputT] | None = None, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - cancellation_type: NexusOperationCancellationType = NexusOperationCancellationType.WAIT_COMPLETED, - headers: Mapping[str, str] | None = None, - summary: str | None = None, - ) -> OutputT: ... - - # Overload for sync_operation methods (def) - @overload - @abstractmethod - async def execute_operation( - self, - operation: Callable[ - [ServiceT, nexusrpc.handler.StartOperationContext, InputT], - OutputT, - ], - input: InputT, - *, - output_type: type[OutputT] | None = None, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - cancellation_type: NexusOperationCancellationType = NexusOperationCancellationType.WAIT_COMPLETED, - headers: Mapping[str, str] | None = None, - summary: str | None = None, - ) -> OutputT: ... - - # Overload for operation_handler - @overload - @abstractmethod - async def execute_operation( - self, - operation: Callable[ - [ServiceT], - nexusrpc.handler.OperationHandler[InputT, OutputT], - ], - input: InputT, - *, - output_type: type[OutputT] | None = None, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - cancellation_type: NexusOperationCancellationType = NexusOperationCancellationType.WAIT_COMPLETED, - headers: Mapping[str, str] | None = None, - summary: str | None = None, - ) -> OutputT: ... - - @abstractmethod - async def execute_operation( - self, - operation: Any, - input: Any, - *, - output_type: type[OutputT] | None = None, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - cancellation_type: NexusOperationCancellationType = NexusOperationCancellationType.WAIT_COMPLETED, - headers: Mapping[str, str] | None = None, - summary: str | None = None, - ) -> Any: - """Execute a Nexus operation and return its result. - - Args: - operation: The Nexus operation. - input: The Nexus operation input. - output_type: The Nexus operation output type. - schedule_to_close_timeout: Timeout for the entire operation attempt. - schedule_to_start_timeout: Timeout for the operation to be started. - start_to_close_timeout: Timeout for async operations to complete after starting. - headers: Headers to send with the Nexus HTTP request. - - Returns: - The operation result. - """ - ... - - -class _NexusClient(NexusClient[ServiceT]): - def __init__( - self, - *, - endpoint: str, - service: type[ServiceT] | str, - ) -> None: - """Create a Nexus client. - - Args: - service: The Nexus service. - endpoint: The Nexus endpoint. - """ - # If service is not a str, then it must be a service interface or implementation - # class. - if isinstance(service, str): - self.service_name = service - elif service_defn := nexusrpc.get_service_definition(service): - self.service_name = service_defn.name - else: - raise ValueError( - f"`service` may be a name (str), or a class decorated with either " - f"@nexusrpc.handler.service_handler or @nexusrpc.service. " - f"Invalid service type: {type(service)}" - ) - self.endpoint = endpoint - - async def start_operation( - self, - operation: Any, - input: Any, - *, - output_type: type[OutputT] | None = None, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - cancellation_type: NexusOperationCancellationType = NexusOperationCancellationType.WAIT_COMPLETED, - headers: Mapping[str, str] | None = None, - summary: str | None = None, - ) -> Any: - return ( - await temporalio.workflow._Runtime.current().workflow_start_nexus_operation( - endpoint=self.endpoint, - service=self.service_name, - operation=operation, - input=input, - output_type=output_type, - schedule_to_close_timeout=schedule_to_close_timeout, - schedule_to_start_timeout=schedule_to_start_timeout, - start_to_close_timeout=start_to_close_timeout, - cancellation_type=cancellation_type, - headers=headers, - summary=summary, - ) - ) - - async def execute_operation( - self, - operation: Any, - input: Any, - *, - output_type: type[OutputT] | None = None, - schedule_to_close_timeout: timedelta | None = None, - schedule_to_start_timeout: timedelta | None = None, - start_to_close_timeout: timedelta | None = None, - cancellation_type: NexusOperationCancellationType = NexusOperationCancellationType.WAIT_COMPLETED, - headers: Mapping[str, str] | None = None, - summary: str | None = None, - ) -> Any: - handle = await self.start_operation( - operation, - input, - output_type=output_type, - schedule_to_close_timeout=schedule_to_close_timeout, - schedule_to_start_timeout=schedule_to_start_timeout, - start_to_close_timeout=start_to_close_timeout, - cancellation_type=cancellation_type, - headers=headers, - summary=summary, - ) - return await handle - - -@overload -def create_nexus_client( - *, - service: type[ServiceT], - endpoint: str, -) -> NexusClient[ServiceT]: ... - - -@overload -def create_nexus_client( - *, - service: str, - endpoint: str, -) -> NexusClient[Any]: ... - - -def create_nexus_client( - *, - service: type[ServiceT] | str, - endpoint: str, -) -> NexusClient[ServiceT]: - """Create a Nexus client. - - Args: - service: The Nexus service. - endpoint: The Nexus endpoint. - """ - return _NexusClient(endpoint=endpoint, service=service) diff --git a/temporalio/workflow/__init__.py b/temporalio/workflow/__init__.py new file mode 100644 index 000000000..0f47161a5 --- /dev/null +++ b/temporalio/workflow/__init__.py @@ -0,0 +1,87 @@ +"""Utilities that can decorate or be called inside workflows.""" + +from __future__ import annotations + +from . import ( + _activities, + _asyncio, + _context, + _definition, + _exceptions, + _handlers, + _nexus, + _sandbox, + _workflow_ops, +) +from ._activities import * +from ._activities import _AsyncioTask as _AsyncioTask +from ._asyncio import * +from ._asyncio import _FT as _FT +from ._asyncio import _release_waiter as _release_waiter +from ._asyncio import _wait as _wait +from ._context import * +from ._context import _current_update_info as _current_update_info +from ._context import _Runtime as _Runtime +from ._context import _set_current_update_info as _set_current_update_info +from ._definition import * +from ._definition import _Definition as _Definition +from ._definition import _is_unbound_method_on_cls as _is_unbound_method_on_cls +from ._definition import ( + _parameters_identical_up_to_naming as _parameters_identical_up_to_naming, +) +from ._exceptions import * +from ._exceptions import _NotInWorkflowEventLoopError as _NotInWorkflowEventLoopError +from ._handlers import * +from ._handlers import _assert_dynamic_handler_args as _assert_dynamic_handler_args +from ._handlers import _bind_method as _bind_method +from ._handlers import _QueryDefinition as _QueryDefinition +from ._handlers import _SignalDefinition as _SignalDefinition +from ._handlers import _update_validator as _update_validator +from ._handlers import _UpdateDefinition as _UpdateDefinition +from ._nexus import * +from ._nexus import _NexusClient as _NexusClient +from ._sandbox import * +from ._sandbox import _build_log_context as _build_log_context +from ._sandbox import _imports_passed_through as _imports_passed_through +from ._sandbox import _in_sandbox as _in_sandbox +from ._sandbox import ( + _sandbox_import_notification_policy_override as _sandbox_import_notification_policy_override, +) +from ._sandbox import _sandbox_unrestricted as _sandbox_unrestricted +from ._workflow_ops import * + +__all__: list[str] = [] +__all__ += _activities.__all__ +__all__ += _asyncio.__all__ +__all__ += _context.__all__ +__all__ += _definition.__all__ +__all__ += _exceptions.__all__ +__all__ += _handlers.__all__ +__all__ += _nexus.__all__ +__all__ += _sandbox.__all__ +__all__ += _workflow_ops.__all__ +__all__ += [ + "_AsyncioTask", + "_FT", + "_release_waiter", + "_wait", + "_current_update_info", + "_Runtime", + "_set_current_update_info", + "_Definition", + "_is_unbound_method_on_cls", + "_parameters_identical_up_to_naming", + "_NotInWorkflowEventLoopError", + "_assert_dynamic_handler_args", + "_bind_method", + "_QueryDefinition", + "_SignalDefinition", + "_update_validator", + "_UpdateDefinition", + "_NexusClient", + "_build_log_context", + "_imports_passed_through", + "_in_sandbox", + "_sandbox_import_notification_policy_override", + "_sandbox_unrestricted", +] diff --git a/temporalio/workflow/_activities.py b/temporalio/workflow/_activities.py new file mode 100644 index 000000000..ef883c016 --- /dev/null +++ b/temporalio/workflow/_activities.py @@ -0,0 +1,2008 @@ +from __future__ import annotations + +import asyncio +from collections.abc import Awaitable, Callable, Sequence +from datetime import timedelta +from enum import IntEnum +from typing import TYPE_CHECKING, Any, Concatenate, Generic, TypedDict, overload + +import temporalio.bridge.proto.workflow_commands +import temporalio.common + +from ..types import ( + AnyType, + CallableAsyncNoParam, + CallableAsyncSingleParam, + CallableSyncNoParam, + CallableSyncSingleParam, + MethodAsyncNoParam, + MethodAsyncSingleParam, + MethodSyncNoParam, + MethodSyncSingleParam, + MultiParamSpec, + ParamType, + ReturnType, + SelfType, +) +from ._context import _Runtime +from ._exceptions import VersioningIntent + +__all__ = [ + "ActivityCancellationType", + "ActivityConfig", + "ActivityHandle", + "LocalActivityConfig", + "execute_activity", + "execute_activity_class", + "execute_activity_method", + "execute_local_activity", + "execute_local_activity_class", + "execute_local_activity_method", + "start_activity", + "start_activity_class", + "start_activity_method", + "start_local_activity", + "start_local_activity_class", + "start_local_activity_method", +] + +# See https://mypy.readthedocs.io/en/latest/runtime_troubles.html#using-classes-that-are-generic-in-stubs-but-not-at-runtime +if TYPE_CHECKING: + + class _AsyncioTask(asyncio.Task[AnyType]): + pass + +else: + # TODO: inherited classes should be other way around? + class _AsyncioTask(Generic[AnyType], asyncio.Task): + pass + + +class ActivityHandle(_AsyncioTask[ReturnType]): # type: ignore[type-var] + """Handle returned from :py:func:`start_activity` and + :py:func:`start_local_activity`. + + This extends :py:class:`asyncio.Task` and supports all task features. + """ + + pass + + +class ActivityCancellationType(IntEnum): + """How an activity cancellation should be handled.""" + + TRY_CANCEL = int( + temporalio.bridge.proto.workflow_commands.ActivityCancellationType.TRY_CANCEL + ) + WAIT_CANCELLATION_COMPLETED = int( + temporalio.bridge.proto.workflow_commands.ActivityCancellationType.WAIT_CANCELLATION_COMPLETED + ) + ABANDON = int( + temporalio.bridge.proto.workflow_commands.ActivityCancellationType.ABANDON + ) + + +class ActivityConfig(TypedDict, total=False): + """TypedDict of config that can be used for :py:func:`start_activity` and + :py:func:`execute_activity`. + """ + + task_queue: str | None + schedule_to_close_timeout: timedelta | None + schedule_to_start_timeout: timedelta | None + start_to_close_timeout: timedelta | None + heartbeat_timeout: timedelta | None + retry_policy: temporalio.common.RetryPolicy | None + cancellation_type: ActivityCancellationType + activity_id: str | None + versioning_intent: VersioningIntent | None + summary: str | None + priority: temporalio.common.Priority + + +# Overload for async no-param activity +@overload +def start_activity( + activity: CallableAsyncNoParam[ReturnType], + *, + task_queue: str | None = None, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + heartbeat_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + versioning_intent: VersioningIntent | None = None, + summary: str | None = None, + priority: temporalio.common.Priority = temporalio.common.Priority.default, +) -> ActivityHandle[ReturnType]: ... + + +# Overload for sync no-param activity +@overload +def start_activity( + activity: CallableSyncNoParam[ReturnType], + *, + task_queue: str | None = None, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + heartbeat_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + versioning_intent: VersioningIntent | None = None, + summary: str | None = None, + priority: temporalio.common.Priority = temporalio.common.Priority.default, +) -> ActivityHandle[ReturnType]: ... + + +# Overload for async single-param activity +@overload +def start_activity( + activity: CallableAsyncSingleParam[ParamType, ReturnType], + arg: ParamType, + *, + task_queue: str | None = None, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + heartbeat_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + versioning_intent: VersioningIntent | None = None, + summary: str | None = None, + priority: temporalio.common.Priority = temporalio.common.Priority.default, +) -> ActivityHandle[ReturnType]: ... + + +# Overload for sync single-param activity +@overload +def start_activity( + activity: CallableSyncSingleParam[ParamType, ReturnType], + arg: ParamType, + *, + task_queue: str | None = None, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + heartbeat_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + versioning_intent: VersioningIntent | None = None, + summary: str | None = None, + priority: temporalio.common.Priority = temporalio.common.Priority.default, +) -> ActivityHandle[ReturnType]: ... + + +# Overload for async multi-param activity +@overload +def start_activity( + activity: Callable[..., Awaitable[ReturnType]], + *, + args: Sequence[Any], + task_queue: str | None = None, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + heartbeat_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + versioning_intent: VersioningIntent | None = None, + summary: str | None = None, + priority: temporalio.common.Priority = temporalio.common.Priority.default, +) -> ActivityHandle[ReturnType]: ... + + +# Overload for sync multi-param activity +@overload +def start_activity( + activity: Callable[..., ReturnType], + *, + args: Sequence[Any], + task_queue: str | None = None, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + heartbeat_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + versioning_intent: VersioningIntent | None = None, + summary: str | None = None, + priority: temporalio.common.Priority = temporalio.common.Priority.default, +) -> ActivityHandle[ReturnType]: ... + + +# Overload for string-name activity +@overload +def start_activity( + activity: str, + arg: Any = temporalio.common._arg_unset, + *, + args: Sequence[Any] = [], + task_queue: str | None = None, + result_type: type | None = None, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + heartbeat_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + versioning_intent: VersioningIntent | None = None, + summary: str | None = None, + priority: temporalio.common.Priority = temporalio.common.Priority.default, +) -> ActivityHandle[Any]: ... + + +def start_activity( + activity: Any, + arg: Any = temporalio.common._arg_unset, + *, + args: Sequence[Any] = [], + task_queue: str | None = None, + result_type: type | None = None, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + heartbeat_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + versioning_intent: VersioningIntent | None = None, + summary: str | None = None, + priority: temporalio.common.Priority = temporalio.common.Priority.default, +) -> ActivityHandle[Any]: + """Start an activity and return its handle. + + At least one of ``schedule_to_close_timeout`` or ``start_to_close_timeout`` + must be present. + + Args: + activity: Activity name or function reference. + arg: Single argument to the activity. + args: Multiple arguments to the activity. Cannot be set if arg is. + task_queue: Task queue to run the activity on. Defaults to the current + workflow's task queue. + result_type: For string activities, this can set the specific result + type hint to deserialize into. + schedule_to_close_timeout: Max amount of time the activity can take from + first being scheduled to being completed before it times out. This + is inclusive of all retries. + schedule_to_start_timeout: Max amount of time the activity can take to + be started from first being scheduled. + start_to_close_timeout: Max amount of time a single activity run can + take from when it starts to when it completes. This is per retry. + heartbeat_timeout: How frequently an activity must invoke heartbeat + while running before it is considered timed out. + retry_policy: How an activity is retried on failure. If unset, a + server-defined default is used. Set maximum attempts to 1 to disable + retries. + cancellation_type: How the activity is treated when it is cancelled from + the workflow. + activity_id: Optional unique identifier for the activity. This is an + advanced setting that should not be set unless users are sure they + need to. Contact Temporal before setting this value. + versioning_intent: When using the Worker Versioning feature, specifies whether this Activity + should run on a worker with a compatible Build Id or not. + Deprecated: Use Worker Deployment versioning instead. + summary: A single-line fixed summary for this activity that may appear in UI/CLI. + This can be in single-line Temporal markdown format. + priority: Priority of the activity. + + Returns: + An activity handle to the activity which is an async task. + """ + return _Runtime.current().workflow_start_activity( + activity, + *temporalio.common._arg_or_args(arg, args), + task_queue=task_queue, + result_type=result_type, + schedule_to_close_timeout=schedule_to_close_timeout, + schedule_to_start_timeout=schedule_to_start_timeout, + start_to_close_timeout=start_to_close_timeout, + heartbeat_timeout=heartbeat_timeout, + retry_policy=retry_policy, + cancellation_type=cancellation_type, + activity_id=activity_id, + versioning_intent=versioning_intent, + summary=summary, + priority=priority, + ) + + +# Overload for async no-param activity +@overload +async def execute_activity( + activity: CallableAsyncNoParam[ReturnType], + *, + task_queue: str | None = None, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + heartbeat_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + versioning_intent: VersioningIntent | None = None, + summary: str | None = None, + priority: temporalio.common.Priority = temporalio.common.Priority.default, +) -> ReturnType: ... + + +# Overload for sync no-param activity +@overload +async def execute_activity( + activity: CallableSyncNoParam[ReturnType], + *, + task_queue: str | None = None, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + heartbeat_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + versioning_intent: VersioningIntent | None = None, + summary: str | None = None, + priority: temporalio.common.Priority = temporalio.common.Priority.default, +) -> ReturnType: ... + + +# Overload for async single-param activity +@overload +async def execute_activity( + activity: CallableAsyncSingleParam[ParamType, ReturnType], + arg: ParamType, + *, + task_queue: str | None = None, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + heartbeat_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + versioning_intent: VersioningIntent | None = None, + summary: str | None = None, + priority: temporalio.common.Priority = temporalio.common.Priority.default, +) -> ReturnType: ... + + +# Overload for sync single-param activity +@overload +async def execute_activity( + activity: CallableSyncSingleParam[ParamType, ReturnType], + arg: ParamType, + *, + task_queue: str | None = None, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + heartbeat_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + versioning_intent: VersioningIntent | None = None, + summary: str | None = None, + priority: temporalio.common.Priority = temporalio.common.Priority.default, +) -> ReturnType: ... + + +# Overload for async multi-param activity +@overload +async def execute_activity( + activity: Callable[..., Awaitable[ReturnType]], + *, + args: Sequence[Any], + task_queue: str | None = None, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + heartbeat_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + versioning_intent: VersioningIntent | None = None, + summary: str | None = None, + priority: temporalio.common.Priority = temporalio.common.Priority.default, +) -> ReturnType: ... + + +# Overload for sync multi-param activity +@overload +async def execute_activity( + activity: Callable[..., ReturnType], + *, + args: Sequence[Any], + task_queue: str | None = None, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + heartbeat_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + versioning_intent: VersioningIntent | None = None, + summary: str | None = None, + priority: temporalio.common.Priority = temporalio.common.Priority.default, +) -> ReturnType: ... + + +# Overload for string-name activity +@overload +async def execute_activity( + activity: str, + arg: Any = temporalio.common._arg_unset, + *, + args: Sequence[Any] = [], + task_queue: str | None = None, + result_type: type | None = None, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + heartbeat_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + versioning_intent: VersioningIntent | None = None, + summary: str | None = None, + priority: temporalio.common.Priority = temporalio.common.Priority.default, +) -> Any: ... + + +async def execute_activity( + activity: Any, + arg: Any = temporalio.common._arg_unset, + *, + args: Sequence[Any] = [], + task_queue: str | None = None, + result_type: type | None = None, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + heartbeat_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + versioning_intent: VersioningIntent | None = None, + summary: str | None = None, + priority: temporalio.common.Priority = temporalio.common.Priority.default, +) -> Any: + """Start an activity and wait for completion. + + This is a shortcut for ``await`` :py:meth:`start_activity`. + """ + # We call the runtime directly instead of top-level start_activity to ensure + # we don't miss new parameters + return await _Runtime.current().workflow_start_activity( + activity, + *temporalio.common._arg_or_args(arg, args), + task_queue=task_queue, + result_type=result_type, + schedule_to_close_timeout=schedule_to_close_timeout, + schedule_to_start_timeout=schedule_to_start_timeout, + start_to_close_timeout=start_to_close_timeout, + heartbeat_timeout=heartbeat_timeout, + retry_policy=retry_policy, + cancellation_type=cancellation_type, + activity_id=activity_id, + versioning_intent=versioning_intent, + summary=summary, + priority=priority, + ) + + +# Overload for async no-param activity +@overload +def start_activity_class( + activity: type[CallableAsyncNoParam[ReturnType]], + *, + task_queue: str | None = None, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + heartbeat_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + versioning_intent: VersioningIntent | None = None, + summary: str | None = None, + priority: temporalio.common.Priority = temporalio.common.Priority.default, +) -> ActivityHandle[ReturnType]: ... + + +# Overload for sync no-param activity +@overload +def start_activity_class( + activity: type[CallableSyncNoParam[ReturnType]], + *, + task_queue: str | None = None, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + heartbeat_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + versioning_intent: VersioningIntent | None = None, + summary: str | None = None, + priority: temporalio.common.Priority = temporalio.common.Priority.default, +) -> ActivityHandle[ReturnType]: ... + + +# Overload for async single-param activity +@overload +def start_activity_class( + activity: type[CallableAsyncSingleParam[ParamType, ReturnType]], + arg: ParamType, + *, + task_queue: str | None = None, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + heartbeat_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + versioning_intent: VersioningIntent | None = None, + summary: str | None = None, + priority: temporalio.common.Priority = temporalio.common.Priority.default, +) -> ActivityHandle[ReturnType]: ... + + +# Overload for sync single-param activity +@overload +def start_activity_class( + activity: type[CallableSyncSingleParam[ParamType, ReturnType]], + arg: ParamType, + *, + task_queue: str | None = None, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + heartbeat_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + versioning_intent: VersioningIntent | None = None, + summary: str | None = None, + priority: temporalio.common.Priority = temporalio.common.Priority.default, +) -> ActivityHandle[ReturnType]: ... + + +# Overload for async multi-param activity +@overload +def start_activity_class( + activity: type[Callable[..., Awaitable[ReturnType]]], # type: ignore[reportOverlappingOverload] + *, + args: Sequence[Any], + task_queue: str | None = None, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + heartbeat_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + versioning_intent: VersioningIntent | None = None, + summary: str | None = None, + priority: temporalio.common.Priority = temporalio.common.Priority.default, +) -> ActivityHandle[ReturnType]: ... + + +# Overload for sync multi-param activity +@overload +def start_activity_class( # type: ignore[reportOverlappingOverload] + activity: type[Callable[..., ReturnType]], # type: ignore[reportOverlappingOverload] + *, + args: Sequence[Any], + task_queue: str | None = None, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + heartbeat_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + versioning_intent: VersioningIntent | None = None, + summary: str | None = None, + priority: temporalio.common.Priority = temporalio.common.Priority.default, +) -> ActivityHandle[ReturnType]: ... + + +def start_activity_class( + activity: type[Callable], # type: ignore[reportOverlappingOverload] + arg: Any = temporalio.common._arg_unset, + *, + args: Sequence[Any] = [], + task_queue: str | None = None, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + heartbeat_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + versioning_intent: VersioningIntent | None = None, + summary: str | None = None, + priority: temporalio.common.Priority = temporalio.common.Priority.default, +) -> ActivityHandle[Any]: + """Start an activity from a callable class. + + See :py:meth:`start_activity` for parameter and return details. + """ + return _Runtime.current().workflow_start_activity( + activity, + *temporalio.common._arg_or_args(arg, args), + task_queue=task_queue, + result_type=None, + schedule_to_close_timeout=schedule_to_close_timeout, + schedule_to_start_timeout=schedule_to_start_timeout, + start_to_close_timeout=start_to_close_timeout, + heartbeat_timeout=heartbeat_timeout, + retry_policy=retry_policy, + cancellation_type=cancellation_type, + activity_id=activity_id, + versioning_intent=versioning_intent, + summary=summary, + priority=priority, + ) + + +# Overload for async no-param activity +@overload +async def execute_activity_class( + activity: type[CallableAsyncNoParam[ReturnType]], + *, + task_queue: str | None = None, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + heartbeat_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + versioning_intent: VersioningIntent | None = None, + summary: str | None = None, + priority: temporalio.common.Priority = temporalio.common.Priority.default, +) -> ReturnType: ... + + +# Overload for sync no-param activity +@overload +async def execute_activity_class( + activity: type[CallableSyncNoParam[ReturnType]], + *, + task_queue: str | None = None, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + heartbeat_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + versioning_intent: VersioningIntent | None = None, + summary: str | None = None, + priority: temporalio.common.Priority = temporalio.common.Priority.default, +) -> ReturnType: ... + + +# Overload for async single-param activity +@overload +async def execute_activity_class( + activity: type[CallableAsyncSingleParam[ParamType, ReturnType]], + arg: ParamType, + *, + task_queue: str | None = None, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + heartbeat_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + versioning_intent: VersioningIntent | None = None, + summary: str | None = None, + priority: temporalio.common.Priority = temporalio.common.Priority.default, +) -> ReturnType: ... + + +# Overload for sync single-param activity +@overload +async def execute_activity_class( + activity: type[CallableSyncSingleParam[ParamType, ReturnType]], + arg: ParamType, + *, + task_queue: str | None = None, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + heartbeat_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + versioning_intent: VersioningIntent | None = None, + summary: str | None = None, + priority: temporalio.common.Priority = temporalio.common.Priority.default, +) -> ReturnType: ... + + +# Overload for async multi-param activity +@overload +async def execute_activity_class( + activity: type[Callable[..., Awaitable[ReturnType]]], # type: ignore[reportOverlappingOverload] + *, + args: Sequence[Any], + task_queue: str | None = None, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + heartbeat_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + versioning_intent: VersioningIntent | None = None, + summary: str | None = None, + priority: temporalio.common.Priority = temporalio.common.Priority.default, +) -> ReturnType: ... + + +# Overload for sync multi-param activity +@overload +async def execute_activity_class( + activity: type[Callable[..., ReturnType]], # type: ignore[reportOverlappingOverload] + *, + args: Sequence[Any], + task_queue: str | None = None, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + heartbeat_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + versioning_intent: VersioningIntent | None = None, + summary: str | None = None, + priority: temporalio.common.Priority = temporalio.common.Priority.default, +) -> ReturnType: ... + + +async def execute_activity_class( + activity: type[Callable], # type: ignore[reportOverlappingOverload] + arg: Any = temporalio.common._arg_unset, + *, + args: Sequence[Any] = [], + task_queue: str | None = None, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + heartbeat_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + versioning_intent: VersioningIntent | None = None, + summary: str | None = None, + priority: temporalio.common.Priority = temporalio.common.Priority.default, +) -> Any: + """Start an activity from a callable class and wait for completion. + + This is a shortcut for ``await`` :py:meth:`start_activity_class`. + """ + return await _Runtime.current().workflow_start_activity( + activity, + *temporalio.common._arg_or_args(arg, args), + task_queue=task_queue, + result_type=None, + schedule_to_close_timeout=schedule_to_close_timeout, + schedule_to_start_timeout=schedule_to_start_timeout, + start_to_close_timeout=start_to_close_timeout, + heartbeat_timeout=heartbeat_timeout, + retry_policy=retry_policy, + cancellation_type=cancellation_type, + activity_id=activity_id, + versioning_intent=versioning_intent, + summary=summary, + priority=priority, + ) + + +# Overload for async no-param activity +@overload +def start_activity_method( + activity: MethodAsyncNoParam[SelfType, ReturnType], + *, + task_queue: str | None = None, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + heartbeat_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + versioning_intent: VersioningIntent | None = None, + summary: str | None = None, + priority: temporalio.common.Priority = temporalio.common.Priority.default, +) -> ActivityHandle[ReturnType]: ... + + +# Overload for sync no-param activity +@overload +def start_activity_method( + activity: MethodSyncNoParam[SelfType, ReturnType], + *, + task_queue: str | None = None, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + heartbeat_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + versioning_intent: VersioningIntent | None = None, + summary: str | None = None, + priority: temporalio.common.Priority = temporalio.common.Priority.default, +) -> ActivityHandle[ReturnType]: ... + + +# Overload for async single-param activity +@overload +def start_activity_method( + activity: MethodAsyncSingleParam[SelfType, ParamType, ReturnType], + arg: ParamType, + *, + task_queue: str | None = None, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + heartbeat_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + versioning_intent: VersioningIntent | None = None, + summary: str | None = None, + priority: temporalio.common.Priority = temporalio.common.Priority.default, +) -> ActivityHandle[ReturnType]: ... + + +# Overload for sync single-param activity +@overload +def start_activity_method( + activity: MethodSyncSingleParam[SelfType, ParamType, ReturnType], + arg: ParamType, + *, + task_queue: str | None = None, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + heartbeat_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + versioning_intent: VersioningIntent | None = None, + summary: str | None = None, + priority: temporalio.common.Priority = temporalio.common.Priority.default, +) -> ActivityHandle[ReturnType]: ... + + +# Overload for async multi-param activity +@overload +def start_activity_method( + activity: Callable[Concatenate[SelfType, MultiParamSpec], Awaitable[ReturnType]], + *, + args: Sequence[Any], + task_queue: str | None = None, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + heartbeat_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + versioning_intent: VersioningIntent | None = None, + summary: str | None = None, + priority: temporalio.common.Priority = temporalio.common.Priority.default, +) -> ActivityHandle[ReturnType]: ... + + +# Overload for sync multi-param activity +@overload +def start_activity_method( + activity: Callable[Concatenate[SelfType, MultiParamSpec], ReturnType], + *, + args: Sequence[Any], + task_queue: str | None = None, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + heartbeat_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + versioning_intent: VersioningIntent | None = None, + summary: str | None = None, + priority: temporalio.common.Priority = temporalio.common.Priority.default, +) -> ActivityHandle[ReturnType]: ... + + +def start_activity_method( + activity: Callable, + arg: Any = temporalio.common._arg_unset, + *, + args: Sequence[Any] = [], + task_queue: str | None = None, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + heartbeat_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + versioning_intent: VersioningIntent | None = None, + summary: str | None = None, + priority: temporalio.common.Priority = temporalio.common.Priority.default, +) -> ActivityHandle[Any]: + """Start an activity from a method. + + See :py:meth:`start_activity` for parameter and return details. + """ + return _Runtime.current().workflow_start_activity( + activity, + *temporalio.common._arg_or_args(arg, args), + task_queue=task_queue, + result_type=None, + schedule_to_close_timeout=schedule_to_close_timeout, + schedule_to_start_timeout=schedule_to_start_timeout, + start_to_close_timeout=start_to_close_timeout, + heartbeat_timeout=heartbeat_timeout, + retry_policy=retry_policy, + cancellation_type=cancellation_type, + activity_id=activity_id, + versioning_intent=versioning_intent, + summary=summary, + priority=priority, + ) + + +# Overload for async no-param activity +@overload +async def execute_activity_method( + activity: MethodAsyncNoParam[SelfType, ReturnType], + *, + task_queue: str | None = None, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + heartbeat_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + versioning_intent: VersioningIntent | None = None, + summary: str | None = None, + priority: temporalio.common.Priority = temporalio.common.Priority.default, +) -> ReturnType: ... + + +# Overload for sync no-param activity +@overload +async def execute_activity_method( + activity: MethodSyncNoParam[SelfType, ReturnType], + *, + task_queue: str | None = None, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + heartbeat_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + versioning_intent: VersioningIntent | None = None, + summary: str | None = None, + priority: temporalio.common.Priority = temporalio.common.Priority.default, +) -> ReturnType: ... + + +# Overload for async single-param activity +@overload +async def execute_activity_method( + activity: MethodAsyncSingleParam[SelfType, ParamType, ReturnType], + arg: ParamType, + *, + task_queue: str | None = None, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + heartbeat_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + versioning_intent: VersioningIntent | None = None, + summary: str | None = None, + priority: temporalio.common.Priority = temporalio.common.Priority.default, +) -> ReturnType: ... + + +# Overload for sync single-param activity +@overload +async def execute_activity_method( + activity: MethodSyncSingleParam[SelfType, ParamType, ReturnType], + arg: ParamType, + *, + task_queue: str | None = None, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + heartbeat_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + versioning_intent: VersioningIntent | None = None, + summary: str | None = None, + priority: temporalio.common.Priority = temporalio.common.Priority.default, +) -> ReturnType: ... + + +# Overload for async multi-param activity +@overload +async def execute_activity_method( + activity: Callable[Concatenate[SelfType, MultiParamSpec], Awaitable[ReturnType]], + *, + args: Sequence[Any], + task_queue: str | None = None, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + heartbeat_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + versioning_intent: VersioningIntent | None = None, + summary: str | None = None, + priority: temporalio.common.Priority = temporalio.common.Priority.default, +) -> ReturnType: ... + + +# Overload for sync multi-param activity +@overload +async def execute_activity_method( + activity: Callable[Concatenate[SelfType, MultiParamSpec], ReturnType], + *, + args: Sequence[Any], + task_queue: str | None = None, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + heartbeat_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + versioning_intent: VersioningIntent | None = None, + summary: str | None = None, + priority: temporalio.common.Priority = temporalio.common.Priority.default, +) -> ReturnType: ... + + +async def execute_activity_method( + activity: Callable, + arg: Any = temporalio.common._arg_unset, + *, + args: Sequence[Any] = [], + task_queue: str | None = None, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + heartbeat_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + versioning_intent: VersioningIntent | None = None, + summary: str | None = None, + priority: temporalio.common.Priority = temporalio.common.Priority.default, +) -> Any: + """Start an activity from a method and wait for completion. + + This is a shortcut for ``await`` :py:meth:`start_activity_method`. + """ + # We call the runtime directly instead of top-level start_activity to ensure + # we don't miss new parameters + return await _Runtime.current().workflow_start_activity( + activity, + *temporalio.common._arg_or_args(arg, args), + task_queue=task_queue, + result_type=None, + schedule_to_close_timeout=schedule_to_close_timeout, + schedule_to_start_timeout=schedule_to_start_timeout, + start_to_close_timeout=start_to_close_timeout, + heartbeat_timeout=heartbeat_timeout, + retry_policy=retry_policy, + cancellation_type=cancellation_type, + activity_id=activity_id, + versioning_intent=versioning_intent, + summary=summary, + priority=priority, + ) + + +class LocalActivityConfig(TypedDict, total=False): + """TypedDict of config that can be used for :py:func:`start_local_activity` + and :py:func:`execute_local_activity`. + """ + + schedule_to_close_timeout: timedelta | None + schedule_to_start_timeout: timedelta | None + start_to_close_timeout: timedelta | None + retry_policy: temporalio.common.RetryPolicy | None + local_retry_threshold: timedelta | None + cancellation_type: ActivityCancellationType + activity_id: str | None + summary: str | None + + +# Overload for async no-param activity +@overload +def start_local_activity( + activity: CallableAsyncNoParam[ReturnType], + *, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + local_retry_threshold: timedelta | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + summary: str | None = None, +) -> ActivityHandle[ReturnType]: ... + + +# Overload for sync no-param activity +@overload +def start_local_activity( + activity: CallableSyncNoParam[ReturnType], + *, + activity_id: str | None = None, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + local_retry_threshold: timedelta | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + summary: str | None = None, +) -> ActivityHandle[ReturnType]: ... + + +# Overload for async single-param activity +@overload +def start_local_activity( + activity: CallableAsyncSingleParam[ParamType, ReturnType], + arg: ParamType, + *, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + local_retry_threshold: timedelta | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + summary: str | None = None, +) -> ActivityHandle[ReturnType]: ... + + +# Overload for sync single-param activity +@overload +def start_local_activity( + activity: CallableSyncSingleParam[ParamType, ReturnType], + arg: ParamType, + *, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + local_retry_threshold: timedelta | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + summary: str | None = None, +) -> ActivityHandle[ReturnType]: ... + + +# Overload for async multi-param activity +@overload +def start_local_activity( + activity: Callable[..., Awaitable[ReturnType]], + *, + args: Sequence[Any], + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + local_retry_threshold: timedelta | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + summary: str | None = None, +) -> ActivityHandle[ReturnType]: ... + + +# Overload for sync multi-param activity +@overload +def start_local_activity( + activity: Callable[..., ReturnType], + *, + args: Sequence[Any], + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + local_retry_threshold: timedelta | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + summary: str | None = None, +) -> ActivityHandle[ReturnType]: ... + + +# Overload for string-name activity +@overload +def start_local_activity( + activity: str, + arg: Any = temporalio.common._arg_unset, + *, + args: Sequence[Any] = [], + result_type: type | None = None, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + local_retry_threshold: timedelta | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + summary: str | None = None, +) -> ActivityHandle[Any]: ... + + +def start_local_activity( + activity: Any, + arg: Any = temporalio.common._arg_unset, + *, + args: Sequence[Any] = [], + result_type: type | None = None, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + local_retry_threshold: timedelta | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + summary: str | None = None, +) -> ActivityHandle[Any]: + """Start a local activity and return its handle. + + At least one of ``schedule_to_close_timeout`` or ``start_to_close_timeout`` + must be present. + + Args: + activity: Activity name or function reference. + arg: Single argument to the activity. + args: Multiple arguments to the activity. Cannot be set if arg is. + result_type: For string activities, this can set the specific result + type hint to deserialize into. + schedule_to_close_timeout: Max amount of time the activity can take from + first being scheduled to being completed before it times out. This + is inclusive of all retries. + schedule_to_start_timeout: Max amount of time the activity can take to + be started from first being scheduled. + start_to_close_timeout: Max amount of time a single activity run can + take from when it starts to when it completes. This is per retry. + retry_policy: How an activity is retried on failure. If unset, an + SDK-defined default is used. Set maximum attempts to 1 to disable + retries. + cancellation_type: How the activity is treated when it is cancelled from + the workflow. + activity_id: Optional unique identifier for the activity. This is an + advanced setting that should not be set unless users are sure they + need to. Contact Temporal before setting this value. + summary: Optional summary for the activity. + + Returns: + An activity handle to the activity which is an async task. + """ + return _Runtime.current().workflow_start_local_activity( + activity, + *temporalio.common._arg_or_args(arg, args), + result_type=result_type, + schedule_to_close_timeout=schedule_to_close_timeout, + schedule_to_start_timeout=schedule_to_start_timeout, + start_to_close_timeout=start_to_close_timeout, + retry_policy=retry_policy, + local_retry_threshold=local_retry_threshold, + cancellation_type=cancellation_type, + activity_id=activity_id, + summary=summary, + ) + + +# Overload for async no-param activity +@overload +async def execute_local_activity( + activity: CallableAsyncNoParam[ReturnType], + *, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + local_retry_threshold: timedelta | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + summary: str | None = None, +) -> ReturnType: ... + + +# Overload for sync no-param activity +@overload +async def execute_local_activity( + activity: CallableSyncNoParam[ReturnType], + *, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + local_retry_threshold: timedelta | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + summary: str | None = None, +) -> ReturnType: ... + + +# Overload for async single-param activity +@overload +async def execute_local_activity( + activity: CallableAsyncSingleParam[ParamType, ReturnType], + arg: ParamType, + *, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + local_retry_threshold: timedelta | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + summary: str | None = None, +) -> ReturnType: ... + + +# Overload for sync single-param activity +@overload +async def execute_local_activity( + activity: CallableSyncSingleParam[ParamType, ReturnType], + arg: ParamType, + *, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + local_retry_threshold: timedelta | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + summary: str | None = None, +) -> ReturnType: ... + + +# Overload for async multi-param activity +@overload +async def execute_local_activity( + activity: Callable[..., Awaitable[ReturnType]], + *, + args: Sequence[Any], + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + local_retry_threshold: timedelta | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + summary: str | None = None, +) -> ReturnType: ... + + +# Overload for sync multi-param activity +@overload +async def execute_local_activity( + activity: Callable[..., ReturnType], + *, + args: Sequence[Any], + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + local_retry_threshold: timedelta | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + summary: str | None = None, +) -> ReturnType: ... + + +# Overload for string-name activity +@overload +async def execute_local_activity( + activity: str, + arg: Any = temporalio.common._arg_unset, + *, + args: Sequence[Any] = [], + result_type: type | None = None, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + local_retry_threshold: timedelta | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + summary: str | None = None, +) -> Any: ... + + +async def execute_local_activity( + activity: Any, + arg: Any = temporalio.common._arg_unset, + *, + args: Sequence[Any] = [], + result_type: type | None = None, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + local_retry_threshold: timedelta | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + summary: str | None = None, +) -> Any: + """Start a local activity and wait for completion. + + This is a shortcut for ``await`` :py:meth:`start_local_activity`. + """ + # We call the runtime directly instead of top-level start_local_activity to + # ensure we don't miss new parameters + return await _Runtime.current().workflow_start_local_activity( + activity, + *temporalio.common._arg_or_args(arg, args), + result_type=result_type, + schedule_to_close_timeout=schedule_to_close_timeout, + schedule_to_start_timeout=schedule_to_start_timeout, + start_to_close_timeout=start_to_close_timeout, + retry_policy=retry_policy, + local_retry_threshold=local_retry_threshold, + cancellation_type=cancellation_type, + activity_id=activity_id, + summary=summary, + ) + + +# Overload for async no-param activity +@overload +def start_local_activity_class( + activity: type[CallableAsyncNoParam[ReturnType]], + *, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + local_retry_threshold: timedelta | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, +) -> ActivityHandle[ReturnType]: ... + + +# Overload for sync no-param activity +@overload +def start_local_activity_class( + activity: type[CallableSyncNoParam[ReturnType]], + *, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + local_retry_threshold: timedelta | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, +) -> ActivityHandle[ReturnType]: ... + + +# Overload for async single-param activity +@overload +def start_local_activity_class( + activity: type[CallableAsyncSingleParam[ParamType, ReturnType]], + arg: ParamType, + *, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + local_retry_threshold: timedelta | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, +) -> ActivityHandle[ReturnType]: ... + + +# Overload for sync single-param activity +@overload +def start_local_activity_class( + activity: type[CallableSyncSingleParam[ParamType, ReturnType]], + arg: ParamType, + *, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + local_retry_threshold: timedelta | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, +) -> ActivityHandle[ReturnType]: ... + + +# Overload for async multi-param activity +@overload +def start_local_activity_class( + activity: type[Callable[..., Awaitable[ReturnType]]], # type: ignore[reportInvalidTypeForm] + *, + args: Sequence[Any], + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + local_retry_threshold: timedelta | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, +) -> ActivityHandle[ReturnType]: ... + + +# Overload for sync multi-param activity +@overload +def start_local_activity_class( # type: ignore[reportOverlappingOverload] + activity: type[Callable[..., ReturnType]], # type: ignore[reportInvalidTypeForm] + *, + args: Sequence[Any], + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + local_retry_threshold: timedelta | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, +) -> ActivityHandle[ReturnType]: ... + + +def start_local_activity_class( + activity: type[Callable], # type: ignore[reportInvalidTypeForm] + arg: Any = temporalio.common._arg_unset, + *, + args: Sequence[Any] = [], + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + local_retry_threshold: timedelta | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + summary: str | None = None, +) -> ActivityHandle[Any]: + """Start a local activity from a callable class. + + See :py:meth:`start_local_activity` for parameter and return details. + """ + return _Runtime.current().workflow_start_local_activity( + activity, + *temporalio.common._arg_or_args(arg, args), + result_type=None, + schedule_to_close_timeout=schedule_to_close_timeout, + schedule_to_start_timeout=schedule_to_start_timeout, + start_to_close_timeout=start_to_close_timeout, + retry_policy=retry_policy, + local_retry_threshold=local_retry_threshold, + cancellation_type=cancellation_type, + activity_id=activity_id, + summary=summary, + ) + + +# Overload for async no-param activity +@overload +async def execute_local_activity_class( + activity: type[CallableAsyncNoParam[ReturnType]], + *, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + local_retry_threshold: timedelta | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + summary: str | None = None, +) -> ReturnType: ... + + +# Overload for sync no-param activity +@overload +async def execute_local_activity_class( + activity: type[CallableSyncNoParam[ReturnType]], + *, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + local_retry_threshold: timedelta | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + summary: str | None = None, +) -> ReturnType: ... + + +# Overload for async single-param activity +@overload +async def execute_local_activity_class( + activity: type[CallableAsyncSingleParam[ParamType, ReturnType]], + arg: ParamType, + *, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + local_retry_threshold: timedelta | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + summary: str | None = None, +) -> ReturnType: ... + + +# Overload for sync single-param activity +@overload +async def execute_local_activity_class( + activity: type[CallableSyncSingleParam[ParamType, ReturnType]], + arg: ParamType, + *, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + local_retry_threshold: timedelta | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + summary: str | None = None, +) -> ReturnType: ... + + +# Overload for async multi-param activity +@overload +async def execute_local_activity_class( # type: ignore[reportOverlappingOverload] + activity: type[Callable[..., Awaitable[ReturnType]]], # type: ignore[reportInvalidTypeForm] + *, + args: Sequence[Any], + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + local_retry_threshold: timedelta | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + summary: str | None = None, +) -> ReturnType: ... + + +# Overload for sync multi-param activity +@overload +async def execute_local_activity_class( + activity: type[Callable[..., ReturnType]], # type: ignore[reportInvalidTypeForm] + *, + args: Sequence[Any], + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + local_retry_threshold: timedelta | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + summary: str | None = None, +) -> ReturnType: ... + + +async def execute_local_activity_class( + activity: type[Callable], # type: ignore[reportInvalidTypeForm] + arg: Any = temporalio.common._arg_unset, + *, + args: Sequence[Any] = [], + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + local_retry_threshold: timedelta | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + summary: str | None = None, +) -> Any: + """Start a local activity from a callable class and wait for completion. + + This is a shortcut for ``await`` :py:meth:`start_local_activity_class`. + """ + # We call the runtime directly instead of top-level start_local_activity to + # ensure we don't miss new parameters + return await _Runtime.current().workflow_start_local_activity( + activity, + *temporalio.common._arg_or_args(arg, args), + result_type=None, + schedule_to_close_timeout=schedule_to_close_timeout, + schedule_to_start_timeout=schedule_to_start_timeout, + start_to_close_timeout=start_to_close_timeout, + retry_policy=retry_policy, + local_retry_threshold=local_retry_threshold, + cancellation_type=cancellation_type, + activity_id=activity_id, + summary=summary, + ) + + +# Overload for async no-param activity +@overload +def start_local_activity_method( + activity: MethodAsyncNoParam[SelfType, ReturnType], + *, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + local_retry_threshold: timedelta | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + summary: str | None = None, +) -> ActivityHandle[ReturnType]: ... + + +# Overload for sync no-param activity +@overload +def start_local_activity_method( + activity: MethodSyncNoParam[SelfType, ReturnType], + *, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + local_retry_threshold: timedelta | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + summary: str | None = None, +) -> ActivityHandle[ReturnType]: ... + + +# Overload for async single-param activity +@overload +def start_local_activity_method( + activity: MethodAsyncSingleParam[SelfType, ParamType, ReturnType], + arg: ParamType, + *, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + local_retry_threshold: timedelta | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + summary: str | None = None, +) -> ActivityHandle[ReturnType]: ... + + +# Overload for sync single-param activity +@overload +def start_local_activity_method( + activity: MethodSyncSingleParam[SelfType, ParamType, ReturnType], + arg: ParamType, + *, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + local_retry_threshold: timedelta | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + summary: str | None = None, +) -> ActivityHandle[ReturnType]: ... + + +# Overload for async multi-param activity +@overload +def start_local_activity_method( + activity: Callable[Concatenate[SelfType, MultiParamSpec], Awaitable[ReturnType]], + *, + args: Sequence[Any], + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + local_retry_threshold: timedelta | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + summary: str | None = None, +) -> ActivityHandle[ReturnType]: ... + + +# Overload for sync multi-param activity +@overload +def start_local_activity_method( + activity: Callable[Concatenate[SelfType, MultiParamSpec], ReturnType], + *, + args: Sequence[Any], + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + local_retry_threshold: timedelta | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + summary: str | None = None, +) -> ActivityHandle[ReturnType]: ... + + +def start_local_activity_method( + activity: Callable, + arg: Any = temporalio.common._arg_unset, + *, + args: Sequence[Any] = [], + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + local_retry_threshold: timedelta | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + summary: str | None = None, +) -> ActivityHandle[Any]: + """Start a local activity from a method. + + See :py:meth:`start_local_activity` for parameter and return details. + """ + return _Runtime.current().workflow_start_local_activity( + activity, + *temporalio.common._arg_or_args(arg, args), + result_type=None, + schedule_to_close_timeout=schedule_to_close_timeout, + schedule_to_start_timeout=schedule_to_start_timeout, + start_to_close_timeout=start_to_close_timeout, + retry_policy=retry_policy, + local_retry_threshold=local_retry_threshold, + cancellation_type=cancellation_type, + activity_id=activity_id, + summary=summary, + ) + + +# Overload for async no-param activity +@overload +async def execute_local_activity_method( + activity: MethodAsyncNoParam[SelfType, ReturnType], + *, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + local_retry_threshold: timedelta | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + summary: str | None = None, +) -> ReturnType: ... + + +# Overload for sync no-param activity +@overload +async def execute_local_activity_method( + activity: MethodSyncNoParam[SelfType, ReturnType], + *, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + local_retry_threshold: timedelta | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + summary: str | None = None, +) -> ReturnType: ... + + +# Overload for async single-param activity +@overload +async def execute_local_activity_method( + activity: MethodAsyncSingleParam[SelfType, ParamType, ReturnType], + arg: ParamType, + *, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + local_retry_threshold: timedelta | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + summary: str | None = None, +) -> ReturnType: ... + + +# Overload for sync single-param activity +@overload +async def execute_local_activity_method( + activity: MethodSyncSingleParam[SelfType, ParamType, ReturnType], + arg: ParamType, + *, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + local_retry_threshold: timedelta | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + summary: str | None = None, +) -> ReturnType: ... + + +# Overload for async multi-param activity +@overload +async def execute_local_activity_method( + activity: Callable[Concatenate[SelfType, MultiParamSpec], Awaitable[ReturnType]], + *, + args: Sequence[Any], + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + local_retry_threshold: timedelta | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + summary: str | None = None, +) -> ReturnType: ... + + +# Overload for sync multi-param activity +@overload +async def execute_local_activity_method( + activity: Callable[Concatenate[SelfType, MultiParamSpec], ReturnType], + *, + args: Sequence[Any], + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + local_retry_threshold: timedelta | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + summary: str | None = None, +) -> ReturnType: ... + + +async def execute_local_activity_method( + activity: Callable, + arg: Any = temporalio.common._arg_unset, + *, + args: Sequence[Any] = [], + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + local_retry_threshold: timedelta | None = None, + cancellation_type: ActivityCancellationType = ActivityCancellationType.TRY_CANCEL, + activity_id: str | None = None, + summary: str | None = None, +) -> Any: + """Start a local activity from a method and wait for completion. + + This is a shortcut for ``await`` :py:meth:`start_local_activity_method`. + """ + # We call the runtime directly instead of top-level start_local_activity to + # ensure we don't miss new parameters + return await _Runtime.current().workflow_start_local_activity( + activity, + *temporalio.common._arg_or_args(arg, args), + result_type=None, + schedule_to_close_timeout=schedule_to_close_timeout, + schedule_to_start_timeout=schedule_to_start_timeout, + start_to_close_timeout=start_to_close_timeout, + retry_policy=retry_policy, + local_retry_threshold=local_retry_threshold, + cancellation_type=cancellation_type, + activity_id=activity_id, + summary=summary, + ) diff --git a/temporalio/workflow/_asyncio.py b/temporalio/workflow/_asyncio.py new file mode 100644 index 000000000..5ca7e66d8 --- /dev/null +++ b/temporalio/workflow/_asyncio.py @@ -0,0 +1,180 @@ +from __future__ import annotations + +import asyncio +from collections.abc import Awaitable, Iterable, Iterator +from typing import TYPE_CHECKING, Any, TypeVar, overload + +from ..types import AnyType + +__all__ = [ + "as_completed", + "wait", +] + + +def as_completed( + fs: Iterable[Awaitable[AnyType]], *, timeout: float | None = None +) -> Iterator[Awaitable[AnyType]]: + """Return an iterator whose values are coroutines. + + This is a deterministic version of :py:func:`asyncio.as_completed`. This + function should be used instead of that one in workflows. + """ + # Taken almost verbatim from + # https://github.com/python/cpython/blob/v3.12.3/Lib/asyncio/tasks.py#L584 + # but the "set" is changed out for a "list" and fixed up some typing/format + + if asyncio.isfuture(fs) or asyncio.iscoroutine(fs): + raise TypeError(f"expect an iterable of futures, not {type(fs).__name__}") + + done: asyncio.Queue[asyncio.Future | None] = asyncio.Queue() + + loop = asyncio.get_event_loop() + todo: list[asyncio.Future] = [asyncio.ensure_future(f, loop=loop) for f in list(fs)] + timeout_handle = None + + def _on_timeout(): + for f in todo: + f.remove_done_callback(_on_completion) + done.put_nowait(None) # Queue a dummy value for _wait_for_one(). + todo.clear() # Can't do todo.remove(f) in the loop. + + def _on_completion(f): # type:ignore[reportMissingParameterType] + if not todo: + return # _on_timeout() was here first. + todo.remove(f) + done.put_nowait(f) + if not todo and timeout_handle is not None: + timeout_handle.cancel() + + async def _wait_for_one(): + f = await done.get() + if f is None: + # Dummy value from _on_timeout(). + raise asyncio.TimeoutError + return f.result() # May raise f.exception(). + + for f in todo: + f.add_done_callback(_on_completion) + if todo and timeout is not None: + timeout_handle = loop.call_later(timeout, _on_timeout) + for _ in range(len(todo)): + yield _wait_for_one() + + +if TYPE_CHECKING: + _FT = TypeVar("_FT", bound=asyncio.Future[Any]) +else: + _FT = TypeVar("_FT", bound=asyncio.Future) + + +@overload +async def wait( # type: ignore[misc] + fs: Iterable[_FT], + *, + timeout: float | None = None, + return_when: str = asyncio.ALL_COMPLETED, +) -> tuple[list[_FT], list[_FT]]: ... + + +@overload +async def wait( + fs: Iterable[asyncio.Task[AnyType]], + *, + timeout: float | None = None, + return_when: str = asyncio.ALL_COMPLETED, +) -> tuple[list[asyncio.Task[AnyType]], list[asyncio.Task[AnyType]]]: ... + + +async def wait( + fs: Iterable, + *, + timeout: float | None = None, + return_when: str = asyncio.ALL_COMPLETED, +) -> tuple: + """Wait for the Futures or Tasks given by fs to complete. + + This is a deterministic version of :py:func:`asyncio.wait`. This function + should be used instead of that one in workflows. + """ + # Taken almost verbatim from + # https://github.com/python/cpython/blob/v3.12.3/Lib/asyncio/tasks.py#L435 + # but the "set" is changed out for a "list" and fixed up some typing/format + + if asyncio.isfuture(fs) or asyncio.iscoroutine(fs): + raise TypeError(f"Expect an iterable of Tasks/Futures, not {type(fs).__name__}") + if not fs: + raise ValueError("Sequence of Tasks/Futures must not be empty.") + if return_when not in ( + asyncio.FIRST_COMPLETED, + asyncio.FIRST_EXCEPTION, + asyncio.ALL_COMPLETED, + ): + raise ValueError(f"Invalid return_when value: {return_when}") + + fs = list(fs) + + if any(asyncio.iscoroutine(f) for f in fs): + raise TypeError("Passing coroutines is forbidden, use tasks explicitly.") + + loop = asyncio.get_running_loop() + return await _wait(fs, timeout, return_when, loop) + + +async def _wait( + fs: Iterable[asyncio.Future | asyncio.Task], + timeout: float | None, + return_when: str, + loop: asyncio.AbstractEventLoop, +) -> tuple[list, list]: + # Taken almost verbatim from + # https://github.com/python/cpython/blob/v3.12.3/Lib/asyncio/tasks.py#L522 + # but the "set" is changed out for a "list" and fixed up some typing/format + + assert fs, "Sequence of Tasks/Futures must not be empty." + waiter = loop.create_future() + timeout_handle = None + if timeout is not None: + timeout_handle = loop.call_later(timeout, _release_waiter, waiter) + counter = len(fs) # type: ignore[arg-type] + + def _on_completion(f): # type:ignore[reportMissingParameterType] + nonlocal counter + counter -= 1 + if ( + counter <= 0 + or return_when == asyncio.FIRST_COMPLETED + or return_when == asyncio.FIRST_EXCEPTION + and (not f.cancelled() and f.exception() is not None) + ): + if timeout_handle is not None: + timeout_handle.cancel() + if not waiter.done(): + waiter.set_result(None) + + for f in fs: + f.add_done_callback(_on_completion) + + try: + await waiter + finally: + if timeout_handle is not None: + timeout_handle.cancel() + for f in fs: + f.remove_done_callback(_on_completion) + + done, pending = [], [] + for f in fs: + if f.done(): + done.append(f) + else: + pending.append(f) + return done, pending + + +def _release_waiter(waiter: asyncio.Future[Any], *_args: Any) -> None: + # Taken almost verbatim from + # https://github.com/python/cpython/blob/v3.12.3/Lib/asyncio/tasks.py#L467 + + if not waiter.done(): + waiter.set_result(None) diff --git a/temporalio/workflow/_context.py b/temporalio/workflow/_context.py new file mode 100644 index 000000000..5c3f22cc9 --- /dev/null +++ b/temporalio/workflow/_context.py @@ -0,0 +1,917 @@ +from __future__ import annotations + +import asyncio +import contextvars +import uuid +from abc import ABC, abstractmethod +from collections.abc import Callable, Mapping, Sequence +from dataclasses import dataclass +from datetime import datetime, timedelta, timezone +from random import Random +from typing import TYPE_CHECKING, Any, NoReturn, overload + +import nexusrpc +from nexusrpc import InputT, OutputT + +import temporalio.api.common.v1 +import temporalio.common +import temporalio.converter + +from ..types import AnyType, ParamType +from ._exceptions import _NotInWorkflowEventLoopError + +if TYPE_CHECKING: + from ._activities import ActivityCancellationType, ActivityHandle + from ._exceptions import ContinueAsNewVersioningBehavior, VersioningIntent + from ._nexus import NexusOperationCancellationType, NexusOperationHandle + from ._workflow_ops import ( + ChildWorkflowCancellationType, + ChildWorkflowHandle, + ExternalWorkflowHandle, + ParentClosePolicy, + ) + +__all__ = [ + "Info", + "ParentInfo", + "RootInfo", + "UpdateInfo", + "current_update_info", + "deprecate_patch", + "extern_functions", + "get_current_details", + "get_last_completion_result", + "get_last_failure", + "has_last_completion_result", + "in_workflow", + "info", + "instance", + "is_failure_exception", + "memo", + "memo_value", + "metric_meter", + "new_random", + "now", + "patched", + "payload_converter", + "random", + "random_seed", + "register_random_seed_callback", + "set_current_details", + "sleep", + "time", + "time_ns", + "upsert_memo", + "upsert_search_attributes", + "uuid4", + "wait_condition", +] + + +@dataclass(frozen=True) +class Info: + """Information about the running workflow. + + Retrieved inside a workflow via :py:func:`info`. This object is immutable + with the exception of the :py:attr:`search_attributes` and + :py:attr:`typed_search_attributes` which is updated on + :py:func:`upsert_search_attributes`. + + Note, required fields may be added here in future versions. This class + should never be constructed by users. + """ + + attempt: int + continued_run_id: str | None + cron_schedule: str | None + execution_timeout: timedelta | None + first_execution_run_id: str + headers: Mapping[str, temporalio.api.common.v1.Payload] + namespace: str + parent: ParentInfo | None + root: RootInfo | None + priority: temporalio.common.Priority + """The priority of this workflow execution. If not set, or this server predates priorities, + then returns a default instance.""" + raw_memo: Mapping[str, temporalio.api.common.v1.Payload] + retry_policy: temporalio.common.RetryPolicy | None + run_id: str + run_timeout: timedelta | None + + search_attributes: temporalio.common.SearchAttributes + """Search attributes for the workflow. + + .. deprecated:: + Use :py:attr:`typed_search_attributes` instead. + """ + + start_time: datetime + """The start time of the first task executed by the workflow.""" + + task_queue: str + task_timeout: timedelta + + typed_search_attributes: temporalio.common.TypedSearchAttributes + """Search attributes for the workflow. + + Note, this may have invalid values or be missing values if passing the + deprecated form of dictionary attributes to + :py:meth:`upsert_search_attributes`. + """ + + workflow_id: str + + workflow_start_time: datetime + """The start time of the workflow based on the workflow initialization.""" + + workflow_type: str + + def _logger_details(self) -> Mapping[str, Any]: + return { + # TODO(cretz): worker ID? + "attempt": self.attempt, + "namespace": self.namespace, + "run_id": self.run_id, + "task_queue": self.task_queue, + "workflow_id": self.workflow_id, + "workflow_type": self.workflow_type, + } + + def get_current_build_id(self) -> str: + """Get the Build ID of the worker which executed the current Workflow Task. + + May be undefined if the task was completed by a worker without a Build ID. If this worker is + the one executing this task for the first time and has a Build ID set, then its ID will be + used. This value may change over the lifetime of the workflow run, but is deterministic and + safe to use for branching. + + .. deprecated:: + Use get_current_deployment_version instead. + """ + return _Runtime.current().workflow_get_current_build_id() + + def get_current_deployment_version( + self, + ) -> temporalio.common.WorkerDeploymentVersion | None: + """Get the deployment version of the worker which executed the current Workflow Task. + + May be None if the task was completed by a worker without a deployment version or build + id. If this worker is the one executing this task for the first time and has a deployment + version set, then its ID will be used. This value may change over the lifetime of the + workflow run, but is deterministic and safe to use for branching. + """ + return _Runtime.current().workflow_get_current_deployment_version() + + def get_current_history_length(self) -> int: + """Get the current number of events in history. + + Note, this value may not be up to date if accessed inside a query. + + Returns: + Current number of events in history (up until the current task). + """ + return _Runtime.current().workflow_get_current_history_length() + + def get_current_history_size(self) -> int: + """Get the current byte size of history. + + Note, this value may not be up to date if accessed inside a query. + + Returns: + Current byte-size of history (up until the current task). + """ + return _Runtime.current().workflow_get_current_history_size() + + def is_continue_as_new_suggested(self) -> bool: + """Get whether or not continue as new is suggested. + + Note, this value may not be up to date if accessed inside a query. + + Returns: + True if the server is configured to suggest continue as new and it + is suggested. + """ + return _Runtime.current().workflow_is_continue_as_new_suggested() + + def is_target_worker_deployment_version_changed(self) -> bool: + """Check whether the target worker deployment version has changed. + + Note: Upgrade-on-Continue-as-New is currently experimental. + + Returns: + True if the target worker deployment version has changed. + """ + return _Runtime.current().workflow_is_target_worker_deployment_version_changed() + + +@dataclass(frozen=True) +class ParentInfo: + """Information about the parent workflow.""" + + namespace: str + run_id: str + workflow_id: str + + +@dataclass(frozen=True) +class RootInfo: + """Information about the root workflow.""" + + run_id: str + workflow_id: str + + +@dataclass(frozen=True) +class UpdateInfo: + """Information about a workflow update.""" + + id: str + """Update ID.""" + + name: str + """Update type name.""" + + @property + def _logger_details(self) -> Mapping[str, Any]: + """Data to be included in string appended to default logging output.""" + return { + "update_id": self.id, + "update_name": self.name, + } + + +class _Runtime(ABC): + @staticmethod + def current() -> _Runtime: + loop = _Runtime.maybe_current() + if not loop: + raise _NotInWorkflowEventLoopError("Not in workflow event loop") + return loop + + @staticmethod + def maybe_current() -> _Runtime | None: + try: + return getattr( + asyncio.get_running_loop(), "__temporal_workflow_runtime", None + ) + except RuntimeError: + return None + + @staticmethod + def set_on_loop(loop: asyncio.AbstractEventLoop, runtime: _Runtime | None) -> None: + if runtime: + setattr(loop, "__temporal_workflow_runtime", runtime) + elif hasattr(loop, "__temporal_workflow_runtime"): + delattr(loop, "__temporal_workflow_runtime") + + def __init__(self) -> None: + super().__init__() + self._logger_details: Mapping[str, Any] | None = None + + @property + def logger_details(self) -> Mapping[str, Any]: + if self._logger_details is None: + self._logger_details = self.workflow_info()._logger_details() + return self._logger_details + + @abstractmethod + def workflow_all_handlers_finished(self) -> bool: ... + + @abstractmethod + def workflow_continue_as_new( + self, + *args: Any, + workflow: None | Callable | str, + task_queue: str | None, + run_timeout: timedelta | None, + task_timeout: timedelta | None, + retry_policy: temporalio.common.RetryPolicy | None, + memo: Mapping[str, Any] | None, + search_attributes: None + | ( + temporalio.common.SearchAttributes | temporalio.common.TypedSearchAttributes + ), + versioning_intent: VersioningIntent | None, + initial_versioning_behavior: ContinueAsNewVersioningBehavior | None, + ) -> NoReturn: ... + + @abstractmethod + def workflow_extern_functions(self) -> Mapping[str, Callable]: ... + + @abstractmethod + def workflow_get_current_build_id(self) -> str: ... + + @abstractmethod + def workflow_get_current_deployment_version( + self, + ) -> temporalio.common.WorkerDeploymentVersion | None: ... + + @abstractmethod + def workflow_get_current_history_length(self) -> int: ... + + @abstractmethod + def workflow_get_current_history_size(self) -> int: ... + + @abstractmethod + def workflow_get_external_workflow_handle( + self, id: str, *, run_id: str | None + ) -> ExternalWorkflowHandle[Any]: ... + + @abstractmethod + def workflow_get_query_handler(self, name: str | None) -> Callable | None: ... + + @abstractmethod + def workflow_get_signal_handler(self, name: str | None) -> Callable | None: ... + + @abstractmethod + def workflow_get_update_handler(self, name: str | None) -> Callable | None: ... + + @abstractmethod + def workflow_get_update_validator(self, name: str | None) -> Callable | None: ... + + @abstractmethod + def workflow_info(self) -> Info: ... + + @abstractmethod + def workflow_instance(self) -> Any: ... + + @abstractmethod + def workflow_is_continue_as_new_suggested(self) -> bool: ... + + @abstractmethod + def workflow_is_target_worker_deployment_version_changed(self) -> bool: ... + + @abstractmethod + def workflow_is_replaying(self) -> bool: ... + + @abstractmethod + def workflow_is_replaying_history_events(self) -> bool: ... + + @abstractmethod + def workflow_is_read_only(self) -> bool: ... + + @abstractmethod + def workflow_memo(self) -> Mapping[str, Any]: ... + + @abstractmethod + def workflow_memo_value( + self, key: str, default: Any, *, type_hint: type | None + ) -> Any: ... + + @abstractmethod + def workflow_upsert_memo(self, updates: Mapping[str, Any]) -> None: ... + + @abstractmethod + def workflow_metric_meter(self) -> temporalio.common.MetricMeter: ... + + @abstractmethod + def workflow_patch(self, id: str, *, deprecated: bool) -> bool: ... + + @abstractmethod + def workflow_payload_converter(self) -> temporalio.converter.PayloadConverter: ... + + @abstractmethod + def workflow_random(self) -> Random: ... + + @abstractmethod + def workflow_set_query_handler( + self, name: str | None, handler: Callable | None + ) -> None: ... + + @abstractmethod + def workflow_set_signal_handler( + self, name: str | None, handler: Callable | None + ) -> None: ... + + @abstractmethod + def workflow_set_update_handler( + self, + name: str | None, + handler: Callable | None, + validator: Callable | None, + ) -> None: ... + + @abstractmethod + def workflow_start_activity( + self, + activity: Any, + *args: Any, + task_queue: str | None, + result_type: type | None, + schedule_to_close_timeout: timedelta | None, + schedule_to_start_timeout: timedelta | None, + start_to_close_timeout: timedelta | None, + heartbeat_timeout: timedelta | None, + retry_policy: temporalio.common.RetryPolicy | None, + cancellation_type: ActivityCancellationType, + activity_id: str | None, + versioning_intent: VersioningIntent | None, + summary: str | None = None, + priority: temporalio.common.Priority = temporalio.common.Priority.default, + ) -> ActivityHandle[Any]: ... + + @abstractmethod + async def workflow_start_child_workflow( + self, + workflow: Any, + *args: Any, + id: str, + task_queue: str | None, + result_type: type | None, + cancellation_type: ChildWorkflowCancellationType, + parent_close_policy: ParentClosePolicy, + execution_timeout: timedelta | None, + run_timeout: timedelta | None, + task_timeout: timedelta | None, + id_reuse_policy: temporalio.common.WorkflowIDReusePolicy, + retry_policy: temporalio.common.RetryPolicy | None, + cron_schedule: str, + memo: Mapping[str, Any] | None, + search_attributes: None + | ( + temporalio.common.SearchAttributes | temporalio.common.TypedSearchAttributes + ), + versioning_intent: VersioningIntent | None, + static_summary: str | None = None, + static_details: str | None = None, + priority: temporalio.common.Priority = temporalio.common.Priority.default, + ) -> ChildWorkflowHandle[Any, Any]: ... + + @abstractmethod + def workflow_start_local_activity( + self, + activity: Any, + *args: Any, + result_type: type | None, + schedule_to_close_timeout: timedelta | None, + schedule_to_start_timeout: timedelta | None, + start_to_close_timeout: timedelta | None, + retry_policy: temporalio.common.RetryPolicy | None, + local_retry_threshold: timedelta | None, + cancellation_type: ActivityCancellationType, + activity_id: str | None, + summary: str | None, + ) -> ActivityHandle[Any]: ... + + @abstractmethod + async def workflow_start_nexus_operation( + self, + endpoint: str, + service: str, + operation: nexusrpc.Operation[InputT, OutputT] | str | Callable[..., Any], + input: Any, + output_type: type[OutputT] | None, + schedule_to_close_timeout: timedelta | None, + schedule_to_start_timeout: timedelta | None, + start_to_close_timeout: timedelta | None, + cancellation_type: NexusOperationCancellationType, + headers: Mapping[str, str] | None, + summary: str | None, + ) -> NexusOperationHandle[OutputT]: ... + + @abstractmethod + def workflow_time_ns(self) -> int: ... + + @abstractmethod + def workflow_upsert_search_attributes( + self, + attributes: ( + temporalio.common.SearchAttributes + | Sequence[temporalio.common.SearchAttributeUpdate] + ), + ) -> None: ... + + @abstractmethod + async def workflow_sleep( + self, duration: float, *, summary: str | None = None + ) -> None: ... + + @abstractmethod + async def workflow_wait_condition( + self, + fn: Callable[[], bool], + *, + timeout: float | None = None, + timeout_summary: str | None = None, + ) -> None: ... + + @abstractmethod + def workflow_get_current_details(self) -> str: ... + + @abstractmethod + def workflow_set_current_details(self, details: str): ... + + @abstractmethod + def workflow_is_failure_exception(self, err: BaseException) -> bool: ... + + @abstractmethod + def workflow_has_last_completion_result(self) -> bool: ... + + @abstractmethod + def workflow_last_completion_result(self, type_hint: type | None) -> Any | None: ... + + @abstractmethod + def workflow_last_failure(self) -> BaseException | None: ... + + @abstractmethod + def workflow_random_seed(self) -> int: ... + + @abstractmethod + def workflow_register_random_seed_callback( + self, callback: Callable[[int], None] + ) -> None: ... + + +_current_update_info: contextvars.ContextVar[UpdateInfo] = contextvars.ContextVar( + "__temporal_current_update_info" +) + + +def _set_current_update_info(info: UpdateInfo) -> None: # type: ignore[reportUnusedFunction] + _current_update_info.set(info) + + +def current_update_info() -> UpdateInfo | None: + """Info for the current update if any. + + This is powered by :py:mod:`contextvars` so it is only valid within the + update handler and coroutines/tasks it has started. + + Returns: + Info for the current update handler the code calling this is executing + within if any. + """ + return _current_update_info.get(None) + + +def deprecate_patch(id: str) -> None: + """Mark a patch as deprecated. + + This marks a workflow that had :py:func:`patched` in a previous version of + the code as no longer applicable because all workflows that use the old code + path are done and will never be queried again. Therefore the old code path + is removed as well. + + Args: + id: The identifier originally used with :py:func:`patched`. + """ + _Runtime.current().workflow_patch(id, deprecated=True) + + +def extern_functions() -> Mapping[str, Callable]: + """External functions available in the workflow sandbox. + + Returns: + Mapping of external functions that can be called from inside a workflow + sandbox. + """ + return _Runtime.current().workflow_extern_functions() + + +def info() -> Info: + """Current workflow's info. + + Returns: + Info for the currently running workflow. + """ + return _Runtime.current().workflow_info() + + +def instance() -> Any: + """Current workflow's instance. + + Returns: + The currently running workflow instance. + """ + return _Runtime.current().workflow_instance() + + +def in_workflow() -> bool: + """Whether the code is currently running in a workflow.""" + return _Runtime.maybe_current() is not None + + +def memo() -> Mapping[str, Any]: + """Current workflow's memo values, converted without type hints. + + Since type hints are not used, the default converted values will come back. + For example, if the memo was originally created with a dataclass, the value + will be a dict. To convert using proper type hints, use + :py:func:`memo_value`. + + Returns: + Mapping of all memo keys and they values without type hints. + """ + return _Runtime.current().workflow_memo() + + +def is_failure_exception(err: BaseException) -> bool: + """Checks if the given exception is a workflow failure in the current workflow. + + Returns: + True if the given exception is a workflow failure in the current workflow. + """ + return _Runtime.current().workflow_is_failure_exception(err) + + +@overload +def memo_value(key: str, default: Any = temporalio.common._arg_unset) -> Any: ... + + +@overload +def memo_value(key: str, *, type_hint: type[ParamType]) -> ParamType: ... + + +@overload +def memo_value( + key: str, default: AnyType, *, type_hint: type[ParamType] +) -> AnyType | ParamType: ... + + +def memo_value( + key: str, + default: Any = temporalio.common._arg_unset, + *, + type_hint: type | None = None, +) -> Any: + """Memo value for the given key, optional default, and optional type + hint. + + Args: + key: Key to get memo value for. + default: Default to use if key is not present. If unset, a + :py:class:`KeyError` is raised when the key does not exist. + type_hint: Type hint to use when converting. + + Returns: + Memo value, converted with the type hint if present. + + Raises: + KeyError: Key not present and default not set. + """ + return _Runtime.current().workflow_memo_value(key, default, type_hint=type_hint) + + +def upsert_memo(updates: Mapping[str, Any]) -> None: + """Adds, modifies, and/or removes memos, with upsert semantics. + + Every memo that has a matching key has its value replaced with the one specified in ``updates``. + If the value is set to ``None``, the memo is removed instead. + For every key with no existing memo, a new memo is added with specified value (unless the value is ``None``). + Memos with keys not included in ``updates`` remain unchanged. + """ + return _Runtime.current().workflow_upsert_memo(updates) + + +def get_current_details() -> str: + """Get the current details of the workflow which may appear in the UI/CLI. + Unlike static details set at start, this value can be updated throughout + the life of the workflow and is independent of the static details. + This can be in Temporal markdown format and can span multiple lines. + """ + return _Runtime.current().workflow_get_current_details() + + +def has_last_completion_result() -> bool: + """Gets whether there is a last completion result of the workflow.""" + return _Runtime.current().workflow_has_last_completion_result() + + +@overload +def get_last_completion_result() -> Any | None: ... + + +@overload +def get_last_completion_result(type_hint: type[ParamType]) -> ParamType | None: ... + + +def get_last_completion_result(type_hint: type | None = None) -> Any | None: + """Get the result of the last run of the workflow. This will be None if there was + no previous completion or the result was None. has_last_completion_result() + can be used to differentiate. + """ + return _Runtime.current().workflow_last_completion_result(type_hint) + + +def get_last_failure() -> BaseException | None: + """Get the last failure of the workflow if it has run previously.""" + return _Runtime.current().workflow_last_failure() + + +def set_current_details(description: str) -> None: + """Set the current details of the workflow which may appear in the UI/CLI. + Unlike static details set at start, this value can be updated throughout + the life of the workflow and is independent of the static details. + This can be in Temporal markdown format and can span multiple lines. + """ + _Runtime.current().workflow_set_current_details(description) + + +def metric_meter() -> temporalio.common.MetricMeter: + """Get the metric meter for the current workflow. + + This meter is replay safe which means that metrics will not be recorded + during replay. + + Returns: + Current metric meter for this workflow for recording metrics. + """ + return _Runtime.current().workflow_metric_meter() + + +def now() -> datetime: + """Current time from the workflow perspective. + + This is the workflow equivalent of :py:func:`datetime.now` with the + :py:attr:`timezone.utc` parameter. + + Returns: + UTC datetime for the current workflow time. The datetime does have UTC + set as the time zone. + """ + return datetime.fromtimestamp(time(), timezone.utc) + + +def patched(id: str) -> bool: + """Patch a workflow. + + When called, this will only return true if code should take the newer path + which means this is either not replaying or is replaying and has seen this + patch before. + + Use :py:func:`deprecate_patch` when all workflows are done and will never be + queried again. The old code path can be used at that time too. + + Args: + id: The identifier for this patch. This identifier may be used + repeatedly in the same workflow to represent the same patch + + Returns: + True if this should take the newer path, false if it should take the + older path. + """ + return _Runtime.current().workflow_patch(id, deprecated=False) + + +def payload_converter() -> temporalio.converter.PayloadConverter: + """Get the payload converter for the current workflow. + + The returned converter has :py:class:`temporalio.converter.WorkflowSerializationContext` set. + This is often used for dynamic workflows/signals/queries to convert + payloads. + """ + return _Runtime.current().workflow_payload_converter() + + +def random() -> Random: + """Get a deterministic pseudo-random number generator. + + Note, this random number generator is not cryptographically safe and should + not be used for security purposes. + + Returns: + The deterministically-seeded pseudo-random number generator. + """ + return _Runtime.current().workflow_random() + + +def random_seed() -> int: + """Get the current random seed value from core. + + This returns the seed value currently being used by the workflow's + deterministic random number generator. + + Returns: + The current random seed as an integer. + """ + return _Runtime.current().workflow_random_seed() + + +def register_random_seed_callback(callback: Callable[[int], None]) -> None: + """Register a callback to be notified when the random seed changes. + + The callback will be invoked whenever the workflow receives a new random + seed from the core. This is useful for maintaining external random number + generators that need to stay in sync with the workflow's randomness. + + Args: + callback: Function to be called with the new seed value when it changes. + """ + return _Runtime.current().workflow_register_random_seed_callback(callback) + + +def new_random() -> Random: + """Create a Random instance that automatically reseeds when the workflow seed changes. + + This creates a new Random instance that is initially seeded with the current + workflow seed, and automatically registers a callback to reseed itself + whenever the workflow receives a new seed from core. + + Returns: + A Random instance that stays synchronized with the workflow's randomness. + """ + current_seed = random_seed() + auto_random = Random(current_seed) + + def reseed_callback(new_seed: int) -> None: + auto_random.seed(new_seed) + + register_random_seed_callback(reseed_callback) + return auto_random + + +def time() -> float: + """Current seconds since the epoch from the workflow perspective. + + This is the workflow equivalent of :py:func:`time.time`. + + Returns: + Seconds since the epoch as a float. + """ + return time_ns() / 1e9 + + +def time_ns() -> int: + """Current nanoseconds since the epoch from the workflow perspective. + + This is the workflow equivalent of :py:func:`time.time_ns`. + + Returns: + Nanoseconds since the epoch + """ + return _Runtime.current().workflow_time_ns() + + +def upsert_search_attributes( + attributes: ( + temporalio.common.SearchAttributes + | Sequence[temporalio.common.SearchAttributeUpdate] + ), +) -> None: + """Upsert search attributes for this workflow. + + Args: + attributes: The attributes to set. This should be a sequence of + updates (i.e. values created via value_set and value_unset calls on + search attribute keys). The dictionary form of attributes is + DEPRECATED and if used, result in invalid key types on the + typed_search_attributes property in the info. + """ + if not attributes: + return + temporalio.common._warn_on_deprecated_search_attributes(attributes) + _Runtime.current().workflow_upsert_search_attributes(attributes) + + +def uuid4() -> uuid.UUID: + """Get a new, determinism-safe v4 UUID based on :py:func:`random`. + + Note, this UUID is not cryptographically safe and should not be used for + security purposes. + + Returns: + A deterministically-seeded v4 UUID. + """ + return uuid.UUID(bytes=random().getrandbits(16 * 8).to_bytes(16, "big"), version=4) + + +async def sleep(duration: float | timedelta, *, summary: str | None = None) -> None: + """Sleep for the given duration. + + Args: + duration: Duration to sleep in seconds or as a timedelta. + summary: A single-line fixed summary for this timer that may appear in UI/CLI. + This can be in single-line Temporal markdown format. + """ + await _Runtime.current().workflow_sleep( + duration=( + duration.total_seconds() if isinstance(duration, timedelta) else duration + ), + summary=summary, + ) + + +async def wait_condition( + fn: Callable[[], bool], + *, + timeout: timedelta | float | None = None, + timeout_summary: str | None = None, +) -> None: + """Wait on a callback to become true. + + This function returns when the callback returns true (invoked each loop + iteration) or the timeout has been reached. + + Args: + fn: Non-async callback that accepts no parameters and returns a boolean. + timeout: Optional number of seconds to wait until throwing + :py:class:`asyncio.TimeoutError`. + timeout_summary: Optional simple string identifying the timer (created if ``timeout`` is + present) that may be visible in UI/CLI. While it can be normal text, it is best to treat + as a timer ID. + """ + await _Runtime.current().workflow_wait_condition( + fn, + timeout=timeout.total_seconds() if isinstance(timeout, timedelta) else timeout, + timeout_summary=timeout_summary, + ) diff --git a/temporalio/workflow/_definition.py b/temporalio/workflow/_definition.py new file mode 100644 index 000000000..c1ce21169 --- /dev/null +++ b/temporalio/workflow/_definition.py @@ -0,0 +1,466 @@ +from __future__ import annotations + +import inspect +from collections.abc import Awaitable, Callable, Mapping, Sequence +from dataclasses import dataclass +from typing import Any, cast, overload + +import temporalio.common + +from ..types import ( + CallableAsyncType, + CallableType, + ClassType, + MethodSyncNoParam, + SelfType, +) +from ._handlers import ( + UpdateMethodMultiParam, + _QueryDefinition, + _SignalDefinition, + _UpdateDefinition, +) + +__all__ = [ + "DynamicWorkflowConfig", + "defn", + "dynamic_config", + "init", + "run", +] + + +@overload +def defn(cls: ClassType) -> ClassType: ... + + +@overload +def defn( + *, + name: str | None = None, + sandboxed: bool = True, + failure_exception_types: Sequence[type[BaseException]] = [], + versioning_behavior: temporalio.common.VersioningBehavior = temporalio.common.VersioningBehavior.UNSPECIFIED, +) -> Callable[[ClassType], ClassType]: ... + + +@overload +def defn( + *, + sandboxed: bool = True, + dynamic: bool = False, + versioning_behavior: temporalio.common.VersioningBehavior = temporalio.common.VersioningBehavior.UNSPECIFIED, +) -> Callable[[ClassType], ClassType]: ... + + +def defn( + cls: ClassType | None = None, + *, + name: str | None = None, + sandboxed: bool = True, + dynamic: bool = False, + failure_exception_types: Sequence[type[BaseException]] = [], + versioning_behavior: temporalio.common.VersioningBehavior = temporalio.common.VersioningBehavior.UNSPECIFIED, +) -> Callable[[ClassType], ClassType]: + """Decorator for workflow classes. + + This must be set on any registered workflow class (it is ignored if on a + base class). + + Args: + cls: The class to decorate. + name: Name to use for the workflow. Defaults to class ``__name__``. This + cannot be set if dynamic is set. + sandboxed: Whether the workflow should run in a sandbox. Default is + true. + dynamic: If true, this activity will be dynamic. Dynamic workflows have + to accept a single 'Sequence[RawValue]' parameter. This cannot be + set to true if name is present. + failure_exception_types: The types of exceptions that, if a + workflow-thrown exception extends, will cause the workflow/update to + fail instead of suspending the workflow via task failure. These are + applied in addition to ones set on the worker constructor. If + ``Exception`` is set, it effectively will fail a workflow/update in + all user exception cases. WARNING: This setting is experimental. + versioning_behavior: Specifies the versioning behavior to use for this workflow. + """ + + def decorator(cls: ClassType) -> ClassType: + # This performs validation + _Definition._apply_to_class( + cls, + workflow_name=name or cls.__name__ if not dynamic else None, + sandboxed=sandboxed, + failure_exception_types=failure_exception_types, + versioning_behavior=versioning_behavior, + ) + return cls + + if cls is not None: + return decorator(cls) + return decorator + + +def init( + init_fn: CallableType, +) -> CallableType: + """Decorator for the workflow init method. + + This may be used on the __init__ method of the workflow class to specify + that it accepts the same workflow input arguments as the ``@workflow.run`` + method. If used, the parameters of your __init__ and ``@workflow.run`` + methods must be identical. + + Args: + init_fn: The __init__ method to decorate. + """ + if init_fn.__name__ != "__init__": + raise ValueError("@workflow.init may only be used on the __init__ method") + + setattr(init_fn, "__temporal_workflow_init", True) + return init_fn + + +def run(fn: CallableAsyncType) -> CallableAsyncType: + """Decorator for the workflow run method. + + This must be used on one and only one async method defined on the same class + as ``@workflow.defn``. This can be defined on a base class method but must + then be explicitly overridden and defined on the workflow class. + + Run methods can only have positional parameters. Best practice is to only + take a single object/dataclass argument that can accept more fields later if + needed. + + Args: + fn: The function to decorate. + """ + if not inspect.iscoroutinefunction(fn): + raise ValueError("Workflow run method must be an async function") + # Disallow local classes because we need to have the class globally + # referenceable by name + if "" in fn.__qualname__: + raise ValueError( + "Local classes unsupported, @workflow.run cannot be on a local class" + ) + setattr(fn, "__temporal_workflow_run", True) + # TODO(cretz): Why is MyPy unhappy with this return? + return fn # type: ignore[return-value] + + +@dataclass(frozen=True) +class DynamicWorkflowConfig: + """Returned by functions using the :py:func:`dynamic_config` decorator, see it for more.""" + + failure_exception_types: Sequence[type[BaseException]] | None = None + """The types of exceptions that, if a workflow-thrown exception extends, will cause the + workflow/update to fail instead of suspending the workflow via task failure. These are applied + in addition to ones set on the worker constructor. If ``Exception`` is set, it effectively will + fail a workflow/update in all user exception cases. + + Always overrides the equivalent parameter on :py:func:`defn` if set not-None. + + WARNING: This setting is experimental. + """ + versioning_behavior: temporalio.common.VersioningBehavior = ( + temporalio.common.VersioningBehavior.UNSPECIFIED + ) + """Specifies the versioning behavior to use for this workflow. + + Always overrides the equivalent parameter on :py:func:`defn`. + """ + + +def dynamic_config( + fn: MethodSyncNoParam[SelfType, DynamicWorkflowConfig], +) -> MethodSyncNoParam[SelfType, DynamicWorkflowConfig]: + """Decorator to allow configuring a dynamic workflow's behavior. + + Because dynamic workflows may conceptually represent more than one workflow type, it may be + desirable to have different settings for fields that would normally be passed to + :py:func:`defn`, but vary based on the workflow type name or other information available in + the workflow's context. This function will be called after the workflow's :py:func:`init`, + if it has one, but before the workflow's :py:func:`run` method. + + The method must only take self as a parameter, and any values set in the class it returns will + override those provided to :py:func:`defn`. + + Cannot be specified on non-dynamic workflows. + + Args: + fn: The function to decorate. + """ + if inspect.iscoroutinefunction(fn): + raise ValueError("Workflow dynamic_config method must be synchronous") + params = list(inspect.signature(fn).parameters.values()) + if len(params) != 1: + raise ValueError("Workflow dynamic_config method must only take self parameter") + + # Add marker attribute + setattr(fn, "__temporal_workflow_dynamic_config", True) + return fn + + +@dataclass(frozen=True) +class _Definition: + name: str | None + cls: type + run_fn: Callable[..., Awaitable] + signals: Mapping[str | None, _SignalDefinition] + queries: Mapping[str | None, _QueryDefinition] + updates: Mapping[str | None, _UpdateDefinition] + sandboxed: bool + failure_exception_types: Sequence[type[BaseException]] + # Types loaded on post init if both are None + arg_types: list[type] | None = None + ret_type: type | None = None + versioning_behavior: temporalio.common.VersioningBehavior | None = None + dynamic_config_fn: Callable[..., DynamicWorkflowConfig] | None = None + + @staticmethod + def from_class(cls: type) -> _Definition | None: # type: ignore[reportSelfClsParameterName] + # We make sure to only return it if it's on _this_ class + defn = getattr(cls, "__temporal_workflow_definition", None) + if defn and defn.cls == cls: + return defn + return None + + @staticmethod + def must_from_class(cls: type) -> _Definition: # type: ignore[reportSelfClsParameterName] + ret = _Definition.from_class(cls) + if ret: + return ret + cls_name = getattr(cls, "__name__", "") + raise ValueError( + f"Workflow {cls_name} missing attributes, was it decorated with @workflow.defn?" + ) + + @staticmethod + def from_run_fn(fn: Callable[..., Awaitable[Any]]) -> _Definition | None: + return getattr(fn, "__temporal_workflow_definition", None) + + @staticmethod + def must_from_run_fn(fn: Callable[..., Awaitable[Any]]) -> _Definition: + ret = _Definition.from_run_fn(fn) + if ret: + return ret + fn_name = getattr(fn, "__qualname__", "") + raise ValueError( + f"Function {fn_name} missing attributes, was it decorated with @workflow.run and was its class decorated with @workflow.defn?" + ) + + @classmethod + def get_name_and_result_type( + cls, name_or_run_fn: str | Callable[..., Awaitable[Any]] + ) -> tuple[str, type | None]: + if isinstance(name_or_run_fn, str): + return name_or_run_fn, None + elif callable(name_or_run_fn): + defn = cls.must_from_run_fn(name_or_run_fn) + if not defn.name: + raise ValueError("Cannot invoke dynamic workflow explicitly") + return defn.name, defn.ret_type + else: + raise TypeError("Workflow must be a string or callable") # type: ignore[reportUnreachable] + + @staticmethod + def _apply_to_class( + cls: type, # type: ignore[reportSelfClsParameterName] + *, + workflow_name: str | None, + sandboxed: bool, + failure_exception_types: Sequence[type[BaseException]], + versioning_behavior: temporalio.common.VersioningBehavior, + ) -> None: + # Check it's not being doubly applied + if _Definition.from_class(cls): + raise ValueError("Class already contains workflow definition") + issues: list[str] = [] + + # Collect run fn and all signal/query/update fns + init_fn: Callable[..., None] | None = None + run_fn: Callable[..., Awaitable[Any]] | None = None + dynamic_config_fn: Callable[..., DynamicWorkflowConfig] | None = None + seen_run_attr = False + signals: dict[str | None, _SignalDefinition] = {} + queries: dict[str | None, _QueryDefinition] = {} + updates: dict[str | None, _UpdateDefinition] = {} + for name, member in inspect.getmembers(cls): + if hasattr(member, "__temporal_workflow_run"): + seen_run_attr = True + if not _is_unbound_method_on_cls(member, cls): + issues.append( + f"@workflow.run method {name} must be defined on {cls.__qualname__}" + ) + elif run_fn is not None: + issues.append( + f"Multiple @workflow.run methods found (at least on {name} and {run_fn.__name__})" + ) + else: + # We can guarantee the @workflow.run decorator did + # validation of the function itself + run_fn = member + elif hasattr(member, "__temporal_signal_definition"): + signal_defn = cast( + _SignalDefinition, getattr(member, "__temporal_signal_definition") + ) + if signal_defn.name in signals: + defn_name = signal_defn.name or "" + # TODO(cretz): Remove cast when https://github.com/python/mypy/issues/5485 fixed + other_fn = cast(Callable, signals[signal_defn.name].fn) + issues.append( + f"Multiple signal methods found for {defn_name} " + f"(at least on {name} and {other_fn.__name__})" + ) + else: + signals[signal_defn.name] = signal_defn + elif hasattr(member, "__temporal_query_definition"): + query_defn = cast( + _QueryDefinition, getattr(member, "__temporal_query_definition") + ) + if query_defn.name in queries: + defn_name = query_defn.name or "" + issues.append( + f"Multiple query methods found for {defn_name} " + f"(at least on {name} and {queries[query_defn.name].fn.__name__})" + ) + else: + queries[query_defn.name] = query_defn + elif name == "__init__" and hasattr(member, "__temporal_workflow_init"): + init_fn = member + elif hasattr(member, "__temporal_workflow_dynamic_config"): + if workflow_name: + issues.append( + "@workflow.dynamic_config can only be used in dynamic workflows, but " + f"workflow class {workflow_name} ({cls.__name__}) is not dynamic" + ) + if dynamic_config_fn: + issues.append( + "@workflow.dynamic_config can only be defined once per workflow" + ) + dynamic_config_fn = member + elif isinstance(member, UpdateMethodMultiParam): + update_defn = member._defn + if update_defn.name in updates: + defn_name = update_defn.name or "" + issues.append( + f"Multiple update methods found for {defn_name} " + f"(at least on {name} and {updates[update_defn.name].fn.__name__})" + ) + elif update_defn.validator and not _parameters_identical_up_to_naming( + update_defn.fn, update_defn.validator + ): + issues.append( + f"Update validator method {update_defn.validator.__name__} parameters " + f"do not match update method {update_defn.fn.__name__} parameters" + ) + else: + updates[update_defn.name] = update_defn + + # Check base classes haven't defined things with different decorators + for base_cls in inspect.getmro(cls)[1:]: + for _, base_member in inspect.getmembers(base_cls): + # We only care about methods defined on this class + if not inspect.isfunction(base_member) or not _is_unbound_method_on_cls( + base_member, base_cls + ): + continue + if hasattr(base_member, "__temporal_workflow_run"): + seen_run_attr = True + if not run_fn or base_member.__name__ != run_fn.__name__: + issues.append( + f"@workflow.run defined on {base_member.__qualname__} but not on the override" + ) + elif hasattr(base_member, "__temporal_signal_definition"): + signal_defn = cast( + _SignalDefinition, + getattr(base_member, "__temporal_signal_definition"), + ) + if signal_defn.name not in signals: + issues.append( + f"@workflow.signal defined on {base_member.__qualname__} but not on the override" + ) + elif hasattr(base_member, "__temporal_query_definition"): + query_defn = cast( + _QueryDefinition, + getattr(base_member, "__temporal_query_definition"), + ) + if query_defn.name not in queries: + issues.append( + f"@workflow.query defined on {base_member.__qualname__} but not on the override" + ) + elif isinstance(base_member, UpdateMethodMultiParam): + update_defn = base_member._defn + if update_defn.name not in updates: + issues.append( + f"@workflow.update defined on {base_member.__qualname__} but not on the override" + ) + + if not seen_run_attr: + issues.append("Missing @workflow.run method") + if init_fn and run_fn: + if not _parameters_identical_up_to_naming(init_fn, run_fn): + issues.append( + "@workflow.init and @workflow.run method parameters do not match" + ) + if issues: + if len(issues) == 1: + raise ValueError(f"Invalid workflow class: {issues[0]}") + raise ValueError( + f"Invalid workflow class for {len(issues)} reasons: {', '.join(issues)}" + ) + + assert run_fn + assert seen_run_attr + defn = _Definition( + name=workflow_name, + cls=cls, + run_fn=run_fn, + signals=signals, + queries=queries, + updates=updates, + sandboxed=sandboxed, + failure_exception_types=failure_exception_types, + versioning_behavior=versioning_behavior, + dynamic_config_fn=dynamic_config_fn, + ) + setattr(cls, "__temporal_workflow_definition", defn) + setattr(run_fn, "__temporal_workflow_definition", defn) + + def __post_init__(self) -> None: + if self.arg_types is None and self.ret_type is None: + dynamic = self.name is None + arg_types, ret_type = temporalio.common._type_hints_from_func(self.run_fn) + # If dynamic, must be a sequence of raw values + if dynamic and ( + not arg_types + or len(arg_types) != 1 + or arg_types[0] != Sequence[temporalio.common.RawValue] + ): + raise TypeError( + "Dynamic workflow must accept a single Sequence[temporalio.common.RawValue]" + ) + object.__setattr__(self, "arg_types", arg_types) + object.__setattr__(self, "ret_type", ret_type) + + +def _parameters_identical_up_to_naming(fn1: Callable, fn2: Callable) -> bool: + """Return True if the functions have identical parameter lists, ignoring parameter names.""" + + def params(fn: Callable) -> list[inspect.Parameter]: + # Ignore name when comparing parameters (remaining fields are kind, + # default, and annotation). + return [p.replace(name="x") for p in inspect.signature(fn).parameters.values()] + + # We require that any type annotations present match exactly; i.e. we do + # not support any notion of subtype compatibility. + return params(fn1) == params(fn2) + + +def _is_unbound_method_on_cls(fn: Callable[..., Any], cls: type) -> bool: + # Python 3 does not make this easy, ref https://stackoverflow.com/questions/3589311 + return ( + inspect.isfunction(fn) + and inspect.getmodule(fn) is inspect.getmodule(cls) + and fn.__qualname__.rsplit(".", 1)[0] == cls.__name__ + ) diff --git a/temporalio/workflow/_exceptions.py b/temporalio/workflow/_exceptions.py new file mode 100644 index 000000000..e61e71829 --- /dev/null +++ b/temporalio/workflow/_exceptions.py @@ -0,0 +1,100 @@ +from __future__ import annotations + +from enum import Enum, IntEnum + +import temporalio.api.enums.v1 +import temporalio.bridge.proto.common +import temporalio.exceptions + +__all__ = [ + "NondeterminismError", + "ReadOnlyContextError", + "VersioningIntent", + "ContinueAsNewVersioningBehavior", +] + + +class NondeterminismError(temporalio.exceptions.TemporalError): + """Error that can be thrown during replay for non-deterministic workflow.""" + + def __init__(self, message: str) -> None: + """Initialize a nondeterminism error.""" + super().__init__(message) + self.message = message + + +class ReadOnlyContextError(temporalio.exceptions.TemporalError): + """Error thrown when trying to do mutable workflow calls in a read-only + context like a query or update validator. + """ + + def __init__(self, message: str) -> None: + """Initialize a read-only context error.""" + super().__init__(message) + self.message = message + + +class _NotInWorkflowEventLoopError( # pyright: ignore[reportUnusedClass] + temporalio.exceptions.TemporalError +): + def __init__(self, *args: object) -> None: + super().__init__("Not in workflow event loop") + self.message = "Not in workflow event loop" + + +class VersioningIntent(Enum): + """Indicates whether the user intends certain commands to be run on a compatible worker Build + Id version or not. + + `COMPATIBLE` indicates that the command should run on a worker with compatible version if + possible. It may not be possible if the target task queue does not also have knowledge of the + current worker's Build Id. + + `DEFAULT` indicates that the command should run on the target task queue's current + overall-default Build Id. + + Where this type is accepted optionally, an unset value indicates that the SDK should choose the + most sensible default behavior for the type of command, accounting for whether the command will + be run on the same task queue as the current worker. + + .. deprecated:: + Use Worker Deployment versioning instead. + """ + + COMPATIBLE = 1 + DEFAULT = 2 + + def _to_proto(self) -> temporalio.bridge.proto.common.VersioningIntent.ValueType: + if self == VersioningIntent.COMPATIBLE: + return temporalio.bridge.proto.common.VersioningIntent.COMPATIBLE + elif self == VersioningIntent.DEFAULT: + return temporalio.bridge.proto.common.VersioningIntent.DEFAULT + return temporalio.bridge.proto.common.VersioningIntent.UNSPECIFIED + + +class ContinueAsNewVersioningBehavior(IntEnum): + """Experimental. Optionally decide the versioning behavior that the first task of the new run should use. + For example, choose to AutoUpgrade on continue-as-new instead of inheriting the pinned version + of the previous run. + """ + + UNSPECIFIED = int( + temporalio.api.enums.v1.ContinueAsNewVersioningBehavior.CONTINUE_AS_NEW_VERSIONING_BEHAVIOR_UNSPECIFIED + ) + """An initial versioning behavior is not set, follow the existing continue-as-new inheritance semantics. + See https://docs.temporal.io/worker-versioning#inheritance-semantics for more detail. + """ + + AUTO_UPGRADE = int( + temporalio.api.enums.v1.ContinueAsNewVersioningBehavior.CONTINUE_AS_NEW_VERSIONING_BEHAVIOR_AUTO_UPGRADE + ) + """Start the new run with AutoUpgrade behavior. Use the Target Version of the workflow's task queue at + start-time, as AutoUpgrade workflows do. After the first workflow task completes, use whatever + Versioning Behavior the workflow is annotated with in the workflow code. + + Note that if the previous workflow had a Pinned override, that override will be inherited by the + new workflow run regardless of the ContinueAsNewVersioningBehavior specified in the continue-as-new + command. If a Pinned override is inherited by the new run, and the new run starts with AutoUpgrade + behavior, the base version of the new run will be the Target Version as described above, but the + effective version will be whatever is specified by the Versioning Override until the override is removed. + """ diff --git a/temporalio/workflow/_handlers.py b/temporalio/workflow/_handlers.py new file mode 100644 index 000000000..afa0bb6e4 --- /dev/null +++ b/temporalio/workflow/_handlers.py @@ -0,0 +1,587 @@ +from __future__ import annotations + +import inspect +import typing +import warnings +from collections.abc import Awaitable, Callable, Sequence +from dataclasses import dataclass +from enum import Enum +from functools import partial +from typing import Any, Literal, cast, overload + +from typing_extensions import Protocol, runtime_checkable + +import temporalio.common + +from ..types import ( + CallableSyncOrAsyncReturnNoneType, + CallableSyncOrAsyncType, + CallableType, + MultiParamSpec, + ProtocolReturnType, + ReturnType, +) + +__all__ = [ + "HandlerUnfinishedPolicy", + "UnfinishedSignalHandlersWarning", + "UnfinishedUpdateHandlersWarning", + "UpdateMethodMultiParam", + "query", + "signal", + "update", +] + + +class HandlerUnfinishedPolicy(Enum): + """Actions taken if a workflow terminates with running handlers. + + Policy defining actions taken when a workflow exits while update or signal handlers are running. + The workflow exit may be due to successful return, failure, cancellation, or continue-as-new. + """ + + WARN_AND_ABANDON = 1 + """Issue a warning in addition to abandoning.""" + ABANDON = 2 + """Abandon the handler. + + In the case of an update handler this means that the client will receive an error rather than + the update result.""" + + +class UnfinishedUpdateHandlersWarning(RuntimeWarning): + """The workflow exited before all update handlers had finished executing.""" + + +class UnfinishedSignalHandlersWarning(RuntimeWarning): + """The workflow exited before all signal handlers had finished executing.""" + + +@overload +def signal( + fn: CallableSyncOrAsyncReturnNoneType, +) -> CallableSyncOrAsyncReturnNoneType: ... + + +@overload +def signal( + *, + unfinished_policy: HandlerUnfinishedPolicy = HandlerUnfinishedPolicy.WARN_AND_ABANDON, + description: str | None = None, +) -> Callable[ + [CallableSyncOrAsyncReturnNoneType], CallableSyncOrAsyncReturnNoneType +]: ... + + +@overload +def signal( + *, + name: str, + unfinished_policy: HandlerUnfinishedPolicy = HandlerUnfinishedPolicy.WARN_AND_ABANDON, + description: str | None = None, +) -> Callable[ + [CallableSyncOrAsyncReturnNoneType], CallableSyncOrAsyncReturnNoneType +]: ... + + +@overload +def signal( + *, + dynamic: Literal[True], + unfinished_policy: HandlerUnfinishedPolicy = HandlerUnfinishedPolicy.WARN_AND_ABANDON, + description: str | None = None, +) -> Callable[ + [CallableSyncOrAsyncReturnNoneType], CallableSyncOrAsyncReturnNoneType +]: ... + + +def signal( + fn: CallableSyncOrAsyncReturnNoneType | None = None, + *, + name: str | None = None, + dynamic: bool | None = False, + unfinished_policy: HandlerUnfinishedPolicy = HandlerUnfinishedPolicy.WARN_AND_ABANDON, + description: str | None = None, +) -> ( + Callable[[CallableSyncOrAsyncReturnNoneType], CallableSyncOrAsyncReturnNoneType] + | CallableSyncOrAsyncReturnNoneType +): + """Decorator for a workflow signal method. + + This is used on any async or non-async method that you wish to be called upon + receiving a signal. If a function overrides one with this decorator, it too + must be decorated. + + Signal methods can only have positional parameters. Best practice for + non-dynamic signal methods is to only take a single object/dataclass + argument that can accept more fields later if needed. Return values from + signal methods are ignored. + + Args: + fn: The function to decorate. + name: Signal name. Defaults to method ``__name__``. Cannot be present + when ``dynamic`` is present. + dynamic: If true, this handles all signals not otherwise handled. The + parameters of the method must be self, a string name, and a + ``*args`` positional varargs. Cannot be present when ``name`` is + present. + unfinished_policy: Actions taken if a workflow terminates with + a running instance of this handler. + description: A short description of the signal that may appear in the UI/CLI. + """ + + def decorator( + name: str | None, + unfinished_policy: HandlerUnfinishedPolicy, + fn: CallableSyncOrAsyncReturnNoneType, + ) -> CallableSyncOrAsyncReturnNoneType: + if not name and not dynamic: + name = fn.__name__ + defn = _SignalDefinition( + name=name, + fn=fn, + is_method=True, + unfinished_policy=unfinished_policy, + description=description, + ) + setattr(fn, "__temporal_signal_definition", defn) + if defn.dynamic_vararg: + warnings.warn( + "Dynamic signals with vararg third param is deprecated, use Sequence[RawValue]", + DeprecationWarning, + stacklevel=2, + ) + return fn + + if not fn: + if name is not None and dynamic: + raise RuntimeError("Cannot provide name and dynamic boolean") + return partial(decorator, name, unfinished_policy) + else: + return decorator(fn.__name__, unfinished_policy, fn) + + +@overload +def query(fn: CallableType) -> CallableType: ... + + +@overload +def query( + *, name: str, description: str | None = None +) -> Callable[[CallableType], CallableType]: ... + + +@overload +def query( + *, dynamic: Literal[True], description: str | None = None +) -> Callable[[CallableType], CallableType]: ... + + +@overload +def query(*, description: str) -> Callable[[CallableType], CallableType]: ... + + +def query( + fn: CallableType | None = None, # type: ignore[reportInvalidTypeVarUse] + *, + name: str | None = None, + dynamic: bool | None = False, + description: str | None = None, +): + """Decorator for a workflow query method. + + This is used on any non-async method that expects to handle a query. If a + function overrides one with this decorator, it too must be decorated. + + Query methods can only have positional parameters. Best practice for + non-dynamic query methods is to only take a single object/dataclass + argument that can accept more fields later if needed. The return value is + the resulting query value. Query methods must not mutate any workflow state. + + Args: + fn: The function to decorate. + name: Query name. Defaults to method ``__name__``. Cannot be present + when ``dynamic`` is present. + dynamic: If true, this handles all queries not otherwise handled. The + parameters of the method should be self, a string name, and a + ``Sequence[RawValue]``. An older form of this accepted vararg + parameters which will now warn. Cannot be present when ``name`` is + present. + description: A short description of the query that may appear in the UI/CLI. + """ + + def decorator( + name: str | None, + description: str | None, + fn: CallableType, + *, + bypass_async_check: bool = False, + ) -> CallableType: + if not name and not dynamic: + name = fn.__name__ + if not bypass_async_check and inspect.iscoroutinefunction(fn): + warnings.warn( + "Queries as async def functions are deprecated", + DeprecationWarning, + stacklevel=2, + ) + defn = _QueryDefinition( + name=name, fn=fn, is_method=True, description=description + ) + setattr(fn, "__temporal_query_definition", defn) + if defn.dynamic_vararg: + warnings.warn( + "Dynamic queries with vararg third param is deprecated, use Sequence[RawValue]", + DeprecationWarning, + stacklevel=2, + ) + return fn + + if name is not None or dynamic or description: + if name is not None and dynamic: + raise RuntimeError("Cannot provide name and dynamic boolean") + return partial(decorator, name, description) + if fn is None: + raise RuntimeError("Cannot create query without function or name or dynamic") + if inspect.iscoroutinefunction(fn): + warnings.warn( + "Queries as async def functions are deprecated", + DeprecationWarning, + stacklevel=2, + ) + return decorator(fn.__name__, description, fn, bypass_async_check=True) + + +@runtime_checkable +class UpdateMethodMultiParam(Protocol[MultiParamSpec, ProtocolReturnType]): + """Decorated workflow update functions implement this.""" + + _defn: _UpdateDefinition + + def __call__( + self, *args: MultiParamSpec.args, **kwargs: MultiParamSpec.kwargs + ) -> ProtocolReturnType | Awaitable[ProtocolReturnType]: + """Generic callable type callback.""" + ... + + def validator( + self, vfunc: Callable[MultiParamSpec, None] + ) -> Callable[MultiParamSpec, None]: + """Use to decorate a function to validate the arguments passed to the update handler.""" + ... + + +@overload +def update( + fn: Callable[MultiParamSpec, Awaitable[ReturnType]], +) -> UpdateMethodMultiParam[MultiParamSpec, ReturnType]: ... + + +@overload +def update( + fn: Callable[MultiParamSpec, ReturnType], +) -> UpdateMethodMultiParam[MultiParamSpec, ReturnType]: ... + + +@overload +def update( + *, + unfinished_policy: HandlerUnfinishedPolicy = HandlerUnfinishedPolicy.WARN_AND_ABANDON, + description: str | None = None, +) -> Callable[ + [Callable[MultiParamSpec, ReturnType]], + UpdateMethodMultiParam[MultiParamSpec, ReturnType], +]: ... + + +@overload +def update( + *, + name: str, + unfinished_policy: HandlerUnfinishedPolicy = HandlerUnfinishedPolicy.WARN_AND_ABANDON, + description: str | None = None, +) -> Callable[ + [Callable[MultiParamSpec, ReturnType]], + UpdateMethodMultiParam[MultiParamSpec, ReturnType], +]: ... + + +@overload +def update( + *, + dynamic: Literal[True], + unfinished_policy: HandlerUnfinishedPolicy = HandlerUnfinishedPolicy.WARN_AND_ABANDON, + description: str | None = None, +) -> Callable[ + [Callable[MultiParamSpec, ReturnType]], + UpdateMethodMultiParam[MultiParamSpec, ReturnType], +]: ... + + +def update( + fn: CallableSyncOrAsyncType | None = None, # type: ignore[reportInvalidTypeVarUse] + *, + name: str | None = None, + dynamic: bool | None = False, + unfinished_policy: HandlerUnfinishedPolicy = HandlerUnfinishedPolicy.WARN_AND_ABANDON, + description: str | None = None, +) -> ( + UpdateMethodMultiParam[MultiParamSpec, ReturnType] + | Callable[ + [Callable[MultiParamSpec, ReturnType]], + UpdateMethodMultiParam[MultiParamSpec, ReturnType], + ] +): + """Decorator for a workflow update handler method. + + This is used on any async or non-async method that you wish to be called upon + receiving an update. If a function overrides one with this decorator, it too + must be decorated. + + You may also optionally define a validator method that will be called before + this handler you have applied this decorator to. You can specify the validator + with ``@update_handler_function_name.validator``. + + Update methods can only have positional parameters. Best practice for + non-dynamic update methods is to only take a single object/dataclass + argument that can accept more fields later if needed. The handler may return + a serializable value which will be sent back to the caller of the update. + + Args: + fn: The function to decorate. + name: Update name. Defaults to method ``__name__``. Cannot be present + when ``dynamic`` is present. + dynamic: If true, this handles all updates not otherwise handled. The + parameters of the method must be self, a string name, and a + ``*args`` positional varargs. Cannot be present when ``name`` is + present. + unfinished_policy: Actions taken if a workflow terminates with + a running instance of this handler. + description: A short description of the update that may appear in the UI/CLI. + """ + + def decorator( + name: str | None, + unfinished_policy: HandlerUnfinishedPolicy, + fn: CallableSyncOrAsyncType, + ) -> CallableSyncOrAsyncType: + if not name and not dynamic: + name = fn.__name__ + defn = _UpdateDefinition( + name=name, + fn=fn, + is_method=True, + unfinished_policy=unfinished_policy, + description=description, + ) + if defn.dynamic_vararg: + raise RuntimeError( + "Dynamic updates do not support a vararg third param, use Sequence[RawValue]", + ) + setattr(fn, "_defn", defn) + setattr(fn, "validator", partial(_update_validator, defn)) + return fn + + if not fn: + if name is not None and dynamic: + raise RuntimeError("Cannot provide name and dynamic boolean") + return partial(decorator, name, unfinished_policy) # type: ignore[reportReturnType, return-value] + else: + return decorator(fn.__name__, unfinished_policy, fn) # type: ignore[reportReturnType, return-value] + + +def _update_validator( + update_def: _UpdateDefinition, fn: Callable[..., None] | None = None +) -> Callable[..., None] | None: + """Decorator for a workflow update validator method.""" + if fn is not None: + update_def.set_validator(fn) + return fn + + +def _bind_method(obj: Any, fn: Callable[..., Any]) -> Callable[..., Any]: + # Curry instance on the definition function since that represents an + # unbound method + if inspect.iscoroutinefunction(fn): + # We cannot use functools.partial here because in <= 3.7 that isn't + # considered an inspect.iscoroutinefunction + fn = cast(Callable[..., Awaitable[Any]], fn) + + async def with_object(*args: Any, **kwargs: Any) -> Any: + return await fn(obj, *args, **kwargs) + + return with_object + return partial(fn, obj) + + +def _assert_dynamic_handler_args( + fn: Callable, arg_types: list[type] | None, is_method: bool +) -> bool: + # Dynamic query/signal/update must have three args: self, name, and + # Sequence[RawValue]. An older form accepted varargs for the third param for signals/queries so + # we will too (but will warn in the signal/query code). + params = list(inspect.signature(fn).parameters.values()) + total_expected_params = 3 if is_method else 2 + if ( + len(params) == total_expected_params + and params[-2].kind is inspect.Parameter.POSITIONAL_OR_KEYWORD + and params[-1].kind is inspect.Parameter.VAR_POSITIONAL + ): + # Old var-arg form + return False + if ( + not arg_types + or len(arg_types) != 2 + or arg_types[0] is not str + or ( + arg_types[1] != Sequence[temporalio.common.RawValue] + and arg_types[1] != typing.Sequence[temporalio.common.RawValue] # type: ignore[reportDeprecated] + ) + ): + raise RuntimeError( + "Dynamic handler must have 3 arguments: self, str, and Sequence[RawValue]" + ) + return True + + +@dataclass(frozen=True) +class _SignalDefinition: + # None if dynamic + name: str | None + fn: Callable[..., None | Awaitable[None]] + is_method: bool + unfinished_policy: HandlerUnfinishedPolicy = ( + HandlerUnfinishedPolicy.WARN_AND_ABANDON + ) + description: str | None = None + # Types loaded on post init if None + arg_types: list[type] | None = None + dynamic_vararg: bool = False + + @staticmethod + def from_fn(fn: Callable) -> _SignalDefinition | None: + return getattr(fn, "__temporal_signal_definition", None) + + @staticmethod + def must_name_from_fn_or_str(signal: str | Callable) -> str: + if callable(signal): + defn = _SignalDefinition.from_fn(signal) + if not defn: + raise RuntimeError( + f"Signal definition not found on {signal.__qualname__}, " + "is it decorated with @workflow.signal?" + ) + elif not defn.name: + raise RuntimeError("Cannot invoke dynamic signal definition") + # TODO(cretz): Check count/type of args at runtime? + return defn.name + return str(signal) + + def __post_init__(self) -> None: + if self.arg_types is None: + arg_types, _ = temporalio.common._type_hints_from_func(self.fn) + # If dynamic, assert it + if not self.name: + object.__setattr__( + self, + "dynamic_vararg", + not _assert_dynamic_handler_args( + self.fn, arg_types, self.is_method + ), + ) + object.__setattr__(self, "arg_types", arg_types) + + def bind_fn(self, obj: Any) -> Callable[..., Any]: + return _bind_method(obj, self.fn) + + +@dataclass(frozen=True) +class _QueryDefinition: + # None if dynamic + name: str | None + fn: Callable[..., Any] + is_method: bool + description: str | None = None + # Types loaded on post init if both are None + arg_types: list[type] | None = None + ret_type: type | None = None + dynamic_vararg: bool = False + + @staticmethod + def from_fn(fn: Callable) -> _QueryDefinition | None: + return getattr(fn, "__temporal_query_definition", None) + + def __post_init__(self) -> None: + if self.arg_types is None and self.ret_type is None: + arg_types, ret_type = temporalio.common._type_hints_from_func(self.fn) + # If dynamic, assert it + if not self.name: + object.__setattr__( + self, + "dynamic_vararg", + not _assert_dynamic_handler_args( + self.fn, arg_types, self.is_method + ), + ) + object.__setattr__(self, "arg_types", arg_types) + object.__setattr__(self, "ret_type", ret_type) + + def bind_fn(self, obj: Any) -> Callable[..., Any]: + return _bind_method(obj, self.fn) + + +@dataclass(frozen=True) +class _UpdateDefinition: + # None if dynamic + name: str | None + fn: Callable[..., Any | Awaitable[Any]] + is_method: bool + unfinished_policy: HandlerUnfinishedPolicy = ( + HandlerUnfinishedPolicy.WARN_AND_ABANDON + ) + description: str | None = None + # Types loaded on post init if None + arg_types: list[type] | None = None + ret_type: type | None = None + validator: Callable[..., None] | None = None + dynamic_vararg: bool = False + + def __post_init__(self) -> None: + if self.arg_types is None: + arg_types, ret_type = temporalio.common._type_hints_from_func(self.fn) + # Disallow dynamic varargs + if not self.name and not _assert_dynamic_handler_args( + self.fn, arg_types, self.is_method + ): + raise RuntimeError( + "Dynamic updates do not support a vararg third param, use Sequence[RawValue]", + ) + object.__setattr__(self, "arg_types", arg_types) + object.__setattr__(self, "ret_type", ret_type) + + def bind_fn(self, obj: Any) -> Callable[..., Any]: + return _bind_method(obj, self.fn) + + def bind_validator(self, obj: Any) -> Callable[..., Any]: + if self.validator is not None: + return _bind_method(obj, self.validator) + return lambda *args, **kwargs: None + + def set_validator(self, validator: Callable[..., None]) -> None: + if self.validator: + raise RuntimeError(f"Validator already set for update {self.name}") + object.__setattr__(self, "validator", validator) + + @classmethod + def get_name_and_result_type( + cls, + name_or_update_fn: str | Callable[..., Any], + ) -> tuple[str, type | None]: + if isinstance(name_or_update_fn, UpdateMethodMultiParam): + defn = name_or_update_fn._defn + if not defn.name: + raise RuntimeError("Cannot invoke dynamic update definition") + # TODO(cretz): Check count/type of args at runtime? + return defn.name, defn.ret_type + else: + return str(name_or_update_fn), None diff --git a/temporalio/workflow/_nexus.py b/temporalio/workflow/_nexus.py new file mode 100644 index 000000000..7b3b842fe --- /dev/null +++ b/temporalio/workflow/_nexus.py @@ -0,0 +1,503 @@ +from __future__ import annotations + +from abc import ABC, abstractmethod +from collections.abc import Awaitable, Callable, Generator, Mapping +from datetime import timedelta +from enum import IntEnum +from typing import Any, Generic, TypeVar, overload + +import nexusrpc +import nexusrpc.handler +from nexusrpc import InputT, OutputT + +import temporalio.bridge.proto.nexus +import temporalio.nexus +from temporalio.nexus._util import ServiceHandlerT + +from ._context import _Runtime + +__all__ = [ + "NexusClient", + "NexusOperationCancellationType", + "NexusOperationHandle", + "ServiceT", + "create_nexus_client", +] + + +class NexusOperationHandle(Generic[OutputT]): + """Handle for interacting with a Nexus operation.""" + + # TODO(nexus-preview): should attempts to instantiate directly throw? + + def cancel(self) -> bool: + """Request cancellation of the operation.""" + raise NotImplementedError + + def __await__(self) -> Generator[Any, Any, OutputT]: + """Support await.""" + raise NotImplementedError + + @property + def operation_token(self) -> str | None: + """The operation token for this handle.""" + raise NotImplementedError + + +ServiceT = TypeVar("ServiceT") + + +class NexusOperationCancellationType(IntEnum): + """Defines behavior of a Nexus operation when the caller workflow initiates cancellation. + + Pass one of these values to :py:meth:`NexusClient.start_operation` to define cancellation + behavior. + + To initiate cancellation, use :py:meth:`NexusOperationHandle.cancel` and then ``await`` the + operation handle. This will result in a :py:class:`exceptions.NexusOperationError`. The values + of this enum define what is guaranteed to have happened by that point. + """ + + ABANDON = int(temporalio.bridge.proto.nexus.NexusOperationCancellationType.ABANDON) + """Do not send any cancellation request to the operation handler; just report cancellation to the caller""" + + TRY_CANCEL = int( + temporalio.bridge.proto.nexus.NexusOperationCancellationType.TRY_CANCEL + ) + """Send a cancellation request but immediately report cancellation to the caller. Note that this + does not guarantee that cancellation is delivered to the operation handler if the caller exits + before the delivery is done. + """ + + WAIT_REQUESTED = int( + temporalio.bridge.proto.nexus.NexusOperationCancellationType.WAIT_CANCELLATION_REQUESTED + ) + """Send a cancellation request and wait for confirmation that the request was received. + Does not wait for the operation to complete. + """ + + WAIT_COMPLETED = int( + temporalio.bridge.proto.nexus.NexusOperationCancellationType.WAIT_CANCELLATION_COMPLETED + ) + """Send a cancellation request and wait for the operation to complete. + Note that the operation may not complete as cancelled (for example, if it catches the + :py:exc:`asyncio.CancelledError` resulting from the cancellation request).""" + + +class NexusClient(ABC, Generic[ServiceT]): + """A client for invoking Nexus operations. + + Example:: + + nexus_client = workflow.create_nexus_client( + endpoint=my_nexus_endpoint, + service=MyService, + ) + handle = await nexus_client.start_operation( + operation=MyService.my_operation, + input=MyOperationInput(value="hello"), + schedule_to_close_timeout=timedelta(seconds=10), + ) + result = await handle.result() + """ + + # Overload for nexusrpc.Operation + @overload + @abstractmethod + async def start_operation( + self, + operation: nexusrpc.Operation[InputT, OutputT], + input: InputT, + *, + output_type: type[OutputT] | None = None, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + cancellation_type: NexusOperationCancellationType = NexusOperationCancellationType.WAIT_COMPLETED, + headers: Mapping[str, str] | None = None, + summary: str | None = None, + ) -> NexusOperationHandle[OutputT]: ... + + # Overload for string operation name + @overload + @abstractmethod + async def start_operation( + self, + operation: str, + input: Any, + *, + output_type: type[OutputT] | None = None, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + cancellation_type: NexusOperationCancellationType = NexusOperationCancellationType.WAIT_COMPLETED, + headers: Mapping[str, str] | None = None, + summary: str | None = None, + ) -> NexusOperationHandle[OutputT]: ... + + # Overload for workflow_run_operation methods + @overload + @abstractmethod + async def start_operation( + self, + operation: Callable[ + [ServiceHandlerT, temporalio.nexus.WorkflowRunOperationContext, InputT], + Awaitable[temporalio.nexus.WorkflowHandle[OutputT]], + ], + input: InputT, + *, + output_type: type[OutputT] | None = None, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + cancellation_type: NexusOperationCancellationType = NexusOperationCancellationType.WAIT_COMPLETED, + headers: Mapping[str, str] | None = None, + summary: str | None = None, + ) -> NexusOperationHandle[OutputT]: ... + + # Overload for sync_operation methods (async def) + @overload + @abstractmethod + async def start_operation( + self, + operation: Callable[ + [ServiceHandlerT, nexusrpc.handler.StartOperationContext, InputT], + Awaitable[OutputT], + ], + input: InputT, + *, + output_type: type[OutputT] | None = None, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + cancellation_type: NexusOperationCancellationType = NexusOperationCancellationType.WAIT_COMPLETED, + headers: Mapping[str, str] | None = None, + summary: str | None = None, + ) -> NexusOperationHandle[OutputT]: ... + + # Overload for sync_operation methods (def) + @overload + @abstractmethod + async def start_operation( + self, + operation: Callable[ + [ServiceHandlerT, nexusrpc.handler.StartOperationContext, InputT], + OutputT, + ], + input: InputT, + *, + output_type: type[OutputT] | None = None, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + cancellation_type: NexusOperationCancellationType = NexusOperationCancellationType.WAIT_COMPLETED, + headers: Mapping[str, str] | None = None, + summary: str | None = None, + ) -> NexusOperationHandle[OutputT]: ... + + # Overload for operation_handler + @overload + @abstractmethod + async def start_operation( + self, + operation: Callable[ + [ServiceHandlerT], nexusrpc.handler.OperationHandler[InputT, OutputT] + ], + input: InputT, + *, + output_type: type[OutputT] | None = None, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + cancellation_type: NexusOperationCancellationType = NexusOperationCancellationType.WAIT_COMPLETED, + headers: Mapping[str, str] | None = None, + summary: str | None = None, + ) -> NexusOperationHandle[OutputT]: ... + + @abstractmethod + async def start_operation( + self, + operation: Any, + input: Any, + *, + output_type: type[OutputT] | None = None, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + cancellation_type: NexusOperationCancellationType = NexusOperationCancellationType.WAIT_COMPLETED, + headers: Mapping[str, str] | None = None, + summary: str | None = None, + ) -> Any: + """Start a Nexus operation and return its handle. + + Args: + operation: The Nexus operation. + input: The Nexus operation input. + output_type: The Nexus operation output type. + schedule_to_close_timeout: Timeout for the entire operation attempt. + schedule_to_start_timeout: Timeout for the operation to be started. + start_to_close_timeout: Timeout for async operations to complete after starting. + headers: Headers to send with the Nexus HTTP request. + + Returns: + A handle to the Nexus operation. The result can be obtained as + ```python + await handle.result() + ``` + """ + ... + + # Overload for nexusrpc.Operation + @overload + @abstractmethod + async def execute_operation( + self, + operation: nexusrpc.Operation[InputT, OutputT], + input: InputT, + *, + output_type: type[OutputT] | None = None, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + cancellation_type: NexusOperationCancellationType = NexusOperationCancellationType.WAIT_COMPLETED, + headers: Mapping[str, str] | None = None, + summary: str | None = None, + ) -> OutputT: ... + + # Overload for string operation name + @overload + @abstractmethod + async def execute_operation( + self, + operation: str, + input: Any, + *, + output_type: type[OutputT] | None = None, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + cancellation_type: NexusOperationCancellationType = NexusOperationCancellationType.WAIT_COMPLETED, + headers: Mapping[str, str] | None = None, + summary: str | None = None, + ) -> OutputT: ... + + # Overload for workflow_run_operation methods + @overload + @abstractmethod + async def execute_operation( + self, + operation: Callable[ + [ServiceHandlerT, temporalio.nexus.WorkflowRunOperationContext, InputT], + Awaitable[temporalio.nexus.WorkflowHandle[OutputT]], + ], + input: InputT, + *, + output_type: type[OutputT] | None = None, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + cancellation_type: NexusOperationCancellationType = NexusOperationCancellationType.WAIT_COMPLETED, + headers: Mapping[str, str] | None = None, + summary: str | None = None, + ) -> OutputT: ... + + # Overload for sync_operation methods (async def) + @overload + @abstractmethod + async def execute_operation( + self, + operation: Callable[ + [ServiceT, nexusrpc.handler.StartOperationContext, InputT], + Awaitable[OutputT], + ], + input: InputT, + *, + output_type: type[OutputT] | None = None, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + cancellation_type: NexusOperationCancellationType = NexusOperationCancellationType.WAIT_COMPLETED, + headers: Mapping[str, str] | None = None, + summary: str | None = None, + ) -> OutputT: ... + + # Overload for sync_operation methods (def) + @overload + @abstractmethod + async def execute_operation( + self, + operation: Callable[ + [ServiceT, nexusrpc.handler.StartOperationContext, InputT], + OutputT, + ], + input: InputT, + *, + output_type: type[OutputT] | None = None, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + cancellation_type: NexusOperationCancellationType = NexusOperationCancellationType.WAIT_COMPLETED, + headers: Mapping[str, str] | None = None, + summary: str | None = None, + ) -> OutputT: ... + + # Overload for operation_handler + @overload + @abstractmethod + async def execute_operation( + self, + operation: Callable[ + [ServiceT], + nexusrpc.handler.OperationHandler[InputT, OutputT], + ], + input: InputT, + *, + output_type: type[OutputT] | None = None, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + cancellation_type: NexusOperationCancellationType = NexusOperationCancellationType.WAIT_COMPLETED, + headers: Mapping[str, str] | None = None, + summary: str | None = None, + ) -> OutputT: ... + + @abstractmethod + async def execute_operation( + self, + operation: Any, + input: Any, + *, + output_type: type[OutputT] | None = None, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + cancellation_type: NexusOperationCancellationType = NexusOperationCancellationType.WAIT_COMPLETED, + headers: Mapping[str, str] | None = None, + summary: str | None = None, + ) -> Any: + """Execute a Nexus operation and return its result. + + Args: + operation: The Nexus operation. + input: The Nexus operation input. + output_type: The Nexus operation output type. + schedule_to_close_timeout: Timeout for the entire operation attempt. + schedule_to_start_timeout: Timeout for the operation to be started. + start_to_close_timeout: Timeout for async operations to complete after starting. + headers: Headers to send with the Nexus HTTP request. + + Returns: + The operation result. + """ + ... + + +class _NexusClient(NexusClient[ServiceT]): + def __init__( + self, + *, + endpoint: str, + service: type[ServiceT] | str, + ) -> None: + """Create a Nexus client. + + Args: + service: The Nexus service. + endpoint: The Nexus endpoint. + """ + # If service is not a str, then it must be a service interface or implementation + # class. + if isinstance(service, str): + self.service_name = service + elif service_defn := nexusrpc.get_service_definition(service): + self.service_name = service_defn.name + else: + raise ValueError( + f"`service` may be a name (str), or a class decorated with either " + f"@nexusrpc.handler.service_handler or @nexusrpc.service. " + f"Invalid service type: {type(service)}" + ) + self.endpoint = endpoint + + async def start_operation( + self, + operation: Any, + input: Any, + *, + output_type: type[OutputT] | None = None, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + cancellation_type: NexusOperationCancellationType = NexusOperationCancellationType.WAIT_COMPLETED, + headers: Mapping[str, str] | None = None, + summary: str | None = None, + ) -> Any: + return await _Runtime.current().workflow_start_nexus_operation( + endpoint=self.endpoint, + service=self.service_name, + operation=operation, + input=input, + output_type=output_type, + schedule_to_close_timeout=schedule_to_close_timeout, + schedule_to_start_timeout=schedule_to_start_timeout, + start_to_close_timeout=start_to_close_timeout, + cancellation_type=cancellation_type, + headers=headers, + summary=summary, + ) + + async def execute_operation( + self, + operation: Any, + input: Any, + *, + output_type: type[OutputT] | None = None, + schedule_to_close_timeout: timedelta | None = None, + schedule_to_start_timeout: timedelta | None = None, + start_to_close_timeout: timedelta | None = None, + cancellation_type: NexusOperationCancellationType = NexusOperationCancellationType.WAIT_COMPLETED, + headers: Mapping[str, str] | None = None, + summary: str | None = None, + ) -> Any: + handle = await self.start_operation( + operation, + input, + output_type=output_type, + schedule_to_close_timeout=schedule_to_close_timeout, + schedule_to_start_timeout=schedule_to_start_timeout, + start_to_close_timeout=start_to_close_timeout, + cancellation_type=cancellation_type, + headers=headers, + summary=summary, + ) + return await handle + + +@overload +def create_nexus_client( + *, + service: type[ServiceT], + endpoint: str, +) -> NexusClient[ServiceT]: ... + + +@overload +def create_nexus_client( + *, + service: str, + endpoint: str, +) -> NexusClient[Any]: ... + + +def create_nexus_client( + *, + service: type[ServiceT] | str, + endpoint: str, +) -> NexusClient[ServiceT]: + """Create a Nexus client. + + Args: + service: The Nexus service. + endpoint: The Nexus endpoint. + """ + return _NexusClient(endpoint=endpoint, service=service) diff --git a/temporalio/workflow/_sandbox.py b/temporalio/workflow/_sandbox.py new file mode 100644 index 000000000..6f1d4569a --- /dev/null +++ b/temporalio/workflow/_sandbox.py @@ -0,0 +1,321 @@ +from __future__ import annotations + +import logging +import sys +import threading +from collections.abc import Iterator, Mapping, MutableMapping +from contextlib import contextmanager +from enum import Flag, auto +from typing import Any + +from ._context import Info, _Runtime, current_update_info + +__all__ = [ + "LoggerAdapter", + "SandboxImportNotificationPolicy", + "logger", + "unsafe", +] + +_sandbox_unrestricted = threading.local() +_in_sandbox = threading.local() +_imports_passed_through = threading.local() +_sandbox_import_notification_policy_override = threading.local() + + +class SandboxImportNotificationPolicy(Flag): + """Defines the behavior taken when modules are imported into the sandbox after the workflow is initially loaded or unintentionally missing from the passthrough list.""" + + SILENT = auto() + """Allow imports that do not violate sandbox restrictions and no warnings are generated.""" + WARN_ON_DYNAMIC_IMPORT = auto() + """Allows dynamic imports that do not violate sandbox restrictions but issues a warning when an import is triggered in the sandbox after initial workflow load.""" + WARN_ON_UNINTENTIONAL_PASSTHROUGH = auto() + """Allows imports that do not violate sandbox restrictions but issues a warning when an import is triggered in the sandbox that was unintentionally passed through.""" + RAISE_ON_UNINTENTIONAL_PASSTHROUGH = auto() + """Raise an error when an import is triggered in the sandbox that was unintentionally passed through.""" + + +class unsafe: + """Contains static methods that should not normally be called during + workflow execution except in advanced cases. + """ + + def __init__(self) -> None: # noqa: D107 + raise NotImplementedError + + @staticmethod + def in_sandbox() -> bool: + """Whether the code is executing on a sandboxed thread. + + Returns: + True if the code is executing in the sandbox thread. + """ + return getattr(_in_sandbox, "value", False) + + @staticmethod + def _set_in_sandbox(v: bool) -> None: + _in_sandbox.value = v + + @staticmethod + def is_replaying() -> bool: + """Whether the workflow is currently replaying. + + This includes queries and update validators that occur during replay. + + Returns: + True if the workflow is currently replaying + """ + return _Runtime.current().workflow_is_replaying() + + @staticmethod + def is_replaying_history_events() -> bool: + """Whether the workflow is replaying history events. + + This excludes queries and update validators, which are live operations. + + Returns: + True if replaying history events, False otherwise. + """ + return _Runtime.current().workflow_is_replaying_history_events() + + @staticmethod + def is_read_only() -> bool: + """Whether the workflow is currently in read-only mode. + + Read-only mode occurs during queries and update validators where + side effects are not allowed. + + Returns: + True if the workflow is in read-only mode, False otherwise. + """ + return _Runtime.current().workflow_is_read_only() + + @staticmethod + def is_sandbox_unrestricted() -> bool: + """Whether the current block of code is not restricted via sandbox. + + Returns: + True if the current code is not restricted in the sandbox. + """ + # Activations happen in different threads than init and possibly the + # local hasn't been initialized in _that_ thread, so we allow unset here + # instead of just setting value = False globally. + return getattr(_sandbox_unrestricted, "value", False) + + @staticmethod + @contextmanager + def sandbox_unrestricted() -> Iterator[None]: + """A context manager to run code without sandbox restrictions.""" + # Only apply if not already applied. Nested calls just continue + # unrestricted. + if unsafe.is_sandbox_unrestricted(): + yield None + return + _sandbox_unrestricted.value = True + try: + yield None + finally: + _sandbox_unrestricted.value = False + + @staticmethod + def is_imports_passed_through() -> bool: + """Whether the current block of code is in + :py:meth:imports_passed_through. + + Returns: + True if the current code's imports will be passed through + """ + # See comment in is_sandbox_unrestricted for why we allow unset instead + # of just global false. + return getattr(_imports_passed_through, "value", False) + + @staticmethod + @contextmanager + def imports_passed_through() -> Iterator[None]: + """Context manager to mark all imports that occur within it as passed + through (meaning not reloaded by the sandbox). + """ + # Only apply if not already applied. Nested calls just continue + # passed through. + if unsafe.is_imports_passed_through(): + yield None + return + _imports_passed_through.value = True + try: + yield None + finally: + _imports_passed_through.value = False + + @staticmethod + def current_import_notification_policy_override() -> ( + SandboxImportNotificationPolicy | None + ): + """Gets the current import notification policy override if one is set.""" + applied_policy = getattr( + _sandbox_import_notification_policy_override, + "value", + None, + ) + return applied_policy + + @staticmethod + @contextmanager + def sandbox_import_notification_policy( + policy: SandboxImportNotificationPolicy, + ) -> Iterator[None]: + """Context manager to apply the given import notification policy.""" + original_policy = _sandbox_import_notification_policy_override.value = getattr( + _sandbox_import_notification_policy_override, + "value", + None, + ) + _sandbox_import_notification_policy_override.value = policy + try: + yield None + finally: + _sandbox_import_notification_policy_override.value = original_policy + + +def _build_log_context( + workflow_details: Mapping[str, Any] | None, + update_details: Mapping[str, Any] | None = None, + *, + workflow_info_on_message: bool = True, + workflow_info_on_extra: bool = True, + full_workflow_info: Info | None = None, +) -> tuple[dict[str, Any], dict[str, Any]]: + """Build the msg_extra suffix and extra dict entries for a temporal log record. + + Returns: + (msg_extra, extra) where msg_extra should be appended to the log message + and extra should be merged into the log record's extra dict. + """ + msg_extra: dict[str, Any] = {} + extra: dict[str, Any] = {} + + if workflow_details is not None: + if workflow_info_on_message: + msg_extra.update(workflow_details) + if workflow_info_on_extra: + extra["temporal_workflow"] = dict(workflow_details) + + if update_details is not None: + if workflow_info_on_message: + msg_extra.update(update_details) + if workflow_info_on_extra: + extra.setdefault("temporal_workflow", {}).update(update_details) + + if full_workflow_info is not None: + extra["workflow_info"] = full_workflow_info + + return msg_extra, extra + + +class LoggerAdapter(logging.LoggerAdapter): + """Adapter that adds details to the log about the running workflow. + + Attributes: + workflow_info_on_message: Boolean for whether a string representation of + a dict of some workflow info will be appended to each message. + Default is True. + workflow_info_on_extra: Boolean for whether a ``temporal_workflow`` + dictionary value will be added to the ``extra`` dictionary with some + workflow info, making it present on the ``LogRecord.__dict__`` for + use by others. Default is True. + full_workflow_info_on_extra: Boolean for whether a ``workflow_info`` + value will be added to the ``extra`` dictionary with the entire + workflow info, making it present on the ``LogRecord.__dict__`` for + use by others. Default is False. + log_during_replay: Boolean for whether logs should occur during replay. + Default is False. + + Values added to ``extra`` are merged with the ``extra`` dictionary from a + logging call, with values from the logging call taking precedence. I.e. the + behavior is that of ``merge_extra=True`` in Python >= 3.13. + """ + + def __init__(self, logger: logging.Logger, extra: Mapping[str, Any] | None) -> None: + """Create the logger adapter.""" + super().__init__(logger, extra or {}) + self.workflow_info_on_message = True + self.workflow_info_on_extra = True + self.full_workflow_info_on_extra = False + self.log_during_replay = False + self.disable_sandbox = False + + def process( + self, msg: Any, kwargs: MutableMapping[str, Any] + ) -> tuple[Any, MutableMapping[str, Any]]: + """Override to add workflow details.""" + msg_extra: dict[str, Any] = {} + extra: dict[str, Any] = {} + + if ( + self.workflow_info_on_message + or self.workflow_info_on_extra + or self.full_workflow_info_on_extra + ): + runtime = _Runtime.maybe_current() + update_info = current_update_info() + msg_extra, extra = _build_log_context( + runtime.logger_details if runtime else None, + update_info._logger_details if update_info else None, + workflow_info_on_message=self.workflow_info_on_message, + workflow_info_on_extra=self.workflow_info_on_extra, + full_workflow_info=runtime.workflow_info() + if runtime and self.full_workflow_info_on_extra + else None, + ) + + kwargs["extra"] = {**extra, **(kwargs.get("extra") or {})} + if msg_extra: + msg = f"{msg} ({msg_extra})" + return msg, kwargs + + def log( + self, + level: int, + msg: object, + *args: Any, + stacklevel: int = 1, + **kwargs: Any, + ): + """Override to potentially disable the sandbox.""" + if sys.version_info < (3, 11) and stacklevel == 1: + # An additional stacklevel is needed on 3.10 because it doesn't skip internal frames until after stacklevel + # is decremented, so it needs an additional stacklevel to skip the internal frame. + stacklevel += 1 # type: ignore[reportUnreachable] + stacklevel += 1 + if self.disable_sandbox: + with unsafe.sandbox_unrestricted(): + with unsafe.imports_passed_through(): + super().log(level, msg, *args, stacklevel=stacklevel, **kwargs) + else: + super().log(level, msg, *args, stacklevel=stacklevel, **kwargs) + + def isEnabledFor(self, level: int) -> bool: + """Override to ignore replay logs.""" + if not self.log_during_replay and unsafe.is_replaying_history_events(): + return False + return super().isEnabledFor(level) + + @property + def base_logger(self) -> logging.Logger: + """Underlying logger usable for actions such as adding + handlers/formatters. + """ + return self.logger + + def unsafe_disable_sandbox(self, value: bool = True): + """Disable the sandbox during log processing. + Can be turned back on with unsafe_disable_sandbox(False). + """ + self.disable_sandbox = value + + +logger = LoggerAdapter(logging.getLogger("temporalio.workflow"), None) +"""Logger that will have contextual workflow details embedded. + +Logs are skipped during replay by default. +""" diff --git a/temporalio/workflow/_workflow_ops.py b/temporalio/workflow/_workflow_ops.py new file mode 100644 index 000000000..0cd22cb17 --- /dev/null +++ b/temporalio/workflow/_workflow_ops.py @@ -0,0 +1,1010 @@ +from __future__ import annotations + +from collections.abc import Awaitable, Callable, Mapping, Sequence +from datetime import timedelta +from enum import IntEnum +from typing import Any, Concatenate, Generic, NoReturn, TypedDict, overload + +import temporalio.bridge.proto.child_workflow +import temporalio.common + +from ..types import ( + MethodAsyncNoParam, + MethodAsyncSingleParam, + MethodSyncOrAsyncNoParam, + MethodSyncOrAsyncSingleParam, + MultiParamSpec, + ParamType, + ReturnType, + SelfType, +) +from ._activities import _AsyncioTask +from ._context import _Runtime, uuid4 +from ._exceptions import ContinueAsNewVersioningBehavior, VersioningIntent + +__all__ = [ + "ChildWorkflowCancellationType", + "ChildWorkflowConfig", + "ChildWorkflowHandle", + "ContinueAsNewError", + "ExternalWorkflowHandle", + "ParentClosePolicy", + "all_handlers_finished", + "continue_as_new", + "execute_child_workflow", + "get_dynamic_query_handler", + "get_dynamic_signal_handler", + "get_dynamic_update_handler", + "get_external_workflow_handle", + "get_external_workflow_handle_for", + "get_query_handler", + "get_signal_handler", + "get_update_handler", + "set_dynamic_query_handler", + "set_dynamic_signal_handler", + "set_dynamic_update_handler", + "set_query_handler", + "set_signal_handler", + "set_update_handler", + "start_child_workflow", +] + + +class ChildWorkflowHandle(_AsyncioTask[ReturnType], Generic[SelfType, ReturnType]): # type: ignore[type-var] + """Handle for interacting with a child workflow. + + This is created via :py:func:`start_child_workflow`. + + This extends :py:class:`asyncio.Task` and supports all task features. + """ + + @property + def id(self) -> str: + """ID for the workflow.""" + raise NotImplementedError + + @property + def first_execution_run_id(self) -> str | None: + """Run ID for the workflow.""" + raise NotImplementedError + + @overload + async def signal( + self, + signal: MethodSyncOrAsyncNoParam[SelfType, None], + ) -> None: ... + + @overload + async def signal( + self, + signal: MethodSyncOrAsyncSingleParam[SelfType, ParamType, None], + arg: ParamType, + ) -> None: ... + + @overload + async def signal( + self, + signal: Callable[Concatenate[SelfType, MultiParamSpec], Awaitable[None] | None], + *, + args: Sequence[Any], + ) -> None: ... + + @overload + async def signal( + self, + signal: str, + arg: Any = temporalio.common._arg_unset, + *, + args: Sequence[Any] = [], + ) -> None: ... + + async def signal( + self, + signal: str | Callable, # type: ignore[reportUnusedParameter] + arg: Any = temporalio.common._arg_unset, # type: ignore[reportUnusedParameter] + *, + args: Sequence[Any] = [], # type: ignore[reportUnusedParameter] + ) -> None: + """Signal this child workflow. + + Args: + signal: Name or method reference for the signal. + arg: Single argument to the signal. + args: Multiple arguments to the signal. Cannot be set if arg is. + + """ + raise NotImplementedError + + +class ChildWorkflowCancellationType(IntEnum): + """How a child workflow cancellation should be handled.""" + + ABANDON = int( + temporalio.bridge.proto.child_workflow.ChildWorkflowCancellationType.ABANDON + ) + TRY_CANCEL = int( + temporalio.bridge.proto.child_workflow.ChildWorkflowCancellationType.TRY_CANCEL + ) + WAIT_CANCELLATION_COMPLETED = int( + temporalio.bridge.proto.child_workflow.ChildWorkflowCancellationType.WAIT_CANCELLATION_COMPLETED + ) + WAIT_CANCELLATION_REQUESTED = int( + temporalio.bridge.proto.child_workflow.ChildWorkflowCancellationType.WAIT_CANCELLATION_REQUESTED + ) + + +class ParentClosePolicy(IntEnum): + """How a child workflow should be handled when the parent closes.""" + + UNSPECIFIED = int( + temporalio.bridge.proto.child_workflow.ParentClosePolicy.PARENT_CLOSE_POLICY_UNSPECIFIED + ) + TERMINATE = int( + temporalio.bridge.proto.child_workflow.ParentClosePolicy.PARENT_CLOSE_POLICY_TERMINATE + ) + ABANDON = int( + temporalio.bridge.proto.child_workflow.ParentClosePolicy.PARENT_CLOSE_POLICY_ABANDON + ) + REQUEST_CANCEL = int( + temporalio.bridge.proto.child_workflow.ParentClosePolicy.PARENT_CLOSE_POLICY_REQUEST_CANCEL + ) + + +class ChildWorkflowConfig(TypedDict, total=False): + """TypedDict of config that can be used for :py:func:`start_child_workflow` + and :py:func:`execute_child_workflow`. + """ + + id: str | None + task_queue: str | None + cancellation_type: ChildWorkflowCancellationType + parent_close_policy: ParentClosePolicy + execution_timeout: timedelta | None + run_timeout: timedelta | None + task_timeout: timedelta | None + id_reuse_policy: temporalio.common.WorkflowIDReusePolicy + retry_policy: temporalio.common.RetryPolicy | None + cron_schedule: str + memo: Mapping[str, Any] | None + search_attributes: None | ( + temporalio.common.SearchAttributes | temporalio.common.TypedSearchAttributes + ) + versioning_intent: VersioningIntent | None + static_summary: str | None + static_details: str | None + priority: temporalio.common.Priority + + +# Overload for no-param workflow +@overload +async def start_child_workflow( + workflow: MethodAsyncNoParam[SelfType, ReturnType], + *, + id: str | None = None, + task_queue: str | None = None, + cancellation_type: ChildWorkflowCancellationType = ChildWorkflowCancellationType.WAIT_CANCELLATION_COMPLETED, + parent_close_policy: ParentClosePolicy = ParentClosePolicy.TERMINATE, + execution_timeout: timedelta | None = None, + run_timeout: timedelta | None = None, + task_timeout: timedelta | None = None, + id_reuse_policy: temporalio.common.WorkflowIDReusePolicy = temporalio.common.WorkflowIDReusePolicy.ALLOW_DUPLICATE, + retry_policy: temporalio.common.RetryPolicy | None = None, + cron_schedule: str = "", + memo: Mapping[str, Any] | None = None, + search_attributes: None + | ( + temporalio.common.SearchAttributes | temporalio.common.TypedSearchAttributes + ) = None, + versioning_intent: VersioningIntent | None = None, + static_summary: str | None = None, + static_details: str | None = None, + priority: temporalio.common.Priority = temporalio.common.Priority.default, +) -> ChildWorkflowHandle[SelfType, ReturnType]: ... + + +# Overload for single-param workflow +@overload +async def start_child_workflow( + workflow: MethodAsyncSingleParam[SelfType, ParamType, ReturnType], + arg: ParamType, + *, + id: str | None = None, + task_queue: str | None = None, + cancellation_type: ChildWorkflowCancellationType = ChildWorkflowCancellationType.WAIT_CANCELLATION_COMPLETED, + parent_close_policy: ParentClosePolicy = ParentClosePolicy.TERMINATE, + execution_timeout: timedelta | None = None, + run_timeout: timedelta | None = None, + task_timeout: timedelta | None = None, + id_reuse_policy: temporalio.common.WorkflowIDReusePolicy = temporalio.common.WorkflowIDReusePolicy.ALLOW_DUPLICATE, + retry_policy: temporalio.common.RetryPolicy | None = None, + cron_schedule: str = "", + memo: Mapping[str, Any] | None = None, + search_attributes: None + | ( + temporalio.common.SearchAttributes | temporalio.common.TypedSearchAttributes + ) = None, + versioning_intent: VersioningIntent | None = None, + static_summary: str | None = None, + static_details: str | None = None, + priority: temporalio.common.Priority = temporalio.common.Priority.default, +) -> ChildWorkflowHandle[SelfType, ReturnType]: ... + + +# Overload for multi-param workflow +@overload +async def start_child_workflow( + workflow: Callable[Concatenate[SelfType, MultiParamSpec], Awaitable[ReturnType]], + *, + args: Sequence[Any], + id: str | None = None, + task_queue: str | None = None, + cancellation_type: ChildWorkflowCancellationType = ChildWorkflowCancellationType.WAIT_CANCELLATION_COMPLETED, + parent_close_policy: ParentClosePolicy = ParentClosePolicy.TERMINATE, + execution_timeout: timedelta | None = None, + run_timeout: timedelta | None = None, + task_timeout: timedelta | None = None, + id_reuse_policy: temporalio.common.WorkflowIDReusePolicy = temporalio.common.WorkflowIDReusePolicy.ALLOW_DUPLICATE, + retry_policy: temporalio.common.RetryPolicy | None = None, + cron_schedule: str = "", + memo: Mapping[str, Any] | None = None, + search_attributes: None + | ( + temporalio.common.SearchAttributes | temporalio.common.TypedSearchAttributes + ) = None, + versioning_intent: VersioningIntent | None = None, + static_summary: str | None = None, + static_details: str | None = None, + priority: temporalio.common.Priority = temporalio.common.Priority.default, +) -> ChildWorkflowHandle[SelfType, ReturnType]: ... + + +# Overload for string-name workflow +@overload +async def start_child_workflow( + workflow: str, + arg: Any = temporalio.common._arg_unset, + *, + args: Sequence[Any] = [], + id: str | None = None, + task_queue: str | None = None, + result_type: type | None = None, + cancellation_type: ChildWorkflowCancellationType = ChildWorkflowCancellationType.WAIT_CANCELLATION_COMPLETED, + parent_close_policy: ParentClosePolicy = ParentClosePolicy.TERMINATE, + execution_timeout: timedelta | None = None, + run_timeout: timedelta | None = None, + task_timeout: timedelta | None = None, + id_reuse_policy: temporalio.common.WorkflowIDReusePolicy = temporalio.common.WorkflowIDReusePolicy.ALLOW_DUPLICATE, + retry_policy: temporalio.common.RetryPolicy | None = None, + cron_schedule: str = "", + memo: Mapping[str, Any] | None = None, + search_attributes: None + | ( + temporalio.common.SearchAttributes | temporalio.common.TypedSearchAttributes + ) = None, + versioning_intent: VersioningIntent | None = None, + static_summary: str | None = None, + static_details: str | None = None, + priority: temporalio.common.Priority = temporalio.common.Priority.default, +) -> ChildWorkflowHandle[Any, Any]: ... + + +async def start_child_workflow( + workflow: Any, + arg: Any = temporalio.common._arg_unset, + *, + args: Sequence[Any] = [], + id: str | None = None, + task_queue: str | None = None, + result_type: type | None = None, + cancellation_type: ChildWorkflowCancellationType = ChildWorkflowCancellationType.WAIT_CANCELLATION_COMPLETED, + parent_close_policy: ParentClosePolicy = ParentClosePolicy.TERMINATE, + execution_timeout: timedelta | None = None, + run_timeout: timedelta | None = None, + task_timeout: timedelta | None = None, + id_reuse_policy: temporalio.common.WorkflowIDReusePolicy = temporalio.common.WorkflowIDReusePolicy.ALLOW_DUPLICATE, + retry_policy: temporalio.common.RetryPolicy | None = None, + cron_schedule: str = "", + memo: Mapping[str, Any] | None = None, + search_attributes: None + | ( + temporalio.common.SearchAttributes | temporalio.common.TypedSearchAttributes + ) = None, + versioning_intent: VersioningIntent | None = None, + static_summary: str | None = None, + static_details: str | None = None, + priority: temporalio.common.Priority = temporalio.common.Priority.default, +) -> ChildWorkflowHandle[Any, Any]: + """Start a child workflow and return its handle. + + Args: + workflow: String name or class method decorated with ``@workflow.run`` + for the workflow to start. + arg: Single argument to the child workflow. + args: Multiple arguments to the child workflow. Cannot be set if arg is. + id: Optional unique identifier for the workflow execution. If not set, + defaults to :py:func:`uuid4`. + task_queue: Task queue to run the workflow on. Defaults to the current + workflow's task queue. + result_type: For string workflows, this can set the specific result type + hint to deserialize into. + cancellation_type: How the child workflow will react to cancellation. + parent_close_policy: How to handle the child workflow when the parent + workflow closes. + execution_timeout: Total workflow execution timeout including + retries and continue as new. + run_timeout: Timeout of a single workflow run. + task_timeout: Timeout of a single workflow task. + id_reuse_policy: How already-existing IDs are treated. + retry_policy: Retry policy for the workflow. + cron_schedule: See https://docs.temporal.io/docs/content/what-is-a-temporal-cron-job/ + memo: Memo for the workflow. + search_attributes: Search attributes for the workflow. The dictionary + form of this is DEPRECATED. + versioning_intent: When using the Worker Versioning feature, specifies whether this Child + Workflow should run on a worker with a compatible Build Id or not. + Deprecated: Use Worker Deployment versioning instead. + static_summary: A single-line fixed summary for this child workflow execution that may appear + in the UI/CLI. This can be in single-line Temporal markdown format. + static_details: General fixed details for this child workflow execution that may appear in + UI/CLI. This can be in Temporal markdown format and can span multiple lines. This is + a fixed value on the workflow that cannot be updated. For details that can be + updated, use :py:meth:`get_current_details` within the workflow. + priority: Priority to use for this workflow. + + Returns: + A workflow handle to the started/existing workflow. + """ + temporalio.common._warn_on_deprecated_search_attributes(search_attributes) + return await _Runtime.current().workflow_start_child_workflow( + workflow, + *temporalio.common._arg_or_args(arg, args), + id=id or str(uuid4()), + task_queue=task_queue, + result_type=result_type, + cancellation_type=cancellation_type, + parent_close_policy=parent_close_policy, + execution_timeout=execution_timeout, + run_timeout=run_timeout, + task_timeout=task_timeout, + id_reuse_policy=id_reuse_policy, + retry_policy=retry_policy, + cron_schedule=cron_schedule, + memo=memo, + search_attributes=search_attributes, + versioning_intent=versioning_intent, + static_summary=static_summary, + static_details=static_details, + priority=priority, + ) + + +# Overload for no-param workflow +@overload +async def execute_child_workflow( + workflow: MethodAsyncNoParam[SelfType, ReturnType], + *, + id: str | None = None, + task_queue: str | None = None, + cancellation_type: ChildWorkflowCancellationType = ChildWorkflowCancellationType.WAIT_CANCELLATION_COMPLETED, + parent_close_policy: ParentClosePolicy = ParentClosePolicy.TERMINATE, + execution_timeout: timedelta | None = None, + run_timeout: timedelta | None = None, + task_timeout: timedelta | None = None, + id_reuse_policy: temporalio.common.WorkflowIDReusePolicy = temporalio.common.WorkflowIDReusePolicy.ALLOW_DUPLICATE, + retry_policy: temporalio.common.RetryPolicy | None = None, + cron_schedule: str = "", + memo: Mapping[str, Any] | None = None, + search_attributes: None + | ( + temporalio.common.SearchAttributes | temporalio.common.TypedSearchAttributes + ) = None, + versioning_intent: VersioningIntent | None = None, + static_summary: str | None = None, + static_details: str | None = None, + priority: temporalio.common.Priority = temporalio.common.Priority.default, +) -> ReturnType: ... + + +# Overload for single-param workflow +@overload +async def execute_child_workflow( + workflow: MethodAsyncSingleParam[SelfType, ParamType, ReturnType], + arg: ParamType, + *, + id: str | None = None, + task_queue: str | None = None, + cancellation_type: ChildWorkflowCancellationType = ChildWorkflowCancellationType.WAIT_CANCELLATION_COMPLETED, + parent_close_policy: ParentClosePolicy = ParentClosePolicy.TERMINATE, + execution_timeout: timedelta | None = None, + run_timeout: timedelta | None = None, + task_timeout: timedelta | None = None, + id_reuse_policy: temporalio.common.WorkflowIDReusePolicy = temporalio.common.WorkflowIDReusePolicy.ALLOW_DUPLICATE, + retry_policy: temporalio.common.RetryPolicy | None = None, + cron_schedule: str = "", + memo: Mapping[str, Any] | None = None, + search_attributes: None + | ( + temporalio.common.SearchAttributes | temporalio.common.TypedSearchAttributes + ) = None, + versioning_intent: VersioningIntent | None = None, + static_summary: str | None = None, + static_details: str | None = None, + priority: temporalio.common.Priority = temporalio.common.Priority.default, +) -> ReturnType: ... + + +# Overload for multi-param workflow +@overload +async def execute_child_workflow( + workflow: Callable[Concatenate[SelfType, MultiParamSpec], Awaitable[ReturnType]], + *, + args: Sequence[Any], + id: str | None = None, + task_queue: str | None = None, + cancellation_type: ChildWorkflowCancellationType = ChildWorkflowCancellationType.WAIT_CANCELLATION_COMPLETED, + parent_close_policy: ParentClosePolicy = ParentClosePolicy.TERMINATE, + execution_timeout: timedelta | None = None, + run_timeout: timedelta | None = None, + task_timeout: timedelta | None = None, + id_reuse_policy: temporalio.common.WorkflowIDReusePolicy = temporalio.common.WorkflowIDReusePolicy.ALLOW_DUPLICATE, + retry_policy: temporalio.common.RetryPolicy | None = None, + cron_schedule: str = "", + memo: Mapping[str, Any] | None = None, + search_attributes: None + | ( + temporalio.common.SearchAttributes | temporalio.common.TypedSearchAttributes + ) = None, + versioning_intent: VersioningIntent | None = None, + static_summary: str | None = None, + static_details: str | None = None, + priority: temporalio.common.Priority = temporalio.common.Priority.default, +) -> ReturnType: ... + + +# Overload for string-name workflow +@overload +async def execute_child_workflow( + workflow: str, + arg: Any = temporalio.common._arg_unset, + *, + args: Sequence[Any] = [], + id: str | None = None, + task_queue: str | None = None, + result_type: type | None = None, + cancellation_type: ChildWorkflowCancellationType = ChildWorkflowCancellationType.WAIT_CANCELLATION_COMPLETED, + parent_close_policy: ParentClosePolicy = ParentClosePolicy.TERMINATE, + execution_timeout: timedelta | None = None, + run_timeout: timedelta | None = None, + task_timeout: timedelta | None = None, + id_reuse_policy: temporalio.common.WorkflowIDReusePolicy = temporalio.common.WorkflowIDReusePolicy.ALLOW_DUPLICATE, + retry_policy: temporalio.common.RetryPolicy | None = None, + cron_schedule: str = "", + memo: Mapping[str, Any] | None = None, + search_attributes: None + | ( + temporalio.common.SearchAttributes | temporalio.common.TypedSearchAttributes + ) = None, + versioning_intent: VersioningIntent | None = None, + static_summary: str | None = None, + static_details: str | None = None, + priority: temporalio.common.Priority = temporalio.common.Priority.default, +) -> Any: ... + + +async def execute_child_workflow( + workflow: Any, + arg: Any = temporalio.common._arg_unset, + *, + args: Sequence[Any] = [], + id: str | None = None, + task_queue: str | None = None, + result_type: type | None = None, + cancellation_type: ChildWorkflowCancellationType = ChildWorkflowCancellationType.WAIT_CANCELLATION_COMPLETED, + parent_close_policy: ParentClosePolicy = ParentClosePolicy.TERMINATE, + execution_timeout: timedelta | None = None, + run_timeout: timedelta | None = None, + task_timeout: timedelta | None = None, + id_reuse_policy: temporalio.common.WorkflowIDReusePolicy = temporalio.common.WorkflowIDReusePolicy.ALLOW_DUPLICATE, + retry_policy: temporalio.common.RetryPolicy | None = None, + cron_schedule: str = "", + memo: Mapping[str, Any] | None = None, + search_attributes: None + | ( + temporalio.common.SearchAttributes | temporalio.common.TypedSearchAttributes + ) = None, + versioning_intent: VersioningIntent | None = None, + static_summary: str | None = None, + static_details: str | None = None, + priority: temporalio.common.Priority = temporalio.common.Priority.default, +) -> Any: + """Start a child workflow and wait for completion. + + This is a shortcut for ``await (await`` :py:meth:`start_child_workflow` ``)``. + """ + temporalio.common._warn_on_deprecated_search_attributes(search_attributes) + # We call the runtime directly instead of top-level start_child_workflow to + # ensure we don't miss new parameters + handle = await _Runtime.current().workflow_start_child_workflow( + workflow, + *temporalio.common._arg_or_args(arg, args), + id=id or str(uuid4()), + task_queue=task_queue, + result_type=result_type, + cancellation_type=cancellation_type, + parent_close_policy=parent_close_policy, + execution_timeout=execution_timeout, + run_timeout=run_timeout, + task_timeout=task_timeout, + id_reuse_policy=id_reuse_policy, + retry_policy=retry_policy, + cron_schedule=cron_schedule, + memo=memo, + search_attributes=search_attributes, + versioning_intent=versioning_intent, + static_summary=static_summary, + static_details=static_details, + priority=priority, + ) + return await handle + + +class ExternalWorkflowHandle(Generic[SelfType]): + """Handle for interacting with an external workflow. + + This is created via :py:func:`get_external_workflow_handle` or + :py:func:`get_external_workflow_handle_for`. + """ + + @property + def id(self) -> str: + """ID for the workflow.""" + raise NotImplementedError + + @property + def run_id(self) -> str | None: + """Run ID for the workflow if any.""" + raise NotImplementedError + + @overload + async def signal( + self, + signal: MethodSyncOrAsyncNoParam[SelfType, None], + ) -> None: ... + + @overload + async def signal( + self, + signal: MethodSyncOrAsyncSingleParam[SelfType, ParamType, None], + arg: ParamType, + ) -> None: ... + + @overload + async def signal( + self, + signal: str, + arg: Any = temporalio.common._arg_unset, + *, + args: Sequence[Any] = [], + ) -> None: ... + + async def signal( + self, + signal: str | Callable, # type: ignore[reportUnusedParameter] + arg: Any = temporalio.common._arg_unset, # type: ignore[reportUnusedParameter] + *, + args: Sequence[Any] = [], # type: ignore[reportUnusedParameter] + ) -> None: + """Signal this external workflow. + + Args: + signal: Name or method reference for the signal. + arg: Single argument to the signal. + args: Multiple arguments to the signal. Cannot be set if arg is. + + """ + raise NotImplementedError + + async def cancel(self) -> None: + """Send a cancellation request to this external workflow. + + This will fail if the workflow cannot accept the request (e.g. if the + workflow is not found). + """ + raise NotImplementedError + + +def get_external_workflow_handle( + workflow_id: str, + *, + run_id: str | None = None, +) -> ExternalWorkflowHandle[Any]: + """Get a workflow handle to an existing workflow by its ID. + + Args: + workflow_id: Workflow ID to get a handle to. + run_id: Optional run ID for the workflow. + + Returns: + The external workflow handle. + """ + return _Runtime.current().workflow_get_external_workflow_handle( + workflow_id, run_id=run_id + ) + + +def get_external_workflow_handle_for( + workflow: MethodAsyncNoParam[SelfType, Any] # type: ignore[reportUnusedParameter] + | MethodAsyncSingleParam[SelfType, Any, Any], + workflow_id: str, + *, + run_id: str | None = None, +) -> ExternalWorkflowHandle[SelfType]: + """Get a typed workflow handle to an existing workflow by its ID. + + This is the same as :py:func:`get_external_workflow_handle` but typed. Note, + the workflow type given is not validated, it is only for typing. + + Args: + workflow: The workflow run method to use for typing the handle. + workflow_id: Workflow ID to get a handle to. + run_id: Optional run ID for the workflow. + + Returns: + The external workflow handle. + """ + return get_external_workflow_handle(workflow_id, run_id=run_id) + + +class ContinueAsNewError(BaseException): + """Error thrown by :py:func:`continue_as_new`. + + This should not be caught, but instead be allowed to throw out of the + workflow which then triggers the continue as new. This should never be + instantiated directly. + """ + + def __init__(self, *args: object) -> None: + """Direct instantiation is disabled. Use :py:func:`continue_as_new`.""" + if type(self) is ContinueAsNewError: + raise RuntimeError("Cannot instantiate ContinueAsNewError directly") + super().__init__(*args) + + +# Overload for self (unfortunately, cannot type args) +@overload +def continue_as_new( + arg: Any = temporalio.common._arg_unset, + *, + args: Sequence[Any] = [], + task_queue: str | None = None, + run_timeout: timedelta | None = None, + task_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + memo: Mapping[str, Any] | None = None, + search_attributes: None + | ( + temporalio.common.SearchAttributes | temporalio.common.TypedSearchAttributes + ) = None, + versioning_intent: VersioningIntent | None = None, + initial_versioning_behavior: ContinueAsNewVersioningBehavior | None = None, +) -> NoReturn: ... + + +# Overload for no-param workflow +@overload +def continue_as_new( + *, + workflow: MethodAsyncNoParam[SelfType, Any], + task_queue: str | None = None, + run_timeout: timedelta | None = None, + task_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + memo: Mapping[str, Any] | None = None, + search_attributes: None + | ( + temporalio.common.SearchAttributes | temporalio.common.TypedSearchAttributes + ) = None, + versioning_intent: VersioningIntent | None = None, + initial_versioning_behavior: ContinueAsNewVersioningBehavior | None = None, +) -> NoReturn: ... + + +# Overload for single-param workflow +@overload +def continue_as_new( + arg: ParamType, + *, + workflow: MethodAsyncSingleParam[SelfType, ParamType, Any], + task_queue: str | None = None, + run_timeout: timedelta | None = None, + task_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + memo: Mapping[str, Any] | None = None, + search_attributes: None + | ( + temporalio.common.SearchAttributes | temporalio.common.TypedSearchAttributes + ) = None, + versioning_intent: VersioningIntent | None = None, + initial_versioning_behavior: ContinueAsNewVersioningBehavior | None = None, +) -> NoReturn: ... + + +# Overload for multi-param workflow +@overload +def continue_as_new( + *, + workflow: Callable[Concatenate[SelfType, MultiParamSpec], Awaitable[Any]], + args: Sequence[Any], + task_queue: str | None = None, + run_timeout: timedelta | None = None, + task_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + memo: Mapping[str, Any] | None = None, + search_attributes: None + | ( + temporalio.common.SearchAttributes | temporalio.common.TypedSearchAttributes + ) = None, + versioning_intent: VersioningIntent | None = None, + initial_versioning_behavior: ContinueAsNewVersioningBehavior | None = None, +) -> NoReturn: ... + + +# Overload for string-name workflow +@overload +def continue_as_new( + *, + workflow: str, + args: Sequence[Any] = [], + task_queue: str | None = None, + run_timeout: timedelta | None = None, + task_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + memo: Mapping[str, Any] | None = None, + search_attributes: None + | ( + temporalio.common.SearchAttributes | temporalio.common.TypedSearchAttributes + ) = None, + versioning_intent: VersioningIntent | None = None, + initial_versioning_behavior: ContinueAsNewVersioningBehavior | None = None, +) -> NoReturn: ... + + +def continue_as_new( + arg: Any = temporalio.common._arg_unset, + *, + args: Sequence[Any] = [], + workflow: None | Callable | str = None, + task_queue: str | None = None, + run_timeout: timedelta | None = None, + task_timeout: timedelta | None = None, + retry_policy: temporalio.common.RetryPolicy | None = None, + memo: Mapping[str, Any] | None = None, + search_attributes: None + | ( + temporalio.common.SearchAttributes | temporalio.common.TypedSearchAttributes + ) = None, + versioning_intent: VersioningIntent | None = None, + initial_versioning_behavior: ContinueAsNewVersioningBehavior | None = None, +) -> NoReturn: + """Stop the workflow immediately and continue as new. + + Args: + arg: Single argument to the continued workflow. + args: Multiple arguments to the continued workflow. Cannot be set if arg + is. + workflow: Specific workflow to continue to. Defaults to the current + workflow. + task_queue: Task queue to run the workflow on. Defaults to the current + workflow's task queue. + run_timeout: Timeout of a single workflow run. Defaults to the current + workflow's run timeout. + task_timeout: Timeout of a single workflow task. Defaults to the current + workflow's task timeout. + memo: Memo for the workflow. Defaults to the current workflow's memo. + search_attributes: Search attributes for the workflow. Defaults to the + current workflow's search attributes. The dictionary form of this is + DEPRECATED. + versioning_intent: When using the Worker Versioning feature, specifies whether this Workflow + should Continue-as-New onto a worker with a compatible Build Id or not. + Deprecated: Use Worker Deployment versioning instead. + + Returns: + Never returns, always raises a :py:class:`ContinueAsNewError`. + + Raises: + ContinueAsNewError: Always raised by this function. Should not be caught + but instead be allowed to + """ + temporalio.common._warn_on_deprecated_search_attributes(search_attributes) + _Runtime.current().workflow_continue_as_new( + *temporalio.common._arg_or_args(arg, args), + workflow=workflow, + task_queue=task_queue, + run_timeout=run_timeout, + task_timeout=task_timeout, + retry_policy=retry_policy, + memo=memo, + search_attributes=search_attributes, + versioning_intent=versioning_intent, + initial_versioning_behavior=initial_versioning_behavior, + ) + + +def get_signal_handler(name: str) -> Callable | None: + """Get the signal handler for the given name if any. + + This includes handlers created via the ``@workflow.signal`` decorator. + + Args: + name: Name of the signal. + + Returns: + Callable for the signal if any. If a handler is not found for the name, + this will not return the dynamic handler even if there is one. + """ + return _Runtime.current().workflow_get_signal_handler(name) + + +def set_signal_handler(name: str, handler: Callable | None) -> None: + """Set or unset the signal handler for the given name. + + This overrides any existing handlers for the given name, including handlers + created via the ``@workflow.signal`` decorator. + + When set, all unhandled past signals for the given name are immediately sent + to the handler. + + Args: + name: Name of the signal. + handler: Callable to set or None to unset. + """ + _Runtime.current().workflow_set_signal_handler(name, handler) + + +def get_dynamic_signal_handler() -> Callable | None: + """Get the dynamic signal handler if any. + + This includes dynamic handlers created via the ``@workflow.signal`` + decorator. + + Returns: + Callable for the dynamic signal handler if any. + """ + return _Runtime.current().workflow_get_signal_handler(None) + + +def set_dynamic_signal_handler(handler: Callable | None) -> None: + """Set or unset the dynamic signal handler. + + This overrides the existing dynamic handler even if it was created via the + ``@workflow.signal`` decorator. + + When set, all unhandled past signals are immediately sent to the handler. + + Args: + handler: Callable to set or None to unset. + """ + _Runtime.current().workflow_set_signal_handler(None, handler) + + +def get_query_handler(name: str) -> Callable | None: + """Get the query handler for the given name if any. + + This includes handlers created via the ``@workflow.query`` decorator. + + Args: + name: Name of the query. + + Returns: + Callable for the query if any. If a handler is not found for the name, + this will not return the dynamic handler even if there is one. + """ + return _Runtime.current().workflow_get_query_handler(name) + + +def set_query_handler(name: str, handler: Callable | None) -> None: + """Set or unset the query handler for the given name. + + This overrides any existing handlers for the given name, including handlers + created via the ``@workflow.query`` decorator. + + Args: + name: Name of the query. + handler: Callable to set or None to unset. + """ + _Runtime.current().workflow_set_query_handler(name, handler) + + +def get_dynamic_query_handler() -> Callable | None: + """Get the dynamic query handler if any. + + This includes dynamic handlers created via the ``@workflow.query`` + decorator. + + Returns: + Callable for the dynamic query handler if any. + """ + return _Runtime.current().workflow_get_query_handler(None) + + +def set_dynamic_query_handler(handler: Callable | None) -> None: + """Set or unset the dynamic query handler. + + This overrides the existing dynamic handler even if it was created via the + ``@workflow.query`` decorator. + + Args: + handler: Callable to set or None to unset. + """ + _Runtime.current().workflow_set_query_handler(None, handler) + + +def get_update_handler(name: str) -> Callable | None: + """Get the update handler for the given name if any. + + This includes handlers created via the ``@workflow.update`` decorator. + + Args: + name: Name of the update. + + Returns: + Callable for the update if any. If a handler is not found for the name, + this will not return the dynamic handler even if there is one. + """ + return _Runtime.current().workflow_get_update_handler(name) + + +def set_update_handler( + name: str, handler: Callable | None, *, validator: Callable | None = None +) -> None: + """Set or unset the update handler for the given name. + + This overrides any existing handlers for the given name, including handlers + created via the ``@workflow.update`` decorator. + + Args: + name: Name of the update. + handler: Callable to set or None to unset. + validator: Callable to set or None to unset as the update validator. + """ + _Runtime.current().workflow_set_update_handler(name, handler, validator) + + +def get_dynamic_update_handler() -> Callable | None: + """Get the dynamic update handler if any. + + This includes dynamic handlers created via the ``@workflow.update`` + decorator. + + Returns: + Callable for the dynamic update handler if any. + """ + return _Runtime.current().workflow_get_update_handler(None) + + +def set_dynamic_update_handler( + handler: Callable | None, *, validator: Callable | None = None +) -> None: + """Set or unset the dynamic update handler. + + This overrides the existing dynamic handler even if it was created via the + ``@workflow.update`` decorator. + + Args: + handler: Callable to set or None to unset. + validator: Callable to set or None to unset as the update validator. + """ + _Runtime.current().workflow_set_update_handler(None, handler, validator) + + +def all_handlers_finished() -> bool: + """Whether update and signal handlers have finished executing. + + Consider waiting on this condition before workflow return or continue-as-new, to prevent + interruption of in-progress handlers by workflow exit: + ``await workflow.wait_condition(lambda: workflow.all_handlers_finished())`` + + Returns: + True if there are no in-progress update or signal handler executions. + """ + return _Runtime.current().workflow_all_handlers_finished()