diff --git a/pyproject.toml b/pyproject.toml index 80146f1e8..319835d70 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -45,7 +45,7 @@ dev = [ "psutil>=5.9.3,<6", "pydocstyle>=6.3.0,<7", "pydoctor>=24.11.1,<25", - "pyright==1.1.402", + "pyright==1.1.403", "pytest~=7.4", "pytest-asyncio>=0.21,<0.22", "pytest-timeout~=2.2", @@ -57,6 +57,7 @@ dev = [ "pytest-cov>=6.1.1", "httpx>=0.28.1", "pytest-pretty>=1.3.0", + "basedpyright>=1.30.1", ] [tool.poe.tasks] @@ -69,14 +70,16 @@ lint = [ {cmd = "uv run ruff check --select I"}, {cmd = "uv run ruff format --check"}, {ref = "lint-types"}, - {cmd = "uv run pyright"}, {ref = "lint-docs"}, ] bridge-lint = { cmd = "cargo clippy -- -D warnings", cwd = "temporalio/bridge" } # TODO(cretz): Why does pydocstyle complain about @overload missing docs after # https://github.com/PyCQA/pydocstyle/pull/511? lint-docs = "uv run pydocstyle --ignore-decorators=overload" -lint-types = "uv run mypy --namespace-packages --check-untyped-defs ." +lint-types = [ + { cmd = "uv run pyright"}, + { cmd = "uv run mypy --namespace-packages --check-untyped-defs ."}, +] run-bench = "uv run python scripts/run_bench.py" test = "uv run pytest" @@ -100,7 +103,7 @@ filterwarnings = [ [tool.cibuildwheel] before-all = "pip install protoc-wheel-0" build = "cp39-win_amd64 cp39-manylinux_x86_64 cp39-manylinux_aarch64 cp39-macosx_x86_64 cp39-macosx_arm64" -build-verbosity = "1" +build-verbosity = 1 [tool.cibuildwheel.macos] environment = { MACOSX_DEPLOYMENT_TARGET = "10.12" } @@ -158,16 +161,24 @@ project-name = "Temporal Python" sidebar-expand-depth = 2 [tool.pyright] -reportUnknownVariableType = "none" -reportUnknownParameterType = "none" -reportUnusedCallResult = "none" -reportImplicitStringConcatenation = "none" -reportPrivateUsage = "none" +enableTypeIgnoreComments = true +reportAny = "none" +reportCallInDefaultInitializer = "none" reportExplicitAny = "none" +reportIgnoreCommentWithoutRule = "none" +reportImplicitOverride = "none" +reportImplicitStringConcatenation = "none" +reportImportCycles = "none" reportMissingTypeArgument = "none" -reportAny = "none" -enableTypeIgnoreComments = true - +reportPrivateUsage = "none" +reportUnannotatedClassAttribute = "none" +reportUnknownArgumentType = "none" +reportUnknownMemberType = "none" +reportUnknownParameterType = "none" +reportUnknownVariableType = "none" +reportUnnecessaryIsInstance = "none" +reportUnnecessaryTypeIgnoreComment = "none" +reportUnusedCallResult = "none" include = ["temporalio", "tests"] exclude = [ "temporalio/api", @@ -226,3 +237,6 @@ exclude = [ [tool.uv] # Prevent uv commands from building the package by default package = false + +[tool.uv.sources] +nexus-rpc = { path = "../nexus-sdk-python", editable = true } diff --git a/temporalio/client.py b/temporalio/client.py index 0aab85465..b69feff3b 100644 --- a/temporalio/client.py +++ b/temporalio/client.py @@ -55,6 +55,7 @@ import temporalio.common import temporalio.converter import temporalio.exceptions +import temporalio.nexus import temporalio.runtime import temporalio.service import temporalio.workflow @@ -1542,6 +1543,12 @@ def __init__( result_run_id: Optional[str] = None, first_execution_run_id: Optional[str] = None, result_type: Optional[Type] = None, + start_workflow_response: Optional[ + Union[ + temporalio.api.workflowservice.v1.StartWorkflowExecutionResponse, + temporalio.api.workflowservice.v1.SignalWithStartWorkflowExecutionResponse, + ] + ] = None, ) -> None: """Create workflow handle.""" self._client = client @@ -1550,6 +1557,7 @@ def __init__( self._result_run_id = result_run_id self._first_execution_run_id = first_execution_run_id self._result_type = result_type + self._start_workflow_response = start_workflow_response self.__temporal_eagerly_started = False @property @@ -5772,7 +5780,7 @@ async def get_worker_task_reachability( class _ClientImpl(OutboundInterceptor): - def __init__(self, client: Client) -> None: + def __init__(self, client: Client) -> None: # type: ignore # We are intentionally not calling the base class's __init__ here self._client = client @@ -5832,6 +5840,7 @@ async def start_workflow( result_run_id=resp.run_id, first_execution_run_id=first_execution_run_id, result_type=input.ret_type, + start_workflow_response=resp, ) setattr(handle, "__temporal_eagerly_started", eagerly_started) return handle @@ -7311,23 +7320,8 @@ def api_key(self, value: Optional[str]) -> None: self.service_client.update_api_key(value) -@dataclass(frozen=True) -class NexusCallback: - """Nexus callback to attach to events such as workflow completion. - - .. warning:: - This API is experimental and unstable. - """ - - url: str - """Callback URL.""" - - headers: Mapping[str, str] - """Header to attach to callback request.""" - - # Intended to become a union of callback types -Callback = NexusCallback +Callback = temporalio.nexus.NexusCallback async def _encode_user_metadata( diff --git a/temporalio/contrib/openai_agents/_invoke_model_activity.py b/temporalio/contrib/openai_agents/_invoke_model_activity.py index 10b7370cb..069ea001e 100644 --- a/temporalio/contrib/openai_agents/_invoke_model_activity.py +++ b/temporalio/contrib/openai_agents/_invoke_model_activity.py @@ -26,7 +26,7 @@ from agents.models.multi_provider import MultiProvider from typing_extensions import Required, TypedDict -from temporalio import activity, workflow +from temporalio import activity from temporalio.contrib.openai_agents._heartbeat_decorator import _auto_heartbeater @@ -106,7 +106,7 @@ class ActivityModelInput(TypedDict, total=False): model_name: Optional[str] system_instructions: Optional[str] - input: Required[Union[str, list[TResponseInputItem]]] # type: ignore + input: Required[Union[str, list[TResponseInputItem]]] model_settings: Required[ModelSettings] tools: list[ToolInput] output_schema: Optional[AgentOutputSchemaInput] diff --git a/temporalio/contrib/openai_agents/_temporal_model_stub.py b/temporalio/contrib/openai_agents/_temporal_model_stub.py index 1092c9ada..c620a95ec 100644 --- a/temporalio/contrib/openai_agents/_temporal_model_stub.py +++ b/temporalio/contrib/openai_agents/_temporal_model_stub.py @@ -1,16 +1,6 @@ from __future__ import annotations import logging -from datetime import timedelta -from typing import Optional - -from temporalio import workflow -from temporalio.common import Priority, RetryPolicy -from temporalio.contrib.openai_agents._model_parameters import ModelActivityParameters -from temporalio.workflow import ActivityCancellationType, VersioningIntent - -logger = logging.getLogger(__name__) - from typing import Any, AsyncIterator, Optional, Sequence, Union, cast from agents import ( @@ -31,6 +21,7 @@ from agents.items import TResponseStreamEvent from openai.types.responses.response_prompt_param import ResponsePromptParam +from temporalio import workflow from temporalio.contrib.openai_agents._invoke_model_activity import ( ActivityModelInput, AgentOutputSchemaInput, @@ -40,6 +31,9 @@ ModelTracingInput, ToolInput, ) +from temporalio.contrib.openai_agents._model_parameters import ModelActivityParameters + +logger = logging.getLogger(__name__) class _TemporalModelStub(Model): @@ -57,7 +51,7 @@ def __init__( async def get_response( self, system_instructions: Optional[str], - input: Union[str, list[TResponseInputItem]], + input: Union[str, list[TResponseInputItem], dict[str, str]], model_settings: ModelSettings, tools: list[Tool], output_schema: Optional[AgentOutputSchemaBase], @@ -67,7 +61,9 @@ async def get_response( previous_response_id: Optional[str], prompt: Optional[ResponsePromptParam], ) -> ModelResponse: - def get_summary(input: Union[str, list[TResponseInputItem]]) -> str: + def get_summary( + input: Union[str, list[TResponseInputItem], dict[str, str]], + ) -> str: ### Activity summary shown in the UI try: max_size = 100 @@ -88,21 +84,18 @@ def get_summary(input: Union[str, list[TResponseInputItem]]) -> str: return "" def make_tool_info(tool: Tool) -> ToolInput: - if isinstance(tool, FileSearchTool): - return cast(FileSearchTool, tool) - elif isinstance(tool, WebSearchTool): - return cast(WebSearchTool, tool) + if isinstance(tool, (FileSearchTool, WebSearchTool)): + return tool elif isinstance(tool, ComputerTool): raise NotImplementedError( "Computer search preview is not supported in Temporal model" ) elif isinstance(tool, FunctionTool): - t = cast(FunctionToolInput, tool) return FunctionToolInput( - name=t.name, - description=t.description, - params_json_schema=t.params_json_schema, - strict_json_schema=t.strict_json_schema, + name=tool.name, + description=tool.description, + params_json_schema=tool.params_json_schema, + strict_json_schema=tool.strict_json_schema, ) else: raise ValueError(f"Unknown tool type: {tool.name}") @@ -141,7 +134,7 @@ def make_tool_info(tool: Tool) -> ToolInput: activity_input = ActivityModelInput( model_name=self.model_name, system_instructions=system_instructions, - input=input, + input=cast(Union[str, list[TResponseInputItem]], input), model_settings=model_settings, tools=tool_infos, output_schema=output_schema_input, @@ -169,7 +162,7 @@ def make_tool_info(tool: Tool) -> ToolInput: def stream_response( self, system_instructions: Optional[str], - input: Union[str, list][TResponseInputItem], # type: ignore + input: Union[str, list[TResponseInputItem]], model_settings: ModelSettings, tools: list[Tool], output_schema: Optional[AgentOutputSchemaBase], diff --git a/temporalio/contrib/openai_agents/_trace_interceptor.py b/temporalio/contrib/openai_agents/_trace_interceptor.py index 8a791b73c..483e01147 100644 --- a/temporalio/contrib/openai_agents/_trace_interceptor.py +++ b/temporalio/contrib/openai_agents/_trace_interceptor.py @@ -3,14 +3,13 @@ from __future__ import annotations from contextlib import contextmanager -from typing import Any, Mapping, Protocol, Type, cast +from typing import Any, Mapping, Protocol, Type -from agents import CustomSpanData, custom_span, get_current_span, trace +from agents import custom_span, get_current_span, trace from agents.tracing import ( get_trace_provider, ) -from agents.tracing.provider import DefaultTraceProvider -from agents.tracing.spans import NoOpSpan, SpanImpl +from agents.tracing.spans import NoOpSpan import temporalio.activity import temporalio.api.common.v1 @@ -116,7 +115,7 @@ class OpenAIAgentsTracingInterceptor( worker = Worker(client, task_queue="my-task-queue", interceptors=[interceptor]) """ - def __init__( + def __init__( # type: ignore[reportMissingSuperCall] self, payload_converter: temporalio.converter.PayloadConverter = temporalio.converter.default().payload_converter, ) -> None: @@ -189,7 +188,7 @@ async def start_workflow( **({"temporal:workflowId": input.id} if input.id else {}), } data = {"workflowId": input.id} if input.id else None - span_name = f"temporal:startWorkflow" + span_name = "temporal:startWorkflow" if get_trace_provider().get_current_trace() is None: with trace( span_name + ":" + input.workflow, metadata=metadata, group_id=input.id @@ -208,7 +207,7 @@ async def query_workflow(self, input: temporalio.client.QueryWorkflowInput) -> A **({"temporal:workflowId": input.id} if input.id else {}), } data = {"workflowId": input.id, "query": input.query} - span_name = f"temporal:queryWorkflow" + span_name = "temporal:queryWorkflow" if get_trace_provider().get_current_trace() is None: with trace(span_name, metadata=metadata, group_id=input.id): with custom_span(name=span_name, data=data): @@ -227,7 +226,7 @@ async def signal_workflow( **({"temporal:workflowId": input.id} if input.id else {}), } data = {"workflowId": input.id, "signal": input.signal} - span_name = f"temporal:signalWorkflow" + span_name = "temporal:signalWorkflow" if get_trace_provider().get_current_trace() is None: with trace(span_name, metadata=metadata, group_id=input.id): with custom_span(name=span_name, data=data): diff --git a/temporalio/nexus/__init__.py b/temporalio/nexus/__init__.py index c8bd1e40d..de9164716 100644 --- a/temporalio/nexus/__init__.py +++ b/temporalio/nexus/__init__.py @@ -9,6 +9,7 @@ from ._decorators import workflow_run_operation as workflow_run_operation from ._operation_context import Info as Info from ._operation_context import LoggerAdapter as LoggerAdapter +from ._operation_context import NexusCallback as NexusCallback from ._operation_context import ( WorkflowRunOperationContext as WorkflowRunOperationContext, ) diff --git a/temporalio/nexus/_decorators.py b/temporalio/nexus/_decorators.py index 1266fd29e..3ea05f716 100644 --- a/temporalio/nexus/_decorators.py +++ b/temporalio/nexus/_decorators.py @@ -16,16 +16,10 @@ StartOperationContext, ) -from temporalio.nexus._operation_context import ( - WorkflowRunOperationContext, -) -from temporalio.nexus._operation_handlers import ( - WorkflowRunOperationHandler, -) -from temporalio.nexus._token import ( - WorkflowHandle, -) -from temporalio.nexus._util import ( +from ._operation_context import WorkflowRunOperationContext +from ._operation_handlers import WorkflowRunOperationHandler +from ._token import WorkflowHandle +from ._util import ( get_callable_name, get_workflow_run_start_method_input_and_output_type_annotations, set_operation_factory, @@ -123,7 +117,7 @@ async def _start( return WorkflowRunOperationHandler(_start, input_type, output_type) method_name = get_callable_name(start) - nexusrpc.set_operation_definition( + nexusrpc.set_operation( operation_handler_factory, nexusrpc.Operation( name=name or method_name, diff --git a/temporalio/nexus/_link_conversion.py b/temporalio/nexus/_link_conversion.py index 87027333b..a13c2d149 100644 --- a/temporalio/nexus/_link_conversion.py +++ b/temporalio/nexus/_link_conversion.py @@ -4,6 +4,7 @@ import re import urllib.parse from typing import ( + TYPE_CHECKING, Any, Optional, ) @@ -12,7 +13,9 @@ import temporalio.api.common.v1 import temporalio.api.enums.v1 -import temporalio.client + +if TYPE_CHECKING: + import temporalio.client logger = logging.getLogger(__name__) @@ -23,7 +26,7 @@ LINK_EVENT_TYPE_PARAM_NAME = "eventType" -def workflow_handle_to_workflow_execution_started_event_link( +def workflow_execution_started_event_link_from_workflow_handle( handle: temporalio.client.WorkflowHandle[Any, Any], ) -> temporalio.api.common.v1.Link.WorkflowEvent: """Create a WorkflowEvent link corresponding to a started workflow""" diff --git a/temporalio/nexus/_operation_context.py b/temporalio/nexus/_operation_context.py index 52e6f7b1d..f411a0a06 100644 --- a/temporalio/nexus/_operation_context.py +++ b/temporalio/nexus/_operation_context.py @@ -2,18 +2,15 @@ import dataclasses import logging +from collections.abc import Awaitable, Mapping, MutableMapping, Sequence from contextvars import ContextVar from dataclasses import dataclass from datetime import timedelta from typing import ( + TYPE_CHECKING, Any, - Awaitable, Callable, - Mapping, - MutableMapping, Optional, - Sequence, - Type, Union, overload, ) @@ -22,7 +19,7 @@ from typing_extensions import Concatenate import temporalio.api.common.v1 -import temporalio.client +import temporalio.api.workflowservice.v1 import temporalio.common from temporalio.nexus import _link_conversion from temporalio.nexus._token import WorkflowHandle @@ -35,6 +32,9 @@ SelfType, ) +if TYPE_CHECKING: + import temporalio.client + # The Temporal Nexus worker always builds a nexusrpc StartOperationContext or # CancelOperationContext and passes it as the first parameter to the nexusrpc operation # handler. In addition, it sets one of the following context vars. @@ -125,7 +125,7 @@ def _get_callbacks( ctx = self.nexus_context return ( [ - temporalio.client.NexusCallback( + NexusCallback( url=ctx.callback_url, headers=ctx.callback_headers, ) @@ -146,25 +146,30 @@ def _get_workflow_event_links( def _add_outbound_links( self, workflow_handle: temporalio.client.WorkflowHandle[Any, Any] ): + # If links were not sent in StartWorkflowExecutionResponse then construct them. + wf_event_links: list[temporalio.api.common.v1.Link.WorkflowEvent] = [] try: - link = _link_conversion.workflow_event_to_nexus_link( - _link_conversion.workflow_handle_to_workflow_execution_started_event_link( - workflow_handle - ) + if isinstance( + workflow_handle._start_workflow_response, + temporalio.api.workflowservice.v1.StartWorkflowExecutionResponse, + ): + if workflow_handle._start_workflow_response.HasField("link"): + if link := workflow_handle._start_workflow_response.link: + if link.HasField("workflow_event"): + wf_event_links.append(link.workflow_event) + if not wf_event_links: + wf_event_links = [ + _link_conversion.workflow_execution_started_event_link_from_workflow_handle( + workflow_handle + ) + ] + self.nexus_context.outbound_links.extend( + _link_conversion.workflow_event_to_nexus_link(link) + for link in wf_event_links ) except Exception as e: logger.warning( - f"Failed to create WorkflowExecutionStarted event link for workflow {id}: {e}" - ) - else: - self.nexus_context.outbound_links.append( - # TODO(nexus-prerelease): Before, WorkflowRunOperation was generating an EventReference - # link to send back to the caller. Now, it checks if the server returned - # the link in the StartWorkflowExecutionResponse, and if so, send the link - # from the response to the caller. Fallback to generating the link for - # backwards compatibility. PR reference in Go SDK: - # https://github.com/temporalio/sdk-go/pull/1934 - link + f"Failed to create WorkflowExecutionStarted event links for workflow {workflow_handle}: {e}" ) return workflow_handle @@ -305,7 +310,7 @@ async def start_workflow( args: Sequence[Any] = [], id: str, task_queue: Optional[str] = None, - result_type: Optional[Type[ReturnType]] = None, + result_type: Optional[type[ReturnType]] = None, execution_timeout: Optional[timedelta] = None, run_timeout: Optional[timedelta] = None, task_timeout: Optional[timedelta] = None, @@ -340,7 +345,7 @@ async def start_workflow( args: Sequence[Any] = [], id: str, task_queue: Optional[str] = None, - result_type: Optional[Type] = None, + result_type: Optional[type] = None, execution_timeout: Optional[timedelta] = None, run_timeout: Optional[timedelta] = None, task_timeout: Optional[timedelta] = None, @@ -447,6 +452,21 @@ async def start_workflow( return WorkflowHandle[ReturnType]._unsafe_from_client_workflow_handle(wf_handle) +@dataclass(frozen=True) +class NexusCallback: + """Nexus callback to attach to events such as workflow completion. + + .. warning:: + This API is experimental and unstable. + """ + + url: str + """Callback URL.""" + + headers: Mapping[str, str] + """Header to attach to callback request.""" + + @dataclass(frozen=True) class _TemporalCancelOperationContext: """Context for a Nexus cancel operation being handled by a Temporal Nexus Worker.""" diff --git a/temporalio/nexus/_operation_handlers.py b/temporalio/nexus/_operation_handlers.py index 99b9ed101..cdfb81dd5 100644 --- a/temporalio/nexus/_operation_handlers.py +++ b/temporalio/nexus/_operation_handlers.py @@ -24,7 +24,6 @@ StartOperationResultAsync, ) -from temporalio import client from temporalio.nexus._operation_context import ( _temporal_cancel_operation_context, ) @@ -73,15 +72,14 @@ async def start( """Start the operation, by starting a workflow and completing asynchronously.""" handle = await self._start(ctx, input) if not isinstance(handle, WorkflowHandle): - if isinstance(handle, client.WorkflowHandle): - raise RuntimeError( - f"Expected {handle} to be a nexus.WorkflowHandle, but got a client.WorkflowHandle. " - f"You must use WorkflowRunOperationContext.start_workflow " - "to start a workflow that will deliver the result of the Nexus operation, " - "not client.Client.start_workflow." - ) raise RuntimeError( f"Expected {handle} to be a nexus.WorkflowHandle, but got {type(handle)}. " + f"When using @workflow_run_operation you must use " + "WorkflowRunOperationContext.start_workflow() " + "to start a workflow that will deliver the result of the Nexus operation, " + "and you must return the nexus.WorkflowHandle that it returns. " + "It is not possible to use client.Client.start_workflow() and client.WorkflowHandle " + "for this purpose." ) return StartOperationResultAsync(handle.to_token()) diff --git a/temporalio/nexus/_token.py b/temporalio/nexus/_token.py index 9793583a3..999e33767 100644 --- a/temporalio/nexus/_token.py +++ b/temporalio/nexus/_token.py @@ -3,15 +3,16 @@ import base64 import json from dataclasses import dataclass -from typing import Any, Generic, Literal, Optional, Type +from typing import TYPE_CHECKING, Any, Generic, Literal, Optional from nexusrpc import OutputT -from temporalio import client - OperationTokenType = Literal[1] OPERATION_TOKEN_TYPE_WORKFLOW: OperationTokenType = 1 +if TYPE_CHECKING: + import temporalio.client + @dataclass(frozen=True) class WorkflowHandle(Generic[OutputT]): @@ -32,8 +33,10 @@ class WorkflowHandle(Generic[OutputT]): version: Optional[int] = None def _to_client_workflow_handle( - self, client: client.Client, result_type: Optional[Type[OutputT]] = None - ) -> client.WorkflowHandle[Any, OutputT]: + self, + client: temporalio.client.Client, + result_type: Optional[type[OutputT]] = None, + ) -> temporalio.client.WorkflowHandle[Any, OutputT]: """Create a :py:class:`temporalio.client.WorkflowHandle` from the token.""" if client.namespace != self.namespace: raise ValueError( @@ -46,7 +49,7 @@ def _to_client_workflow_handle( # handle type. @classmethod def _unsafe_from_client_workflow_handle( - cls, workflow_handle: client.WorkflowHandle[Any, OutputT] + cls, workflow_handle: temporalio.client.WorkflowHandle[Any, OutputT] ) -> WorkflowHandle[OutputT]: """Create a :py:class:`WorkflowHandle` from a :py:class:`temporalio.client.WorkflowHandle`. diff --git a/temporalio/nexus/_util.py b/temporalio/nexus/_util.py index ef005d0c4..4c9d5997b 100644 --- a/temporalio/nexus/_util.py +++ b/temporalio/nexus/_util.py @@ -13,7 +13,6 @@ TypeVar, ) -import nexusrpc from nexusrpc import ( InputT, OutputT, @@ -118,28 +117,6 @@ def get_callable_name(fn: Callable[..., Any]) -> str: return method_name -# TODO(nexus-preview) Copied from nexusrpc -def get_operation_factory( - obj: Any, -) -> tuple[ - Optional[Callable[[Any], Any]], - Optional[nexusrpc.Operation[Any, Any]], -]: - """Return the :py:class:`Operation` for the object along with the factory function. - - ``obj`` should be a decorated operation start method. - """ - op_defn = nexusrpc.get_operation_definition(obj) - if op_defn: - factory = obj - else: - if factory := getattr(obj, "__nexus_operation_factory__", None): - op_defn = nexusrpc.get_operation_definition(factory) - if not isinstance(op_defn, nexusrpc.Operation): - return None, None - return factory, op_defn - - # TODO(nexus-preview) Copied from nexusrpc def set_operation_factory( obj: Any, diff --git a/temporalio/runtime.py b/temporalio/runtime.py index 809c06346..84b683941 100644 --- a/temporalio/runtime.py +++ b/temporalio/runtime.py @@ -227,7 +227,7 @@ def _on_logs( # We can't access logging module's start time and it's not worth # doing difference math to get relative time right here, so # we'll make time relative to _our_ module's start time - self.relativeCreated = (record.created - _module_start_time) * 1000 + self.relativeCreated = (record.created - _module_start_time) * 1000 # type: ignore[reportUninitializedInstanceVariable] # Log the record self.logger.handle(record) diff --git a/temporalio/worker/_interceptor.py b/temporalio/worker/_interceptor.py index 1b412cb7f..32ce66e0b 100644 --- a/temporalio/worker/_interceptor.py +++ b/temporalio/worker/_interceptor.py @@ -299,15 +299,14 @@ class StartNexusOperationInput(Generic[InputT, OutputT]): input: InputT schedule_to_close_timeout: Optional[timedelta] headers: Optional[Mapping[str, str]] - output_type: Optional[Type[OutputT]] = None + output_type: Optional[type[OutputT]] = None def __post_init__(self) -> None: """Initialize operation-specific attributes after dataclass creation.""" if isinstance(self.operation, nexusrpc.Operation): self.output_type = self.operation.output_type elif callable(self.operation): - _, op = temporalio.nexus._util.get_operation_factory(self.operation) - if isinstance(op, nexusrpc.Operation): + if op := nexusrpc.get_operation(self.operation): self.output_type = op.output_type else: raise ValueError( @@ -326,8 +325,7 @@ def operation_name(self) -> str: elif isinstance(self.operation, str): return self.operation elif callable(self.operation): - _, op = temporalio.nexus._util.get_operation_factory(self.operation) - if isinstance(op, nexusrpc.Operation): + if op := nexusrpc.get_operation(self.operation): return op.name else: raise ValueError( diff --git a/temporalio/worker/_worker.py b/temporalio/worker/_worker.py index 4d77e111e..ca35b9a88 100644 --- a/temporalio/worker/_worker.py +++ b/temporalio/worker/_worker.py @@ -24,17 +24,9 @@ from typing_extensions import TypeAlias, TypedDict -import temporalio.activity -import temporalio.api.common.v1 -import temporalio.bridge.client -import temporalio.bridge.proto -import temporalio.bridge.proto.activity_result -import temporalio.bridge.proto.activity_task -import temporalio.bridge.proto.common import temporalio.bridge.worker import temporalio.client -import temporalio.converter -import temporalio.exceptions +import temporalio.common import temporalio.runtime import temporalio.service from temporalio.common import ( @@ -578,8 +570,8 @@ def config(self) -> WorkerConfig: Configuration, shallow-copied. """ config = self._config.copy() - config["activities"] = list(config["activities"]) - config["workflows"] = list(config["workflows"]) + config["activities"] = list(config.get("activities", [])) + config["workflows"] = list(config.get("workflows", [])) return config @property diff --git a/temporalio/worker/_workflow_instance.py b/temporalio/worker/_workflow_instance.py index 8a9532b61..75e80b3e1 100644 --- a/temporalio/worker/_workflow_instance.py +++ b/temporalio/worker/_workflow_instance.py @@ -2514,7 +2514,7 @@ def get_debug(self) -> bool: class _WorkflowInboundImpl(WorkflowInboundInterceptor): - def __init__( + def __init__( # type: ignore self, instance: _WorkflowInstanceImpl, ) -> None: @@ -2522,7 +2522,7 @@ def __init__( self._instance = instance def init(self, outbound: WorkflowOutboundInterceptor) -> None: - self._outbound = outbound + self._outbound = outbound # type: ignore async def execute_workflow(self, input: ExecuteWorkflowInput) -> Any: args = [self._instance._object] + list(input.args) @@ -2572,7 +2572,7 @@ async def handle_update_handler(self, input: HandleUpdateInput) -> Any: class _WorkflowOutboundImpl(WorkflowOutboundInterceptor): - def __init__(self, instance: _WorkflowInstanceImpl) -> None: + def __init__(self, instance: _WorkflowInstanceImpl) -> None: # type: ignore # We are intentionally not calling the base class's __init__ here self._instance = instance diff --git a/temporalio/workflow.py b/temporalio/workflow.py index df78664ca..75cedd18c 100644 --- a/temporalio/workflow.py +++ b/temporalio/workflow.py @@ -5145,7 +5145,7 @@ async def start_operation( operation: nexusrpc.Operation[InputT, OutputT], input: InputT, *, - output_type: Optional[Type[OutputT]] = None, + output_type: Optional[type[OutputT]] = None, schedule_to_close_timeout: Optional[timedelta] = None, headers: Optional[Mapping[str, str]] = None, ) -> NexusOperationHandle[OutputT]: ... @@ -5158,7 +5158,7 @@ async def start_operation( operation: str, input: Any, *, - output_type: Optional[Type[OutputT]] = None, + output_type: Optional[type[OutputT]] = None, schedule_to_close_timeout: Optional[timedelta] = None, headers: Optional[Mapping[str, str]] = None, ) -> NexusOperationHandle[OutputT]: ... @@ -5174,7 +5174,7 @@ async def start_operation( ], input: InputT, *, - output_type: Optional[Type[OutputT]] = None, + output_type: Optional[type[OutputT]] = None, schedule_to_close_timeout: Optional[timedelta] = None, headers: Optional[Mapping[str, str]] = None, ) -> NexusOperationHandle[OutputT]: ... @@ -5190,7 +5190,7 @@ async def start_operation( ], input: InputT, *, - output_type: Optional[Type[OutputT]] = None, + output_type: Optional[type[OutputT]] = None, schedule_to_close_timeout: Optional[timedelta] = None, headers: Optional[Mapping[str, str]] = None, ) -> NexusOperationHandle[OutputT]: ... @@ -5206,7 +5206,7 @@ async def start_operation( ], input: InputT, *, - output_type: Optional[Type[OutputT]] = None, + output_type: Optional[type[OutputT]] = None, schedule_to_close_timeout: Optional[timedelta] = None, headers: Optional[Mapping[str, str]] = None, ) -> NexusOperationHandle[OutputT]: ... @@ -5217,7 +5217,7 @@ async def start_operation( operation: Any, input: Any, *, - output_type: Optional[Type[OutputT]] = None, + output_type: Optional[type[OutputT]] = None, schedule_to_close_timeout: Optional[timedelta] = None, headers: Optional[Mapping[str, str]] = None, ) -> Any: @@ -5246,7 +5246,7 @@ async def execute_operation( operation: nexusrpc.Operation[InputT, OutputT], input: InputT, *, - output_type: Optional[Type[OutputT]] = None, + output_type: Optional[type[OutputT]] = None, schedule_to_close_timeout: Optional[timedelta] = None, headers: Optional[Mapping[str, str]] = None, ) -> OutputT: ... @@ -5259,7 +5259,7 @@ async def execute_operation( operation: str, input: Any, *, - output_type: Optional[Type[OutputT]] = None, + output_type: Optional[type[OutputT]] = None, schedule_to_close_timeout: Optional[timedelta] = None, headers: Optional[Mapping[str, str]] = None, ) -> OutputT: ... @@ -5275,23 +5275,26 @@ async def execute_operation( ], input: InputT, *, - output_type: Optional[Type[OutputT]] = None, + output_type: Optional[type[OutputT]] = None, schedule_to_close_timeout: Optional[timedelta] = None, headers: Optional[Mapping[str, str]] = None, ) -> OutputT: ... + # TODO(nexus-preview): in practice, both these overloads match an async def sync + # operation (i.e. either can be deleted without causing a type error). + # Overload for sync_operation methods (async def) @overload @abstractmethod async def execute_operation( self, operation: Callable[ - [ServiceHandlerT, nexusrpc.handler.StartOperationContext, InputT], + [ServiceT, nexusrpc.handler.StartOperationContext, InputT], Awaitable[OutputT], ], input: InputT, *, - output_type: Optional[Type[OutputT]] = None, + output_type: Optional[type[OutputT]] = None, schedule_to_close_timeout: Optional[timedelta] = None, headers: Optional[Mapping[str, str]] = None, ) -> OutputT: ... @@ -5302,12 +5305,12 @@ async def execute_operation( async def execute_operation( self, operation: Callable[ - [ServiceHandlerT, nexusrpc.handler.StartOperationContext, InputT], + [ServiceT, nexusrpc.handler.StartOperationContext, InputT], OutputT, ], input: InputT, *, - output_type: Optional[Type[OutputT]] = None, + output_type: Optional[type[OutputT]] = None, schedule_to_close_timeout: Optional[timedelta] = None, headers: Optional[Mapping[str, str]] = None, ) -> OutputT: ... @@ -5318,7 +5321,7 @@ async def execute_operation( operation: Any, input: Any, *, - output_type: Optional[Type[OutputT]] = None, + output_type: Optional[type[OutputT]] = None, schedule_to_close_timeout: Optional[timedelta] = None, headers: Optional[Mapping[str, str]] = None, ) -> Any: @@ -5342,7 +5345,7 @@ def __init__( self, *, endpoint: str, - service: Union[Type[ServiceT], str], + service: Union[type[ServiceT], str], ) -> None: """Create a Nexus client. @@ -5369,7 +5372,7 @@ async def start_operation( operation: Any, input: Any, *, - output_type: Optional[Type] = None, + output_type: Optional[type] = None, schedule_to_close_timeout: Optional[timedelta] = None, headers: Optional[Mapping[str, str]] = None, ) -> Any: @@ -5390,7 +5393,7 @@ async def execute_operation( operation: Any, input: Any, *, - output_type: Optional[Type] = None, + output_type: Optional[type] = None, schedule_to_close_timeout: Optional[timedelta] = None, headers: Optional[Mapping[str, str]] = None, ) -> Any: @@ -5407,7 +5410,7 @@ async def execute_operation( @overload def create_nexus_client( *, - service: Type[ServiceT], + service: type[ServiceT], endpoint: str, ) -> NexusClient[ServiceT]: ... @@ -5422,9 +5425,9 @@ def create_nexus_client( def create_nexus_client( *, - service: Union[Type[ServiceT], str], + service: Union[type[ServiceT], str], endpoint: str, -) -> NexusClient[ServiceT]: +) -> NexusClient[Any]: """Create a Nexus client. .. warning:: diff --git a/tests/nexus/test_dynamic_creation_of_user_handler_classes.py b/tests/nexus/test_dynamic_creation_of_user_handler_classes.py index 0eef14b84..3df085d01 100644 --- a/tests/nexus/test_dynamic_creation_of_user_handler_classes.py +++ b/tests/nexus/test_dynamic_creation_of_user_handler_classes.py @@ -3,11 +3,9 @@ import httpx import nexusrpc.handler import pytest -from nexusrpc.handler import sync_operation from temporalio import nexus, workflow from temporalio.client import Client -from temporalio.nexus._util import get_operation_factory from temporalio.testing import WorkflowEnvironment from temporalio.worker import Worker from tests.helpers.nexus import ServiceClient, create_nexus_endpoint @@ -78,8 +76,8 @@ async def test_run_nexus_service_from_programmatically_created_service_handler( service_handler = nexusrpc.handler._core.ServiceHandler( service=nexusrpc.ServiceDefinition( name="MyService", - operations={ - "increment": nexusrpc.Operation[int, int]( + operation_definitions={ + "increment": nexusrpc.OperationDefinition[int, int]( name="increment", method_name="increment", input_type=int, @@ -107,70 +105,3 @@ async def test_run_nexus_service_from_programmatically_created_service_handler( json=1, ) assert response.status_code == 201 - - -def make_incrementer_user_service_definition_and_service_handler_classes( - op_names: list[str], -) -> tuple[type, type]: - # - # service contract - # - - ops = {name: nexusrpc.Operation[int, int] for name in op_names} - service_cls: type = nexusrpc.service(type("ServiceContract", (), ops)) - - # - # service handler - # - @sync_operation - async def _increment_op( - self, - ctx: nexusrpc.handler.StartOperationContext, - input: int, - ) -> int: - return input + 1 - - op_handler_factories = {} - for name in op_names: - op_handler_factory, _ = get_operation_factory(_increment_op) - assert op_handler_factory - op_handler_factories[name] = op_handler_factory - - handler_cls: type = nexusrpc.handler.service_handler(service=service_cls)( - type("ServiceImpl", (), op_handler_factories) - ) - - return service_cls, handler_cls - - -@pytest.mark.skip( - reason="Dynamic creation of service contract using type() is not supported" -) -async def test_dynamic_creation_of_user_handler_classes( - client: Client, env: WorkflowEnvironment -): - task_queue = str(uuid.uuid4()) - - service_cls, handler_cls = ( - make_incrementer_user_service_definition_and_service_handler_classes( - ["increment"] - ) - ) - - assert (service_defn := nexusrpc.get_service_definition(service_cls)) - service_name = service_defn.name - - endpoint = (await create_nexus_endpoint(task_queue, client)).endpoint.id - async with Worker( - client, - task_queue=task_queue, - nexus_service_handlers=[handler_cls()], - ): - server_address = ServiceClient.default_server_address(env) - async with httpx.AsyncClient() as http_client: - response = await http_client.post( - f"http://{server_address}/nexus/endpoints/{endpoint}/services/{service_name}/increment", - json=1, - ) - assert response.status_code == 200 - assert response.json() == 2 diff --git a/tests/nexus/test_handler.py b/tests/nexus/test_handler.py index 2d0e59908..d95db5731 100644 --- a/tests/nexus/test_handler.py +++ b/tests/nexus/test_handler.py @@ -18,10 +18,11 @@ import logging import pprint import uuid +from collections.abc import Mapping from concurrent.futures.thread import ThreadPoolExecutor from dataclasses import dataclass from types import MappingProxyType -from typing import Any, Callable, Mapping, Optional, Type, Union +from typing import Any, Callable, Optional, Union import httpx import nexusrpc @@ -44,6 +45,7 @@ sync_operation, ) from nexusrpc.handler._decorators import operation_handler +from typing_extensions import dataclass_transform from temporalio import nexus, workflow from temporalio.client import Client @@ -327,12 +329,17 @@ class UnsuccessfulResponse: headers: Mapping[str, str] = UNSUCCESSFUL_RESPONSE_HEADERS -class _TestCase: +@dataclass_transform() +class _BaseTestCase: + pass + + +class _TestCase(_BaseTestCase): operation: str + expected: SuccessfulResponse service_defn: str = "MyService" input: Input = Input("") headers: dict[str, str] = {} - expected: SuccessfulResponse expected_without_service_definition: Optional[SuccessfulResponse] = None skip = "" @@ -556,7 +563,7 @@ class NonSerializableOutputFailure(_FailureTestCase): ) @pytest.mark.parametrize("with_service_definition", [True, False]) async def test_start_operation_happy_path( - test_case: Type[_TestCase], + test_case: type[_TestCase], with_service_definition: bool, env: WorkflowEnvironment, ): @@ -581,7 +588,7 @@ async def test_start_operation_happy_path( ], ) async def test_start_operation_protocol_level_failures( - test_case: Type[_TestCase], env: WorkflowEnvironment + test_case: type[_TestCase], env: WorkflowEnvironment ): if test_case == UpstreamTimeoutViaRequestTimeout: pytest.skip( @@ -603,7 +610,7 @@ async def test_start_operation_protocol_level_failures( ], ) async def test_start_operation_operation_failures( - test_case: Type[_TestCase], env: WorkflowEnvironment + test_case: type[_TestCase], env: WorkflowEnvironment ): if env.supports_time_skipping: pytest.skip("Nexus tests don't work with time-skipping server") @@ -612,7 +619,7 @@ async def test_start_operation_operation_failures( async def _test_start_operation_with_service_definition( - test_case: Type[_TestCase], + test_case: type[_TestCase], env: WorkflowEnvironment, ): if test_case.skip: @@ -646,7 +653,7 @@ async def _test_start_operation_with_service_definition( async def _test_start_operation_without_service_definition( - test_case: Type[_TestCase], + test_case: type[_TestCase], env: WorkflowEnvironment, ): if test_case.skip: @@ -732,7 +739,7 @@ class AsyncHandlerHappyPathWithoutTypeAnnotations(_TestCase): ], ) async def test_start_operation_without_type_annotations( - test_case: Type[_TestCase], env: WorkflowEnvironment + test_case: type[_TestCase], env: WorkflowEnvironment ): if env.supports_time_skipping: pytest.skip("Nexus tests don't work with time-skipping server") @@ -772,10 +779,7 @@ async def test_start_operation_without_type_annotations( def test_operation_without_type_annotations_without_service_definition_raises_validation_error(): - with pytest.raises( - ValueError, - match=r"has no input type.+has no output type", - ): + with pytest.raises(ValueError, match=r"has no input type"): service_handler(MyServiceHandlerWithOperationsWithoutTypeAnnotations) @@ -830,10 +834,11 @@ async def test_logger_uses_operation_context(env: WorkflowEnvironment, caplog: A assert getattr(record, "operation", None) == operation_name +@dataclass class _InstantiationCase: executor: bool handler: Callable[..., Any] - exception: Optional[Type[Exception]] + exception: Optional[type[Exception]] match: Optional[str] @@ -917,7 +922,7 @@ class SyncCancel(_InstantiationCase): [SyncHandlerNoExecutor, DefaultCancel, SyncCancel], ) async def test_handler_instantiation( - test_case: Type[_InstantiationCase], client: Client + test_case: type[_InstantiationCase], client: Client ): task_queue = str(uuid.uuid4()) diff --git a/tests/nexus/test_handler_interface_implementation.py b/tests/nexus/test_handler_interface_implementation.py index 8db3c7ddc..d51d58dca 100644 --- a/tests/nexus/test_handler_interface_implementation.py +++ b/tests/nexus/test_handler_interface_implementation.py @@ -1,4 +1,5 @@ -from typing import Any, Optional, Type +from dataclasses import dataclass +from typing import Any, Optional import nexusrpc import nexusrpc.handler @@ -9,9 +10,10 @@ from temporalio.nexus import WorkflowRunOperationContext, workflow_run_operation +@dataclass class _InterfaceImplementationTestCase: - Interface: Type[Any] - Impl: Type[Any] + Interface: type[Any] + Impl: type[Any] error_message: Optional[str] @@ -22,7 +24,7 @@ class Interface: class Impl: @sync_operation - async def op(self, ctx: StartOperationContext, input: None) -> None: ... + async def op(self, _ctx: StartOperationContext, _input: None) -> None: ... error_message = None @@ -35,7 +37,7 @@ class Interface: class Impl: @workflow_run_operation async def op( - self, ctx: WorkflowRunOperationContext, input: str + self, _ctx: WorkflowRunOperationContext, _input: str ) -> nexus.WorkflowHandle[int]: raise NotImplementedError @@ -50,7 +52,7 @@ async def op( ], ) def test_service_decorator_enforces_interface_conformance( - test_case: Type[_InterfaceImplementationTestCase], + test_case: type[_InterfaceImplementationTestCase], ): if test_case.error_message: with pytest.raises(Exception) as ei: diff --git a/tests/nexus/test_handler_operation_definitions.py b/tests/nexus/test_handler_operation_definitions.py index 8e41c1efa..82a0682fb 100644 --- a/tests/nexus/test_handler_operation_definitions.py +++ b/tests/nexus/test_handler_operation_definitions.py @@ -11,7 +11,6 @@ from temporalio import nexus from temporalio.nexus import WorkflowRunOperationContext, workflow_run_operation -from temporalio.nexus._util import get_operation_factory @dataclass @@ -96,7 +95,7 @@ async def test_collected_operation_names( assert isinstance(service_defn, nexusrpc.ServiceDefinition) assert service_defn.name == "Service" for method_name, expected_op in test_case.expected_operations.items(): - _, actual_op = get_operation_factory(getattr(test_case.Service, method_name)) + actual_op = nexusrpc.get_operation(getattr(test_case.Service, method_name)) assert isinstance(actual_op, nexusrpc.Operation) assert actual_op.name == expected_op.name assert actual_op.input_type == expected_op.input_type diff --git a/tests/nexus/test_type_checking.py b/tests/nexus/test_type_checking.py index e9210e954..228e6b3f8 100644 --- a/tests/nexus/test_type_checking.py +++ b/tests/nexus/test_type_checking.py @@ -1,33 +1,229 @@ +""" +This file exists to test for type-checker false positives and false negatives. +It doesn't contain any test functions. +""" + +from dataclasses import dataclass + import nexusrpc import temporalio.nexus from temporalio import workflow -def _(): - @nexusrpc.handler.service_handler - class MyService: - @nexusrpc.handler.sync_operation - async def my_sync_operation( - self, ctx: nexusrpc.handler.StartOperationContext, input: int - ) -> str: - raise NotImplementedError - - @temporalio.nexus.workflow_run_operation - async def my_workflow_run_operation( - self, ctx: temporalio.nexus.WorkflowRunOperationContext, input: int - ) -> temporalio.nexus.WorkflowHandle[str]: - raise NotImplementedError - - @workflow.defn(sandboxed=False) - class MyWorkflow: - @workflow.run - async def invoke_nexus_op_and_assert_error(self) -> None: - self.nexus_client = workflow.create_nexus_client( - service=MyService, - endpoint="fake-endpoint", - ) - await self.nexus_client.execute_operation(MyService.my_sync_operation, 1) - await self.nexus_client.execute_operation( - MyService.my_workflow_run_operation, 1 - ) +@dataclass +class MyInput: + pass + + +@dataclass +class MyOutput: + pass + + +@nexusrpc.service +class MyService: + my_sync_operation: nexusrpc.Operation[MyInput, MyOutput] + my_workflow_run_operation: nexusrpc.Operation[MyInput, MyOutput] + + +@nexusrpc.handler.service_handler(service=MyService) +class MyServiceHandler: + @nexusrpc.handler.sync_operation + async def my_sync_operation( + self, _ctx: nexusrpc.handler.StartOperationContext, _input: MyInput + ) -> MyOutput: + raise NotImplementedError + + @temporalio.nexus.workflow_run_operation + async def my_workflow_run_operation( + self, _ctx: temporalio.nexus.WorkflowRunOperationContext, _input: MyInput + ) -> temporalio.nexus.WorkflowHandle[MyOutput]: + raise NotImplementedError + + +@nexusrpc.handler.service_handler(service=MyService) +class MyServiceHandler2: + @nexusrpc.handler.sync_operation + async def my_sync_operation( + self, _ctx: nexusrpc.handler.StartOperationContext, _input: MyInput + ) -> MyOutput: + raise NotImplementedError + + @temporalio.nexus.workflow_run_operation + async def my_workflow_run_operation( + self, _ctx: temporalio.nexus.WorkflowRunOperationContext, _input: MyInput + ) -> temporalio.nexus.WorkflowHandle[MyOutput]: + raise NotImplementedError + + +@nexusrpc.handler.service_handler +class MyServiceHandlerWithoutServiceDefinition: + @nexusrpc.handler.sync_operation + async def my_sync_operation( + self, _ctx: nexusrpc.handler.StartOperationContext, _input: MyInput + ) -> MyOutput: + raise NotImplementedError + + @temporalio.nexus.workflow_run_operation + async def my_workflow_run_operation( + self, _ctx: temporalio.nexus.WorkflowRunOperationContext, _input: MyInput + ) -> temporalio.nexus.WorkflowHandle[MyOutput]: + raise NotImplementedError + + +@workflow.defn +class MyWorkflow1: + @workflow.run + async def test_invoke_by_operation_definition_happy_path(self) -> None: + """ + When a nexus client calls an operation by referencing an operation definition on + a service definition, the output type is inferred correctly. + """ + nexus_client = workflow.create_nexus_client( + service=MyService, + endpoint="fake-endpoint", + ) + input = MyInput() + + # sync operation + _output_1: MyOutput = await nexus_client.execute_operation( + MyService.my_sync_operation, input + ) + _handle_1: workflow.NexusOperationHandle[ + MyOutput + ] = await nexus_client.start_operation(MyService.my_sync_operation, input) + _output_1_1: MyOutput = await _handle_1 + + # workflow run operation + _output_2: MyOutput = await nexus_client.execute_operation( + MyService.my_workflow_run_operation, input + ) + _handle_2: workflow.NexusOperationHandle[ + MyOutput + ] = await nexus_client.start_operation( + MyService.my_workflow_run_operation, input + ) + _output_2_1: MyOutput = await _handle_2 + + +@workflow.defn +class MyWorkflow2: + @workflow.run + async def test_invoke_by_operation_handler_happy_path(self) -> None: + """ + When a nexus client calls an operation by referencing an operation handler on a + service handler, the output type is inferred correctly. + """ + nexus_client = workflow.create_nexus_client( + service=MyServiceHandler, # MyService would also work + endpoint="fake-endpoint", + ) + input = MyInput() + + # sync operation + _output_1: MyOutput = await nexus_client.execute_operation( + MyServiceHandler.my_sync_operation, input + ) + _handle_1: workflow.NexusOperationHandle[ + MyOutput + ] = await nexus_client.start_operation( + MyServiceHandler.my_sync_operation, input + ) + _output_1_1: MyOutput = await _handle_1 + + # workflow run operation + _output_2: MyOutput = await nexus_client.execute_operation( + MyServiceHandler.my_workflow_run_operation, input + ) + _handle_2: workflow.NexusOperationHandle[ + MyOutput + ] = await nexus_client.start_operation( + MyServiceHandler.my_workflow_run_operation, input + ) + _output_2_1: MyOutput = await _handle_2 + + +@workflow.defn +class MyWorkflow3: + @workflow.run + async def test_invoke_by_operation_name_happy_path(self) -> None: + """ + When a nexus client calls an operation by referencing an operation name, the + output type is inferred as Unknown. + """ + nexus_client = workflow.create_nexus_client( + service=MyServiceHandler, + endpoint="fake-endpoint", + ) + input = MyInput() + # TODO: mypy fails these since no type is inferred, so we're forced to add a + # `type: ignore`. As a result this function doesn't currently prove anything, but + # one can confirm the inferred type is Unknown in an IDE. + _output_1 = await nexus_client.execute_operation("my_sync_operation", input) # type: ignore[var-annotated] + _output_2 = await nexus_client.execute_operation( # type: ignore[var-annotated] + "my_workflow_run_operation", input + ) + + +@workflow.defn +class MyWorkflow4: + @workflow.run + async def test_invoke_by_operation_definition_wrong_input_type(self) -> None: + """ + When a nexus client calls an operation by referencing an operation definition on + a service definition, there is a type error if the input type is wrong. + """ + nexus_client = workflow.create_nexus_client( + service=MyService, + endpoint="fake-endpoint", + ) + # assert-type-error-pyright: 'No overloads for "execute_operation" match' + await nexus_client.execute_operation( # type: ignore + MyService.my_sync_operation, + # assert-type-error-pyright: 'Argument of type .+ cannot be assigned to parameter "input"' + "wrong-input-type", # type: ignore + ) + + +@workflow.defn +class MyWorkflow5: + @workflow.run + async def test_invoke_by_operation_handler_wrong_input_type(self) -> None: + """ + When a nexus client calls an operation by referencing an operation handler on a + service handler, there is a type error if the input type is wrong. + """ + nexus_client = workflow.create_nexus_client( + service=MyServiceHandler, + endpoint="fake-endpoint", + ) + # assert-type-error-pyright: 'No overloads for "execute_operation" match' + await nexus_client.execute_operation( # type: ignore + MyServiceHandler.my_sync_operation, + # assert-type-error-pyright: 'Argument of type .+ cannot be assigned to parameter "input"' + "wrong-input-type", # type: ignore + ) + + +@workflow.defn +class MyWorkflow6: + @workflow.run + async def test_invoke_by_operation_handler_method_on_wrong_service(self) -> None: + """ + When a nexus client calls an operation by referencing an operation handler method + on a service handler, there is a type error if the method does not belong to the + service for which the client was created. + + (This form of type safety is not available when referencing an operation definition) + """ + nexus_client = workflow.create_nexus_client( + service=MyServiceHandler, + endpoint="fake-endpoint", + ) + # assert-type-error-pyright: 'No overloads for "execute_operation" match' + await nexus_client.execute_operation( # type: ignore + # assert-type-error-pyright: 'Argument of type .+ cannot be assigned to parameter "operation"' + MyServiceHandler2.my_sync_operation, # type: ignore + MyInput(), + ) diff --git a/tests/nexus/test_workflow_caller.py b/tests/nexus/test_workflow_caller.py index c9417ef58..085febb78 100644 --- a/tests/nexus/test_workflow_caller.py +++ b/tests/nexus/test_workflow_caller.py @@ -253,11 +253,12 @@ def __init__( request_cancel: bool, task_queue: str, ) -> None: + service: type[Any] = { + CallerReference.IMPL_WITH_INTERFACE: ServiceImpl, + CallerReference.INTERFACE: ServiceInterface, + }[input.op_input.caller_reference] self.nexus_client = workflow.create_nexus_client( - service={ - CallerReference.IMPL_WITH_INTERFACE: ServiceImpl, - CallerReference.INTERFACE: ServiceInterface, - }[input.op_input.caller_reference], + service=service, endpoint=make_nexus_endpoint_name(task_queue), ) self._nexus_operation_started = False @@ -883,7 +884,7 @@ async def run( task_queue: str, ) -> ServiceClassNameOutput: C, N = CallerReference, NameOverride - service_cls: type + service_cls: type[Any] if (caller_reference, name_override) == (C.INTERFACE, N.YES): service_cls = ServiceInterfaceWithNameOverride elif (caller_reference, name_override) == (C.INTERFACE, N.NO): diff --git a/tests/nexus/test_workflow_caller_error_chains.py b/tests/nexus/test_workflow_caller_error_chains.py index f982f942e..3e3c05b0b 100644 --- a/tests/nexus/test_workflow_caller_error_chains.py +++ b/tests/nexus/test_workflow_caller_error_chains.py @@ -27,6 +27,7 @@ error_conversion_test_cases: dict[str, type[ErrorConversionTestCase]] = {} +@dataclass class ErrorConversionTestCase: action_in_nexus_operation: Callable[..., Any] expected_exception_chain_in_workflow: list[tuple[type[Exception], dict[str, Any]]] diff --git a/tests/nexus/test_workflow_run_operation.py b/tests/nexus/test_workflow_run_operation.py index 0869a1d00..01850379a 100644 --- a/tests/nexus/test_workflow_run_operation.py +++ b/tests/nexus/test_workflow_run_operation.py @@ -40,7 +40,9 @@ async def run(self, input: str) -> str: class MyOperation(WorkflowRunOperationHandler): - def __init__(self): + # TODO(nexus-preview) WorkflowRunOperationHandler is not currently implemented to + # support subclassing as this test does. + def __init__(self): # type: ignore[reportMissingSuperCall] pass async def start( diff --git a/tests/test_client.py b/tests/test_client.py index 418d9ff53..ce6d96ef3 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -3,7 +3,7 @@ import os import uuid from datetime import datetime, timedelta, timezone -from typing import Any, List, Mapping, Optional, Tuple, cast +from typing import Any, List, Mapping, Optional, cast from unittest import mock import google.protobuf.any_pb2 @@ -312,7 +312,7 @@ class start_workflow_execution( ], ) - def __init__(self) -> None: + def __init__(self) -> None: # type: ignore[reportMissingSuperCall] pass async def __call__( @@ -481,7 +481,7 @@ async def test_single_client_config_change(client: Client, worker: ExternalWorke class TracingClientInterceptor(Interceptor): def intercept_client(self, next: OutboundInterceptor) -> OutboundInterceptor: - self.traces: List[Tuple[str, Any]] = [] + self.traces: list[tuple[str, Any]] = [] # type: ignore[reportUninitializedInstanceVariable] return TracingClientOutboundInterceptor(self, next) diff --git a/tests/test_type_errors.py b/tests/test_type_errors.py new file mode 100644 index 000000000..7c1623116 --- /dev/null +++ b/tests/test_type_errors.py @@ -0,0 +1,192 @@ +""" +This file contains a test allowing assertions to be made that an expected type error is in +fact produced by the type-checker. I.e. that the type checker is not delivering a false +negative. + +To use the test, add a comment of the following form to your test code: + + # assert-type-error-pyright: 'No overloads for "execute_operation" match' await + nexus_client.execute_operation( # type: ignore + +The `type: ignore` is only necessary if your test code is being type-checked. + +This is a copy of https://github.com/nexus-rpc/sdk-python/blob/main/tests/test_type_errors.py + +Until a shared library is created, please keep the two in sync. +""" + +import itertools +import json +import os +import platform +import re +import subprocess +import tempfile +from pathlib import Path + +import pytest + + +def pytest_generate_tests(metafunc: pytest.Metafunc) -> None: + """Dynamically generate test cases for files with type error assertions.""" + if metafunc.function.__name__ in [ + "test_type_errors_pyright", + "test_type_errors_mypy", + ]: + tests_dir = Path(__file__).parent + files_with_assertions = [] + + for test_file in tests_dir.rglob("test_*.py"): + if test_file.name == "test_type_errors.py": + continue + + if _has_type_error_assertions(test_file): + files_with_assertions.append(test_file) + + metafunc.parametrize("test_file", files_with_assertions, ids=lambda f: f.name) + + +@pytest.mark.skipif(platform.system() == "Windows", reason="TODO: broken on Windows") +def test_type_errors_pyright(test_file: Path): + """ + Validate type error assertions in a single test file using pyright. + + For each line with a comment of the form `# assert-type-error-pyright: "regex"`, + verify that pyright reports an error on the next non-comment line matching the regex. + Also verify that there are no unexpected type errors. + """ + _test_type_errors( + test_file, + _get_expected_errors(test_file, "pyright"), + _get_pyright_errors(test_file), + ) + + +# This test is disabled since we currently have no way to be able to +# assert-type-error-mypy on a line with a `type: ignore`. +def _test_type_errors_mypy(test_file: Path): # pyright: ignore + """ + Validate type error assertions in a single test file using mypy. + + For each line with a comment of the form `# assert-type-error-mypy: "regex"`, + verify that mypy reports an error on the next non-comment line matching the regex. + Also verify that there are no unexpected type errors. + """ + _test_type_errors( + test_file, + _get_expected_errors(test_file, "mypy"), + _get_mypy_errors(test_file), + ) + + +def _test_type_errors( + test_file: Path, + expected_errors: dict[int, str], + actual_errors: dict[int, str], +) -> None: + for line_num, expected_pattern in expected_errors.items(): + if line_num not in actual_errors: + pytest.fail( + f"{test_file}:{line_num}: Expected type error matching '{expected_pattern}' but no error found" + ) + + actual_msg = actual_errors[line_num] + if not re.search(expected_pattern, actual_msg): + pytest.fail( + f"{test_file}:{line_num}: Expected error matching '{expected_pattern}' but got '{actual_msg}'" + ) + + for line_num, actual_msg in actual_errors.items(): + if line_num not in expected_errors: + pytest.fail(f"{test_file}:{line_num}: Unexpected type error: {actual_msg}") + + +def _has_type_error_assertions(test_file: Path) -> bool: + """Check if a file contains any type error assertions.""" + with open(test_file) as f: + return any( + re.search(r"# assert-type-error-\w+:", line) for line in f.readlines() + ) + + +def _get_expected_errors(test_file: Path, type_checker: str) -> dict[int, str]: + """Parse expected type errors from comments in a file for the specified type checker.""" + expected_errors = {} + + with open(test_file) as f: + lines = zip(itertools.count(1), f) + for line_num, line in lines: + if match := re.search( + rf'# assert-type-error-{re.escape(type_checker)}:\s*["\'](.+)["\']', + line, + ): + pattern = match.group(1) + for line_num, line in lines: + if line.strip() and not line.strip().startswith("#"): + expected_errors[line_num] = pattern + break + + return expected_errors + + +def _get_pyright_errors(test_file: Path) -> dict[int, str]: + """Run pyright on a file and parse the actual type errors.""" + with tempfile.NamedTemporaryFile(mode="w", suffix=".json", delete=False) as f: + # Create a temporary config file to disable type ignore comments + config_data = {"enableTypeIgnoreComments": False} + json.dump(config_data, f) + config_path = f.name + + try: + result = subprocess.run( + ["uv", "run", "pyright", "--project", config_path, str(test_file)], + capture_output=True, + text=True, + ) + + actual_errors = {} + abs_path = test_file.resolve() + + for line in result.stdout.splitlines(): + # pyright output format: /full/path/to/file.py:line:column - error: message (error_code) + if match := re.match( + rf"\s*{re.escape(str(abs_path))}:(\d+):\d+\s*-\s*error:\s*(.+)", line + ): + line_num = int(match.group(1)) + error_msg = match.group(2).strip() + # Remove error code in parentheses if present + error_msg = re.sub(r"\s*\([^)]+\)$", "", error_msg) + actual_errors[line_num] = error_msg + + return actual_errors + finally: + if os.path.exists(config_path): + os.unlink(config_path) + + +def _get_mypy_errors(test_file: Path) -> dict[int, str]: + """Run mypy on a file and parse the actual type errors. + + Note: mypy does not have a direct equivalent to pyright's enableTypeIgnoreComments=false, + so type ignore comments will still be respected by mypy. Users should avoid placing + # type: ignore comments on lines they want to test, or manually remove them for testing. + """ + result = subprocess.run( + ["uv", "run", "mypy", str(test_file)], + capture_output=True, + text=True, + ) + + actual_errors = {} + abs_path = test_file.resolve() + + for line in result.stdout.splitlines(): + # mypy output format: file.py:line: error: message + if match := re.match( + rf"{re.escape(str(abs_path))}:(\d+):\s*error:\s*(.+)", line + ): + line_num = int(match.group(1)) + error_msg = match.group(2).strip() + actual_errors[line_num] = error_msg + + return actual_errors diff --git a/tests/testing/test_workflow.py b/tests/testing/test_workflow.py index 014f58d62..7915da8f6 100644 --- a/tests/testing/test_workflow.py +++ b/tests/testing/test_workflow.py @@ -4,7 +4,7 @@ import uuid from datetime import datetime, timedelta, timezone from time import monotonic -from typing import Any, List, Optional, Union +from typing import Any, Optional, Union from temporalio import activity, workflow from temporalio.client import ( @@ -194,8 +194,8 @@ def some_signal(self) -> None: class SimpleClientInterceptor(Interceptor): - def __init__(self) -> None: - self.events: List[str] = [] + def __init__(self) -> None: # type: ignore[reportMissingSuperCall] + self.events: list[str] = [] def intercept_client(self, next: OutboundInterceptor) -> OutboundInterceptor: return SimpleClientOutboundInterceptor(self, super().intercept_client(next)) diff --git a/tests/worker/test_update_with_start.py b/tests/worker/test_update_with_start.py index a8131cd05..7bf48fc8c 100644 --- a/tests/worker/test_update_with_start.py +++ b/tests/worker/test_update_with_start.py @@ -109,9 +109,9 @@ class UpdateHandlerType(Enum): class TestUpdateWithStart: - client: Client - workflow_id: str - task_queue: str + client: Client # type: ignore[reportUninitializedInstanceVariable] + workflow_id: str # type: ignore[reportUninitializedInstanceVariable] + task_queue: str # type: ignore[reportUninitializedInstanceVariable] update_id = "test-uws-up-id" @pytest.mark.parametrize( @@ -825,7 +825,7 @@ class execute_multi_operation( # Set grpc_status with empty details empty_details_err._grpc_status = temporalio.api.common.v1.GrpcStatus(details=[]) - def __init__(self) -> None: + def __init__(self) -> None: # type: ignore[reportMissingSuperCall] pass async def __call__( diff --git a/uv.lock b/uv.lock index cd3f4eff4..876193f13 100644 --- a/uv.lock +++ b/uv.lock @@ -60,6 +60,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b9/fa/123043af240e49752f1c4bd24da5053b6bd00cad78c2be53c0d1e8b975bc/backports.tarfile-1.2.0-py3-none-any.whl", hash = "sha256:77e284d754527b01fb1e6fa8a1afe577858ebe4e9dad8919e34c862cb399bc34", size = 30181, upload-time = "2024-05-28T17:01:53.112Z" }, ] +[[package]] +name = "basedpyright" +version = "1.30.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "nodejs-wheel-binaries" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/15/aa/7b15082f749a13e6c8494bab9413d589b849d52957da1e898409b5259712/basedpyright-1.30.1.tar.gz", hash = "sha256:fe33afd16200202fff4aa3f91ac2c2958af35461c801cb8bbff60ca29093a2cb", size = 22059762, upload-time = "2025-07-10T08:47:16.787Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2c/90/95b04aee796f471ac8cb5f8794936065490e3c04944423db2f9ec0a973b8/basedpyright-1.30.1-py3-none-any.whl", hash = "sha256:5afdf6e2ab94f098cd0004a4e3b198a5e176b8eb8e08049542729a7f5beb0dcd", size = 11537793, upload-time = "2025-07-10T08:47:13.767Z" }, +] + [[package]] name = "bashlex" version = "0.18" @@ -1044,13 +1056,26 @@ wheels = [ [[package]] name = "nexus-rpc" version = "1.1.0" -source = { registry = "https://pypi.org/simple" } +source = { editable = "../nexus-sdk-python" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ef/66/540687556bd28cf1ec370cc6881456203dfddb9dab047b8979c6865b5984/nexus_rpc-1.1.0.tar.gz", hash = "sha256:d65ad6a2f54f14e53ebe39ee30555eaeb894102437125733fb13034a04a44553", size = 77383, upload-time = "2025-07-07T19:03:58.368Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/bf/2f/9e9d0dcaa4c6ffa22b7aa31069a8a264c753ff8027b36af602cce038c92f/nexus_rpc-1.1.0-py3-none-any.whl", hash = "sha256:d1b007af2aba186a27e736f8eaae39c03aed05b488084ff6c3d1785c9ba2ad38", size = 27743, upload-time = "2025-07-07T19:03:57.556Z" }, + +[package.metadata] +requires-dist = [{ name = "typing-extensions", specifier = ">=4.12.2" }] + +[package.metadata.requires-dev] +dev = [ + { name = "basedpyright", specifier = ">=1.30.1" }, + { name = "mypy", specifier = ">=1.15.0" }, + { name = "poethepoet", specifier = ">=0.35.0" }, + { name = "pydoctor", specifier = ">=25.4.0" }, + { name = "pyright", specifier = "==1.1.403" }, + { name = "pytest", specifier = ">=8.3.5" }, + { name = "pytest-asyncio", specifier = ">=0.26.0" }, + { name = "pytest-cov", specifier = ">=6.1.1" }, + { name = "pytest-pretty", specifier = ">=1.3.0" }, + { name = "ruff", specifier = ">=0.12.0" }, ] [[package]] @@ -1093,6 +1118,22 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9", size = 22314, upload-time = "2024-06-04T18:44:08.352Z" }, ] +[[package]] +name = "nodejs-wheel-binaries" +version = "22.17.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d3/86/8962d1d24ff480f4dd31871f42c8e0d8e2c851cd558a07ee689261d310ab/nodejs_wheel_binaries-22.17.0.tar.gz", hash = "sha256:529142012fb8fd20817ef70e2ef456274df4f49933292e312c8bbc7285af6408", size = 8068, upload-time = "2025-06-29T20:24:25.002Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5d/53/b942c6da4ff6f87a315033f6ff6fed8fd3c22047d7ff5802badaa5dfc2c2/nodejs_wheel_binaries-22.17.0-py2.py3-none-macosx_11_0_arm64.whl", hash = "sha256:6545a6f6d2f736d9c9e2eaad7e599b6b5b2d8fd4cbd2a1df0807cbcf51b9d39b", size = 51003554, upload-time = "2025-06-29T20:23:47.042Z" }, + { url = "https://files.pythonhosted.org/packages/e2/b7/7184a9ad2364912da22f2fe021dc4a3301721131ef7759aeb4a1f19db0b4/nodejs_wheel_binaries-22.17.0-py2.py3-none-macosx_11_0_x86_64.whl", hash = "sha256:4bea5b994dd87c20f8260031ea69a97c3d282e2d4472cc8908636a313a830d00", size = 51936848, upload-time = "2025-06-29T20:23:52.064Z" }, + { url = "https://files.pythonhosted.org/packages/e9/7a/0ea425147b8110b8fd65a6c21cfd3bd130cdec7766604361429ef870d799/nodejs_wheel_binaries-22.17.0-py2.py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:885508615274a22499dd5314759c1cf96ba72de03e6485d73b3e5475e7f12662", size = 57925230, upload-time = "2025-06-29T20:23:56.81Z" }, + { url = "https://files.pythonhosted.org/packages/23/5f/10a3f2ac08a839d065d9ccfd6d9df66bc46e100eaf87a8a5cf149eb3fb8e/nodejs_wheel_binaries-22.17.0-py2.py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90f38ce034a602bcab534d55cbe0390521e73e5dcffdd1c4b34354b932172af2", size = 58457829, upload-time = "2025-06-29T20:24:01.945Z" }, + { url = "https://files.pythonhosted.org/packages/ed/a4/d2ca331e16eef0974eb53702df603c54f77b2a7e2007523ecdbf6cf61162/nodejs_wheel_binaries-22.17.0-py2.py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:5eed087855b644c87001fe04036213193963ccd65e7f89949e9dbe28e7743d9b", size = 59778054, upload-time = "2025-06-29T20:24:07.14Z" }, + { url = "https://files.pythonhosted.org/packages/be/2b/04e0e7f7305fe2ba30fd4610bfb432516e0f65379fe6c2902f4b7b1ad436/nodejs_wheel_binaries-22.17.0-py2.py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:715f413c81500f0770ea8936ef1fc2529b900da8054cbf6da67cec3ee308dc76", size = 60830079, upload-time = "2025-06-29T20:24:12.21Z" }, + { url = "https://files.pythonhosted.org/packages/ce/67/12070b24b88040c2d694883f3dcb067052f748798f4c63f7c865769a5747/nodejs_wheel_binaries-22.17.0-py2.py3-none-win_amd64.whl", hash = "sha256:51165630493c8dd4acfe1cae1684b76940c9b03f7f355597d55e2d056a572ddd", size = 40117877, upload-time = "2025-06-29T20:24:17.51Z" }, + { url = "https://files.pythonhosted.org/packages/2e/ec/53ac46af423527c23e40c7343189f2bce08a8337efedef4d8a33392cee23/nodejs_wheel_binaries-22.17.0-py2.py3-none-win_arm64.whl", hash = "sha256:fae56d172227671fccb04461d3cd2b26a945c6c7c7fc29edb8618876a39d8b4a", size = 38865278, upload-time = "2025-06-29T20:24:21.065Z" }, +] + [[package]] name = "openai" version = "1.92.3" @@ -1427,15 +1468,15 @@ wheels = [ [[package]] name = "pyright" -version = "1.1.402" +version = "1.1.403" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "nodeenv" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/aa/04/ce0c132d00e20f2d2fb3b3e7c125264ca8b909e693841210534b1ea1752f/pyright-1.1.402.tar.gz", hash = "sha256:85a33c2d40cd4439c66aa946fd4ce71ab2f3f5b8c22ce36a623f59ac22937683", size = 3888207, upload-time = "2025-06-11T08:48:35.759Z" } +sdist = { url = "https://files.pythonhosted.org/packages/fe/f6/35f885264ff08c960b23d1542038d8da86971c5d8c955cfab195a4f672d7/pyright-1.1.403.tar.gz", hash = "sha256:3ab69b9f41c67fb5bbb4d7a36243256f0d549ed3608678d381d5f51863921104", size = 3913526, upload-time = "2025-07-09T07:15:52.882Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/fe/37/1a1c62d955e82adae588be8e374c7f77b165b6cb4203f7d581269959abbc/pyright-1.1.402-py3-none-any.whl", hash = "sha256:2c721f11869baac1884e846232800fe021c33f1b4acb3929cff321f7ea4e2982", size = 5624004, upload-time = "2025-06-11T08:48:33.998Z" }, + { url = "https://files.pythonhosted.org/packages/49/b6/b04e5c2f41a5ccad74a1a4759da41adb20b4bc9d59a5e08d29ba60084d07/pyright-1.1.403-py3-none-any.whl", hash = "sha256:c0eeca5aa76cbef3fcc271259bbd785753c7ad7bcac99a9162b4c4c7daed23b3", size = 5684504, upload-time = "2025-07-09T07:15:50.958Z" }, ] [[package]] @@ -1737,6 +1778,7 @@ pydantic = [ [package.dev-dependencies] dev = [ + { name = "basedpyright" }, { name = "cibuildwheel" }, { name = "grpcio-tools" }, { name = "httpx" }, @@ -1761,7 +1803,7 @@ dev = [ requires-dist = [ { name = "eval-type-backport", marker = "python_full_version < '3.10' and extra == 'openai-agents'", specifier = ">=0.2.2" }, { name = "grpcio", marker = "extra == 'grpc'", specifier = ">=1.48.2,<2" }, - { name = "nexus-rpc", specifier = ">=1.1.0" }, + { name = "nexus-rpc", editable = "../nexus-sdk-python" }, { name = "openai-agents", marker = "extra == 'openai-agents'", specifier = ">=0.1,<0.2" }, { name = "opentelemetry-api", marker = "extra == 'opentelemetry'", specifier = ">=1.11.1,<2" }, { name = "opentelemetry-sdk", marker = "extra == 'opentelemetry'", specifier = ">=1.11.1,<2" }, @@ -1775,6 +1817,7 @@ provides-extras = ["grpc", "opentelemetry", "pydantic", "openai-agents"] [package.metadata.requires-dev] dev = [ + { name = "basedpyright", specifier = ">=1.30.1" }, { name = "cibuildwheel", specifier = ">=2.22.0,<3" }, { name = "grpcio-tools", specifier = ">=1.48.2,<2" }, { name = "httpx", specifier = ">=0.28.1" }, @@ -1784,7 +1827,7 @@ dev = [ { name = "psutil", specifier = ">=5.9.3,<6" }, { name = "pydocstyle", specifier = ">=6.3.0,<7" }, { name = "pydoctor", specifier = ">=24.11.1,<25" }, - { name = "pyright", specifier = "==1.1.402" }, + { name = "pyright", specifier = "==1.1.403" }, { name = "pytest", specifier = "~=7.4" }, { name = "pytest-asyncio", specifier = ">=0.21,<0.22" }, { name = "pytest-cov", specifier = ">=6.1.1" },
Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.
Alternative Proxies: