diff --git a/pyproject.toml b/pyproject.toml index 77f31e2ad..0f3446668 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -26,7 +26,7 @@ opentelemetry = [ ] pydantic = ["pydantic>=2.0.0,<3"] openai-agents = [ - "openai-agents>=0.2.3,<=0.2.9", # 0.2.10 doesn't work: https://github.com/openai/openai-agents-python/issues/1639 + "openai-agents>=0.2.11,<0.3", "eval-type-backport>=0.2.2; python_version < '3.10'" ] @@ -57,7 +57,7 @@ dev = [ "pytest-cov>=6.1.1", "httpx>=0.28.1", "pytest-pretty>=1.3.0", - "openai-agents[litellm]>=0.2.3,<=0.2.9", # 0.2.10 doesn't work: https://github.com/openai/openai-agents-python/issues/1639 + "openai-agents[litellm]>=0.2.11,<0.3" ] [tool.poe.tasks] diff --git a/temporalio/contrib/openai_agents/_invoke_model_activity.py b/temporalio/contrib/openai_agents/_invoke_model_activity.py index ed4313537..b0ba8ef7d 100644 --- a/temporalio/contrib/openai_agents/_invoke_model_activity.py +++ b/temporalio/contrib/openai_agents/_invoke_model_activity.py @@ -144,6 +144,7 @@ class ActivityModelInput(TypedDict, total=False): handoffs: list[HandoffInput] tracing: Required[ModelTracingInput] previous_response_id: Optional[str] + conversation_id: Optional[str] prompt: Optional[Any] @@ -226,6 +227,7 @@ def make_tool(tool: ToolInput) -> Tool: handoffs=handoffs, tracing=ModelTracing(input["tracing"]), previous_response_id=input.get("previous_response_id"), + conversation_id=input.get("conversation_id"), prompt=input.get("prompt"), ) except APIStatusError as e: diff --git a/temporalio/contrib/openai_agents/_temporal_model_stub.py b/temporalio/contrib/openai_agents/_temporal_model_stub.py index 11f1ddc5e..3815904e2 100644 --- a/temporalio/contrib/openai_agents/_temporal_model_stub.py +++ b/temporalio/contrib/openai_agents/_temporal_model_stub.py @@ -68,6 +68,7 @@ async def get_response( tracing: ModelTracing, *, previous_response_id: Optional[str], + conversation_id: Optional[str], prompt: Optional[ResponsePromptParam], ) -> ModelResponse: def make_tool_info(tool: Tool) -> ToolInput: @@ -134,6 +135,7 @@ def make_tool_info(tool: Tool) -> ToolInput: handoffs=handoff_infos, tracing=ModelTracingInput(tracing.value), previous_response_id=previous_response_id, + conversation_id=conversation_id, prompt=prompt, ) @@ -178,6 +180,7 @@ def stream_response( tracing: ModelTracing, *, previous_response_id: Optional[str], + conversation_id: Optional[str], prompt: ResponsePromptParam | None, ) -> AsyncIterator[TResponseStreamEvent]: raise NotImplementedError("Temporal model doesn't support streams yet") diff --git a/temporalio/contrib/openai_agents/_temporal_openai_agents.py b/temporalio/contrib/openai_agents/_temporal_openai_agents.py index f8b8999a2..ea2e1ebc0 100644 --- a/temporalio/contrib/openai_agents/_temporal_openai_agents.py +++ b/temporalio/contrib/openai_agents/_temporal_openai_agents.py @@ -126,9 +126,7 @@ async def get_response( output_schema: Union[AgentOutputSchemaBase, None], handoffs: list[Handoff], tracing: ModelTracing, - *, - previous_response_id: Union[str, None], - prompt: Union[ResponsePromptParam, None] = None, + **kwargs, ) -> ModelResponse: """Get a response from the model.""" return self.fn() @@ -142,9 +140,7 @@ def stream_response( output_schema: Optional[AgentOutputSchemaBase], handoffs: list[Handoff], tracing: ModelTracing, - *, - previous_response_id: Optional[str], - prompt: Optional[ResponsePromptParam], + **kwargs, ) -> AsyncIterator[TResponseStreamEvent]: """Get a streamed response from the model. Unimplemented.""" raise NotImplementedError() diff --git a/tests/contrib/openai_agents/test_openai.py b/tests/contrib/openai_agents/test_openai.py index 741c33353..ca45a54b8 100644 --- a/tests/contrib/openai_agents/test_openai.py +++ b/tests/contrib/openai_agents/test_openai.py @@ -1164,8 +1164,9 @@ async def get_response( output_schema: Union[AgentOutputSchemaBase, None], handoffs: list[Handoff], tracing: ModelTracing, - previous_response_id: Union[str, None], - prompt: Union[ResponsePromptParam, None] = None, + previous_response_id: Optional[str] = None, + conversation_id: Optional[str] = None, + prompt: Optional[ResponsePromptParam] = None, ) -> ModelResponse: if ( system_instructions @@ -1553,9 +1554,7 @@ async def get_response( output_schema: Union[AgentOutputSchemaBase, None], handoffs: list[Handoff], tracing: ModelTracing, - *, - previous_response_id: Union[str, None], - prompt: Union[ResponsePromptParam, None] = None, + **kwargs, ) -> ModelResponse: activity.logger.info("Waiting") await asyncio.sleep(1.0) @@ -1571,9 +1570,7 @@ def stream_response( output_schema: Optional[AgentOutputSchemaBase], handoffs: list[Handoff], tracing: ModelTracing, - *, - previous_response_id: Optional[str], - prompt: Optional[ResponsePromptParam], + **kwargs, ) -> AsyncIterator[TResponseStreamEvent]: raise NotImplementedError() diff --git a/uv.lock b/uv.lock index c5fb60fcd..63a083a0c 100644 --- a/uv.lock +++ b/uv.lock @@ -1720,7 +1720,7 @@ wheels = [ [[package]] name = "openai" -version = "1.103.0" +version = "1.107.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, @@ -1732,14 +1732,14 @@ dependencies = [ { name = "tqdm" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/cb/0b/4cacc14f976601edf35b74f7c5c2d6305f7402257cb13b9956b4eaabf94d/openai-1.103.0.tar.gz", hash = "sha256:f84f8741536f01adfdae1acfe31ec1874fc0985d33f53344f9edca773f150a36", size = 556049, upload-time = "2025-09-02T14:03:11.533Z" } +sdist = { url = "https://files.pythonhosted.org/packages/88/67/d6498de300f83ff57a79cb7aa96ef3bef8d6f070c3ded0f1b5b45442a6bc/openai-1.107.0.tar.gz", hash = "sha256:43e04927584e57d0e9e640ee0077c78baf8150098be96ebd5c512539b6c4e9a4", size = 566056, upload-time = "2025-09-08T19:25:47.604Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a9/c0/f5d440153d96c6be42e5d05c39adf33e0c324c9f035daf0e537a71fe2d11/openai-1.103.0-py3-none-any.whl", hash = "sha256:60a69224f0d210a720e7364947d3b712fe0036373f25dc1cb801fc25abb3f864", size = 926169, upload-time = "2025-09-02T14:03:09.666Z" }, + { url = "https://files.pythonhosted.org/packages/91/ed/e8a4fd20390f2858b95227c288df8fe0c835f7c77625f7583609161684ba/openai-1.107.0-py3-none-any.whl", hash = "sha256:3dcfa3cbb116bd6924b27913b8da28c4a787379ff60049588547a1013e6d6438", size = 950968, upload-time = "2025-09-08T19:25:45.552Z" }, ] [[package]] name = "openai-agents" -version = "0.2.9" +version = "0.2.11" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "griffe" }, @@ -1750,9 +1750,9 @@ dependencies = [ { name = "types-requests" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c3/03/45a1022ae6daa7f0223a3cae8975c5a524858edcad2355f8e1adcd4f000e/openai_agents-0.2.9.tar.gz", hash = "sha256:619c51c8ce49f841474a9e619573d6f7f96a46432900752145ad04309ab70ae9", size = 1664869, upload-time = "2025-08-22T02:03:39.196Z" } +sdist = { url = "https://files.pythonhosted.org/packages/dd/c7/e8b588851bdbb33f16397b45d182998a01e6e57ff028a143788036a89d53/openai_agents-0.2.11.tar.gz", hash = "sha256:1a2e3fade02b3d8571560dbd121bfe0d84c80f48da04c838d9d5195966714abc", size = 1677542, upload-time = "2025-09-03T22:16:05.856Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/dc/bf/8a8dd24206763214f364b272371486247744a64ef554e952d92444e6ce14/openai_agents-0.2.9-py3-none-any.whl", hash = "sha256:cca016c28e39b24b17cae232c2bc16769e48dbfc7cbe006775d10822c441f6e4", size = 175106, upload-time = "2025-08-22T02:03:37.738Z" }, + { url = "https://files.pythonhosted.org/packages/6a/71/a8712a89502b95da64db6b0b31c12ac5039542ae8e31caddba369b6bd324/openai_agents-0.2.11-py3-none-any.whl", hash = "sha256:ed26f7bb2b08bd7607ae87eb7bcfcee8c8f4431da134252757b31120a68b9086", size = 179141, upload-time = "2025-09-03T22:16:03.823Z" }, ] [package.optional-dependencies] @@ -2847,7 +2847,7 @@ requires-dist = [ { name = "eval-type-backport", marker = "python_full_version < '3.10' and extra == 'openai-agents'", specifier = ">=0.2.2" }, { name = "grpcio", marker = "extra == 'grpc'", specifier = ">=1.48.2,<2" }, { name = "nexus-rpc", specifier = "==1.1.0" }, - { name = "openai-agents", marker = "extra == 'openai-agents'", specifier = ">=0.2.3,<=0.2.9" }, + { name = "openai-agents", marker = "extra == 'openai-agents'", specifier = ">=0.2.11" }, { name = "opentelemetry-api", marker = "extra == 'opentelemetry'", specifier = ">=1.11.1,<2" }, { name = "opentelemetry-sdk", marker = "extra == 'opentelemetry'", specifier = ">=1.11.1,<2" }, { name = "protobuf", specifier = ">=3.20,<6" }, @@ -2866,7 +2866,7 @@ dev = [ { name = "maturin", specifier = ">=1.8.2" }, { name = "mypy", specifier = "==1.4.1" }, { name = "mypy-protobuf", specifier = ">=3.3.0,<4" }, - { name = "openai-agents", extras = ["litellm"], specifier = ">=0.2.3,<=0.2.9" }, + { name = "openai-agents", extras = ["litellm"], specifier = ">=0.2.11" }, { name = "psutil", specifier = ">=5.9.3,<6" }, { name = "pydocstyle", specifier = ">=6.3.0,<7" }, { name = "pydoctor", specifier = ">=24.11.1,<25" },