|
| 1 | +from __future__ import annotations |
| 2 | + |
| 3 | +from openai import AsyncOpenAI |
| 4 | + |
| 5 | +from agents.models._openai_shared import get_default_openai_client |
| 6 | + |
| 7 | +from ..items import TResponseInputItem |
| 8 | +from .session import SessionABC |
| 9 | + |
| 10 | + |
| 11 | +async def start_openai_conversations_session(openai_client: AsyncOpenAI | None = None) -> str: |
| 12 | + _maybe_openai_client = openai_client |
| 13 | + if openai_client is None: |
| 14 | + _maybe_openai_client = get_default_openai_client() or AsyncOpenAI() |
| 15 | + # this never be None here |
| 16 | + _openai_client: AsyncOpenAI = _maybe_openai_client # type: ignore [assignment] |
| 17 | + |
| 18 | + response = await _openai_client.conversations.create(items=[]) |
| 19 | + return response.id |
| 20 | + |
| 21 | + |
| 22 | +_EMPTY_SESSION_ID = "" |
| 23 | + |
| 24 | + |
| 25 | +class OpenAIConversationsSession(SessionABC): |
| 26 | + def __init__( |
| 27 | + self, |
| 28 | + *, |
| 29 | + conversation_id: str | None = None, |
| 30 | + openai_client: AsyncOpenAI | None = None, |
| 31 | + ): |
| 32 | + self._session_id: str | None = conversation_id |
| 33 | + _openai_client = openai_client |
| 34 | + if _openai_client is None: |
| 35 | + _openai_client = get_default_openai_client() or AsyncOpenAI() |
| 36 | + # this never be None here |
| 37 | + self._openai_client: AsyncOpenAI = _openai_client |
| 38 | + |
| 39 | + async def _get_session_id(self) -> str: |
| 40 | + if self._session_id is None: |
| 41 | + self._session_id = await start_openai_conversations_session(self._openai_client) |
| 42 | + return self._session_id |
| 43 | + |
| 44 | + async def _clear_session_id(self) -> None: |
| 45 | + self._session_id = None |
| 46 | + |
| 47 | + async def get_items(self, limit: int | None = None) -> list[TResponseInputItem]: |
| 48 | + session_id = await self._get_session_id() |
| 49 | + all_items = [] |
| 50 | + if limit is None: |
| 51 | + async for item in self._openai_client.conversations.items.list( |
| 52 | + conversation_id=session_id, |
| 53 | + order="asc", |
| 54 | + ): |
| 55 | + # calling model_dump() to make this serializable |
| 56 | + all_items.append(item.model_dump()) |
| 57 | + else: |
| 58 | + async for item in self._openai_client.conversations.items.list( |
| 59 | + conversation_id=session_id, |
| 60 | + limit=limit, |
| 61 | + order="desc", |
| 62 | + ): |
| 63 | + # calling model_dump() to make this serializable |
| 64 | + all_items.append(item.model_dump()) |
| 65 | + if limit is not None and len(all_items) >= limit: |
| 66 | + break |
| 67 | + all_items.reverse() |
| 68 | + |
| 69 | + return all_items # type: ignore |
| 70 | + |
| 71 | + async def add_items(self, items: list[TResponseInputItem]) -> None: |
| 72 | + session_id = await self._get_session_id() |
| 73 | + await self._openai_client.conversations.items.create( |
| 74 | + conversation_id=session_id, |
| 75 | + items=items, |
| 76 | + ) |
| 77 | + |
| 78 | + async def pop_item(self) -> TResponseInputItem | None: |
| 79 | + session_id = await self._get_session_id() |
| 80 | + items = await self.get_items(limit=1) |
| 81 | + if not items: |
| 82 | + return None |
| 83 | + item_id: str = str(items[0]["id"]) # type: ignore [typeddict-item] |
| 84 | + await self._openai_client.conversations.items.delete( |
| 85 | + conversation_id=session_id, item_id=item_id |
| 86 | + ) |
| 87 | + return items[0] |
| 88 | + |
| 89 | + async def clear_session(self) -> None: |
| 90 | + session_id = await self._get_session_id() |
| 91 | + await self._openai_client.conversations.delete( |
| 92 | + conversation_id=session_id, |
| 93 | + ) |
| 94 | + await self._clear_session_id() |
0 commit comments