From f8e5e5d268f38c5572493fc54f90e4d58b72dd27 Mon Sep 17 00:00:00 2001 From: Ian Saultz <52051793+atierian@users.noreply.github.com> Date: Thu, 21 Nov 2024 13:27:27 -0500 Subject: [PATCH 1/4] add conversations > connect your frontend page with client examples --- .../connect-your-frontend/index.mdx | 451 ++++++++++++++++++ 1 file changed, 451 insertions(+) create mode 100644 src/pages/[platform]/ai/conversation/connect-your-frontend/index.mdx diff --git a/src/pages/[platform]/ai/conversation/connect-your-frontend/index.mdx b/src/pages/[platform]/ai/conversation/connect-your-frontend/index.mdx new file mode 100644 index 00000000000..cc52846b1a8 --- /dev/null +++ b/src/pages/[platform]/ai/conversation/connect-your-frontend/index.mdx @@ -0,0 +1,451 @@ +import { getCustomStaticPath } from "@/utils/getCustomStaticPath"; + +export const meta = { + title: "Connect your frontend", + description: "Learn how to use AI conversations in your app", + platforms: [ + "javascript", + "react-native", + "angular", + "nextjs", + "react", + "vue", + ], +}; + +export const getStaticPaths = async () => { + return getCustomStaticPath(meta.platforms); +}; + +export function getStaticProps(context) { + return { + props: { + platform: context.params.platform, + meta, + showBreadcrumbs: false, + }, + }; +} + +In this guide, you will learn how to create, update, and delete conversations, as well as send messages and subscribe to assistant responses. + +Conversations and their associated messages are persisted in Amazon DynamoDB. This means the previous messages for a conversation are automatically included in the history sent to the LLM. Access to conversations and messages are scoped to individual users through the [owner based authorization strategy](/react/build-a-backend/data/customize-authz/per-user-per-owner-data-access/). + +## Conversation and Message types + +### Conversation + +There are two main types within the conversation flow, `Conversation` and `Message`. + +A `Conversation` is an instance of a chat session between an application user and an LLM. It contains data and methods for interacting with the conversation. A conversation has a one-to-many relationship with its messages. + +The `Conversation` type is accessible via `Schema['myChat']['type']` type definition. + +| Property/Method | Type | Description | +|------------------|:-------|:-------------| +| `id` | `string` | The unique identifier for the conversation | +| `name` | `string \| undefined` | The name of the conversation.
You can specify a name when creating or updating a conversation. | +| `metadata` | `Record \| undefined` | Metadata associated with the conversation.
You can specify arbitrary metadata when creating or updating a conversation. | +| `createdAt` | `string` | The date and time when the conversation was created |w +| `updatedAt` | `string` | The date and time when the last user message was sent | +| `sendMessage()` | `(content: MessageContent) => {`
  `data: Message;`
  `errors: Error[]`
`}` | Send a message to the AI assistant | +| `listMessages()` | `() => {`
  `data: Message[];`
  `errors: Error[]`
`}` | List all messages for the conversation | +| `onStreamEvent()`| `(options: {`
  `next: (event: StreamEvent) => void;`
  `error: (error: Error) => void;`
`}) => void` | Subscribe to assistant responses | + +### Message + +A `Message` is a single chat message between an application user and an LLM. Each message has a `role` property that indicates whether the message is from the user or the assistant. User and assistant messages have a one-to-one relationship. Assistant messages contain an `associatedUserMessageId` property that points to the `id` of the user message that triggered the assistant response. + +The `Message` type is accessible via `Schema['myChat']['messageType']`. + +| Property | Type | Description | +|-----------|:-------|:-------------| +| `id` | `string` | The unique identifier for the message | +| `conversationId` | `string` | The ID of the conversation this message belongs to | +| `associatedUserMessageId` | `string \| undefined` | For assistant messages, the ID of the user message that triggered the response | +| `content` | `MessageContent[]` | The content of the message | +| `role` | `'user' \| 'assistant'` | Whether the message is from the user or assistant | +| `createdAt` | `string` | The date and time when the message was created | + +## Data flow + +1. Create a new conversation with `.create()` or get an existing one with `.get()`. +2. Subscribe to assistant responses for a conversation with `.onStreamEvent()`. +3. Send messages to the conversation with `.sendMessage()`. + +```ts +// 1. Create a conversation +const { data: chat, errors } = await client.conversations.chat.create(); + +// 2. Subscribe to assistant responses +const subscription = chat.onStreamEvent({ + next: (event) => { + console.log(event); + }, + error: (error) => { + console.error(error); + }, +}); + +// 3. Send a message to the conversation +const { data: message, errors } = await chat.sendMessage('Hello, world!'); +``` + +## Managing conversations + +### Create a conversation + +Conversation routes defined in your schema are accessible via the `.conversations` namespace. First create a client instance with the `generateClient()` function. + +```ts +import { generateClient } from 'aws-amplify/data'; +import { type Schema } from '../amplify/data/resource' + +const client = generateClient(); +``` + +Then create a new conversation by calling the `.create()` method on your conversation route. In the examples below, we're using a conversation route named `chat`. + +```ts +const { data: chat, errors } = await client.conversations.chat.create(); + +/** +Example conversation data +{ + id: '123e4567-e89b-12d3-a456-426614174000', + createdAt: '2024-01-01T00:00:00.000Z', + updatedAt: '2024-01-01T00:00:00.000Z', +} +*/ +``` + +You can optionally attach a `name` and `metadata` to a conversation by passing them as arguments to the `.create()` method. There are no uniqueness constraints on conversation `name` or `metadata` values. + +```ts +const { data: chat, errors } = await client.conversations.chat.create({ + name: 'My conversation', + metadata: { + value: '1234567890', + }, +}); + +/** +Example conversation data +{ + id: '123e4567-e89b-12d3-a456-426614174000', + name: 'My conversation', + metadata: { + value: '1234567890', + }, + createdAt: '2024-01-01T00:00:00.000Z', + updatedAt: '2024-01-01T00:00:00.000Z', +} +*/ +``` + +### Get an existing conversation + +You can get an existing conversation by calling the `.get()` method on your conversation route with the conversation's `id`. + +```ts +const id = '123e4567-e89b-12d3-a456-426614174000'; +const { data: chat, errors } = await client.conversations.chat.get({ id }); + +/** +Example conversation data +{ + id: '123e4567-e89b-12d3-a456-426614174000', + createdAt: '2024-01-01T00:00:00.000Z', + updatedAt: '2024-01-01T00:00:00.000Z', +} +*/ +``` + +### List conversations + +You can list all conversations for a user with the `.list()` method. Retrieved conversations are sorted by `updatedAt` in descending order. This means the most recently used conversations are returned first. + +```ts +const { data: chat, errors } = await client.conversations.chat.list(); + +/** +Example conversations data +{ + items: [ + { + id: '123e4567-e89b-12d3-a456-426614174000', + createdAt: '2024-01-01T00:00:00.000Z', + updatedAt: '2024-01-01T00:00:00.000Z', + }, + ... + ], + nextToken: '...', +} +*/ +``` + +Use the `nextToken` value to paginate through conversations and optionally specify a `limit` to limit the number of conversations returned. + +```ts +const { data: chat, errors } = await client.conversations.chat.list({ + limit: 10, + nextToken: '...', +}); +``` + +### Update a conversation + +You can update a conversation's `name` and `metadata` with the `.update()` method. + +This is useful if you want to update the conversation name based on the messages sent or attach arbitrary metadata at a later time. + +```ts +const id = '123e4567-e89b-12d3-a456-426614174000'; +const { data: chat, errors } = await client.conversations.chat.update({ + id, + name: 'My updated conversation', +}); + +/** +Example conversation data +{ + id: '123e4567-e89b-12d3-a456-426614174000', + name: 'My updated conversation', + createdAt: '2024-01-01T00:00:00.000Z', + updatedAt: '2024-01-02T00:00:00.000Z', +} +*/ +``` + +### Delete a conversation + + + Deleting a conversation makes it unusable in the future. However it does not delete its associated messages. + + +```ts +const id = '123e4567-e89b-12d3-a456-426614174000'; +const { data: chat, errors } = await client.conversations.chat.delete({ id }); +``` + +## Using a conversation instance + +Once you have a conversation instance, you can interact with it by calling methods on the instance. These methods are documented in the [Conversation and Message types](#conversation-and-message-types) section. + +### Send a message + +Once you have a conversation instance, you can send a message to the AI assistant by calling the `.sendMessage()` method. In its simplest form you just pass the message content as text. + + + The message returned is the user message sent. Assistant messages are streamed back to the client and can be subscribed to with the `.onStreamEvent()` method. See [Subscribe to assistant responses](#subscribe-to-assistant-responses) for more information. + + +```ts +const { data: message, errors } = await chat.sendMessage('Hello, world!'); + +/** +Example message data +{ + id: '98765432-dcba-4321-9876-543210987654', + conversationId: '123e4567-e89b-12d3-a456-426614174000', + content: 'Hello, world!', + role: 'user', + createdAt: '2024-01-01T00:00:00.000Z', +} +*/ +``` + +There are other arguments you can pass to `.sendMessage()` to customize the message according to your application's needs. + +#### Customizing the message content + +`sendMessage()` accepts a object type with a `content` property that provides a flexible way to send different types of content to the AI assistant. + +##### Image Content +Use `image` to send an image to the AI assistant. +Supported image formats are `png`, `gif`, `jpeg`, and `webp`. + +```ts +const { data: message, errors } = await chat.sendMessage({ + content: [ + { + image: { + format: 'png', + source: { + bytes: new Uint8Array([1, 2, 3]), + }, + }, + }, + ], +}); +``` + +Mixing `text` and `image` in a single message is supported. + +```ts +const { data: message, errors } = await chat.sendMessage({ + content: [ + { + text: 'describe the image in detail', + }, + { + image: { + format: 'png', + source: { + bytes: new Uint8Array([1, 2, 3]), + }, + }, + }, + ], +}); +``` + +#### AI context + +The `aiContext` argument allows you to optionally attach arbitrary data to the message. This is useful for passing additional information, like user information or current state of your application, in a user message to the AI assistant. + +```ts +const { data: message, errors } = await chat.sendMessage('Hello, world!', { + aiContext: { + user: { + name: 'Ian', + }, + }, +}); +``` + +#### Tool Configuration + +The `toolConfiguration` argument allows you to optionally pass a client tool configuration to the AI assistant with a user message. See the [Tools concept page](/ai/concepts/tools) and [Tools guide](/ai/conversation/tools/) for more information on how tools works. + + + Client tools are conceptually the same as data tools and lambda executable tools. They are API definitions provided to an LLM alongside a user message. The LLM can use the provided tool configuration to decide which tool (if any) to call in order to better respond to the user. However, there's an important distinction with client tools -- you are responsible for implementing the tool execution logic and responding to the AI assistant with the tool's response. + + +The `json` property is simply a JSON Schema definition of the tool's input. The AI assistant will use this schema to provide the expected input to your tool. +```ts +const { data: message, errors } = await conversation.sendMessage({ + content: [ + { + text: "I'd like to make a chocolate cake for my friend with a gluten intolerance. What ingredients do I need?", + }, + ], + toolConfiguration: { + tools: { + generateRecipe: { + description: "List ingredients needed for a recipe", + inputSchema: { + json: { + type: "object", + properties: { + ingredients: { + type: "array", + items: { type: "string" }, + }, + }, + }, + }, + }, + }, + }, +}); +``` + +The response from the AI assistant will be a JSON object that matches the `inputSchema` definition. See [Subscribe to assistant responses](#subscribe-to-assistant-responses) for more information on how to handle the response. + +### Subscribe to assistant responses + +Assistant responses are streamed back to the client as they are generated. This allows for a more natural conversation flow where the user doesn't have to wait for a complete response from the AI assistant to see progress and begin reading the response. To subscribe to assistant responses, call the `.onStreamEvent()` method on your conversation instance. + +```ts +const subscription = conversation.onStreamEvent({ + next: (event) => { + console.log(event); + }, + error: (error) => { + console.error(error); + }, +}); + +// later... +subscription.unsubscribe(); +``` + +`onStreamEvent()` takes two callback functions as arguments: `next` and `error`. The `next` callback is invoked with each assistant response. + +The `error` callback is invoked if there's an error while processing messages. + +The `next` callback is invoked with a `ConversationStreamEvent` object. This type is accessible via `Schema['myChat']['streamEventType']` and is a union of the following types: + +#### ConversationStreamTextEvent + +As text is streamed back to the client, the `next` callback is invoked with a `ConversationStreamTextEvent` object. + +| Property | Type | Description | +|----------|:-----|:------------| +| `id` | `string` | The unique identifier for the stream event | +| `conversationId` | `string` | The ID of the conversation this event belongs to | +| `associatedUserMessageId` | `string` | The ID of the user message that triggered this response | +| `contentBlockIndex` | `number` | The index of the content block being streamed | +| `contentBlockDeltaIndex` | `number` | The index of the delta within the content block | +| `text` | `string` | The text content being streamed | + +#### ConversationStreamDoneAtIndexEvent + +When the AI assistant completes a content block, the `next` callback is invoked with a `ConversationStreamDoneAtIndexEvent` object. + +| Property | Type | Description | +|----------|:-----|:------------| +| `id` | `string` | The unique identifier for the stream event | +| `conversationId` | `string` | The ID of the conversation this event belongs to | +| `associatedUserMessageId` | `string` | The ID of the user message that triggered this response | +| `contentBlockIndex` | `number` | The index of the content block that is complete | +| `contentBlockDoneAtIndex` | `number` | The index at which the content block is complete | + + +#### ConversationStreamTurnDoneEvent + +When the AI assistant completes a turn, the `next` callback is invoked with a `ConversationStreamTurnDoneEvent` object. This event indicates that the assistant has completed a turn and is waiting for the next user message. + +| Property | Type | Description | +|----------|:-----|:------------| +| `id` | `string` | The unique identifier for the stream event | +| `conversationId` | `string` | The ID of the conversation this event belongs to | +| `associatedUserMessageId` | `string` | The ID of the user message that triggered this response | +| `contentBlockIndex` | `number` | The index of the final content block for the turn | +| `stopReason` | `string` | The reason why the assistant stopped generating the response | + +#### ConversationStreamToolUseEvent + +When the AI assistant uses a client tool, the `next` callback is invoked with a `ConversationStreamToolUseEvent` object. Tool use events are accumulated in your cloud resources and sent to the client as a single event. + +| Property | Type | Description | +|----------|:-----|:------------| +| `id` | `string` | The unique identifier for the stream event | +| `conversationId` | `string` | The ID of the conversation this event belongs to | +| `associatedUserMessageId` | `string` | The ID of the user message that triggered this response | +| `contentBlockIndex` | `number` | The index of the content block being streamed | +| `toolUse` | `ToolUseBlock` | The tool use block containing function call information | + + + **A note on ordering** + + There are no guarantees that events will be received by the client in order. For example, a `ConversationStreamTextEvent` with `contentBlockDeltaIndex` of `1` may be received before the preceding text with `contentBlockDeltaIndex` of `0`. Assume that events may be received out of order and use the `contentBlockIndex` and `contentBlockDeltaIndex` properties to order the events as needed. + + +### List messages for a conversation + +Retrieve all messages for a conversation by calling the `.listMessages()` method on your conversation instance. Recall that messages are automatically persisted, so you can retrieve them at any time to display the conversation history. + +```ts +const { data: messages, errors } = await conversation.listMessages(); +``` + +Similar to the `client.conversations.chat.list()` method, retrieved messages are paginated. Use the `nextToken` value to paginate through messages and optionally specify a `limit` to limit the number of messages returned. + +```ts +const { data: messages, errors } = await conversation.listMessages({ + limit: 10, + nextToken: '...', +}); +``` From 6ce2cf8b7be66485d9c78f2228a36c941a2d6249 Mon Sep 17 00:00:00 2001 From: Ian Saultz <52051793+atierian@users.noreply.github.com> Date: Fri, 22 Nov 2024 16:14:59 -0500 Subject: [PATCH 2/4] Apply suggestions from code review Co-authored-by: Danny Banks --- .../[platform]/ai/conversation/connect-your-frontend/index.mdx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pages/[platform]/ai/conversation/connect-your-frontend/index.mdx b/src/pages/[platform]/ai/conversation/connect-your-frontend/index.mdx index cc52846b1a8..33f9dcbb15f 100644 --- a/src/pages/[platform]/ai/conversation/connect-your-frontend/index.mdx +++ b/src/pages/[platform]/ai/conversation/connect-your-frontend/index.mdx @@ -39,7 +39,7 @@ There are two main types within the conversation flow, `Conversation` and `Messa A `Conversation` is an instance of a chat session between an application user and an LLM. It contains data and methods for interacting with the conversation. A conversation has a one-to-many relationship with its messages. -The `Conversation` type is accessible via `Schema['myChat']['type']` type definition. +The `Conversation` type is accessible via `Schema['myChat']['type']` type definition, where `'myChat'` is the name of the conversation route in your data schema. | Property/Method | Type | Description | |------------------|:-------|:-------------| From 07a979bafd94df9afad0276dcba533b748e1aa82 Mon Sep 17 00:00:00 2001 From: Ian Saultz <52051793+atierian@users.noreply.github.com> Date: Fri, 22 Nov 2024 17:01:33 -0500 Subject: [PATCH 3/4] address pr feedback --- src/directory/directory.mjs | 3 +++ .../connect-your-frontend/index.mdx | 24 +++++++++---------- 2 files changed, 14 insertions(+), 13 deletions(-) diff --git a/src/directory/directory.mjs b/src/directory/directory.mjs index 8ca06f860b7..39a36c3dd32 100644 --- a/src/directory/directory.mjs +++ b/src/directory/directory.mjs @@ -756,6 +756,9 @@ export const directory = { { path: 'src/pages/[platform]/ai/conversation/ai-conversation/index.mdx' }, + { + path: 'src/pages/[platform]/ai/conversation/connect-your-frontend/index.mdx' + }, { path: 'src/pages/[platform]/ai/conversation/history/index.mdx' }, diff --git a/src/pages/[platform]/ai/conversation/connect-your-frontend/index.mdx b/src/pages/[platform]/ai/conversation/connect-your-frontend/index.mdx index 33f9dcbb15f..e646c3bfd18 100644 --- a/src/pages/[platform]/ai/conversation/connect-your-frontend/index.mdx +++ b/src/pages/[platform]/ai/conversation/connect-your-frontend/index.mdx @@ -39,7 +39,7 @@ There are two main types within the conversation flow, `Conversation` and `Messa A `Conversation` is an instance of a chat session between an application user and an LLM. It contains data and methods for interacting with the conversation. A conversation has a one-to-many relationship with its messages. -The `Conversation` type is accessible via `Schema['myChat']['type']` type definition, where `'myChat'` is the name of the conversation route in your data schema. +The `Conversation` type is accessible via `Schema['myChat']['type']` type definition, where `'myChat'` is the name of the conversation route in your data schema. | Property/Method | Type | Description | |------------------|:-------|:-------------| @@ -56,7 +56,7 @@ The `Conversation` type is accessible via `Schema['myChat']['type']` type defini A `Message` is a single chat message between an application user and an LLM. Each message has a `role` property that indicates whether the message is from the user or the assistant. User and assistant messages have a one-to-one relationship. Assistant messages contain an `associatedUserMessageId` property that points to the `id` of the user message that triggered the assistant response. -The `Message` type is accessible via `Schema['myChat']['messageType']`. +The `Message` type is accessible via `Schema['myChat']['messageType']`, where `'myChat'` is the name of the conversation route in your data schema. | Property | Type | Description | |-----------|:-------|:-------------| @@ -67,22 +67,29 @@ The `Message` type is accessible via `Schema['myChat']['messageType']`. | `role` | `'user' \| 'assistant'` | Whether the message is from the user or assistant | | `createdAt` | `string` | The date and time when the message was created | -## Data flow +## Request response flow 1. Create a new conversation with `.create()` or get an existing one with `.get()`. 2. Subscribe to assistant responses for a conversation with `.onStreamEvent()`. 3. Send messages to the conversation with `.sendMessage()`. ```ts +import { generateClient } from 'aws-amplify/data'; +import { type Schema } from '../amplify/data/resource' + +const client = generateClient(); + // 1. Create a conversation const { data: chat, errors } = await client.conversations.chat.create(); // 2. Subscribe to assistant responses const subscription = chat.onStreamEvent({ next: (event) => { + // handle assistant response stream events console.log(event); }, error: (error) => { + // handle errors console.error(error); }, }); @@ -95,16 +102,7 @@ const { data: message, errors } = await chat.sendMessage('Hello, world!'); ### Create a conversation -Conversation routes defined in your schema are accessible via the `.conversations` namespace. First create a client instance with the `generateClient()` function. - -```ts -import { generateClient } from 'aws-amplify/data'; -import { type Schema } from '../amplify/data/resource' - -const client = generateClient(); -``` - -Then create a new conversation by calling the `.create()` method on your conversation route. In the examples below, we're using a conversation route named `chat`. +Create a new conversation by calling the `.create()` method on your conversation route. In the examples below, we're using a conversation route named `chat`. ```ts const { data: chat, errors } = await client.conversations.chat.create(); From 64d0bf5082071083415f75afaa58d90bf3bf63c0 Mon Sep 17 00:00:00 2001 From: Ian Saultz <52051793+atierian@users.noreply.github.com> Date: Fri, 22 Nov 2024 18:29:38 -0500 Subject: [PATCH 4/4] Update index.mdx Co-authored-by: Danny Banks --- .../[platform]/ai/conversation/connect-your-frontend/index.mdx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pages/[platform]/ai/conversation/connect-your-frontend/index.mdx b/src/pages/[platform]/ai/conversation/connect-your-frontend/index.mdx index e646c3bfd18..911f0c77afb 100644 --- a/src/pages/[platform]/ai/conversation/connect-your-frontend/index.mdx +++ b/src/pages/[platform]/ai/conversation/connect-your-frontend/index.mdx @@ -314,7 +314,7 @@ const { data: message, errors } = await chat.sendMessage('Hello, world!', { #### Tool Configuration -The `toolConfiguration` argument allows you to optionally pass a client tool configuration to the AI assistant with a user message. See the [Tools concept page](/ai/concepts/tools) and [Tools guide](/ai/conversation/tools/) for more information on how tools works. +The `toolConfiguration` argument allows you to optionally pass a client tool configuration to the AI assistant with a user message. See the [Tools concept page](/[platform]/ai/concepts/tools) and [Tools guide](/[platform]/ai/conversation/tools/) for more information on how tools works. Client tools are conceptually the same as data tools and lambda executable tools. They are API definitions provided to an LLM alongside a user message. The LLM can use the provided tool configuration to decide which tool (if any) to call in order to better respond to the user. However, there's an important distinction with client tools -- you are responsible for implementing the tool execution logic and responding to the AI assistant with the tool's response.