diff --git a/src/Libraries/Microsoft.Extensions.AI.OpenAI/MicrosoftExtensionsAIChatExtensions.cs b/src/Libraries/Microsoft.Extensions.AI.OpenAI/MicrosoftExtensionsAIChatExtensions.cs
index 13242a9b32f..0385d318842 100644
--- a/src/Libraries/Microsoft.Extensions.AI.OpenAI/MicrosoftExtensionsAIChatExtensions.cs
+++ b/src/Libraries/Microsoft.Extensions.AI.OpenAI/MicrosoftExtensionsAIChatExtensions.cs
@@ -6,13 +6,14 @@
using System.Collections.Generic;
using System.IO;
using System.Linq;
+using System.Runtime.CompilerServices;
using System.Text.Encodings.Web;
using System.Text.Json;
+using System.Threading;
+using System.Threading.Tasks;
using Microsoft.Extensions.AI;
using Microsoft.Shared.Diagnostics;
-#pragma warning disable S103 // Lines should not be too long
-
namespace OpenAI.Chat;
/// Provides extension methods for working with content associated with OpenAI.Chat.
@@ -27,10 +28,10 @@ public static ChatTool AsOpenAIChatTool(this AIFunction function) =>
/// Creates a sequence of OpenAI instances from the specified input messages.
/// The input messages to convert.
+ /// The options employed while processing .
/// A sequence of OpenAI chat messages.
- /// is .
- public static IEnumerable AsOpenAIChatMessages(this IEnumerable messages) =>
- OpenAIChatClient.ToOpenAIChatMessages(Throw.IfNull(messages), chatOptions: null);
+ public static IEnumerable AsOpenAIChatMessages(this IEnumerable messages, ChatOptions? options = null) =>
+ OpenAIChatClient.ToOpenAIChatMessages(Throw.IfNull(messages), options);
/// Creates an OpenAI from a .
/// The to convert to a .
@@ -47,24 +48,9 @@ public static ChatCompletion AsOpenAIChatCompletion(this ChatResponse response)
var lastMessage = response.Messages.LastOrDefault();
- ChatMessageRole role = lastMessage?.Role.Value switch
- {
- "user" => ChatMessageRole.User,
- "function" => ChatMessageRole.Function,
- "tool" => ChatMessageRole.Tool,
- "developer" => ChatMessageRole.Developer,
- "system" => ChatMessageRole.System,
- _ => ChatMessageRole.Assistant,
- };
+ ChatMessageRole role = ToChatMessageRole(lastMessage?.Role);
- ChatFinishReason finishReason = response.FinishReason?.Value switch
- {
- "length" => ChatFinishReason.Length,
- "content_filter" => ChatFinishReason.ContentFilter,
- "tool_calls" => ChatFinishReason.ToolCalls,
- "function_call" => ChatFinishReason.FunctionCall,
- _ => ChatFinishReason.Stop,
- };
+ ChatFinishReason finishReason = ToChatFinishReason(response.FinishReason);
ChatTokenUsage usage = OpenAIChatModelFactory.ChatTokenUsage(
(int?)response.Usage?.OutputTokenCount ?? 0,
@@ -124,6 +110,52 @@ static IEnumerable ConvertAnnotations(IEnumerable
+ /// Creates a sequence of OpenAI instances from the specified
+ /// sequence of instances.
+ ///
+ /// The update instances.
+ /// The to monitor for cancellation requests. The default is .
+ /// A sequence of converted instances.
+ /// is .
+ public static async IAsyncEnumerable AsOpenAIStreamingChatCompletionUpdatesAsync(
+ this IAsyncEnumerable responseUpdates, [EnumeratorCancellation] CancellationToken cancellationToken = default)
+ {
+ _ = Throw.IfNull(responseUpdates);
+
+ await foreach (var update in responseUpdates.WithCancellation(cancellationToken).ConfigureAwait(false))
+ {
+ if (update.RawRepresentation is StreamingChatCompletionUpdate streamingUpdate)
+ {
+ yield return streamingUpdate;
+ continue;
+ }
+
+ var usage = update.Contents.FirstOrDefault(c => c is UsageContent) is UsageContent usageContent ?
+ OpenAIChatModelFactory.ChatTokenUsage(
+ (int?)usageContent.Details.OutputTokenCount ?? 0,
+ (int?)usageContent.Details.InputTokenCount ?? 0,
+ (int?)usageContent.Details.TotalTokenCount ?? 0) :
+ null;
+
+ var toolCallUpdates = update.Contents.OfType().Select((fcc, index) =>
+ OpenAIChatModelFactory.StreamingChatToolCallUpdate(
+ index, fcc.CallId, ChatToolCallKind.Function, fcc.Name,
+ new(JsonSerializer.SerializeToUtf8Bytes(fcc.Arguments, AIJsonUtilities.DefaultOptions.GetTypeInfo(typeof(IDictionary))))))
+ .ToList();
+
+ yield return OpenAIChatModelFactory.StreamingChatCompletionUpdate(
+ update.ResponseId,
+ new(OpenAIChatClient.ToOpenAIChatContent(update.Contents)),
+ toolCallUpdates: toolCallUpdates,
+ role: ToChatMessageRole(update.Role),
+ finishReason: ToChatFinishReason(update.FinishReason),
+ createdAt: update.CreatedAt ?? default,
+ model: update.ModelId,
+ usage: usage);
+ }
+ }
+
/// Creates a sequence of instances from the specified input messages.
/// The input messages to convert.
/// A sequence of Microsoft.Extensions.AI chat messages.
@@ -205,4 +237,40 @@ static object ToToolResult(ChatMessageContent content)
/// is .
public static ChatResponse AsChatResponse(this ChatCompletion chatCompletion, ChatCompletionOptions? options = null) =>
OpenAIChatClient.FromOpenAIChatCompletion(Throw.IfNull(chatCompletion), options);
+
+ ///
+ /// Creates a sequence of Microsoft.Extensions.AI instances from the specified
+ /// sequence of OpenAI instances.
+ ///
+ /// The update instances.
+ /// The options employed in the creation of the response.
+ /// The to monitor for cancellation requests. The default is .
+ /// A sequence of converted instances.
+ /// is .
+ public static IAsyncEnumerable AsChatResponseUpdatesAsync(
+ this IAsyncEnumerable chatCompletionUpdates, ChatCompletionOptions? options = null, CancellationToken cancellationToken = default) =>
+ OpenAIChatClient.FromOpenAIStreamingChatCompletionAsync(Throw.IfNull(chatCompletionUpdates), options, cancellationToken);
+
+ /// Converts the to a .
+ private static ChatMessageRole ToChatMessageRole(ChatRole? role) =>
+ role?.Value switch
+ {
+ "user" => ChatMessageRole.User,
+ "function" => ChatMessageRole.Function,
+ "tool" => ChatMessageRole.Tool,
+ "developer" => ChatMessageRole.Developer,
+ "system" => ChatMessageRole.System,
+ _ => ChatMessageRole.Assistant,
+ };
+
+ /// Converts the to a .
+ private static ChatFinishReason ToChatFinishReason(Microsoft.Extensions.AI.ChatFinishReason? finishReason) =>
+ finishReason?.Value switch
+ {
+ "length" => ChatFinishReason.Length,
+ "content_filter" => ChatFinishReason.ContentFilter,
+ "tool_calls" => ChatFinishReason.ToolCalls,
+ "function_call" => ChatFinishReason.FunctionCall,
+ _ => ChatFinishReason.Stop,
+ };
}
diff --git a/src/Libraries/Microsoft.Extensions.AI.OpenAI/MicrosoftExtensionsAIResponsesExtensions.cs b/src/Libraries/Microsoft.Extensions.AI.OpenAI/MicrosoftExtensionsAIResponsesExtensions.cs
index 8f39ad7852e..083b4057d7d 100644
--- a/src/Libraries/Microsoft.Extensions.AI.OpenAI/MicrosoftExtensionsAIResponsesExtensions.cs
+++ b/src/Libraries/Microsoft.Extensions.AI.OpenAI/MicrosoftExtensionsAIResponsesExtensions.cs
@@ -3,6 +3,7 @@
using System;
using System.Collections.Generic;
+using System.Threading;
using Microsoft.Extensions.AI;
using Microsoft.Shared.Diagnostics;
@@ -20,10 +21,11 @@ public static ResponseTool AsOpenAIResponseTool(this AIFunction function) =>
/// Creates a sequence of OpenAI instances from the specified input messages.
/// The input messages to convert.
+ /// The options employed while processing .
/// A sequence of OpenAI response items.
/// is .
- public static IEnumerable AsOpenAIResponseItems(this IEnumerable messages) =>
- OpenAIResponsesChatClient.ToOpenAIResponseItems(Throw.IfNull(messages));
+ public static IEnumerable AsOpenAIResponseItems(this IEnumerable messages, ChatOptions? options = null) =>
+ OpenAIResponsesChatClient.ToOpenAIResponseItems(Throw.IfNull(messages), options);
/// Creates a sequence of instances from the specified input items.
/// The input messages to convert.
@@ -40,6 +42,19 @@ public static IEnumerable AsChatMessages(this IEnumerable
OpenAIResponsesChatClient.FromOpenAIResponse(Throw.IfNull(response), options);
+ ///
+ /// Creates a sequence of Microsoft.Extensions.AI instances from the specified
+ /// sequence of OpenAI instances.
+ ///
+ /// The update instances.
+ /// The options employed in the creation of the response.
+ /// The to monitor for cancellation requests. The default is .
+ /// A sequence of converted instances.
+ /// is .
+ public static IAsyncEnumerable AsChatResponseUpdatesAsync(
+ this IAsyncEnumerable responseUpdates, ResponseCreationOptions? options = null, CancellationToken cancellationToken = default) =>
+ OpenAIResponsesChatClient.FromOpenAIStreamingResponseUpdatesAsync(Throw.IfNull(responseUpdates), options, cancellationToken);
+
/// Creates an OpenAI from a .
/// The response to convert.
/// The created .
diff --git a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIChatClient.cs b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIChatClient.cs
index 7173046ccac..1a56452e332 100644
--- a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIChatClient.cs
+++ b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIChatClient.cs
@@ -303,7 +303,7 @@ internal static List ToOpenAIChatContent(IEnumerable FromOpenAIStreamingChatCompletionAsync(
+ internal static async IAsyncEnumerable FromOpenAIStreamingChatCompletionAsync(
IAsyncEnumerable updates,
ChatCompletionOptions? options,
[EnumeratorCancellation] CancellationToken cancellationToken)
diff --git a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIResponsesChatClient.cs b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIResponsesChatClient.cs
index 40e2ee048fd..b1d4b010b99 100644
--- a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIResponsesChatClient.cs
+++ b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIResponsesChatClient.cs
@@ -72,7 +72,7 @@ public async Task GetResponseAsync(
_ = Throw.IfNull(messages);
// Convert the inputs into what OpenAIResponseClient expects.
- var openAIResponseItems = ToOpenAIResponseItems(messages);
+ var openAIResponseItems = ToOpenAIResponseItems(messages, options);
var openAIOptions = ToOpenAIResponseCreationOptions(options);
// Make the call to the OpenAIResponseClient.
@@ -174,16 +174,22 @@ internal static IEnumerable ToChatMessages(IEnumerable
- public async IAsyncEnumerable GetStreamingResponseAsync(
- IEnumerable messages, ChatOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default)
+ public IAsyncEnumerable GetStreamingResponseAsync(
+ IEnumerable messages, ChatOptions? options = null, CancellationToken cancellationToken = default)
{
_ = Throw.IfNull(messages);
- // Convert the inputs into what OpenAIResponseClient expects.
- var openAIResponseItems = ToOpenAIResponseItems(messages);
+ var openAIResponseItems = ToOpenAIResponseItems(messages, options);
var openAIOptions = ToOpenAIResponseCreationOptions(options);
- // Make the call to the OpenAIResponseClient and process the streaming results.
+ var streamingUpdates = _responseClient.CreateResponseStreamingAsync(openAIResponseItems, openAIOptions, cancellationToken);
+
+ return FromOpenAIStreamingResponseUpdatesAsync(streamingUpdates, openAIOptions, cancellationToken);
+ }
+
+ internal static async IAsyncEnumerable FromOpenAIStreamingResponseUpdatesAsync(
+ IAsyncEnumerable streamingResponseUpdates, ResponseCreationOptions? options, [EnumeratorCancellation] CancellationToken cancellationToken = default)
+ {
DateTimeOffset? createdAt = null;
string? responseId = null;
string? conversationId = null;
@@ -192,14 +198,15 @@ public async IAsyncEnumerable GetStreamingResponseAsync(
ChatRole? lastRole = null;
Dictionary outputIndexToMessages = [];
Dictionary? functionCallInfos = null;
- await foreach (var streamingUpdate in _responseClient.CreateResponseStreamingAsync(openAIResponseItems, openAIOptions, cancellationToken).ConfigureAwait(false))
+
+ await foreach (var streamingUpdate in streamingResponseUpdates.WithCancellation(cancellationToken).ConfigureAwait(false))
{
switch (streamingUpdate)
{
case StreamingResponseCreatedUpdate createdUpdate:
createdAt = createdUpdate.Response.CreatedAt;
responseId = createdUpdate.Response.Id;
- conversationId = openAIOptions.StoredOutputEnabled is false ? null : responseId;
+ conversationId = options?.StoredOutputEnabled is false ? null : responseId;
modelId = createdUpdate.Response.Model;
goto default;
@@ -485,8 +492,10 @@ private ResponseCreationOptions ToOpenAIResponseCreationOptions(ChatOptions? opt
}
/// Convert a sequence of s to s.
- internal static IEnumerable ToOpenAIResponseItems(IEnumerable inputs)
+ internal static IEnumerable ToOpenAIResponseItems(IEnumerable inputs, ChatOptions? options)
{
+ _ = options; // currently unused
+
foreach (ChatMessage input in inputs)
{
if (input.Role == ChatRole.System ||
diff --git a/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIConversionTests.cs b/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIConversionTests.cs
index 46a3c8ee8a0..79b8148a040 100644
--- a/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIConversionTests.cs
+++ b/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIConversionTests.cs
@@ -6,6 +6,7 @@
using System.ComponentModel;
using System.Linq;
using System.Text.Json;
+using System.Threading.Tasks;
using OpenAI.Assistants;
using OpenAI.Chat;
using OpenAI.Realtime;
@@ -77,8 +78,10 @@ private static void ValidateSchemaParameters(BinaryData parameters)
Assert.Equal("The name parameter", nameProperty.GetProperty("description").GetString());
}
- [Fact]
- public void AsOpenAIChatMessages_ProducesExpectedOutput()
+ [Theory]
+ [InlineData(false)]
+ [InlineData(true)]
+ public void AsOpenAIChatMessages_ProducesExpectedOutput(bool withOptions)
{
Assert.Throws("messages", () => ((IEnumerable)null!).AsOpenAIChatMessages());
@@ -99,17 +102,31 @@ public void AsOpenAIChatMessages_ProducesExpectedOutput()
new(ChatRole.Assistant, "The answer is 42."),
];
- var convertedMessages = messages.AsOpenAIChatMessages().ToArray();
+ ChatOptions? options = withOptions ? new ChatOptions { Instructions = "You talk like a parrot." } : null;
+
+ var convertedMessages = messages.AsOpenAIChatMessages(options).ToArray();
+
+ int index = 0;
+ if (withOptions)
+ {
+ Assert.Equal(6, convertedMessages.Length);
- Assert.Equal(5, convertedMessages.Length);
+ index = 1;
+ SystemChatMessage instructionsMessage = Assert.IsType(convertedMessages[0]);
+ Assert.Equal("You talk like a parrot.", Assert.Single(instructionsMessage.Content).Text);
+ }
+ else
+ {
+ Assert.Equal(5, convertedMessages.Length);
+ }
- SystemChatMessage m0 = Assert.IsType(convertedMessages[0]);
+ SystemChatMessage m0 = Assert.IsType(convertedMessages[index]);
Assert.Equal("You are a helpful assistant.", Assert.Single(m0.Content).Text);
- UserChatMessage m1 = Assert.IsType(convertedMessages[1]);
+ UserChatMessage m1 = Assert.IsType(convertedMessages[index + 1]);
Assert.Equal("Hello", Assert.Single(m1.Content).Text);
- AssistantChatMessage m2 = Assert.IsType(convertedMessages[2]);
+ AssistantChatMessage m2 = Assert.IsType(convertedMessages[index + 2]);
Assert.Single(m2.Content);
Assert.Equal("Hi there!", m2.Content[0].Text);
var tc = Assert.Single(m2.ToolCalls);
@@ -121,11 +138,11 @@ public void AsOpenAIChatMessages_ProducesExpectedOutput()
["param2"] = 42
}), JsonSerializer.Deserialize(tc.FunctionArguments.ToMemory().Span)));
- ToolChatMessage m3 = Assert.IsType(convertedMessages[3]);
+ ToolChatMessage m3 = Assert.IsType(convertedMessages[index + 3]);
Assert.Equal("callid123", m3.ToolCallId);
Assert.Equal("theresult", Assert.Single(m3.Content).Text);
- AssistantChatMessage m4 = Assert.IsType(convertedMessages[4]);
+ AssistantChatMessage m4 = Assert.IsType(convertedMessages[index + 4]);
Assert.Equal("The answer is 42.", Assert.Single(m4.Content).Text);
}
@@ -217,6 +234,70 @@ public void AsChatResponse_ConvertsOpenAIChatCompletion()
Assert.Equal("functionName", Assert.IsType(message.Contents[2]).Name);
}
+ [Fact]
+ public async Task AsChatResponse_ConvertsOpenAIStreamingChatCompletionUpdates()
+ {
+ Assert.Throws("chatCompletionUpdates", () => ((IAsyncEnumerable)null!).AsChatResponseUpdatesAsync());
+
+ List updates = [];
+ await foreach (var update in CreateUpdates().AsChatResponseUpdatesAsync())
+ {
+ updates.Add(update);
+ }
+
+ ChatResponse response = updates.ToChatResponse();
+
+ Assert.Equal("id", response.ResponseId);
+ Assert.Equal(ChatFinishReason.ToolCalls, response.FinishReason);
+ Assert.Equal("model123", response.ModelId);
+ Assert.Equal(new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero), response.CreatedAt);
+ Assert.NotNull(response.Usage);
+ Assert.Equal(1, response.Usage.InputTokenCount);
+ Assert.Equal(2, response.Usage.OutputTokenCount);
+ Assert.Equal(3, response.Usage.TotalTokenCount);
+
+ ChatMessage message = Assert.Single(response.Messages);
+ Assert.Equal(ChatRole.Assistant, message.Role);
+
+ Assert.Equal(3, message.Contents.Count);
+ Assert.Equal("Hello, world!", Assert.IsType(message.Contents[0]).Text);
+ Assert.Equal("http://example.com/image.png", Assert.IsType(message.Contents[1]).Uri.ToString());
+ Assert.Equal("functionName", Assert.IsType(message.Contents[2]).Name);
+
+ static async IAsyncEnumerable CreateUpdates()
+ {
+ await Task.Yield();
+ yield return OpenAIChatModelFactory.StreamingChatCompletionUpdate(
+ "id",
+ new ChatMessageContent(
+ ChatMessageContentPart.CreateTextPart("Hello, world!"),
+ ChatMessageContentPart.CreateImagePart(new Uri("http://example.com/image.png"))),
+ null,
+ [OpenAIChatModelFactory.StreamingChatToolCallUpdate(0, "id", ChatToolCallKind.Function, "functionName", BinaryData.FromString("test"))],
+ ChatMessageRole.Assistant,
+ null, null, null, OpenAI.Chat.ChatFinishReason.ToolCalls, new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero),
+ "model123", null, OpenAIChatModelFactory.ChatTokenUsage(2, 1, 3));
+ }
+ }
+
+ [Fact]
+ public void AsChatResponse_ConvertsOpenAIResponse()
+ {
+ Assert.Throws("response", () => ((OpenAIResponse)null!).AsChatResponse());
+
+ // The OpenAI library currently doesn't provide any way to create an OpenAIResponse instance,
+ // as all constructors/factory methods currently are internal. Update this test when such functionality is available.
+ }
+
+ [Fact]
+ public void AsChatResponseUpdatesAsync_ConvertsOpenAIStreamingResponseUpdates()
+ {
+ Assert.Throws("responseUpdates", () => ((IAsyncEnumerable)null!).AsChatResponseUpdatesAsync());
+
+ // The OpenAI library currently doesn't provide any way to create a StreamingResponseUpdate instance,
+ // as all constructors/factory methods currently are internal. Update this test when such functionality is available.
+ }
+
[Fact]
public void AsChatMessages_FromOpenAIChatMessages_ProducesExpectedOutput()
{
@@ -455,4 +536,323 @@ public void AsOpenAIChatCompletion_WithDifferentRoles_MapsCorrectly()
Assert.Equal(expectedOpenAIRole, completion.Role);
}
}
+
+ [Fact]
+ public async Task AsOpenAIStreamingChatCompletionUpdatesAsync_WithNullArgument_ThrowsArgumentNullException()
+ {
+ var asyncEnumerable = ((IAsyncEnumerable)null!).AsOpenAIStreamingChatCompletionUpdatesAsync();
+ await Assert.ThrowsAsync(async () => await asyncEnumerable.GetAsyncEnumerator().MoveNextAsync());
+ }
+
+ [Fact]
+ public async Task AsOpenAIStreamingChatCompletionUpdatesAsync_WithEmptyCollection_ReturnsEmptySequence()
+ {
+ var updates = new List();
+ var result = new List();
+
+ await foreach (var update in CreateAsyncEnumerable(updates).AsOpenAIStreamingChatCompletionUpdatesAsync())
+ {
+ result.Add(update);
+ }
+
+ Assert.Empty(result);
+ }
+
+ [Fact]
+ public async Task AsOpenAIStreamingChatCompletionUpdatesAsync_WithRawRepresentation_ReturnsOriginal()
+ {
+ var originalUpdate = OpenAIChatModelFactory.StreamingChatCompletionUpdate(
+ "test-id",
+ new ChatMessageContent(ChatMessageContentPart.CreateTextPart("Hello")),
+ role: ChatMessageRole.Assistant,
+ finishReason: OpenAI.Chat.ChatFinishReason.Stop,
+ createdAt: new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero),
+ model: "gpt-3.5-turbo");
+
+ var responseUpdate = new ChatResponseUpdate(ChatRole.Assistant, "Hello")
+ {
+ RawRepresentation = originalUpdate
+ };
+
+ var result = new List();
+ await foreach (var update in CreateAsyncEnumerable(new[] { responseUpdate }).AsOpenAIStreamingChatCompletionUpdatesAsync())
+ {
+ result.Add(update);
+ }
+
+ Assert.Single(result);
+ Assert.Same(originalUpdate, result[0]);
+ }
+
+ [Fact]
+ public async Task AsOpenAIStreamingChatCompletionUpdatesAsync_WithTextContent_CreatesValidUpdate()
+ {
+ var responseUpdate = new ChatResponseUpdate(ChatRole.Assistant, "Hello, world!")
+ {
+ ResponseId = "response-123",
+ MessageId = "message-456",
+ ModelId = "gpt-4",
+ FinishReason = ChatFinishReason.Stop,
+ CreatedAt = new DateTimeOffset(2025, 1, 1, 12, 0, 0, TimeSpan.Zero)
+ };
+
+ var result = new List();
+ await foreach (var update in CreateAsyncEnumerable(new[] { responseUpdate }).AsOpenAIStreamingChatCompletionUpdatesAsync())
+ {
+ result.Add(update);
+ }
+
+ Assert.Single(result);
+ var streamingUpdate = result[0];
+
+ Assert.Equal("response-123", streamingUpdate.CompletionId);
+ Assert.Equal("gpt-4", streamingUpdate.Model);
+ Assert.Equal(OpenAI.Chat.ChatFinishReason.Stop, streamingUpdate.FinishReason);
+ Assert.Equal(new DateTimeOffset(2025, 1, 1, 12, 0, 0, TimeSpan.Zero), streamingUpdate.CreatedAt);
+ Assert.Equal(ChatMessageRole.Assistant, streamingUpdate.Role);
+ Assert.Equal("Hello, world!", Assert.Single(streamingUpdate.ContentUpdate).Text);
+ }
+
+ [Fact]
+ public async Task AsOpenAIStreamingChatCompletionUpdatesAsync_WithUsageContent_CreatesUpdateWithUsage()
+ {
+ var responseUpdate = new ChatResponseUpdate
+ {
+ ResponseId = "response-123",
+ Contents =
+ [
+ new UsageContent(new UsageDetails
+ {
+ InputTokenCount = 10,
+ OutputTokenCount = 20,
+ TotalTokenCount = 30
+ })
+ ]
+ };
+
+ var result = new List();
+ await foreach (var update in CreateAsyncEnumerable(new[] { responseUpdate }).AsOpenAIStreamingChatCompletionUpdatesAsync())
+ {
+ result.Add(update);
+ }
+
+ Assert.Single(result);
+ var streamingUpdate = result[0];
+
+ Assert.Equal("response-123", streamingUpdate.CompletionId);
+ Assert.NotNull(streamingUpdate.Usage);
+ Assert.Equal(20, streamingUpdate.Usage.OutputTokenCount);
+ Assert.Equal(10, streamingUpdate.Usage.InputTokenCount);
+ Assert.Equal(30, streamingUpdate.Usage.TotalTokenCount);
+ }
+
+ [Fact]
+ public async Task AsOpenAIStreamingChatCompletionUpdatesAsync_WithFunctionCallContent_CreatesUpdateWithToolCalls()
+ {
+ var functionCallContent = new FunctionCallContent("call-123", "GetWeather", new Dictionary
+ {
+ ["location"] = "Seattle",
+ ["units"] = "celsius"
+ });
+
+ var responseUpdate = new ChatResponseUpdate(ChatRole.Assistant, [functionCallContent])
+ {
+ ResponseId = "response-123"
+ };
+
+ var result = new List();
+ await foreach (var update in CreateAsyncEnumerable(new[] { responseUpdate }).AsOpenAIStreamingChatCompletionUpdatesAsync())
+ {
+ result.Add(update);
+ }
+
+ Assert.Single(result);
+ var streamingUpdate = result[0];
+
+ Assert.Equal("response-123", streamingUpdate.CompletionId);
+ Assert.Single(streamingUpdate.ToolCallUpdates);
+
+ var toolCallUpdate = streamingUpdate.ToolCallUpdates[0];
+ Assert.Equal(0, toolCallUpdate.Index);
+ Assert.Equal("call-123", toolCallUpdate.ToolCallId);
+ Assert.Equal(ChatToolCallKind.Function, toolCallUpdate.Kind);
+ Assert.Equal("GetWeather", toolCallUpdate.FunctionName);
+
+ var deserializedArgs = JsonSerializer.Deserialize>(
+ toolCallUpdate.FunctionArgumentsUpdate.ToMemory().Span);
+ Assert.Equal("Seattle", deserializedArgs?["location"]?.ToString());
+ Assert.Equal("celsius", deserializedArgs?["units"]?.ToString());
+ }
+
+ [Fact]
+ public async Task AsOpenAIStreamingChatCompletionUpdatesAsync_WithMultipleFunctionCalls_CreatesCorrectIndexes()
+ {
+ var functionCall1 = new FunctionCallContent("call-1", "Function1", new Dictionary { ["param1"] = "value1" });
+ var functionCall2 = new FunctionCallContent("call-2", "Function2", new Dictionary { ["param2"] = "value2" });
+
+ var responseUpdate = new ChatResponseUpdate(ChatRole.Assistant, [functionCall1, functionCall2])
+ {
+ ResponseId = "response-123"
+ };
+
+ var result = new List();
+ await foreach (var update in CreateAsyncEnumerable(new[] { responseUpdate }).AsOpenAIStreamingChatCompletionUpdatesAsync())
+ {
+ result.Add(update);
+ }
+
+ Assert.Single(result);
+ var streamingUpdate = result[0];
+
+ Assert.Equal(2, streamingUpdate.ToolCallUpdates.Count);
+
+ Assert.Equal(0, streamingUpdate.ToolCallUpdates[0].Index);
+ Assert.Equal("call-1", streamingUpdate.ToolCallUpdates[0].ToolCallId);
+ Assert.Equal("Function1", streamingUpdate.ToolCallUpdates[0].FunctionName);
+
+ Assert.Equal(1, streamingUpdate.ToolCallUpdates[1].Index);
+ Assert.Equal("call-2", streamingUpdate.ToolCallUpdates[1].ToolCallId);
+ Assert.Equal("Function2", streamingUpdate.ToolCallUpdates[1].FunctionName);
+ }
+
+ [Fact]
+ public async Task AsOpenAIStreamingChatCompletionUpdatesAsync_WithMixedContent_IncludesAllContent()
+ {
+ var responseUpdate = new ChatResponseUpdate(ChatRole.Assistant,
+ [
+ new TextContent("Processing your request..."),
+ new FunctionCallContent("call-123", "GetWeather", new Dictionary { ["location"] = "Seattle" }),
+ new UsageContent(new UsageDetails { TotalTokenCount = 50 })
+ ])
+ {
+ ResponseId = "response-123",
+ ModelId = "gpt-4"
+ };
+
+ var result = new List();
+ await foreach (var update in CreateAsyncEnumerable(new[] { responseUpdate }).AsOpenAIStreamingChatCompletionUpdatesAsync())
+ {
+ result.Add(update);
+ }
+
+ Assert.Single(result);
+ var streamingUpdate = result[0];
+
+ Assert.Equal("response-123", streamingUpdate.CompletionId);
+ Assert.Equal("gpt-4", streamingUpdate.Model);
+
+ // Should have text content
+ Assert.Contains(streamingUpdate.ContentUpdate, c => c.Text == "Processing your request...");
+
+ // Should have tool call
+ Assert.Single(streamingUpdate.ToolCallUpdates);
+ Assert.Equal("call-123", streamingUpdate.ToolCallUpdates[0].ToolCallId);
+
+ // Should have usage
+ Assert.NotNull(streamingUpdate.Usage);
+ Assert.Equal(50, streamingUpdate.Usage.TotalTokenCount);
+ }
+
+ [Fact]
+ public async Task AsOpenAIStreamingChatCompletionUpdatesAsync_WithDifferentRoles_MapsCorrectly()
+ {
+ var testCases = new[]
+ {
+ (ChatRole.Assistant, ChatMessageRole.Assistant),
+ (ChatRole.User, ChatMessageRole.User),
+ (ChatRole.System, ChatMessageRole.System),
+ (ChatRole.Tool, ChatMessageRole.Tool)
+ };
+
+ foreach (var (inputRole, expectedOpenAIRole) in testCases)
+ {
+ var responseUpdate = new ChatResponseUpdate(inputRole, "Test message");
+
+ var result = new List();
+ await foreach (var update in CreateAsyncEnumerable(new[] { responseUpdate }).AsOpenAIStreamingChatCompletionUpdatesAsync())
+ {
+ result.Add(update);
+ }
+
+ Assert.Single(result);
+ Assert.Equal(expectedOpenAIRole, result[0].Role);
+ }
+ }
+
+ [Fact]
+ public async Task AsOpenAIStreamingChatCompletionUpdatesAsync_WithDifferentFinishReasons_MapsCorrectly()
+ {
+ var testCases = new[]
+ {
+ (ChatFinishReason.Stop, OpenAI.Chat.ChatFinishReason.Stop),
+ (ChatFinishReason.Length, OpenAI.Chat.ChatFinishReason.Length),
+ (ChatFinishReason.ContentFilter, OpenAI.Chat.ChatFinishReason.ContentFilter),
+ (ChatFinishReason.ToolCalls, OpenAI.Chat.ChatFinishReason.ToolCalls)
+ };
+
+ foreach (var (inputFinishReason, expectedOpenAIFinishReason) in testCases)
+ {
+ var responseUpdate = new ChatResponseUpdate(ChatRole.Assistant, "Test")
+ {
+ FinishReason = inputFinishReason
+ };
+
+ var result = new List();
+ await foreach (var update in CreateAsyncEnumerable(new[] { responseUpdate }).AsOpenAIStreamingChatCompletionUpdatesAsync())
+ {
+ result.Add(update);
+ }
+
+ Assert.Single(result);
+ Assert.Equal(expectedOpenAIFinishReason, result[0].FinishReason);
+ }
+ }
+
+ [Fact]
+ public async Task AsOpenAIStreamingChatCompletionUpdatesAsync_WithMultipleUpdates_ProcessesAllCorrectly()
+ {
+ var updates = new[]
+ {
+ new ChatResponseUpdate(ChatRole.Assistant, "Hello, ")
+ {
+ ResponseId = "response-123",
+ MessageId = "message-1"
+
+ // No FinishReason set - null
+ },
+ new ChatResponseUpdate(ChatRole.Assistant, "world!")
+ {
+ ResponseId = "response-123",
+ MessageId = "message-1",
+ FinishReason = ChatFinishReason.Stop
+ }
+ };
+
+ var result = new List();
+ await foreach (var update in CreateAsyncEnumerable(updates).AsOpenAIStreamingChatCompletionUpdatesAsync())
+ {
+ result.Add(update);
+ }
+
+ Assert.Equal(2, result.Count);
+
+ Assert.Equal("response-123", result[0].CompletionId);
+ Assert.Equal("Hello, ", Assert.Single(result[0].ContentUpdate).Text);
+
+ // The ToChatFinishReason method defaults null to Stop
+ Assert.Equal(OpenAI.Chat.ChatFinishReason.Stop, result[0].FinishReason);
+
+ Assert.Equal("response-123", result[1].CompletionId);
+ Assert.Equal("world!", Assert.Single(result[1].ContentUpdate).Text);
+ Assert.Equal(OpenAI.Chat.ChatFinishReason.Stop, result[1].FinishReason);
+ }
+
+ private static async IAsyncEnumerable CreateAsyncEnumerable(IEnumerable source)
+ {
+ foreach (var item in source)
+ {
+ await Task.Yield();
+ yield return item;
+ }
+ }
}