diff --git a/eng/packages/General.props b/eng/packages/General.props
index 51e3b1bbf76..bd1948f3d26 100644
--- a/eng/packages/General.props
+++ b/eng/packages/General.props
@@ -13,7 +13,7 @@
-
+
diff --git a/eng/packages/TestOnly.props b/eng/packages/TestOnly.props
index c4678a31b30..e9fa63dc4b6 100644
--- a/eng/packages/TestOnly.props
+++ b/eng/packages/TestOnly.props
@@ -2,7 +2,7 @@
-
+
diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/CodeInterpreterTool.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/CodeInterpreterTool.cs
deleted file mode 100644
index 408810ca6f7..00000000000
--- a/src/Libraries/Microsoft.Extensions.AI.Abstractions/CodeInterpreterTool.cs
+++ /dev/null
@@ -1,17 +0,0 @@
-// Licensed to the .NET Foundation under one or more agreements.
-// The .NET Foundation licenses this file to you under the MIT license.
-
-namespace Microsoft.Extensions.AI;
-
-/// Represents a tool that can be specified to an AI service to enable it to execute code it generates.
-///
-/// This tool does not itself implement code interpration. It is a marker that can be used to inform a service
-/// that the service is allowed to execute its generated code if the service is capable of doing so.
-///
-public class CodeInterpreterTool : AITool
-{
- /// Initializes a new instance of the class.
- public CodeInterpreterTool()
- {
- }
-}
diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/HostedCodeInterpreterTool.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/HostedCodeInterpreterTool.cs
new file mode 100644
index 00000000000..6662fc420e3
--- /dev/null
+++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/HostedCodeInterpreterTool.cs
@@ -0,0 +1,17 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+namespace Microsoft.Extensions.AI;
+
+/// Represents a hosted tool that can be specified to an AI service to enable it to execute code it generates.
+///
+/// This tool does not itself implement code interpretation. It is a marker that can be used to inform a service
+/// that the service is allowed to execute its generated code if the service is capable of doing so.
+///
+public class HostedCodeInterpreterTool : AITool
+{
+ /// Initializes a new instance of the class.
+ public HostedCodeInterpreterTool()
+ {
+ }
+}
diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/HostedWebSearchTool.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/HostedWebSearchTool.cs
new file mode 100644
index 00000000000..06d11bf40ed
--- /dev/null
+++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/HostedWebSearchTool.cs
@@ -0,0 +1,17 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+namespace Microsoft.Extensions.AI;
+
+/// Represents a hosted tool that can be specified to an AI service to enable it to perform web searches.
+///
+/// This tool does not itself implement web searches. It is a marker that can be used to inform a service
+/// that the service is allowed to perform web searches if the service is capable of doing so.
+///
+public class HostedWebSearchTool : AITool
+{
+ /// Initializes a new instance of the class.
+ public HostedWebSearchTool()
+ {
+ }
+}
diff --git a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIAssistantClient.cs b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIAssistantClient.cs
index 9aaad72ec3b..3dec5920e22 100644
--- a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIAssistantClient.cs
+++ b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIAssistantClient.cs
@@ -230,7 +230,7 @@ strictObj is bool strictValue ?
runOptions.ToolsOverride.Add(ToolDefinition.CreateFunction(aiFunction.Name, aiFunction.Description, functionParameters, strict));
break;
- case CodeInterpreterTool:
+ case HostedCodeInterpreterTool:
runOptions.ToolsOverride.Add(ToolDefinition.CreateCodeInterpreter());
break;
}
diff --git a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIClientExtensions.cs b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIClientExtensions.cs
index 49f78518015..483786a3174 100644
--- a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIClientExtensions.cs
+++ b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIClientExtensions.cs
@@ -5,6 +5,7 @@
using OpenAI.Assistants;
using OpenAI.Chat;
using OpenAI.Embeddings;
+using OpenAI.Responses;
namespace Microsoft.Extensions.AI;
@@ -24,6 +25,12 @@ public static IChatClient AsChatClient(this OpenAIClient openAIClient, string mo
public static IChatClient AsChatClient(this ChatClient chatClient) =>
new OpenAIChatClient(chatClient);
+ /// Gets an for use with this .
+ /// The client.
+ /// An that can be used to converse via the .
+ public static IChatClient AsChatClient(this OpenAIResponseClient responseClient) =>
+ new OpenAIResponseChatClient(responseClient);
+
#pragma warning disable OPENAI001 // Type is for evaluation purposes only
/// Gets an for use with this .
/// The client.
diff --git a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIModelMapper.ChatCompletion.cs b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIModelMapper.ChatCompletion.cs
index fdee45ea96d..c16ad7fe543 100644
--- a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIModelMapper.ChatCompletion.cs
+++ b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIModelMapper.ChatCompletion.cs
@@ -4,7 +4,6 @@
using System;
using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;
-using System.Globalization;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
@@ -620,7 +619,7 @@ private static FunctionCallContent ParseCallContentFromBinaryData(BinaryData ut8
private static T? GetValueOrDefault(this AdditionalPropertiesDictionary? dict, string key) =>
dict?.TryGetValue(key, out T? value) is true ? value : default;
- private static string CreateCompletionId() => $"chatcmpl-{Guid.NewGuid().ToString("N", CultureInfo.InvariantCulture)}";
+ private static string CreateCompletionId() => $"chatcmpl-{Guid.NewGuid():N}";
/// Used to create the JSON payload for an OpenAI chat tool description.
public sealed class OpenAIChatToolJson
@@ -633,6 +632,9 @@ public sealed class OpenAIChatToolJson
[JsonPropertyName("properties")]
public Dictionary Properties { get; set; } = [];
+
+ [JsonPropertyName("additionalProperties")]
+ public bool AdditionalProperties { get; set; }
}
/// POCO representing function calling info. Used to concatenation information for a single function call from across multiple streaming updates.
diff --git a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIResponseChatClient.cs b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIResponseChatClient.cs
new file mode 100644
index 00000000000..d54440902aa
--- /dev/null
+++ b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIResponseChatClient.cs
@@ -0,0 +1,585 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.Collections.Generic;
+using System.Diagnostics;
+using System.Reflection;
+using System.Runtime.CompilerServices;
+using System.Text;
+using System.Text.Json;
+using System.Threading;
+using System.Threading.Tasks;
+using Microsoft.Shared.Diagnostics;
+using OpenAI.Responses;
+
+#pragma warning disable S1067 // Expressions should not be too complex
+#pragma warning disable S3011 // Reflection should not be used to increase accessibility of classes, methods, or fields
+#pragma warning disable SA1204 // Static elements should appear before instance elements
+#pragma warning disable SA1108 // Block statements should not contain embedded comments
+
+namespace Microsoft.Extensions.AI;
+
+/// Represents an for an .
+internal sealed class OpenAIResponseChatClient : IChatClient
+{
+ /// Gets the default OpenAI endpoint.
+ internal static Uri DefaultOpenAIEndpoint { get; } = new("https://api.openai.com/v1");
+
+ /// A for "developer".
+ private static readonly ChatRole _chatRoleDeveloper = new("developer");
+
+ /// Cached for the string "none".
+ private static readonly BinaryData _none = BinaryData.FromBytes("\"none\""u8.ToArray());
+
+ /// Cached for the string "auto".
+ private static readonly BinaryData _auto = BinaryData.FromBytes("\"auto\""u8.ToArray());
+
+ /// Cached for the string "required".
+ private static readonly BinaryData _required = BinaryData.FromBytes("\"required\""u8.ToArray());
+
+ /// Metadata about the client.
+ private readonly ChatClientMetadata _metadata;
+
+ /// The underlying .
+ private readonly OpenAIResponseClient _responseClient;
+
+ /// The use for any serialization activities related to tool call arguments and results.
+ private JsonSerializerOptions _toolCallJsonSerializerOptions = AIJsonUtilities.DefaultOptions;
+
+ /// Initializes a new instance of the class for the specified .
+ /// The underlying client.
+ /// is .
+ public OpenAIResponseChatClient(OpenAIResponseClient responseClient)
+ {
+ _ = Throw.IfNull(responseClient);
+
+ _responseClient = responseClient;
+
+ // https://github.com/openai/openai-dotnet/issues/215
+ // The endpoint and model aren't currently exposed, so use reflection to get at them, temporarily. Once packages
+ // implement the abstractions directly rather than providing adapters on top of the public APIs,
+ // the package can provide such implementations separate from what's exposed in the public API.
+ Uri providerUrl = typeof(OpenAIResponseClient).GetField("_endpoint", BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance)
+ ?.GetValue(responseClient) as Uri ?? DefaultOpenAIEndpoint;
+ string? model = typeof(OpenAIResponseClient).GetField("_model", BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance)
+ ?.GetValue(responseClient) as string;
+
+ _metadata = new("openai", providerUrl, model);
+ }
+
+ /// Gets or sets to use for any serialization activities related to tool call arguments and results.
+ public JsonSerializerOptions ToolCallJsonSerializerOptions
+ {
+ get => _toolCallJsonSerializerOptions;
+ set => _toolCallJsonSerializerOptions = Throw.IfNull(value);
+ }
+
+ ///
+ object? IChatClient.GetService(Type serviceType, object? serviceKey)
+ {
+ _ = Throw.IfNull(serviceType);
+
+ return
+ serviceKey is not null ? null :
+ serviceType == typeof(ChatClientMetadata) ? _metadata :
+ serviceType == typeof(OpenAIResponseClient) ? _responseClient :
+ serviceType.IsInstanceOfType(this) ? this :
+ null;
+ }
+
+ ///
+ public async Task GetResponseAsync(
+ IEnumerable messages, ChatOptions? options = null, CancellationToken cancellationToken = default)
+ {
+ _ = Throw.IfNull(messages);
+
+ // Convert the inputs into what OpenAIResponseClient expects.
+ var openAIResponseItems = ToOpenAIResponseItems(messages, ToolCallJsonSerializerOptions);
+ var openAIOptions = ToOpenAIResponseCreationOptions(options);
+
+ // Make the call to the OpenAIResponseClient.
+ var openAIResponse = (await _responseClient.CreateResponseAsync(openAIResponseItems, openAIOptions, cancellationToken).ConfigureAwait(false)).Value;
+
+ // Convert and return the results.
+ ChatResponse response = new()
+ {
+ ResponseId = openAIResponse.Id,
+ CreatedAt = openAIResponse.CreatedAt,
+ FinishReason = ToFinishReason(openAIResponse.IncompleteStatusDetails?.Reason),
+ Messages = [new(ChatRole.Assistant, [])],
+ ModelId = openAIResponse.Model,
+ Usage = ToUsageDetails(openAIResponse),
+ };
+
+ if (!string.IsNullOrEmpty(openAIResponse.EndUserId))
+ {
+ (response.AdditionalProperties ??= [])[nameof(openAIResponse.EndUserId)] = openAIResponse.EndUserId;
+ }
+
+ if (openAIResponse.Error is not null)
+ {
+ (response.AdditionalProperties ??= [])[nameof(openAIResponse.Error)] = openAIResponse.Error;
+ }
+
+ if (openAIResponse.OutputItems is not null)
+ {
+ ChatMessage message = response.Messages[0];
+ Debug.Assert(message.Contents is List, "Expected a List for message contents.");
+
+ foreach (ResponseItem outputItem in openAIResponse.OutputItems)
+ {
+ switch (outputItem)
+ {
+ case MessageResponseItem messageItem:
+ message.RawRepresentation = messageItem;
+ message.Role = ToChatRole(messageItem.Role);
+ (message.AdditionalProperties ??= []).Add(nameof(messageItem.Id), messageItem.Id);
+ ((List)message.Contents).AddRange(ToAIContents(messageItem.Content));
+ break;
+
+ case FunctionCallResponseItem functionCall:
+ response.FinishReason ??= ChatFinishReason.ToolCalls;
+ message.Contents.Add(
+ FunctionCallContent.CreateFromParsedArguments(
+ functionCall.FunctionArguments.ToMemory(),
+ functionCall.CallId,
+ functionCall.FunctionName,
+ static json => JsonSerializer.Deserialize(json.Span, OpenAIJsonContext.Default.IDictionaryStringObject)!));
+ break;
+ }
+ }
+ }
+
+ return response;
+ }
+
+ ///
+ public async IAsyncEnumerable GetStreamingResponseAsync(
+ IEnumerable messages, ChatOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default)
+ {
+ _ = Throw.IfNull(messages);
+
+ // Convert the inputs into what OpenAIResponseClient expects.
+ var openAIResponseItems = ToOpenAIResponseItems(messages, ToolCallJsonSerializerOptions);
+ var openAIOptions = ToOpenAIResponseCreationOptions(options);
+
+ // Make the call to the OpenAIResponseClient and process the streaming results.
+ Dictionary? functionCallInfos = null;
+ DateTimeOffset? createdAt = null;
+ string? responseId = null;
+ string? modelId = null;
+ ChatRole? role = null;
+ ChatFinishReason? finishReason = null;
+ UsageDetails? usage = null;
+ await foreach (var streamingUpdate in _responseClient.CreateResponseStreamingAsync(openAIResponseItems, openAIOptions, cancellationToken).ConfigureAwait(false))
+ {
+ // Handle metadata updates about the overall response.
+ if (streamingUpdate is StreamingResponseStatusUpdate statusUpdate)
+ {
+ createdAt ??= statusUpdate.Response.CreatedAt;
+ responseId ??= statusUpdate.Response.Id;
+ modelId ??= statusUpdate.Response.Model;
+ finishReason ??= ToFinishReason(statusUpdate.Response?.IncompleteStatusDetails?.Reason);
+ usage ??= ToUsageDetails(statusUpdate.Response);
+ continue;
+ }
+
+ if (streamingUpdate is StreamingResponseItemUpdate itemUpdate)
+ {
+ // Handle metadata updates about the message.
+ if (itemUpdate.Item is MessageResponseItem messageItem)
+ {
+ role ??= ToChatRole(messageItem.Role);
+ continue;
+ }
+
+ // Handle function call updates (name/id). Arguments come as part of content.
+ if (itemUpdate.Item is FunctionCallResponseItem functionCallItem)
+ {
+ functionCallInfos ??= [];
+ if (!functionCallInfos.TryGetValue(itemUpdate.ItemIndex, out FunctionCallInfo? callInfo))
+ {
+ functionCallInfos[itemUpdate.ItemIndex] = callInfo = new();
+ }
+
+ callInfo.CallId = functionCallItem.CallId;
+ callInfo.Name = functionCallItem.FunctionName;
+ continue;
+ }
+ }
+
+ // Handle content updates.
+ if (streamingUpdate is StreamingResponseContentPartDeltaUpdate contentUpdate)
+ {
+ // Update our knowledge of function call requests.
+ if (contentUpdate.FunctionArguments is string argsUpdate)
+ {
+ functionCallInfos ??= [];
+ if (!functionCallInfos.TryGetValue(contentUpdate.ItemIndex, out FunctionCallInfo? callInfo))
+ {
+ functionCallInfos[contentUpdate.ItemIndex] = callInfo = new();
+ }
+
+ _ = (callInfo.Arguments ??= new()).Append(argsUpdate);
+ }
+
+ // If there's any text content, return it.
+ if (!string.IsNullOrEmpty(contentUpdate.Text))
+ {
+ yield return new(role, contentUpdate.Text)
+ {
+ CreatedAt = createdAt,
+ ModelId = modelId,
+ RawRepresentation = streamingUpdate,
+ ResponseId = responseId,
+ };
+ }
+
+ continue;
+ }
+ }
+
+ // Now that we've received all updates and yielded all content,
+ // yield a final update with any remaining information.
+ ChatResponseUpdate update = new()
+ {
+ ResponseId = responseId,
+ CreatedAt = createdAt,
+ FinishReason = finishReason ?? (functionCallInfos is not null ? ChatFinishReason.ToolCalls : ChatFinishReason.Stop),
+ ModelId = modelId,
+ Role = role,
+ };
+
+ if (usage is not null)
+ {
+ update.Contents.Add(new UsageContent(usage));
+ }
+
+ if (functionCallInfos is not null)
+ {
+ foreach (var entry in functionCallInfos)
+ {
+ FunctionCallInfo fci = entry.Value;
+ if (!string.IsNullOrWhiteSpace(fci.Name))
+ {
+ update.Contents.Add(
+ FunctionCallContent.CreateFromParsedArguments(
+ fci.Arguments?.ToString() ?? string.Empty,
+ fci.CallId ?? string.Empty,
+ fci.Name!,
+ static json => JsonSerializer.Deserialize(json, OpenAIJsonContext.Default.IDictionaryStringObject)!));
+ }
+ }
+ }
+
+ yield return update;
+ }
+
+ ///
+ void IDisposable.Dispose()
+ {
+ // Nothing to dispose. Implementation required for the IChatClient interface.
+ }
+
+ /// Creates a from a .
+ private static ChatRole ToChatRole(MessageRole? role) =>
+ role == MessageRole.System ? ChatRole.System :
+ role == MessageRole.Developer ? _chatRoleDeveloper :
+ role == MessageRole.User ? ChatRole.User :
+ ChatRole.Assistant;
+
+ /// Creates a from a .
+ private static ChatFinishReason? ToFinishReason(ResponseIncompleteStatusReason? statusReason) =>
+ statusReason == ResponseIncompleteStatusReason.ContentFilter ? ChatFinishReason.ContentFilter :
+ statusReason == ResponseIncompleteStatusReason.MaxOutputTokens ? ChatFinishReason.Length :
+ null;
+
+ /// Converts a to a .
+ private static ResponseCreationOptions ToOpenAIResponseCreationOptions(ChatOptions? options)
+ {
+ ResponseCreationOptions result = new();
+
+ if (options is not null)
+ {
+ // Handle strongly-typed properties.
+ result.MaxOutputTokenCount = options.MaxOutputTokens;
+ result.PreviousResponseId = options.ChatThreadId;
+ result.TopP = options.TopP;
+ result.Temperature = options.Temperature;
+
+ // Handle loosely-typed properties from AdditionalProperties.
+ if (options.AdditionalProperties is { Count: > 0 } additionalProperties)
+ {
+ if (additionalProperties.TryGetValue(nameof(result.AllowParallelToolCalls), out bool allowParallelToolCalls))
+ {
+ result.AllowParallelToolCalls = allowParallelToolCalls;
+ }
+
+ if (additionalProperties.TryGetValue(nameof(result.EndUserId), out string? endUserId))
+ {
+ result.EndUserId = endUserId;
+ }
+
+ if (additionalProperties.TryGetValue(nameof(result.Instructions), out string? instructions))
+ {
+ result.Instructions = instructions;
+ }
+
+ if (additionalProperties.TryGetValue(nameof(result.Metadata), out IDictionary? metadata))
+ {
+ foreach (KeyValuePair kvp in metadata)
+ {
+ result.Metadata[kvp.Key] = kvp.Value;
+ }
+ }
+
+ if (additionalProperties.TryGetValue(nameof(result.ReasoningOptions), out ResponseReasoningOptions? reasoningOptions))
+ {
+ result.ReasoningOptions = reasoningOptions;
+ }
+
+ if (additionalProperties.TryGetValue(nameof(result.StoredOutputEnabled), out bool storeOutputEnabled))
+ {
+ result.StoredOutputEnabled = storeOutputEnabled;
+ }
+
+ if (additionalProperties.TryGetValue(nameof(result.TruncationMode), out ResponseTruncationMode truncationMode))
+ {
+ result.TruncationMode = truncationMode;
+ }
+ }
+
+ // Populate tools if there are any.
+ if (options.Tools is { Count: > 0 } tools)
+ {
+ foreach (AITool tool in tools)
+ {
+ switch (tool)
+ {
+ case AIFunction af:
+ var oaitool = JsonSerializer.Deserialize(af.JsonSchema, OpenAIJsonContext.Default.OpenAIChatToolJson)!;
+ var functionParameters = BinaryData.FromBytes(JsonSerializer.SerializeToUtf8Bytes(oaitool, OpenAIJsonContext.Default.OpenAIChatToolJson));
+ result.Tools.Add(ResponseTool.CreateFunctionTool(af.Name, af.Description, functionParameters));
+ break;
+
+ case HostedWebSearchTool:
+ WebSearchToolLocation? location = null;
+ if (tool.AdditionalProperties.TryGetValue(nameof(WebSearchToolLocation), out object? objLocation))
+ {
+ location = objLocation as WebSearchToolLocation;
+ }
+
+ WebSearchToolContextSize? size = null;
+ if (tool.AdditionalProperties.TryGetValue(nameof(WebSearchToolContextSize), out object? objSize) &&
+ objSize is WebSearchToolContextSize)
+ {
+ size = (WebSearchToolContextSize)objSize;
+ }
+
+ result.Tools.Add(ResponseTool.CreateWebSearchTool(location, size));
+ break;
+ }
+ }
+
+ switch (options.ToolMode)
+ {
+ case NoneChatToolMode:
+ result.ToolChoice = _none;
+ break;
+
+ case AutoChatToolMode:
+ case null:
+ result.ToolChoice = _auto;
+ break;
+
+ case RequiredChatToolMode required:
+ result.ToolChoice = required.RequiredFunctionName is not null ?
+ BinaryData.FromString($$"""{"type":"function","name":"{{required.RequiredFunctionName}}"}""") :
+ _required;
+ break;
+ }
+ }
+
+ // Handle response format.
+ if (options.ResponseFormat is ChatResponseFormatText)
+ {
+ result.TextOptions.ResponseFormat = ResponseTextFormat.CreateTextFormat();
+ }
+ else if (options.ResponseFormat is ChatResponseFormatJson jsonFormat)
+ {
+ result.TextOptions.ResponseFormat = jsonFormat.Schema is { } jsonSchema ?
+ ResponseTextFormat.CreateJsonSchemaFormat(
+ jsonFormat.SchemaName ?? "json_schema",
+ BinaryData.FromBytes(JsonSerializer.SerializeToUtf8Bytes(jsonSchema, OpenAIJsonContext.Default.JsonElement)),
+ jsonFormat.SchemaDescription) :
+ ResponseTextFormat.CreateJsonObjectFormat();
+ }
+ }
+
+ return result;
+ }
+
+ /// Convert a sequence of s to s.
+ private static IEnumerable ToOpenAIResponseItems(
+ IEnumerable inputs, JsonSerializerOptions options)
+ {
+ foreach (ChatMessage input in inputs)
+ {
+ if (input.Role == ChatRole.System ||
+ input.Role == _chatRoleDeveloper)
+ {
+ string text = input.Text;
+ if (!string.IsNullOrWhiteSpace(text))
+ {
+ yield return input.Role == ChatRole.System ?
+ ResponseItem.CreateSystemMessageItem(text) :
+ ResponseItem.CreateDeveloperMessageItem(text);
+ }
+
+ continue;
+ }
+
+ if (input.Role == ChatRole.User)
+ {
+ yield return ResponseItem.CreateUserMessageItem(ToOpenAIResponsesContent(input.Contents));
+ continue;
+ }
+
+ if (input.Role == ChatRole.Tool)
+ {
+ foreach (AIContent item in input.Contents)
+ {
+ switch (item)
+ {
+ case FunctionResultContent resultContent:
+ string? result = resultContent.Result as string;
+ if (result is null && resultContent.Result is not null)
+ {
+ try
+ {
+ result = JsonSerializer.Serialize(resultContent.Result, options.GetTypeInfo(typeof(object)));
+ }
+ catch (NotSupportedException)
+ {
+ // If the type can't be serialized, skip it.
+ }
+ }
+
+ yield return ResponseItem.CreateFunctionCallOutputItem(resultContent.CallId, result ?? string.Empty);
+ break;
+ }
+ }
+
+ continue;
+ }
+
+ if (input.Role == ChatRole.Assistant)
+ {
+ foreach (AIContent item in input.Contents)
+ {
+ switch (item)
+ {
+ case TextContent textContent:
+ yield return ResponseItem.CreateAssistantMessageItem(
+ "msg_ignored",
+ textContent.Text);
+ break;
+
+ case FunctionCallContent callContent:
+ yield return ResponseItem.CreateFunctionCall(
+ "msg_ignored",
+ callContent.CallId,
+ callContent.Name,
+ BinaryData.FromBytes(JsonSerializer.SerializeToUtf8Bytes(
+ callContent.Arguments,
+ options.GetTypeInfo(typeof(IDictionary)))));
+ break;
+ }
+ }
+
+ continue;
+ }
+ }
+ }
+
+ /// Extract usage details from an .
+ private static UsageDetails? ToUsageDetails(OpenAIResponse? openAIResponse)
+ {
+ UsageDetails? ud = null;
+ if (openAIResponse?.Usage is { } usage)
+ {
+ ud = new()
+ {
+ InputTokenCount = usage.InputTokenCount,
+ OutputTokenCount = usage.OutputTokenCount,
+ TotalTokenCount = usage.TotalTokenCount,
+ };
+
+ if (usage.OutputTokenDetails is { } outputDetails)
+ {
+ ud.AdditionalCounts ??= [];
+
+ const string OutputDetails = nameof(usage.OutputTokenDetails);
+ ud.AdditionalCounts.Add($"{OutputDetails}.{nameof(outputDetails.ReasoningTokenCount)}", outputDetails.ReasoningTokenCount);
+ }
+ }
+
+ return ud;
+ }
+
+ /// Convert a sequence of s to a list of .
+ private static List ToAIContents(IEnumerable contents)
+ {
+ List results = [];
+
+ foreach (ResponseContentPart part in contents)
+ {
+ if (part.Kind == ResponseContentPartKind.OutputText)
+ {
+ results.Add(new TextContent(part.Text));
+ }
+ }
+
+ return results;
+ }
+
+ /// Convert a list of s to a list of .
+ private static List ToOpenAIResponsesContent(IList contents)
+ {
+ List parts = [];
+ foreach (var content in contents)
+ {
+ switch (content)
+ {
+ case TextContent textContent:
+ parts.Add(ResponseContentPart.CreateInputTextPart(textContent.Text));
+ break;
+
+ case UriContent uriContent when uriContent.HasTopLevelMediaType("image"):
+ parts.Add(ResponseContentPart.CreateInputImagePart(uriContent.Uri));
+ break;
+
+ case DataContent dataContent when dataContent.HasTopLevelMediaType("image"):
+ parts.Add(ResponseContentPart.CreateInputImagePart(BinaryData.FromBytes(dataContent.Data), dataContent.MediaType));
+ break;
+ }
+ }
+
+ if (parts.Count == 0)
+ {
+ parts.Add(ResponseContentPart.CreateInputTextPart(string.Empty));
+ }
+
+ return parts;
+ }
+
+ /// POCO representing function calling info.
+ /// Used to concatenation information for a single function call from across multiple streaming updates.
+ private sealed class FunctionCallInfo
+ {
+ public string? CallId;
+ public string? Name;
+ public StringBuilder? Arguments;
+ }
+}
diff --git a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/HostedCodeInterpreterToolTests.cs b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/HostedCodeInterpreterToolTests.cs
new file mode 100644
index 00000000000..f69ffc5b399
--- /dev/null
+++ b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/HostedCodeInterpreterToolTests.cs
@@ -0,0 +1,19 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using Xunit;
+
+namespace Microsoft.Extensions.AI;
+
+public class HostedCodeInterpreterToolTests
+{
+ [Fact]
+ public void Constructor_Roundtrips()
+ {
+ var tool = new HostedCodeInterpreterTool();
+ Assert.Equal(nameof(HostedCodeInterpreterTool), tool.Name);
+ Assert.Empty(tool.Description);
+ Assert.Empty(tool.AdditionalProperties);
+ Assert.Equal(nameof(HostedCodeInterpreterTool), tool.ToString());
+ }
+}
diff --git a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/CodeInterpreterToolTests.cs b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/HostedWebSearchToolTests.cs
similarity index 62%
rename from test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/CodeInterpreterToolTests.cs
rename to test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/HostedWebSearchToolTests.cs
index 3bf9f568e96..4b03cbb0031 100644
--- a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/CodeInterpreterToolTests.cs
+++ b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/HostedWebSearchToolTests.cs
@@ -5,15 +5,15 @@
namespace Microsoft.Extensions.AI;
-public class CodeInterpreterToolTests
+public class HostedWebSearchToolTests
{
[Fact]
public void Constructor_Roundtrips()
{
- var tool = new CodeInterpreterTool();
- Assert.Equal(nameof(CodeInterpreterTool), tool.Name);
+ var tool = new HostedWebSearchTool();
+ Assert.Equal(nameof(HostedWebSearchTool), tool.Name);
Assert.Empty(tool.Description);
Assert.Empty(tool.AdditionalProperties);
- Assert.Equal(nameof(CodeInterpreterTool), tool.ToString());
+ Assert.Equal(nameof(HostedWebSearchTool), tool.ToString());
}
}
diff --git a/test/Libraries/Microsoft.Extensions.AI.Integration.Tests/VerbatimHttpHandler.cs b/test/Libraries/Microsoft.Extensions.AI.Integration.Tests/VerbatimHttpHandler.cs
index 14ba68feb7a..8b5f1973348 100644
--- a/test/Libraries/Microsoft.Extensions.AI.Integration.Tests/VerbatimHttpHandler.cs
+++ b/test/Libraries/Microsoft.Extensions.AI.Integration.Tests/VerbatimHttpHandler.cs
@@ -1,38 +1,97 @@
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
+using System;
using System.Net.Http;
+using System.Text;
+using System.Text.Json.Nodes;
using System.Text.RegularExpressions;
using System.Threading;
using System.Threading.Tasks;
using Xunit;
+#pragma warning disable CA2000 // Dispose objects before losing scope
+#pragma warning disable CA2016 // Forward the 'CancellationToken' parameter to methods
+#pragma warning disable CA1031 // Do not catch general exception types
+#pragma warning disable S108 // Nested blocks of code should not be left empty
+
namespace Microsoft.Extensions.AI;
///
/// An that checks the request body against an expected one
/// and sends back an expected response.
///
-public sealed class VerbatimHttpHandler(string expectedInput, string sentOutput) : HttpMessageHandler
+public sealed class VerbatimHttpHandler(string expectedInput, string expectedOutput, bool validateExpectedResponse = false) :
+ DelegatingHandler(new HttpClientHandler())
{
protected override async Task SendAsync(HttpRequestMessage request, CancellationToken cancellationToken)
{
Assert.NotNull(request.Content);
- string? input = await request.Content
-#if NET
- .ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
-#else
- .ReadAsStringAsync().ConfigureAwait(false);
-#endif
+ string? actualInput = await request.Content.ReadAsStringAsync().ConfigureAwait(false);
+
+ Assert.NotNull(actualInput);
+ AssertEqualNormalized(expectedInput, actualInput);
+
+ if (validateExpectedResponse)
+ {
+ ByteArrayContent newContent = new(Encoding.UTF8.GetBytes(actualInput));
+ foreach (var header in request.Content.Headers)
+ {
+ newContent.Headers.TryAddWithoutValidation(header.Key, header.Value);
+ }
- Assert.NotNull(input);
- Assert.Equal(RemoveWhiteSpace(expectedInput), RemoveWhiteSpace(input));
+ request.Content = newContent;
- return new() { Content = new StringContent(sentOutput) };
+ using var response = await base.SendAsync(request, cancellationToken).ConfigureAwait(false);
+ string? actualOutput = await response.Content.ReadAsStringAsync().ConfigureAwait(false);
+
+ Assert.NotNull(actualOutput);
+ AssertEqualNormalized(expectedOutput, actualOutput);
+ }
+
+ return new() { Content = new StringContent(expectedOutput) };
}
public static string? RemoveWhiteSpace(string? text) =>
text is null ? null :
Regex.Replace(text, @"\s*", string.Empty);
+
+ private static void AssertEqualNormalized(string expected, string actual)
+ {
+ // First try to compare as JSON.
+ JsonNode? expectedNode = null;
+ JsonNode? actualNode = null;
+ try
+ {
+ expectedNode = JsonNode.Parse(expected);
+ actualNode = JsonNode.Parse(actual);
+ }
+ catch
+ {
+ }
+
+ if (expectedNode is not null && actualNode is not null)
+ {
+ if (!JsonNode.DeepEquals(expectedNode, actualNode))
+ {
+ FailNotEqual(expected, actual);
+ }
+
+ return;
+ }
+
+ // Legitimately may not have been JSON. Fall back to whitespace normalization.
+ if (RemoveWhiteSpace(expected) != RemoveWhiteSpace(actual))
+ {
+ FailNotEqual(expected, actual);
+ }
+ }
+
+ private static void FailNotEqual(string expected, string actual) =>
+ Assert.Fail(
+ $"Expected:{Environment.NewLine}" +
+ $"{expected}{Environment.NewLine}" +
+ $"Actual:{Environment.NewLine}" +
+ $"{actual}");
}
diff --git a/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIChatClientTests.cs b/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIChatClientTests.cs
index 8cd53c55766..8a3e158041e 100644
--- a/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIChatClientTests.cs
+++ b/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIChatClientTests.cs
@@ -388,7 +388,7 @@ public async Task MultipleMessages_NonStreaming()
},
{
"role": "user",
- "content": "i\u0027m good. how are you?"
+ "content": "i'm good. how are you?"
}
],
"model": "gpt-4o-mini",
@@ -701,7 +701,8 @@ public async Task FunctionCallContent_NonStreaming()
"description": "The person whose age is being requested",
"type": "string"
}
- }
+ },
+ "additionalProperties": false
}
},
"type": "function"
@@ -820,7 +821,8 @@ public async Task FunctionCallContent_Streaming()
"description": "The person whose age is being requested",
"type": "string"
}
- }
+ },
+ "additionalProperties": false
}
},
"type": "function"
diff --git a/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIResponseClientTests.cs b/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIResponseClientTests.cs
new file mode 100644
index 00000000000..c114dc77555
--- /dev/null
+++ b/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIResponseClientTests.cs
@@ -0,0 +1,296 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.ClientModel;
+using System.ClientModel.Primitives;
+using System.Collections.Generic;
+using System.Linq;
+using System.Net.Http;
+using System.Threading.Tasks;
+using Azure.AI.OpenAI;
+using Microsoft.Extensions.Caching.Distributed;
+using Microsoft.Extensions.Caching.Memory;
+using OpenAI;
+using OpenAI.Responses;
+using Xunit;
+
+#pragma warning disable S103 // Lines should not be too long
+
+namespace Microsoft.Extensions.AI;
+
+public class OpenAIResponseClientTests
+{
+ [Fact]
+ public void AsChatClient_InvalidArgs_Throws()
+ {
+ Assert.Throws("openAIClient", () => ((OpenAIClient)null!).AsChatClient("model"));
+ Assert.Throws("responseClient", () => ((OpenAIResponseClient)null!).AsChatClient());
+
+ OpenAIClient client = new("key");
+ Assert.Throws("modelId", () => client.AsChatClient(null!));
+ Assert.Throws("modelId", () => client.AsChatClient(" "));
+ }
+
+ [Theory]
+ [InlineData(false)]
+ [InlineData(true)]
+ public void AsChatClient_ProducesExpectedMetadata(bool useAzureOpenAI)
+ {
+ Uri endpoint = new("http://localhost/some/endpoint");
+ string model = "amazingModel";
+
+ var client = useAzureOpenAI ?
+ new AzureOpenAIClient(endpoint, new ApiKeyCredential("key")) :
+ new OpenAIClient(new ApiKeyCredential("key"), new OpenAIClientOptions { Endpoint = endpoint });
+
+ IChatClient chatClient = client.GetOpenAIResponseClient(model).AsChatClient();
+ var metadata = chatClient.GetService();
+ Assert.Equal("openai", metadata?.ProviderName);
+ Assert.Equal(endpoint, metadata?.ProviderUri);
+ Assert.Equal(model, metadata?.ModelId);
+ }
+
+ [Fact]
+ public void GetService_SuccessfullyReturnsUnderlyingClient()
+ {
+ OpenAIResponseClient openAIClient = new OpenAIClient(new ApiKeyCredential("key")).GetOpenAIResponseClient("model");
+ IChatClient chatClient = openAIClient.AsChatClient();
+
+ Assert.Same(chatClient, chatClient.GetService());
+ Assert.Same(openAIClient, chatClient.GetService());
+
+ using IChatClient pipeline = chatClient
+ .AsBuilder()
+ .UseFunctionInvocation()
+ .UseOpenTelemetry()
+ .UseDistributedCache(new MemoryDistributedCache(Options.Options.Create(new MemoryDistributedCacheOptions())))
+ .Build();
+
+ Assert.NotNull(pipeline.GetService());
+ Assert.NotNull(pipeline.GetService());
+ Assert.NotNull(pipeline.GetService());
+ Assert.NotNull(pipeline.GetService());
+
+ Assert.Same(openAIClient, pipeline.GetService());
+ Assert.IsType(pipeline.GetService());
+ }
+
+ [Fact]
+ public async Task BasicRequestResponse_NonStreaming()
+ {
+ const string Input = """
+ {
+ "temperature":0.5,
+ "model":"gpt-4o-mini",
+ "input": [{
+ "type":"message",
+ "role":"user",
+ "content":[{"type":"input_text","text":"hello"}]
+ }],
+ "max_output_tokens":20,
+ "truncation":"auto"
+ }
+ """;
+
+ const string Output = """
+ {
+ "id": "resp_67d327649b288191aeb46a824e49dc40058a5e08c46a181d",
+ "object": "response",
+ "created_at": 1741891428,
+ "status": "completed",
+ "error": null,
+ "incomplete_details": null,
+ "instructions": null,
+ "max_output_tokens": 20,
+ "model": "gpt-4o-mini-2024-07-18",
+ "output": [
+ {
+ "type": "message",
+ "id": "msg_67d32764fcdc8191bcf2e444d4088804058a5e08c46a181d",
+ "status": "completed",
+ "role": "assistant",
+ "content": [
+ {
+ "type": "output_text",
+ "text": "Hello! How can I assist you today?",
+ "annotations": []
+ }
+ ]
+ }
+ ],
+ "parallel_tool_calls": true,
+ "previous_response_id": null,
+ "reasoning": {
+ "effort": null,
+ "generate_summary": null
+ },
+ "store": true,
+ "temperature": 0.5,
+ "text": {
+ "format": {
+ "type": "text"
+ }
+ },
+ "tool_choice": "auto",
+ "tools": [],
+ "top_p": 1.0,
+ "truncation": "auto",
+ "usage": {
+ "input_tokens": 26,
+ "input_tokens_details": {
+ "cached_tokens": 0
+ },
+ "output_tokens": 10,
+ "output_tokens_details": {
+ "reasoning_tokens": 0
+ },
+ "total_tokens": 36
+ },
+ "user": null,
+ "metadata": {}
+ }
+ """;
+
+ using VerbatimHttpHandler handler = new(Input, Output);
+ using HttpClient httpClient = new(handler);
+ using IChatClient client = CreateResponseClient(httpClient, "gpt-4o-mini");
+
+ var response = await client.GetResponseAsync("hello", new()
+ {
+ MaxOutputTokens = 20,
+ Temperature = 0.5f,
+ });
+ Assert.NotNull(response);
+
+ Assert.Equal("resp_67d327649b288191aeb46a824e49dc40058a5e08c46a181d", response.ResponseId);
+ Assert.Equal("Hello! How can I assist you today?", response.Text);
+ Assert.Single(response.Messages.Single().Contents);
+ Assert.Equal(ChatRole.Assistant, response.Messages.Single().Role);
+ Assert.Equal("gpt-4o-mini-2024-07-18", response.ModelId);
+ Assert.Equal(DateTimeOffset.FromUnixTimeSeconds(1_741_891_428), response.CreatedAt);
+ Assert.Null(response.FinishReason);
+
+ Assert.NotNull(response.Usage);
+ Assert.Equal(26, response.Usage.InputTokenCount);
+ Assert.Equal(10, response.Usage.OutputTokenCount);
+ Assert.Equal(36, response.Usage.TotalTokenCount);
+ }
+
+ [Fact]
+ public async Task BasicRequestResponse_Streaming()
+ {
+ const string Input = """
+ {
+ "temperature":0.5,
+ "model":"gpt-4o-mini",
+ "input":[
+ {
+ "type":"message",
+ "role":"user",
+ "content":[{"type":"input_text","text":"hello"}]
+ }
+ ],
+ "stream":true,
+ "max_output_tokens":20,
+ "truncation":"auto"
+ }
+ """;
+
+ const string Output = """
+ event: response.created
+ data: {"type":"response.created","response":{"id":"resp_67d329fbc87c81919f8952fe71dafc96029dabe3ee19bb77","object":"response","created_at":1741892091,"status":"in_progress","error":null,"incomplete_details":null,"instructions":null,"max_output_tokens":20,"model":"gpt-4o-mini-2024-07-18","output":[],"parallel_tool_calls":true,"previous_response_id":null,"reasoning":{"effort":null,"generate_summary":null},"store":true,"temperature":0.5,"text":{"format":{"type":"text"}},"tool_choice":"auto","tools":[],"top_p":1.0,"truncation":"auto","usage":null,"user":null,"metadata":{}}}
+
+ event: response.in_progress
+ data: {"type":"response.in_progress","response":{"id":"resp_67d329fbc87c81919f8952fe71dafc96029dabe3ee19bb77","object":"response","created_at":1741892091,"status":"in_progress","error":null,"incomplete_details":null,"instructions":null,"max_output_tokens":20,"model":"gpt-4o-mini-2024-07-18","output":[],"parallel_tool_calls":true,"previous_response_id":null,"reasoning":{"effort":null,"generate_summary":null},"store":true,"temperature":0.5,"text":{"format":{"type":"text"}},"tool_choice":"auto","tools":[],"top_p":1.0,"truncation":"auto","usage":null,"user":null,"metadata":{}}}
+
+ event: response.output_item.added
+ data: {"type":"response.output_item.added","output_index":0,"item":{"type":"message","id":"msg_67d329fc0c0081919696b8ab36713a41029dabe3ee19bb77","status":"in_progress","role":"assistant","content":[]}}
+
+ event: response.content_part.added
+ data: {"type":"response.content_part.added","item_id":"msg_67d329fc0c0081919696b8ab36713a41029dabe3ee19bb77","output_index":0,"content_index":0,"part":{"type":"output_text","text":"","annotations":[]}}
+
+ event: response.output_text.delta
+ data: {"type":"response.output_text.delta","item_id":"msg_67d329fc0c0081919696b8ab36713a41029dabe3ee19bb77","output_index":0,"content_index":0,"delta":"Hello"}
+
+ event: response.output_text.delta
+ data: {"type":"response.output_text.delta","item_id":"msg_67d329fc0c0081919696b8ab36713a41029dabe3ee19bb77","output_index":0,"content_index":0,"delta":"!"}
+
+ event: response.output_text.delta
+ data: {"type":"response.output_text.delta","item_id":"msg_67d329fc0c0081919696b8ab36713a41029dabe3ee19bb77","output_index":0,"content_index":0,"delta":" How"}
+
+ event: response.output_text.delta
+ data: {"type":"response.output_text.delta","item_id":"msg_67d329fc0c0081919696b8ab36713a41029dabe3ee19bb77","output_index":0,"content_index":0,"delta":" can"}
+
+ event: response.output_text.delta
+ data: {"type":"response.output_text.delta","item_id":"msg_67d329fc0c0081919696b8ab36713a41029dabe3ee19bb77","output_index":0,"content_index":0,"delta":" I"}
+
+ event: response.output_text.delta
+ data: {"type":"response.output_text.delta","item_id":"msg_67d329fc0c0081919696b8ab36713a41029dabe3ee19bb77","output_index":0,"content_index":0,"delta":" assist"}
+
+ event: response.output_text.delta
+ data: {"type":"response.output_text.delta","item_id":"msg_67d329fc0c0081919696b8ab36713a41029dabe3ee19bb77","output_index":0,"content_index":0,"delta":" you"}
+
+ event: response.output_text.delta
+ data: {"type":"response.output_text.delta","item_id":"msg_67d329fc0c0081919696b8ab36713a41029dabe3ee19bb77","output_index":0,"content_index":0,"delta":" today"}
+
+ event: response.output_text.delta
+ data: {"type":"response.output_text.delta","item_id":"msg_67d329fc0c0081919696b8ab36713a41029dabe3ee19bb77","output_index":0,"content_index":0,"delta":"?"}
+
+ event: response.output_text.done
+ data: {"type":"response.output_text.done","item_id":"msg_67d329fc0c0081919696b8ab36713a41029dabe3ee19bb77","output_index":0,"content_index":0,"text":"Hello! How can I assist you today?"}
+
+ event: response.content_part.done
+ data: {"type":"response.content_part.done","item_id":"msg_67d329fc0c0081919696b8ab36713a41029dabe3ee19bb77","output_index":0,"content_index":0,"part":{"type":"output_text","text":"Hello! How can I assist you today?","annotations":[]}}
+
+ event: response.output_item.done
+ data: {"type":"response.output_item.done","output_index":0,"item":{"type":"message","id":"msg_67d329fc0c0081919696b8ab36713a41029dabe3ee19bb77","status":"completed","role":"assistant","content":[{"type":"output_text","text":"Hello! How can I assist you today?","annotations":[]}]}}
+
+ event: response.completed
+ data: {"type":"response.completed","response":{"id":"resp_67d329fbc87c81919f8952fe71dafc96029dabe3ee19bb77","object":"response","created_at":1741892091,"status":"completed","error":null,"incomplete_details":null,"instructions":null,"max_output_tokens":20,"model":"gpt-4o-mini-2024-07-18","output":[{"type":"message","id":"msg_67d329fc0c0081919696b8ab36713a41029dabe3ee19bb77","status":"completed","role":"assistant","content":[{"type":"output_text","text":"Hello! How can I assist you today?","annotations":[]}]}],"parallel_tool_calls":true,"previous_response_id":null,"reasoning":{"effort":null,"generate_summary":null},"store":true,"temperature":0.5,"text":{"format":{"type":"text"}},"tool_choice":"auto","tools":[],"top_p":1.0,"truncation":"auto","usage":{"input_tokens":26,"input_tokens_details":{"cached_tokens":0},"output_tokens":10,"output_tokens_details":{"reasoning_tokens":0},"total_tokens":36},"user":null,"metadata":{}}}
+
+
+ """;
+
+ using VerbatimHttpHandler handler = new(Input, Output);
+ using HttpClient httpClient = new(handler);
+ using IChatClient client = CreateResponseClient(httpClient, "gpt-4o-mini");
+
+ List updates = [];
+ await foreach (var update in client.GetStreamingResponseAsync("hello", new()
+ {
+ MaxOutputTokens = 20,
+ Temperature = 0.5f,
+ }))
+ {
+ updates.Add(update);
+ }
+
+ Assert.Equal("Hello! How can I assist you today?", string.Concat(updates.Select(u => u.Text)));
+
+ var createdAt = DateTimeOffset.FromUnixTimeSeconds(1_741_892_091);
+ Assert.Equal(10, updates.Count);
+ for (int i = 0; i < updates.Count; i++)
+ {
+ Assert.Equal("resp_67d329fbc87c81919f8952fe71dafc96029dabe3ee19bb77", updates[i].ResponseId);
+ Assert.Equal(createdAt, updates[i].CreatedAt);
+ Assert.Equal("gpt-4o-mini-2024-07-18", updates[i].ModelId);
+ Assert.Equal(ChatRole.Assistant, updates[i].Role);
+ Assert.Null(updates[i].AdditionalProperties);
+ Assert.Equal(i == 10 ? 0 : 1, updates[i].Contents.Count);
+ Assert.Equal(i < updates.Count - 1 ? null : ChatFinishReason.Stop, updates[i].FinishReason);
+ }
+
+ UsageContent usage = updates.SelectMany(u => u.Contents).OfType().Single();
+ Assert.Equal(26, usage.Details.InputTokenCount);
+ Assert.Equal(10, usage.Details.OutputTokenCount);
+ Assert.Equal(36, usage.Details.TotalTokenCount);
+ }
+
+ private static IChatClient CreateResponseClient(HttpClient httpClient, string modelId) =>
+ new OpenAIClient(
+ new ApiKeyCredential("apikey"),
+ new OpenAIClientOptions { Transport = new HttpClientPipelineTransport(httpClient) })
+ .GetOpenAIResponseClient(modelId)
+ .AsChatClient();
+}
diff --git a/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAISerializationTests.cs b/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAISerializationTests.cs
index 752e44dc388..3cc42ff0473 100644
--- a/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAISerializationTests.cs
+++ b/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAISerializationTests.cs
@@ -544,6 +544,7 @@ public static async Task SerializeResponse()
"type": "function"
}
],
+ "annotations":[],
"role": "assistant",
"content": "Hello! How can I assist you today?"
},
@@ -726,6 +727,9 @@ private partial class JsonContextWithoutFunctionArgument : JsonSerializerContext
private static void AssertJsonEqual(string expected, string actual)
{
+ expected = NormalizeNewLines(expected);
+ actual = NormalizeNewLines(actual);
+
JsonNode? expectedNode = JsonNode.Parse(expected);
JsonNode? actualNode = JsonNode.Parse(actual);
@@ -735,7 +739,7 @@ private static void AssertJsonEqual(string expected, string actual)
// normal form strings for better reporting.
expected = expectedNode?.ToJsonString() ?? "null";
actual = actualNode?.ToJsonString() ?? "null";
- Assert.Equal(expected.NormalizeNewLines(), actual.NormalizeNewLines());
+ Assert.Fail($"Expected:{Environment.NewLine}{expected}{Environment.NewLine}Actual:{Environment.NewLine}{actual}");
}
}