Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ public ChatClientMetadata(string? providerName = null, Uri? providerUri = null,

/// <summary>Gets the ID of the default model used by this chat client.</summary>
/// <remarks>
/// This value can be null if no default model is set on the corresponding <see cref="IChatClient"/>.
/// This value can be <see langword="null"/> if no default model is set on the corresponding <see cref="IChatClient"/>.
/// An individual request may override this value via <see cref="ChatOptions.ModelId"/>.
/// </remarks>
public string? DefaultModelId { get; }
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,12 +14,12 @@ namespace Microsoft.Extensions.AI;
[JsonConverter(typeof(Converter))]
public readonly struct ChatFinishReason : IEquatable<ChatFinishReason>
{
/// <summary>The finish reason value. If null because `default(ChatFinishReason)` was used, the instance will behave like <see cref="Stop"/>.</summary>
/// <summary>The finish reason value. If <see langword="null"/> because `default(ChatFinishReason)` was used, the instance will behave like <see cref="Stop"/>.</summary>
private readonly string? _value;

/// <summary>Initializes a new instance of the <see cref="ChatFinishReason"/> struct with a string that describes the reason.</summary>
/// <param name="value">The reason value.</param>
/// <exception cref="ArgumentNullException"><paramref name="value"/> is null.</exception>
/// <exception cref="ArgumentNullException"><paramref name="value"/> is <see langword="null"/>.</exception>
/// <exception cref="ArgumentException"><paramref name="value"/> is empty or composed entirely of whitespace.</exception>
[JsonConstructor]
public ChatFinishReason(string value)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ public class ChatOptions
/// Gets or sets the response format for the chat request.
/// </summary>
/// <remarks>
/// If null, no response format is specified and the client will use its default.
/// If <see langword="null"/>, no response format is specified and the client will use its default.
/// This property can be set to <see cref="ChatResponseFormat.Text"/> to specify that the response should be unstructured text,
/// to <see cref="ChatResponseFormat.Json"/> to specify that the response should be structured JSON data, or
/// an instance of <see cref="ChatResponseFormatJson"/> constructed with a specific JSON schema to request that the
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ public ChatResponseFormatJson(
SchemaDescription = schemaDescription;
}

/// <summary>Gets the JSON schema associated with the response, or null if there is none.</summary>
/// <summary>Gets the JSON schema associated with the response, or <see langword="null"/> if there is none.</summary>
public JsonElement? Schema { get; }

/// <summary>Gets a name for the schema.</summary>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ public ChatRole(string value)
/// </summary>
/// <param name="left">The first <see cref="ChatRole"/> instance to compare.</param>
/// <param name="right">The second <see cref="ChatRole"/> instance to compare.</param>
/// <returns><see langword="true"/> if left and right are both null or have equivalent values; otherwise, <see langword="false"/>.</returns>
/// <returns><see langword="true"/> if left and right are both <see langword="null"/> or have equivalent values; otherwise, <see langword="false"/>.</returns>
public static bool operator ==(ChatRole left, ChatRole right)
{
return left.Equals(right);
Expand All @@ -66,7 +66,7 @@ public ChatRole(string value)
/// </summary>
/// <param name="left">The first <see cref="ChatRole"/> instance to compare. </param>
/// <param name="right">The second <see cref="ChatRole"/> instance to compare. </param>
/// <returns><see langword="true"/> if left and right have different values; <see langword="false"/> if they have equivalent values or are both null.</returns>
/// <returns><see langword="true"/> if left and right have different values; <see langword="false"/> if they have equivalent values or are both <see langword="null"/>.</returns>
public static bool operator !=(ChatRole left, ChatRole right)
{
return !(left == right);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,7 @@ public DataContent([StringSyntax(StringSyntaxAttribute.Uri)] string uri, string?
/// </summary>
/// <param name="data">The byte contents.</param>
/// <param name="mediaType">The media type (also known as MIME type) represented by the content.</param>
/// <exception cref="ArgumentNullException"><paramref name="mediaType"/> is null.</exception>
/// <exception cref="ArgumentNullException"><paramref name="mediaType"/> is <see langword="null"/>.</exception>
/// <exception cref="ArgumentException"><paramref name="mediaType"/> is empty or composed entirely of whitespace.</exception>
public DataContent(ReadOnlyMemory<byte> data, string mediaType)
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,14 +36,14 @@ public EmbeddingGeneratorMetadata(string? providerName = null, Uri? providerUri

/// <summary>Gets the ID of the default model used by this embedding generator.</summary>
/// <remarks>
/// This value can be null if no default model is set on the corresponding embedding generator.
/// This value can be <see langword="null"/> if no default model is set on the corresponding embedding generator.
/// An individual request may override this value via <see cref="EmbeddingGenerationOptions.ModelId"/>.
/// </remarks>
public string? DefaultModelId { get; }

/// <summary>Gets the number of dimensions in the embeddings produced by the default model.</summary>
/// <remarks>
/// This value can be null if either the number of dimensions is unknown or there are multiple possible lengths associated with this model.
/// This value can be <see langword="null"/> if either the number of dimensions is unknown or there are multiple possible lengths associated with this model.
/// An individual request may override this value via <see cref="EmbeddingGenerationOptions.Dimensions"/>.
/// </remarks>
public int? DefaultModelDimensions { get; }
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,32 +22,29 @@
namespace Microsoft.Extensions.AI;

/// <summary>Represents an <see cref="IChatClient"/> for an Azure AI Inference <see cref="ChatCompletionsClient"/>.</summary>
public sealed class AzureAIInferenceChatClient : IChatClient
internal sealed class AzureAIInferenceChatClient : IChatClient
{
/// <summary>Metadata about the client.</summary>
private readonly ChatClientMetadata _metadata;

/// <summary>The underlying <see cref="ChatCompletionsClient" />.</summary>
private readonly ChatCompletionsClient _chatCompletionsClient;

/// <summary>The <see cref="JsonSerializerOptions"/> use for any serialization activities related to tool call arguments and results.</summary>
private JsonSerializerOptions _toolCallJsonSerializerOptions = AIJsonUtilities.DefaultOptions;

/// <summary>Gets a ChatRole.Developer value.</summary>
private static ChatRole ChatRoleDeveloper { get; } = new("developer");

/// <summary>Initializes a new instance of the <see cref="AzureAIInferenceChatClient"/> class for the specified <see cref="ChatCompletionsClient"/>.</summary>
/// <param name="chatCompletionsClient">The underlying client.</param>
/// <param name="modelId">The ID of the model to use. If null, it can be provided per request via <see cref="ChatOptions.ModelId"/>.</param>
/// <param name="defaultModelId">The ID of the model to use. If <see langword="null"/>, it can be provided per request via <see cref="ChatOptions.ModelId"/>.</param>
/// <exception cref="ArgumentNullException"><paramref name="chatCompletionsClient"/> is <see langword="null"/>.</exception>
/// <exception cref="ArgumentNullException"><paramref name="modelId"/> is empty or composed entirely of whitespace.</exception>
public AzureAIInferenceChatClient(ChatCompletionsClient chatCompletionsClient, string? modelId = null)
/// <exception cref="ArgumentNullException"><paramref name="defaultModelId"/> is empty or composed entirely of whitespace.</exception>
public AzureAIInferenceChatClient(ChatCompletionsClient chatCompletionsClient, string? defaultModelId = null)
{
_ = Throw.IfNull(chatCompletionsClient);

if (modelId is not null)
if (defaultModelId is not null)
{
_ = Throw.IfNullOrWhitespace(modelId);
_ = Throw.IfNullOrWhitespace(defaultModelId);
}

_chatCompletionsClient = chatCompletionsClient;
Expand All @@ -59,14 +56,7 @@ public AzureAIInferenceChatClient(ChatCompletionsClient chatCompletionsClient, s
var providerUrl = typeof(ChatCompletionsClient).GetField("_endpoint", BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance)
?.GetValue(chatCompletionsClient) as Uri;

_metadata = new ChatClientMetadata("az.ai.inference", providerUrl, modelId);
}

/// <summary>Gets or sets <see cref="JsonSerializerOptions"/> to use for any serialization activities related to tool call arguments and results.</summary>
public JsonSerializerOptions ToolCallJsonSerializerOptions
{
get => _toolCallJsonSerializerOptions;
set => _toolCallJsonSerializerOptions = Throw.IfNull(value);
_metadata = new ChatClientMetadata("az.ai.inference", providerUrl, defaultModelId);
}

/// <inheritdoc />
Expand Down Expand Up @@ -324,7 +314,7 @@ private ChatCompletionsOptions ToAzureAIOptions(IEnumerable<ChatMessage> chatCon
default:
if (prop.Value is not null)
{
byte[] data = JsonSerializer.SerializeToUtf8Bytes(prop.Value, ToolCallJsonSerializerOptions.GetTypeInfo(typeof(object)));
byte[] data = JsonSerializer.SerializeToUtf8Bytes(prop.Value, AIJsonUtilities.DefaultOptions.GetTypeInfo(typeof(object)));
result.AdditionalProperties[prop.Key] = new BinaryData(data);
}

Expand Down Expand Up @@ -413,7 +403,7 @@ private static ChatCompletionsToolDefinition ToAzureAIChatTool(AIFunction aiFunc
}

/// <summary>Converts an Extensions chat message enumerable to an AzureAI chat message enumerable.</summary>
private IEnumerable<ChatRequestMessage> ToAzureAIInferenceChatMessages(IEnumerable<ChatMessage> inputs)
private static IEnumerable<ChatRequestMessage> ToAzureAIInferenceChatMessages(IEnumerable<ChatMessage> inputs)
{
// Maps all of the M.E.AI types to the corresponding AzureAI types.
// Unrecognized or non-processable content is ignored.
Expand All @@ -439,7 +429,7 @@ private IEnumerable<ChatRequestMessage> ToAzureAIInferenceChatMessages(IEnumerab
{
try
{
result = JsonSerializer.Serialize(resultContent.Result, ToolCallJsonSerializerOptions.GetTypeInfo(typeof(object)));
result = JsonSerializer.Serialize(resultContent.Result, AIJsonUtilities.DefaultOptions.GetTypeInfo(typeof(object)));
}
catch (NotSupportedException)
{
Expand Down Expand Up @@ -482,7 +472,7 @@ private IEnumerable<ChatRequestMessage> ToAzureAIInferenceChatMessages(IEnumerab
callRequest.CallId,
new FunctionCall(
callRequest.Name,
JsonSerializer.Serialize(callRequest.Arguments, ToolCallJsonSerializerOptions.GetTypeInfo(typeof(IDictionary<string, object>))))));
JsonSerializer.Serialize(callRequest.Arguments, AIJsonUtilities.DefaultOptions.GetTypeInfo(typeof(IDictionary<string, object>))))));
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
namespace Microsoft.Extensions.AI;

/// <summary>Represents an <see cref="IEmbeddingGenerator{String, Embedding}"/> for an Azure.AI.Inference <see cref="EmbeddingsClient"/>.</summary>
public sealed class AzureAIInferenceEmbeddingGenerator :
internal sealed class AzureAIInferenceEmbeddingGenerator :
IEmbeddingGenerator<string, Embedding<float>>
{
/// <summary>Metadata about the embedding generator.</summary>
Expand All @@ -36,31 +36,31 @@ public sealed class AzureAIInferenceEmbeddingGenerator :

/// <summary>Initializes a new instance of the <see cref="AzureAIInferenceEmbeddingGenerator"/> class.</summary>
/// <param name="embeddingsClient">The underlying client.</param>
/// <param name="modelId">
/// <param name="defaultModelId">
/// The ID of the model to use. This can also be overridden per request via <see cref="EmbeddingGenerationOptions.ModelId"/>.
/// Either this parameter or <see cref="EmbeddingGenerationOptions.ModelId"/> must provide a valid model ID.
/// </param>
/// <param name="dimensions">The number of dimensions to generate in each embedding.</param>
/// <param name="defaultModelDimensions">The number of dimensions to generate in each embedding.</param>
/// <exception cref="ArgumentNullException"><paramref name="embeddingsClient"/> is <see langword="null"/>.</exception>
/// <exception cref="ArgumentException"><paramref name="modelId"/> is empty or composed entirely of whitespace.</exception>
/// <exception cref="ArgumentOutOfRangeException"><paramref name="dimensions"/> is not positive.</exception>
/// <exception cref="ArgumentException"><paramref name="defaultModelId"/> is empty or composed entirely of whitespace.</exception>
/// <exception cref="ArgumentOutOfRangeException"><paramref name="defaultModelDimensions"/> is not positive.</exception>
public AzureAIInferenceEmbeddingGenerator(
EmbeddingsClient embeddingsClient, string? modelId = null, int? dimensions = null)
EmbeddingsClient embeddingsClient, string? defaultModelId = null, int? defaultModelDimensions = null)
{
_ = Throw.IfNull(embeddingsClient);

if (modelId is not null)
if (defaultModelId is not null)
{
_ = Throw.IfNullOrWhitespace(modelId);
_ = Throw.IfNullOrWhitespace(defaultModelId);
}

if (dimensions is < 1)
if (defaultModelDimensions is < 1)
{
Throw.ArgumentOutOfRangeException(nameof(dimensions), "Value must be greater than 0.");
Throw.ArgumentOutOfRangeException(nameof(defaultModelDimensions), "Value must be greater than 0.");
}

_embeddingsClient = embeddingsClient;
_dimensions = dimensions;
_dimensions = defaultModelDimensions;

// https://github.com/Azure/azure-sdk-for-net/issues/46278
// The endpoint isn't currently exposed, so use reflection to get at it, temporarily. Once packages
Expand All @@ -69,7 +69,7 @@ public AzureAIInferenceEmbeddingGenerator(
var providerUrl = typeof(EmbeddingsClient).GetField("_endpoint", BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance)
?.GetValue(embeddingsClient) as Uri;

_metadata = new EmbeddingGeneratorMetadata("az.ai.inference", providerUrl, modelId, dimensions);
_metadata = new EmbeddingGeneratorMetadata("az.ai.inference", providerUrl, defaultModelId, defaultModelDimensions);
}

/// <inheritdoc />
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,18 +10,18 @@ public static class AzureAIInferenceExtensions
{
/// <summary>Gets an <see cref="IChatClient"/> for use with this <see cref="ChatCompletionsClient"/>.</summary>
/// <param name="chatCompletionsClient">The client.</param>
/// <param name="modelId">The ID of the model to use. If null, it can be provided per request via <see cref="ChatOptions.ModelId"/>.</param>
/// <param name="modelId">The ID of the model to use. If <see langword="null"/>, it can be provided per request via <see cref="ChatOptions.ModelId"/>.</param>
/// <returns>An <see cref="IChatClient"/> that can be used to converse via the <see cref="ChatCompletionsClient"/>.</returns>
public static IChatClient AsChatClient(
public static IChatClient AsIChatClient(
this ChatCompletionsClient chatCompletionsClient, string? modelId = null) =>
new AzureAIInferenceChatClient(chatCompletionsClient, modelId);

/// <summary>Gets an <see cref="IEmbeddingGenerator{String, Single}"/> for use with this <see cref="EmbeddingsClient"/>.</summary>
/// <param name="embeddingsClient">The client.</param>
/// <param name="modelId">The ID of the model to use. If null, it can be provided per request via <see cref="ChatOptions.ModelId"/>.</param>
/// <param name="dimensions">The number of dimensions to generate in each embedding.</param>
/// <param name="defaultModelId">The ID of the model to use. If <see langword="null"/>, it can be provided per request via <see cref="ChatOptions.ModelId"/>.</param>
/// <param name="defaultModelDimensions">The number of dimensions generated in each embedding.</param>
/// <returns>An <see cref="IEmbeddingGenerator{String, Embedding}"/> that can be used to generate embeddings via the <see cref="EmbeddingsClient"/>.</returns>
public static IEmbeddingGenerator<string, Embedding<float>> AsEmbeddingGenerator(
this EmbeddingsClient embeddingsClient, string? modelId = null, int? dimensions = null) =>
new AzureAIInferenceEmbeddingGenerator(embeddingsClient, modelId, dimensions);
public static IEmbeddingGenerator<string, Embedding<float>> AsIEmbeddingGenerator(
this EmbeddingsClient embeddingsClient, string? defaultModelId = null, int? defaultModelDimensions = null) =>
new AzureAIInferenceEmbeddingGenerator(embeddingsClient, defaultModelId, defaultModelDimensions);
}
Loading
Loading