From f4a001f327ddac9c919b8fef8a0c5f90f09fa5fe Mon Sep 17 00:00:00 2001 From: Azure SDK Bot <53356347+azure-sdk@users.noreply.github.com> Date: Tue, 17 Dec 2024 06:58:25 -0800 Subject: [PATCH 1/6] Update package index with latest published versions (#43982) --- docs/azure/includes/dotnet-all.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/azure/includes/dotnet-all.md b/docs/azure/includes/dotnet-all.md index 82267109c47ed..2d5a6150a9ba1 100644 --- a/docs/azure/includes/dotnet-all.md +++ b/docs/azure/includes/dotnet-all.md @@ -366,8 +366,8 @@ | Microsoft.Azure.DataFactoryTestingFramework.Expressions | NuGet [0.2.7](https://www.nuget.org/packages/Microsoft.Azure.DataFactoryTestingFramework.Expressions/0.2.7) | | | | Microsoft.Azure.Functions.Worker.OpenTelemetry | NuGet [1.1.0-preview6](https://www.nuget.org/packages/Microsoft.Azure.Functions.Worker.OpenTelemetry/1.1.0-preview6) | | | | System Net Client Model | NuGet [1.0.0-beta.1](https://www.nuget.org/packages/System.Net.ClientModel/1.0.0-beta.1) | | | -| Unknown Display Name | NuGet [1.0.1-beta.3](https://www.nuget.org/packages/Azure.AI.ContentSafety.Extension.Embedded.Image/1.0.1-beta.3) | | | -| Unknown Display Name | NuGet [1.0.0](https://www.nuget.org/packages/Azure.AI.ContentSafety.Extension.Embedded.Text/1.0.0)
NuGet [1.0.1-beta.3](https://www.nuget.org/packages/Azure.AI.ContentSafety.Extension.Embedded.Text/1.0.1-beta.3) | | | +| Unknown Display Name | NuGet [1.0.1-beta.4](https://www.nuget.org/packages/Azure.AI.ContentSafety.Extension.Embedded.Image/1.0.1-beta.4) | | | +| Unknown Display Name | NuGet [1.0.0](https://www.nuget.org/packages/Azure.AI.ContentSafety.Extension.Embedded.Text/1.0.0)
NuGet [1.0.1-beta.4](https://www.nuget.org/packages/Azure.AI.ContentSafety.Extension.Embedded.Text/1.0.1-beta.4) | | | | Unknown Display Name | NuGet [1.0.0-beta.1](https://www.nuget.org/packages/Azure.Maps.TimeZones/1.0.0-beta.1) | | | | Unknown Display Name | NuGet [1.0.0-beta.0](https://www.nuget.org/packages/Microsoft.Azure.Cosmos.FaultInjection/1.0.0-beta.0) | | | | Unknown Display Name | NuGet [1.0.4-preview](https://www.nuget.org/packages/Microsoft.Azure.Functions.Worker.Extensions.MySql/1.0.4-preview) | | | From ab89a0ec842367025801935406eed89f6c043ea5 Mon Sep 17 00:00:00 2001 From: Bill Wagner Date: Tue, 17 Dec 2024 12:14:10 -0500 Subject: [PATCH 2/6] Remove the nested stackalloc proposal (#43986) * Remove the nested stackalloc proposal The official standard language for this feature was merged in the December ECMA committee meeting. It's time to drop the proposal speclet. * remove outdated link * test build --- .openpublishing.redirection.csharp.json | 4 ++++ docfx.json | 3 +-- docs/csharp/language-reference/operators/stackalloc.md | 2 +- docs/csharp/specification/toc.yml | 2 -- 4 files changed, 6 insertions(+), 5 deletions(-) diff --git a/.openpublishing.redirection.csharp.json b/.openpublishing.redirection.csharp.json index b03dbc61de154..db99fd814d2ae 100644 --- a/.openpublishing.redirection.csharp.json +++ b/.openpublishing.redirection.csharp.json @@ -112,6 +112,10 @@ "source_path_from_root": "/redirections/proposals/csharp-8.0/nullable-reference-types.md", "redirect_url": "/dotnet/csharp/language-reference/language-specification/types.md#893-nullable-reference-types" }, + { + "source_path_from_root": "/_csharplang/proposals/csharp-8.0/nested-stackalloc.md", + "redirect_url": "/dotnet/csharp/language-reference/language-specification/structs#164127-stackalloc" + }, { "source_path_from_root": "/redirections/proposals/csharp-9.0/nullable-reference-types-specification.md", "redirect_url": "/dotnet/csharp/language-reference/language-specification/types.md#893-nullable-reference-types" diff --git a/docfx.json b/docfx.json index 4535375787c4e..d2568775f7205 100644 --- a/docfx.json +++ b/docfx.json @@ -73,6 +73,7 @@ "csharp-8.0/unconstrained-null-coalescing.md", "csharp-8.0/nullable-reference-types.md", "csharp-8.0/nullable-reference-types-specification.md", + "csharp-8.0/nested-stackalloc.md", "csharp-9.0/nullable-reference-types-specification.md", "csharp-9.0/nullable-constructor-analysis.md", "csharp-9.0/nullable-parameter-default-value-analysis.md" @@ -613,7 +614,6 @@ "_csharplang/proposals/csharp-8.0/using.md": "Pattern based using and using declarations", "_csharplang/proposals/csharp-8.0/null-coalescing-assignment.md": "Null coalescing assignment", "_csharplang/proposals/csharp-8.0/readonly-instance-members.md": "Readonly instance members", - "_csharplang/proposals/csharp-8.0/nested-stackalloc.md": "Nested stackalloc expressions", "_csharplang/proposals/csharp-9.0/covariant-returns.md": "Covariant return types", "_csharplang/proposals/csharp-9.0/extending-partial-methods.md": "Extending partial methods", "_csharplang/proposals/csharp-9.0/extension-getenumerator.md": "Extension GetEnumerator support in foreach", @@ -734,7 +734,6 @@ "_csharplang/proposals/csharp-8.0/using.md": "This feature specification supports pattern based using and using declarations to simplify resource cleanup.", "_csharplang/proposals/csharp-8.0/null-coalescing-assignment.md": "This feature specification describes the syntax to support null coalescing assignment expressions using the '??=' operator.", "_csharplang/proposals/csharp-8.0/readonly-instance-members.md": "This feature specification describes the syntax for declaring and using readonly instance members.", - "_csharplang/proposals/csharp-8.0/nested-stackalloc.md": "This feature specification describes nested stackalloc expressions, which provides nested arrays of stackalloc storage.", "_csharplang/proposals/csharp-9.0/covariant-returns.md": "This feature specification describes covariant return types, where overriding member declarations can return a type derived from the overridden member declaration.", "_csharplang/proposals/csharp-9.0/extending-partial-methods.md": "This feature specification describes extensions to partial methods. These extensions enable source generators to create or call partial methods.", "_csharplang/proposals/csharp-9.0/extension-getenumerator.md": "This feature specification describes the necessary pattern for an extension method to provide the GetEnumerator support in a foreach loop.", diff --git a/docs/csharp/language-reference/operators/stackalloc.md b/docs/csharp/language-reference/operators/stackalloc.md index 90aadd7b05185..edec7417fa8e9 100644 --- a/docs/csharp/language-reference/operators/stackalloc.md +++ b/docs/csharp/language-reference/operators/stackalloc.md @@ -66,7 +66,7 @@ The use of `stackalloc` automatically enables buffer overrun detection features ## C# language specification -For more information, see the [Stack allocation](~/_csharpstandard/standard/unsafe-code.md#239-stack-allocation) section of the [C# language specification](~/_csharpstandard/standard/README.md) and the [Permit `stackalloc` in nested contexts](~/_csharplang/proposals/csharp-8.0/nested-stackalloc.md) feature proposal note. +For more information, see the [Stack allocation](~/_csharpstandard/standard/unsafe-code.md#239-stack-allocation) section of the [C# language specification](~/_csharpstandard/standard/README.md). ## See also diff --git a/docs/csharp/specification/toc.yml b/docs/csharp/specification/toc.yml index 62b4710ca2b98..01f909e9489cf 100644 --- a/docs/csharp/specification/toc.yml +++ b/docs/csharp/specification/toc.yml @@ -181,8 +181,6 @@ items: href: ../../../_csharplang/proposals/csharp-8.0/using.md - name: Async streams href: ../../../_csharplang/proposals/csharp-8.0/async-streams.md - - name: Nested stackalloc - href: ../../../_csharplang/proposals/csharp-8.0/nested-stackalloc.md - name: Lock object semantics href: ../../../_csharplang/proposals/csharp-13.0/lock-object.md - name: Allow `ref` and `unsafe` From 469b13a7561c900a4f92c8badf96c0f4a41b7766 Mon Sep 17 00:00:00 2001 From: David Pine Date: Tue, 17 Dec 2024 12:03:06 -0600 Subject: [PATCH 3/6] Add the runtime libraries bits for MEAI (#43985) * Add the runtime libraries bits for MEAI * Apply suggestions from code review Co-authored-by: Genevieve Warren <24882762+gewarren@users.noreply.github.com> * Added a mock TOC (or in article nav) * Add all the snippets * Apply suggestions from code review Co-authored-by: alexwolfmsft <93200798+alexwolfmsft@users.noreply.github.com> * Fix code includes --------- Co-authored-by: Genevieve Warren <24882762+gewarren@users.noreply.github.com> Co-authored-by: alexwolfmsft <93200798+alexwolfmsft@users.noreply.github.com> --- docs/ai/ai-extensions.md | 6 +- .../extensions/artificial-intelligence.md | 282 ++++++++++++++++++ docs/core/extensions/http-ratelimiter.md | 12 +- .../snippets/ai/AI.Shared/AI.Shared.csproj | 14 + .../ai/AI.Shared/RateLimitingChatClient.cs | 55 ++++ ...ngChatClientExtensions.OptionalOverload.cs | 17 ++ .../RateLimitingChatClientExtensions.cs | 13 + .../RateLimitingEmbeddingGenerator.cs | 33 ++ .../snippets/ai/AI.Shared/SampleChatClient.cs | 58 ++++ .../ai/AI.Shared/SampleEmbeddingGenerator.cs | 35 +++ .../ConsoleAI.CacheResponses.csproj | 18 ++ .../ai/ConsoleAI.CacheResponses/Program.cs | 24 ++ .../ConsoleAI.CompleteAsyncArgs.csproj | 14 + .../ai/ConsoleAI.CompleteAsyncArgs/Program.cs | 10 + .../ConsoleAI.CompleteStreamingAsync.csproj | 14 + .../Program.cs | 9 + .../ConsoleAI.ConsumeClientMiddleware.csproj | 15 + .../Program.cs | 26 ++ ...soleAI.ConsumeRateLimitingEmbedding.csproj | 14 + .../Program.cs | 16 + .../ConsoleAI.CreateEmbeddings.csproj | 14 + .../ai/ConsoleAI.CreateEmbeddings/Program.cs | 10 + .../ConsoleAI.CustomClientMiddle.csproj | 18 ++ .../ConsoleAI.CustomClientMiddle/Program.cs | 12 + .../ConsoleAI.CustomEmbeddingsMiddle.csproj | 16 + .../Program.cs | 34 +++ .../ConsoleAI.DependencyInjection.csproj | 16 + .../ConsoleAI.DependencyInjection/Program.cs | 20 ++ .../ConsoleAI.FunctionalityPipelines.csproj | 20 ++ .../Program.cs | 46 +++ .../ConsoleAI.ProvideOptions.csproj | 18 ++ .../ai/ConsoleAI.ProvideOptions/Program.cs | 13 + .../ConsoleAI.ToolCalling.csproj | 18 ++ .../ai/ConsoleAI.ToolCalling/Program.cs | 21 ++ .../ConsoleAI.UseExample.csproj | 14 + .../ai/ConsoleAI.UseExample/Program.cs | 28 ++ .../ConsoleAI.UseExampleAlt.csproj | 14 + .../ai/ConsoleAI.UseExampleAlt/Program.cs | 27 ++ .../ConsoleAI.UseTelemetry.csproj | 18 ++ .../ai/ConsoleAI.UseTelemetry/Program.cs | 20 ++ .../snippets/ai/ConsoleAI/ConsoleAI.csproj | 18 ++ .../snippets/ai/ConsoleAI/Program.cs | 8 + docs/fundamentals/toc.yml | 3 + 43 files changed, 1103 insertions(+), 8 deletions(-) create mode 100644 docs/core/extensions/artificial-intelligence.md create mode 100644 docs/core/extensions/snippets/ai/AI.Shared/AI.Shared.csproj create mode 100644 docs/core/extensions/snippets/ai/AI.Shared/RateLimitingChatClient.cs create mode 100644 docs/core/extensions/snippets/ai/AI.Shared/RateLimitingChatClientExtensions.OptionalOverload.cs create mode 100644 docs/core/extensions/snippets/ai/AI.Shared/RateLimitingChatClientExtensions.cs create mode 100644 docs/core/extensions/snippets/ai/AI.Shared/RateLimitingEmbeddingGenerator.cs create mode 100644 docs/core/extensions/snippets/ai/AI.Shared/SampleChatClient.cs create mode 100644 docs/core/extensions/snippets/ai/AI.Shared/SampleEmbeddingGenerator.cs create mode 100644 docs/core/extensions/snippets/ai/ConsoleAI.CacheResponses/ConsoleAI.CacheResponses.csproj create mode 100644 docs/core/extensions/snippets/ai/ConsoleAI.CacheResponses/Program.cs create mode 100644 docs/core/extensions/snippets/ai/ConsoleAI.CompleteAsyncArgs/ConsoleAI.CompleteAsyncArgs.csproj create mode 100644 docs/core/extensions/snippets/ai/ConsoleAI.CompleteAsyncArgs/Program.cs create mode 100644 docs/core/extensions/snippets/ai/ConsoleAI.CompleteStreamingAsync/ConsoleAI.CompleteStreamingAsync.csproj create mode 100644 docs/core/extensions/snippets/ai/ConsoleAI.CompleteStreamingAsync/Program.cs create mode 100644 docs/core/extensions/snippets/ai/ConsoleAI.ConsumeClientMiddleware/ConsoleAI.ConsumeClientMiddleware.csproj create mode 100644 docs/core/extensions/snippets/ai/ConsoleAI.ConsumeClientMiddleware/Program.cs create mode 100644 docs/core/extensions/snippets/ai/ConsoleAI.ConsumeRateLimitingEmbedding/ConsoleAI.ConsumeRateLimitingEmbedding.csproj create mode 100644 docs/core/extensions/snippets/ai/ConsoleAI.ConsumeRateLimitingEmbedding/Program.cs create mode 100644 docs/core/extensions/snippets/ai/ConsoleAI.CreateEmbeddings/ConsoleAI.CreateEmbeddings.csproj create mode 100644 docs/core/extensions/snippets/ai/ConsoleAI.CreateEmbeddings/Program.cs create mode 100644 docs/core/extensions/snippets/ai/ConsoleAI.CustomClientMiddle/ConsoleAI.CustomClientMiddle.csproj create mode 100644 docs/core/extensions/snippets/ai/ConsoleAI.CustomClientMiddle/Program.cs create mode 100644 docs/core/extensions/snippets/ai/ConsoleAI.CustomEmbeddingsMiddle/ConsoleAI.CustomEmbeddingsMiddle.csproj create mode 100644 docs/core/extensions/snippets/ai/ConsoleAI.CustomEmbeddingsMiddle/Program.cs create mode 100644 docs/core/extensions/snippets/ai/ConsoleAI.DependencyInjection/ConsoleAI.DependencyInjection.csproj create mode 100644 docs/core/extensions/snippets/ai/ConsoleAI.DependencyInjection/Program.cs create mode 100644 docs/core/extensions/snippets/ai/ConsoleAI.FunctionalityPipelines/ConsoleAI.FunctionalityPipelines.csproj create mode 100644 docs/core/extensions/snippets/ai/ConsoleAI.FunctionalityPipelines/Program.cs create mode 100644 docs/core/extensions/snippets/ai/ConsoleAI.ProvideOptions/ConsoleAI.ProvideOptions.csproj create mode 100644 docs/core/extensions/snippets/ai/ConsoleAI.ProvideOptions/Program.cs create mode 100644 docs/core/extensions/snippets/ai/ConsoleAI.ToolCalling/ConsoleAI.ToolCalling.csproj create mode 100644 docs/core/extensions/snippets/ai/ConsoleAI.ToolCalling/Program.cs create mode 100644 docs/core/extensions/snippets/ai/ConsoleAI.UseExample/ConsoleAI.UseExample.csproj create mode 100644 docs/core/extensions/snippets/ai/ConsoleAI.UseExample/Program.cs create mode 100644 docs/core/extensions/snippets/ai/ConsoleAI.UseExampleAlt/ConsoleAI.UseExampleAlt.csproj create mode 100644 docs/core/extensions/snippets/ai/ConsoleAI.UseExampleAlt/Program.cs create mode 100644 docs/core/extensions/snippets/ai/ConsoleAI.UseTelemetry/ConsoleAI.UseTelemetry.csproj create mode 100644 docs/core/extensions/snippets/ai/ConsoleAI.UseTelemetry/Program.cs create mode 100644 docs/core/extensions/snippets/ai/ConsoleAI/ConsoleAI.csproj create mode 100644 docs/core/extensions/snippets/ai/ConsoleAI/Program.cs diff --git a/docs/ai/ai-extensions.md b/docs/ai/ai-extensions.md index 226e4ec178e82..b9a539f310948 100644 --- a/docs/ai/ai-extensions.md +++ b/docs/ai/ai-extensions.md @@ -1,7 +1,7 @@ --- title: Unified AI building blocks for .NET description: Learn how to develop with unified AI building blocks for .NET using Microsoft.Extensions.AI and Microsoft.Extensions.AI.Abstractions libraries -ms.date: 11/04/2024 +ms.date: 12/16/2024 ms.topic: quickstart ms.custom: devx-track-dotnet, devx-track-dotnet-ai author: alexwolfmsft @@ -16,11 +16,13 @@ The .NET ecosystem provides abstractions for integrating AI services into .NET a - How to work with AI abstractions in your apps and the benefits they offer. - Essential AI middleware concepts. +For more information, see [Introduction to Microsoft.Extensions.AI](../core/extensions/artificial-intelligence.md). + ## What is the Microsoft.Extensions.AI library? `Microsoft.Extensions.AI` is a set of core .NET libraries created in collaboration with developers across the .NET ecosystem, including Semantic Kernel. These libraries provide a unified layer of C# abstractions for interacting with AI services, such as small and large language models (SLMs and LLMs), embeddings, and middleware. -:::image type="content" source="media/ai-extensions/meai-architecture-diagram.png" alt-text="An architectural diagram of the AI extensions libraries."::: +:::image type="content" source="media/ai-extensions/meai-architecture-diagram.png" lightbox="media/ai-extensions/meai-architecture-diagram.png" alt-text="An architectural diagram of the AI extensions libraries."::: `Microsoft.Extensions.AI` provides abstractions that can be implemented by various services, all adhering to the same core concepts. This library is not intended to provide APIs tailored to any specific provider's services. The goal of `Microsoft.Extensions.AI` is to act as a unifying layer within the .NET ecosystem, enabling developers to choose their preferred frameworks and libraries while ensuring seamless integration and collaboration across the ecosystem. diff --git a/docs/core/extensions/artificial-intelligence.md b/docs/core/extensions/artificial-intelligence.md new file mode 100644 index 0000000000000..d872a9b14f55e --- /dev/null +++ b/docs/core/extensions/artificial-intelligence.md @@ -0,0 +1,282 @@ +--- +title: Artificial Intelligence in .NET (Preview) +description: Learn how to use the Microsoft.Extensions.AI library to integrate and interact with various AI services in your .NET applications. +author: IEvangelist +ms.author: dapine +ms.date: 12/17/2024 +ms.collection: ce-skilling-ai-copilot +--- + +# Artificial intelligence in .NET (Preview) + +With a growing variety of artificial intelligence (AI) services available, developers need a way to integrate and interact with these services in their .NET applications. The `Microsoft.Extensions.AI` library provides a unified approach for representing generative AI components, which enables seamless integration and interoperability with various AI services. This article introduces the library and provides installation instructions and usage examples to help you get started. + +## Install the package + +To install the [📦 Microsoft.Extensions.AI](https://www.nuget.org/packages/Microsoft.Extensions.AI) NuGet package, use the .NET CLI or add a package reference directly to your C# project file: + +### [.NET CLI](#tab/dotnet-cli) + +```dotnetcli +dotnet add package Microsoft.Extensions.AI --prelease +``` + +### [PackageReference](#tab/package-reference) + +```xml + +``` + +--- + +For more information, see [dotnet add package](../tools/dotnet-add-package.md) or [Manage package dependencies in .NET applications](../tools/dependencies.md). + +## Usage examples + +The interface defines a client abstraction responsible for interacting with AI services that provide chat capabilities. It includes methods for sending and receiving messages with multi-modal content (such as text, images, and audio), either as a complete set or streamed incrementally. Additionally, it provides metadata information about the client and allows retrieving strongly typed services. + +> [!IMPORTANT] +> For more usage examples and real-world scenarios, see [AI for .NET developers](../../ai/index.yml). + +**In this section** + +- [The `IChatClient` interface](#the-ichatclient-interface) + - [Request chat completion](#request-chat-completion) + - [Request chat completion with streaming](#request-chat-completion-with-streaming) + - [Tool calling](#tool-calling) + - [Cache responses](#cache-responses) + - [Use telemetry](#use-telemetry) + - [Provide options](#provide-options) + - [Functionality pipelines](#functionality-pipelines) + - [Custom `IChatClient` middleware](#custom-ichatclient-middleware) + - [Dependency injection](#dependency-injection) +- [The `IEmbeddingGenerator` interface](#the-iembeddinggenerator-interface) + - [Sample implementation](#sample-implementation) + - [Create embeddings](#create-embeddings) + - [Custom `IEmbeddingGenerator` middleware](#custom-iembeddinggenerator-middleware) + +### The `IChatClient` interface + +The following sample implements `IChatClient` to show the general structure. + +:::code language="csharp" source="snippets/ai/AI.Shared/SampleChatClient.cs"::: + +You can find other concrete implementations of `IChatClient` in the following NuGet packages: + +- [📦 Microsoft.Extensions.AI.AzureAIInference](https://www.nuget.org/packages/Microsoft.Extensions.AI.AzureAIInference): Implementation backed by [Azure AI Model Inference API](/azure/ai-studio/reference/reference-model-inference-api). +- [📦 Microsoft.Extensions.AI.Ollama](https://www.nuget.org/packages/Microsoft.Extensions.AI.Ollama): Implementation backed by [Ollama](https://ollama.com/). +- [📦 Microsoft.Extensions.AI.OpenAI](https://www.nuget.org/packages/Microsoft.Extensions.AI.OpenAI): Implementation backed by either [OpenAI](https://openai.com/) or OpenAI-compatible endpoints (such as [Azure OpenAI](https://azure.microsoft.com/products/ai-services/openai-service)). + +#### Request chat completion + +To request a completion, call the method. The request is composed of one or more messages, each of which is composed of one or more pieces of content. Accelerator methods exist to simplify common cases, such as constructing a request for a single piece of text content. + +:::code language="csharp" source="snippets/ai/ConsoleAI/Program.cs"::: + +The core `IChatClient.CompleteAsync` method accepts a list of messages. This list represents the history of all messages that are part of the conversation. + +:::code language="csharp" source="snippets/ai/ConsoleAI.CompleteAsyncArgs/Program.cs"::: + +Each message in the history is represented by a object. The `ChatMessage` class provides a property that indicates the role of the message. By default, the is used. The following roles are available: + +- : Instructs or sets the behavior of the assistant. +- : Provides responses to system-instructed, user-prompted input. +- : Provides additional information and references for chat completions. +- : Provides input for chat completions. + +Each chat message is instantiated, assigning to its property a new . There are various [types of content](xref:Microsoft.Extensions.AI.AIContent) that can be represented, such as a simple string or a more complex object that represents a multi-modal message with text, images, and audio: + +- +- +- +- +- +- +- + +#### Request chat completion with streaming + +The inputs to are identical to those of `CompleteAsync`. However, rather than returning the complete response as part of a object, the method returns an where `T` is , providing a stream of updates that collectively form the single response. + +:::code language="csharp" source="snippets/ai/ConsoleAI.CompleteStreamingAsync/Program.cs"::: + +> [!TIP] +> Streaming APIs are nearly synonymous with AI user experiences. C# enables compelling scenarios with its `IAsyncEnumerable` support, allowing for a natural and efficient way to stream data. + +#### Tool calling + +Some models and services support _tool calling_, where requests can include tools for the model to invoke functions to gather additional information. Instead of sending a final response, the model requests a function invocation with specific arguments. The client then invokes the function and sends the results back to the model along with the conversation history. The `Microsoft.Extensions.AI` library includes abstractions for various message content types, including function call requests and results. While consumers can interact with this content directly, `Microsoft.Extensions.AI` automates these interactions and provides: + +- : Represents a function that can be described to an AI service and invoked. +- : Provides factory methods for creating commonly used implementations of `AIFunction`. +- : Wraps an `IChatClient` to add automatic function invocation capabilities. + +Consider the following example that demonstrates a random function invocation: + +:::code language="csharp" source="snippets/ai/ConsoleAI.ToolCalling/Program.cs"::: + +The preceding example depends on the [📦 Microsoft.Extensions.AI.Ollama](https://www.nuget.org/packages/Microsoft.Extensions.AI.Ollama) NuGet package. + +The preceding code: + +- Defines a function named `GetCurrentWeather` that returns a random weather forecast. + - This function is decorated with a , which is used to provide a description of the function to the AI service. +- Instantiates a with an and configures it to use function invocation. +- Calls `CompleteStreamingAsync` on the client, passing a prompt and a list of tools that includes a function created with . +- Iterates over the response, printing each update to the console. + +#### Cache responses + +If you're familiar with [Caching in .NET](caching.md), it's good to know that provides other such delegating `IChatClient` implementations. The is an `IChatClient` that layers caching around another arbitrary `IChatClient` instance. When a unique chat history is submitted to the `DistributedCachingChatClient`, it forwards it to the underlying client and then caches the response before sending it back to the consumer. The next time the same prompt is submitted, such that a cached response can be found in the cache, the `DistributedCachingChatClient` returns the cached response rather than needing to forward the request along the pipeline. + +:::code language="csharp" source="snippets/ai/ConsoleAI.CacheResponses/Program.cs"::: + +The preceding example depends on the [📦 Microsoft.Extensions.Caching.Memory](https://www.nuget.org/packages/Microsoft.Extensions.Caching.Memory) NuGet package. For more information, see [Caching in .NET](caching.md). + +#### Use telemetry + +Another example of a delegating chat client is the . This implementation adheres to the [OpenTelemetry Semantic Conventions for Generative AI systems](https://opentelemetry.io/docs/specs/semconv/gen-ai/). Similar to other `IChatClient` delegators, it layers metrics and spans around any underlying `IChatClient` implementation, providing enhanced observability. + +:::code language="csharp" source="snippets/ai/ConsoleAI.UseTelemetry/Program.cs"::: + +The preceding example depends on the [📦 OpenTelemetry.Exporter.Console](https://www.nuget.org/packages/OpenTelemetry.Exporter.Console) NuGet package. + +#### Provide options + +Every call to or can optionally supply a instance containing additional parameters for the operation. The most common parameters among AI models and services show up as strongly typed properties on the type, such as . Other parameters can be supplied by name in a weakly typed manner via the dictionary. + +You can also specify options when building an `IChatClient` with the fluent API and chaining a call to the `ConfigureOptions` extension method. This delegating client wraps another client and invokes the supplied delegate to populate a `ChatOptions` instance for every call. For example, to ensure that the property defaults to a particular model name, you can use code like the following: + +:::code language="csharp" source="snippets/ai/ConsoleAI.ProvideOptions/Program.cs"::: + +The preceding example depends on the [📦 Microsoft.Extensions.AI.Ollama](https://www.nuget.org/packages/Microsoft.Extensions.AI.Ollama) NuGet package. + +#### Functionality pipelines + +`IChatClient` instances can be layered to create a pipeline of components, each adding specific functionality. These components can come from `Microsoft.Extensions.AI`, other NuGet packages, or custom implementations. This approach allows you to augment the behavior of the `IChatClient` in various ways to meet your specific needs. Consider the following example code that layers a distributed cache, function invocation, and OpenTelemetry tracing around a sample chat client: + +:::code language="csharp" source="snippets/ai/ConsoleAI.FunctionalityPipelines/Program.cs"::: + +The preceding example depends on the following NuGet packages: + +- [📦 Microsoft.Extensions.Caching.Memory](https://www.nuget.org/packages/Microsoft.Extensions.Caching.Memory) +- [📦 Microsoft.Extensions.AI.Ollama](https://www.nuget.org/packages/Microsoft.Extensions.AI.Ollama) +- [📦 OpenTelemetry.Exporter.Console](https://www.nuget.org/packages/OpenTelemetry.Exporter.Console) + +#### Custom `IChatClient` middleware + +To add additional functionality, you can implement `IChatClient` directly or use the class. This class serves as a base for creating chat clients that delegate operations to another `IChatClient` instance. It simplifies chaining multiple clients, allowing calls to pass through to an underlying client. + +The `DelegatingChatClient` class provides default implementations for methods like `CompleteAsync`, `CompleteStreamingAsync`, and `Dispose`, which forward calls to the inner client. You can derive from this class and override only the methods you need to enhance behavior, while delegating other calls to the base implementation. This approach helps create flexible and modular chat clients that are easy to extend and compose. + +The following is an example class derived from `DelegatingChatClient` to provide rate limiting functionality, utilizing the : + +:::code language="csharp" source="snippets/ai/AI.Shared/RateLimitingChatClient.cs"::: + +The preceding example depends on the [📦 System.Threading.RateLimiting](https://www.nuget.org/packages/System.Threading.RateLimiting) NuGet package. Composition of the `RateLimitingChatClient` with another client is straightforward: + +:::code language="csharp" source="snippets/ai/ConsoleAI.CustomClientMiddle/Program.cs"::: + +To simplify the composition of such components with others, component authors should create a `Use*` extension method for registering the component into a pipeline. For example, consider the following extension method: + +:::code language="csharp" source="snippets/ai/AI.Shared/RateLimitingChatClientExtensions.cs"::: + +Such extensions can also query for relevant services from the DI container; the used by the pipeline is passed in as an optional parameter: + +:::code language="csharp" source="snippets/ai/AI.Shared/RateLimitingChatClientExtensions.OptionalOverload.cs"::: + +The consumer can then easily use this in their pipeline, for example: + +:::code language="csharp" source="snippets/ai/ConsoleAI.ConsumeClientMiddleware/Program.cs" id="program"::: + +This example demonstrates [hosted scenario](generic-host.md), where the consumer relies on [dependency injection](dependency-injection.md) to provide the `RateLimiter` instance. The preceding extension methods demonstrate using a `Use` method on . The `ChatClientBuilder` also provides overloads that make it easier to write such delegating handlers. + +- +- +- + +For example, in the earlier `RateLimitingChatClient` example, the overrides of `CompleteAsync` and `CompleteStreamingAsync` only need to do work before and after delegating to the next client in the pipeline. To achieve the same thing without writing a custom class, you can use an overload of `Use` that accepts a delegate that's used for both `CompleteAsync` and `CompleteStreamingAsync`, reducing the boilerplate required: + +:::code language="csharp" source="snippets/ai/ConsoleAI.UseExample/Program.cs"::: + +The preceding overload internally uses an `AnonymousDelegatingChatClient`, which enables more complicated patterns with only a little additional code. For example, to achieve the same result but with the retrieved from DI: + +:::code language="csharp" source="snippets/ai/ConsoleAI.UseExampleAlt/Program.cs"::: + +For scenarios where the developer would like to specify delegating implementations of `CompleteAsync` and `CompleteStreamingAsync` inline, and where it's important to be able to write a different implementation for each in order to handle their unique return types specially, another overload of `Use` exists that accepts a delegate for each. + +#### Dependency injection + + implementations will typically be provided to an application via [dependency injection (DI)](dependency-injection.md). In this example, an is added into the DI container, as is an `IChatClient`. The registration for the `IChatClient` employs a builder that creates a pipeline containing a caching client (which will then use an `IDistributedCache` retrieved from DI) and the sample client. The injected `IChatClient` can be retrieved and used elsewhere in the app. + +::code language="csharp" source="snippets/ai/ConsoleAI.DependencyInjection/Program.cs"::: + +The preceding example depends on the following NuGet packages: + +- [📦 Microsoft.Extensions.Hosting](https://www.nuget.org/packages/Microsoft.Extensions.Hosting) +- [📦 Microsoft.Extensions.Caching.Memory](https://www.nuget.org/packages/Microsoft.Extensions.Caching.Memory) + +What instance and configuration is injected can differ based on the current needs of the application, and multiple pipelines can be injected with different keys. + +### The `IEmbeddingGenerator` interface + +The interface represents a generic generator of embeddings. Here, `TInput` is the type of input values being embedded, and `TEmbedding` is the type of generated embedding, which inherits from the class. + +The `Embedding` class serves as a base class for embeddings generated by an `IEmbeddingGenerator`. It's designed to store and manage the metadata and data associated with embeddings. Derived types like `Embedding` provide the concrete embedding vector data. For instance, an embedding exposes a property to access its embedding data. + +The `IEmbeddingGenerator` interface defines a method to asynchronously generate embeddings for a collection of input values, with optional configuration and cancellation support. It also provides metadata describing the generator and allows for the retrieval of strongly typed services that can be provided by the generator or its underlying services. + +#### Sample implementation + +Consider the following sample implementation of an `IEmbeddingGenerator` to show the general structure but that just generates random embedding vectors. + +:::code language="csharp" source="snippets/ai/AI.Shared/SampleEmbeddingGenerator.cs"::: + +The preceding code: + +- Defines a class named `SampleEmbeddingGenerator` that implements the `IEmbeddingGenerator>` interface. +- Has a primary constructor that accepts an endpoint and model ID, which are used to identify the generator. +- Exposes a `Metadata` property that provides metadata about the generator. +- Implements the `GenerateAsync` method to generate embeddings for a collection of input values: + - Simulates an asynchronous operation by delaying for 100 milliseconds. + - Returns random embeddings for each input value. + +You can find actual concrete implementations in the following packages: + +- [📦 Microsoft.Extensions.AI.OpenAI](https://www.nuget.org/packages/Microsoft.Extensions.AI.OpenAI) +- [📦 Microsoft.Extensions.AI.Ollama](https://www.nuget.org/packages/Microsoft.Extensions.AI.Ollama) + +#### Create embeddings + +The primary operation performed with an is embedding generation, which is accomplished with its method. + +::code language="csharp" source="snippets/ai/ConsoleAI.CreateEmbeddings/Program.cs"::: + +#### Custom `IEmbeddingGenerator` middleware + +As with `IChatClient`, `IEmbeddingGenerator` implementations can be layered. Just as `Microsoft.Extensions.AI` provides delegating implementations of `IChatClient` for caching and telemetry, it provides an implementation for `IEmbeddingGenerator` as well. + +:::code language="csharp" source="snippets/ai/ConsoleAI.CustomEmbeddingsMiddle/Program.cs"::: + +The `IEmbeddingGenerator` enables building custom middleware that extends the functionality of an `IEmbeddingGenerator`. The class is an implementation of the `IEmbeddingGenerator` interface that serves as a base class for creating embedding generators that delegate their operations to another `IEmbeddingGenerator` instance. It allows for chaining multiple generators in any order, passing calls through to an underlying generator. The class provides default implementations for methods such as and `Dispose`, which forward the calls to the inner generator instance, enabling flexible and modular embedding generation. + +The following is an example implementation of such a delegating embedding generator that rate limits embedding generation requests: + +:::code language="csharp" source="snippets/ai/AI.Shared/RateLimitingEmbeddingGenerator.cs"::: + +This can then be layered around an arbitrary `IEmbeddingGenerator>` to rate limit all embedding generation operations performed. + +:::code language="csharp" source="snippets/ai/ConsoleAI.ConsumeRateLimitingEmbedding/Program.cs"::: + +In this way, the `RateLimitingEmbeddingGenerator` can be composed with other `IEmbeddingGenerator>` instances to provide rate limiting functionality. + +## See also + +- [Develop .NET applications with AI features](../../ai/get-started/dotnet-ai-overview.md) +- [Unified AI building blocks for .NET using Microsoft.Extensions.AI](../../ai/ai-extensions.md) +- [Build an AI chat app with .NET](../../ai/quickstarts/get-started-openai.md) +- [.NET dependency injection](dependency-injection.md) +- [Rate limit an HTTP handler in .NET](http-ratelimiter.md) +- [.NET Generic Host](generic-host.md) +- [Caching in .NET](caching.md) diff --git a/docs/core/extensions/http-ratelimiter.md b/docs/core/extensions/http-ratelimiter.md index d8634be350c84..04ebe462f9695 100644 --- a/docs/core/extensions/http-ratelimiter.md +++ b/docs/core/extensions/http-ratelimiter.md @@ -3,7 +3,7 @@ title: Rate limiting an HTTP handler in .NET description: Learn how to create a client-side HTTP handler that limits the number of requests, with the inbuilt rate limiter API from .NET. author: IEvangelist ms.author: dapine -ms.date: 03/13/2023 +ms.date: 12/16/2024 --- # Rate limit an HTTP handler in .NET @@ -161,12 +161,12 @@ You'll notice that the first logged entries are always the immediately returned Note also that each URL's query string is unique: examine the `iteration` parameter to see that it's incremented by one for each request. This parameter helps to illustrate that the 429 responses aren't from the first requests, but rather from the requests that are made after the rate limit is reached. The 200 responses arrive later but these requests were made earlier—before the limit was reached. -To have a better understanding of the various rate-limiting algorithms, try rewriting this code to accept a different `RateLimiter` implementation. In addition to the `TokenBucketRateLimiter` you could try: +To have a better understanding of the various rate-limiting algorithms, try rewriting this code to accept a different implementation. In addition to the you could try: -- `ConcurrencyLimiter` -- `FixedWindowRateLimiter` -- `PartitionedRateLimiter` -- `SlidingWindowRateLimiter` +- +- +- +- ## Summary diff --git a/docs/core/extensions/snippets/ai/AI.Shared/AI.Shared.csproj b/docs/core/extensions/snippets/ai/AI.Shared/AI.Shared.csproj new file mode 100644 index 0000000000000..f7e01ba0c9f9c --- /dev/null +++ b/docs/core/extensions/snippets/ai/AI.Shared/AI.Shared.csproj @@ -0,0 +1,14 @@ + + + + net9.0 + enable + enable + + + + + + + + diff --git a/docs/core/extensions/snippets/ai/AI.Shared/RateLimitingChatClient.cs b/docs/core/extensions/snippets/ai/AI.Shared/RateLimitingChatClient.cs new file mode 100644 index 0000000000000..e5d3ada7f1f60 --- /dev/null +++ b/docs/core/extensions/snippets/ai/AI.Shared/RateLimitingChatClient.cs @@ -0,0 +1,55 @@ +using Microsoft.Extensions.AI; +using System.Runtime.CompilerServices; +using System.Threading.RateLimiting; + +public sealed class RateLimitingChatClient( + IChatClient innerClient, RateLimiter rateLimiter) + : DelegatingChatClient(innerClient) +{ + public override async Task CompleteAsync( + IList chatMessages, + ChatOptions? options = null, + CancellationToken cancellationToken = default) + { + using var lease = await rateLimiter.AcquireAsync(permitCount: 1, cancellationToken) + .ConfigureAwait(false); + + if (!lease.IsAcquired) + { + throw new InvalidOperationException("Unable to acquire lease."); + } + + return await base.CompleteAsync(chatMessages, options, cancellationToken) + .ConfigureAwait(false); + } + + public override async IAsyncEnumerable CompleteStreamingAsync( + IList chatMessages, + ChatOptions? options = null, + [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + using var lease = await rateLimiter.AcquireAsync(permitCount: 1, cancellationToken) + .ConfigureAwait(false); + + if (!lease.IsAcquired) + { + throw new InvalidOperationException("Unable to acquire lease."); + } + + await foreach (var update in base.CompleteStreamingAsync(chatMessages, options, cancellationToken) + .ConfigureAwait(false)) + { + yield return update; + } + } + + protected override void Dispose(bool disposing) + { + if (disposing) + { + rateLimiter.Dispose(); + } + + base.Dispose(disposing); + } +} diff --git a/docs/core/extensions/snippets/ai/AI.Shared/RateLimitingChatClientExtensions.OptionalOverload.cs b/docs/core/extensions/snippets/ai/AI.Shared/RateLimitingChatClientExtensions.OptionalOverload.cs new file mode 100644 index 0000000000000..066cf22f6ee44 --- /dev/null +++ b/docs/core/extensions/snippets/ai/AI.Shared/RateLimitingChatClientExtensions.OptionalOverload.cs @@ -0,0 +1,17 @@ +namespace Example.Two; + +// +using Microsoft.Extensions.AI; +using Microsoft.Extensions.DependencyInjection; +using System.Threading.RateLimiting; + +public static class RateLimitingChatClientExtensions +{ + public static ChatClientBuilder UseRateLimiting( + this ChatClientBuilder builder, RateLimiter? rateLimiter = null) => + builder.Use((innerClient, services) => + new RateLimitingChatClient( + innerClient, + rateLimiter ?? services.GetRequiredService())); +} +// diff --git a/docs/core/extensions/snippets/ai/AI.Shared/RateLimitingChatClientExtensions.cs b/docs/core/extensions/snippets/ai/AI.Shared/RateLimitingChatClientExtensions.cs new file mode 100644 index 0000000000000..5f0fe5765b193 --- /dev/null +++ b/docs/core/extensions/snippets/ai/AI.Shared/RateLimitingChatClientExtensions.cs @@ -0,0 +1,13 @@ +namespace Example.One; + +// +using Microsoft.Extensions.AI; +using System.Threading.RateLimiting; + +public static class RateLimitingChatClientExtensions +{ + public static ChatClientBuilder UseRateLimiting( + this ChatClientBuilder builder, RateLimiter rateLimiter) => + builder.Use(innerClient => new RateLimitingChatClient(innerClient, rateLimiter)); +} +// diff --git a/docs/core/extensions/snippets/ai/AI.Shared/RateLimitingEmbeddingGenerator.cs b/docs/core/extensions/snippets/ai/AI.Shared/RateLimitingEmbeddingGenerator.cs new file mode 100644 index 0000000000000..f71650698eaac --- /dev/null +++ b/docs/core/extensions/snippets/ai/AI.Shared/RateLimitingEmbeddingGenerator.cs @@ -0,0 +1,33 @@ +using Microsoft.Extensions.AI; +using System.Threading.RateLimiting; + +public class RateLimitingEmbeddingGenerator( + IEmbeddingGenerator> innerGenerator, RateLimiter rateLimiter) + : DelegatingEmbeddingGenerator>(innerGenerator) +{ + public override async Task>> GenerateAsync( + IEnumerable values, + EmbeddingGenerationOptions? options = null, + CancellationToken cancellationToken = default) + { + using var lease = await rateLimiter.AcquireAsync(permitCount: 1, cancellationToken) + .ConfigureAwait(false); + + if (!lease.IsAcquired) + { + throw new InvalidOperationException("Unable to acquire lease."); + } + + return await base.GenerateAsync(values, options, cancellationToken); + } + + protected override void Dispose(bool disposing) + { + if (disposing) + { + rateLimiter.Dispose(); + } + + base.Dispose(disposing); + } +} diff --git a/docs/core/extensions/snippets/ai/AI.Shared/SampleChatClient.cs b/docs/core/extensions/snippets/ai/AI.Shared/SampleChatClient.cs new file mode 100644 index 0000000000000..99e0fb033df9a --- /dev/null +++ b/docs/core/extensions/snippets/ai/AI.Shared/SampleChatClient.cs @@ -0,0 +1,58 @@ +using System.Runtime.CompilerServices; +using Microsoft.Extensions.AI; + +public sealed class SampleChatClient(Uri endpoint, string modelId) : IChatClient +{ + public ChatClientMetadata Metadata { get; } = new(nameof(SampleChatClient), endpoint, modelId); + + public async Task CompleteAsync( + IList chatMessages, + ChatOptions? options = null, + CancellationToken cancellationToken = default) + { + // Simulate some operation. + await Task.Delay(300, cancellationToken); + + // Return a sample chat completion response randomly. + string[] responses = + [ + "This is the first sample response.", + "Here is another example of a response message.", + "This is yet another response message." + ]; + + return new([new ChatMessage() + { + Role = ChatRole.Assistant, + Text = responses[Random.Shared.Next(responses.Length)], + }]); + } + + public async IAsyncEnumerable CompleteStreamingAsync( + IList chatMessages, + ChatOptions? options = null, + [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + // Simulate streaming by yielding messages one by one. + string[] words = ["This ", "is ", "the ", "response ", "for ", "the ", "request."]; + foreach (string word in words) + { + // Simulate some operation. + await Task.Delay(100, cancellationToken); + + // Yield the next message in the response. + yield return new StreamingChatCompletionUpdate + { + Role = ChatRole.Assistant, + Text = word, + }; + } + } + + public object? GetService(Type serviceType, object? serviceKey) => this; + + public TService? GetService(object? key = null) + where TService : class => this as TService; + + void IDisposable.Dispose() { } +} diff --git a/docs/core/extensions/snippets/ai/AI.Shared/SampleEmbeddingGenerator.cs b/docs/core/extensions/snippets/ai/AI.Shared/SampleEmbeddingGenerator.cs new file mode 100644 index 0000000000000..8cf53982d2cb1 --- /dev/null +++ b/docs/core/extensions/snippets/ai/AI.Shared/SampleEmbeddingGenerator.cs @@ -0,0 +1,35 @@ +using Microsoft.Extensions.AI; + +public sealed class SampleEmbeddingGenerator( + Uri endpoint, string modelId) + : IEmbeddingGenerator> +{ + public EmbeddingGeneratorMetadata Metadata { get; } = + new(nameof(SampleEmbeddingGenerator), endpoint, modelId); + + public async Task>> GenerateAsync( + IEnumerable values, + EmbeddingGenerationOptions? options = null, + CancellationToken cancellationToken = default) + { + // Simulate some async operation + await Task.Delay(100, cancellationToken); + + // Create random embeddings + return + [ + .. from value in values + select new Embedding( + Enumerable.Range(0, 384) + .Select(_ => Random.Shared.NextSingle()) + .ToArray()) + ]; + } + + public object? GetService(Type serviceType, object? serviceKey) => this; + + public TService? GetService(object? key = null) + where TService : class => this as TService; + + void IDisposable.Dispose() { } +} diff --git a/docs/core/extensions/snippets/ai/ConsoleAI.CacheResponses/ConsoleAI.CacheResponses.csproj b/docs/core/extensions/snippets/ai/ConsoleAI.CacheResponses/ConsoleAI.CacheResponses.csproj new file mode 100644 index 0000000000000..be3d111984a25 --- /dev/null +++ b/docs/core/extensions/snippets/ai/ConsoleAI.CacheResponses/ConsoleAI.CacheResponses.csproj @@ -0,0 +1,18 @@ + + + + Exe + net9.0 + enable + enable + + + + + + + + + + + diff --git a/docs/core/extensions/snippets/ai/ConsoleAI.CacheResponses/Program.cs b/docs/core/extensions/snippets/ai/ConsoleAI.CacheResponses/Program.cs new file mode 100644 index 0000000000000..51096d1df9a95 --- /dev/null +++ b/docs/core/extensions/snippets/ai/ConsoleAI.CacheResponses/Program.cs @@ -0,0 +1,24 @@ +using Microsoft.Extensions.AI; +using Microsoft.Extensions.Caching.Distributed; +using Microsoft.Extensions.Caching.Memory; +using Microsoft.Extensions.Options; + +var sampleChatClient = new SampleChatClient( + new Uri("http://coolsite.ai"), "target-ai-model"); + +IChatClient client = new ChatClientBuilder(sampleChatClient) + .UseDistributedCache(new MemoryDistributedCache( + Options.Create(new MemoryDistributedCacheOptions()))) + .Build(); + +string[] prompts = ["What is AI?", "What is .NET?", "What is AI?"]; + +foreach (var prompt in prompts) +{ + await foreach (var update in client.CompleteStreamingAsync(prompt)) + { + Console.Write(update); + } + + Console.WriteLine(); +} diff --git a/docs/core/extensions/snippets/ai/ConsoleAI.CompleteAsyncArgs/ConsoleAI.CompleteAsyncArgs.csproj b/docs/core/extensions/snippets/ai/ConsoleAI.CompleteAsyncArgs/ConsoleAI.CompleteAsyncArgs.csproj new file mode 100644 index 0000000000000..b615dd1b868c2 --- /dev/null +++ b/docs/core/extensions/snippets/ai/ConsoleAI.CompleteAsyncArgs/ConsoleAI.CompleteAsyncArgs.csproj @@ -0,0 +1,14 @@ + + + + Exe + net9.0 + enable + enable + + + + + + + diff --git a/docs/core/extensions/snippets/ai/ConsoleAI.CompleteAsyncArgs/Program.cs b/docs/core/extensions/snippets/ai/ConsoleAI.CompleteAsyncArgs/Program.cs new file mode 100644 index 0000000000000..eda37fef75fbf --- /dev/null +++ b/docs/core/extensions/snippets/ai/ConsoleAI.CompleteAsyncArgs/Program.cs @@ -0,0 +1,10 @@ +using Microsoft.Extensions.AI; + +IChatClient client = new SampleChatClient( + new Uri("http://coolsite.ai"), "target-ai-model"); + +Console.WriteLine(await client.CompleteAsync( +[ + new(ChatRole.System, "You are a helpful AI assistant"), + new(ChatRole.User, "What is AI?"), +])); diff --git a/docs/core/extensions/snippets/ai/ConsoleAI.CompleteStreamingAsync/ConsoleAI.CompleteStreamingAsync.csproj b/docs/core/extensions/snippets/ai/ConsoleAI.CompleteStreamingAsync/ConsoleAI.CompleteStreamingAsync.csproj new file mode 100644 index 0000000000000..b615dd1b868c2 --- /dev/null +++ b/docs/core/extensions/snippets/ai/ConsoleAI.CompleteStreamingAsync/ConsoleAI.CompleteStreamingAsync.csproj @@ -0,0 +1,14 @@ + + + + Exe + net9.0 + enable + enable + + + + + + + diff --git a/docs/core/extensions/snippets/ai/ConsoleAI.CompleteStreamingAsync/Program.cs b/docs/core/extensions/snippets/ai/ConsoleAI.CompleteStreamingAsync/Program.cs new file mode 100644 index 0000000000000..a5e32ce3438a0 --- /dev/null +++ b/docs/core/extensions/snippets/ai/ConsoleAI.CompleteStreamingAsync/Program.cs @@ -0,0 +1,9 @@ +using Microsoft.Extensions.AI; + +IChatClient client = new SampleChatClient( + new Uri("http://coolsite.ai"), "target-ai-model"); + +await foreach (var update in client.CompleteStreamingAsync("What is AI?")) +{ + Console.Write(update); +} diff --git a/docs/core/extensions/snippets/ai/ConsoleAI.ConsumeClientMiddleware/ConsoleAI.ConsumeClientMiddleware.csproj b/docs/core/extensions/snippets/ai/ConsoleAI.ConsumeClientMiddleware/ConsoleAI.ConsumeClientMiddleware.csproj new file mode 100644 index 0000000000000..ffd67c3f5495a --- /dev/null +++ b/docs/core/extensions/snippets/ai/ConsoleAI.ConsumeClientMiddleware/ConsoleAI.ConsumeClientMiddleware.csproj @@ -0,0 +1,15 @@ + + + + Exe + net9.0 + enable + enable + + + + + + + + diff --git a/docs/core/extensions/snippets/ai/ConsoleAI.ConsumeClientMiddleware/Program.cs b/docs/core/extensions/snippets/ai/ConsoleAI.ConsumeClientMiddleware/Program.cs new file mode 100644 index 0000000000000..f95efffe26568 --- /dev/null +++ b/docs/core/extensions/snippets/ai/ConsoleAI.ConsumeClientMiddleware/Program.cs @@ -0,0 +1,26 @@ +using Example.Two; + +// +using Microsoft.Extensions.AI; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Hosting; + +var builder = Host.CreateApplicationBuilder(args); + +builder.Services.AddChatClient(services => + new SampleChatClient(new Uri("http://localhost"), "test") + .AsBuilder() + .UseDistributedCache() + .UseRateLimiting() + .UseOpenTelemetry() + .Build(services)); + +using var app = builder.Build(); + +// Elsewhere in the app +var chatClient = app.Services.GetRequiredService(); + +Console.WriteLine(await chatClient.CompleteAsync("What is AI?")); + +app.Run(); +// diff --git a/docs/core/extensions/snippets/ai/ConsoleAI.ConsumeRateLimitingEmbedding/ConsoleAI.ConsumeRateLimitingEmbedding.csproj b/docs/core/extensions/snippets/ai/ConsoleAI.ConsumeRateLimitingEmbedding/ConsoleAI.ConsumeRateLimitingEmbedding.csproj new file mode 100644 index 0000000000000..b615dd1b868c2 --- /dev/null +++ b/docs/core/extensions/snippets/ai/ConsoleAI.ConsumeRateLimitingEmbedding/ConsoleAI.ConsumeRateLimitingEmbedding.csproj @@ -0,0 +1,14 @@ + + + + Exe + net9.0 + enable + enable + + + + + + + diff --git a/docs/core/extensions/snippets/ai/ConsoleAI.ConsumeRateLimitingEmbedding/Program.cs b/docs/core/extensions/snippets/ai/ConsoleAI.ConsumeRateLimitingEmbedding/Program.cs new file mode 100644 index 0000000000000..d7987319e07ee --- /dev/null +++ b/docs/core/extensions/snippets/ai/ConsoleAI.ConsumeRateLimitingEmbedding/Program.cs @@ -0,0 +1,16 @@ +using Microsoft.Extensions.AI; +using System.Threading.RateLimiting; + +IEmbeddingGenerator> generator = + new RateLimitingEmbeddingGenerator( + new SampleEmbeddingGenerator(new Uri("http://coolsite.ai"), "target-ai-model"), + new ConcurrencyLimiter(new() + { + PermitLimit = 1, + QueueLimit = int.MaxValue + })); + +foreach (var embedding in await generator.GenerateAsync(["What is AI?", "What is .NET?"])) +{ + Console.WriteLine(string.Join(", ", embedding.Vector.ToArray())); +} diff --git a/docs/core/extensions/snippets/ai/ConsoleAI.CreateEmbeddings/ConsoleAI.CreateEmbeddings.csproj b/docs/core/extensions/snippets/ai/ConsoleAI.CreateEmbeddings/ConsoleAI.CreateEmbeddings.csproj new file mode 100644 index 0000000000000..b615dd1b868c2 --- /dev/null +++ b/docs/core/extensions/snippets/ai/ConsoleAI.CreateEmbeddings/ConsoleAI.CreateEmbeddings.csproj @@ -0,0 +1,14 @@ + + + + Exe + net9.0 + enable + enable + + + + + + + diff --git a/docs/core/extensions/snippets/ai/ConsoleAI.CreateEmbeddings/Program.cs b/docs/core/extensions/snippets/ai/ConsoleAI.CreateEmbeddings/Program.cs new file mode 100644 index 0000000000000..c3d8ece9410fb --- /dev/null +++ b/docs/core/extensions/snippets/ai/ConsoleAI.CreateEmbeddings/Program.cs @@ -0,0 +1,10 @@ +using Microsoft.Extensions.AI; + +IEmbeddingGenerator> generator = + new SampleEmbeddingGenerator( + new Uri("http://coolsite.ai"), "target-ai-model"); + +foreach (var embedding in await generator.GenerateAsync(["What is AI?", "What is .NET?"])) +{ + Console.WriteLine(string.Join(", ", embedding.Vector.ToArray())); +} diff --git a/docs/core/extensions/snippets/ai/ConsoleAI.CustomClientMiddle/ConsoleAI.CustomClientMiddle.csproj b/docs/core/extensions/snippets/ai/ConsoleAI.CustomClientMiddle/ConsoleAI.CustomClientMiddle.csproj new file mode 100644 index 0000000000000..be4820d0ade34 --- /dev/null +++ b/docs/core/extensions/snippets/ai/ConsoleAI.CustomClientMiddle/ConsoleAI.CustomClientMiddle.csproj @@ -0,0 +1,18 @@ + + + + Exe + net9.0 + enable + enable + + + + + + + + + + + diff --git a/docs/core/extensions/snippets/ai/ConsoleAI.CustomClientMiddle/Program.cs b/docs/core/extensions/snippets/ai/ConsoleAI.CustomClientMiddle/Program.cs new file mode 100644 index 0000000000000..dd69572c6c7a2 --- /dev/null +++ b/docs/core/extensions/snippets/ai/ConsoleAI.CustomClientMiddle/Program.cs @@ -0,0 +1,12 @@ +using Microsoft.Extensions.AI; +using System.Threading.RateLimiting; + +var client = new RateLimitingChatClient( + new SampleChatClient(new Uri("http://localhost"), "test"), + new ConcurrencyLimiter(new() + { + PermitLimit = 1, + QueueLimit = int.MaxValue + })); + +await client.CompleteAsync("What color is the sky?"); diff --git a/docs/core/extensions/snippets/ai/ConsoleAI.CustomEmbeddingsMiddle/ConsoleAI.CustomEmbeddingsMiddle.csproj b/docs/core/extensions/snippets/ai/ConsoleAI.CustomEmbeddingsMiddle/ConsoleAI.CustomEmbeddingsMiddle.csproj new file mode 100644 index 0000000000000..0f51adef5a2a3 --- /dev/null +++ b/docs/core/extensions/snippets/ai/ConsoleAI.CustomEmbeddingsMiddle/ConsoleAI.CustomEmbeddingsMiddle.csproj @@ -0,0 +1,16 @@ + + + + Exe + net9.0 + enable + enable + + + + + + + + + diff --git a/docs/core/extensions/snippets/ai/ConsoleAI.CustomEmbeddingsMiddle/Program.cs b/docs/core/extensions/snippets/ai/ConsoleAI.CustomEmbeddingsMiddle/Program.cs new file mode 100644 index 0000000000000..ffa45b4be6dd4 --- /dev/null +++ b/docs/core/extensions/snippets/ai/ConsoleAI.CustomEmbeddingsMiddle/Program.cs @@ -0,0 +1,34 @@ +using Microsoft.Extensions.AI; +using Microsoft.Extensions.Caching.Distributed; +using Microsoft.Extensions.Caching.Memory; +using Microsoft.Extensions.Options; +using OpenTelemetry.Trace; + +// Configure OpenTelemetry exporter +var sourceName = Guid.NewGuid().ToString(); +var tracerProvider = OpenTelemetry.Sdk.CreateTracerProviderBuilder() + .AddSource(sourceName) + .AddConsoleExporter() + .Build(); + +// Explore changing the order of the intermediate "Use" calls to see that impact +// that has on what gets cached, traced, etc. +var generator = new EmbeddingGeneratorBuilder>( + new SampleEmbeddingGenerator(new Uri("http://coolsite.ai"), "target-ai-model")) + .UseDistributedCache( + new MemoryDistributedCache( + Options.Create(new MemoryDistributedCacheOptions()))) + .UseOpenTelemetry(sourceName: sourceName) + .Build(); + +var embeddings = await generator.GenerateAsync( +[ + "What is AI?", + "What is .NET?", + "What is AI?" +]); + +foreach (var embedding in embeddings) +{ + Console.WriteLine(string.Join(", ", embedding.Vector.ToArray())); +} diff --git a/docs/core/extensions/snippets/ai/ConsoleAI.DependencyInjection/ConsoleAI.DependencyInjection.csproj b/docs/core/extensions/snippets/ai/ConsoleAI.DependencyInjection/ConsoleAI.DependencyInjection.csproj new file mode 100644 index 0000000000000..dd7f9e2936369 --- /dev/null +++ b/docs/core/extensions/snippets/ai/ConsoleAI.DependencyInjection/ConsoleAI.DependencyInjection.csproj @@ -0,0 +1,16 @@ + + + + Exe + net9.0 + enable + enable + + + + + + + + + diff --git a/docs/core/extensions/snippets/ai/ConsoleAI.DependencyInjection/Program.cs b/docs/core/extensions/snippets/ai/ConsoleAI.DependencyInjection/Program.cs new file mode 100644 index 0000000000000..930b0b036c74e --- /dev/null +++ b/docs/core/extensions/snippets/ai/ConsoleAI.DependencyInjection/Program.cs @@ -0,0 +1,20 @@ +using Microsoft.Extensions.AI; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Hosting; + +// App setup +var builder = Host.CreateApplicationBuilder(); + +builder.Services.AddDistributedMemoryCache(); +builder.Services.AddChatClient(new SampleChatClient( + new Uri("http://coolsite.ai"), "target-ai-model")) + .UseDistributedCache(); + +using var app = builder.Build(); + +// Elsewhere in the app +var chatClient = app.Services.GetRequiredService(); + +Console.WriteLine(await chatClient.CompleteAsync("What is AI?")); + +app.Run(); diff --git a/docs/core/extensions/snippets/ai/ConsoleAI.FunctionalityPipelines/ConsoleAI.FunctionalityPipelines.csproj b/docs/core/extensions/snippets/ai/ConsoleAI.FunctionalityPipelines/ConsoleAI.FunctionalityPipelines.csproj new file mode 100644 index 0000000000000..3fe7c47c39ce1 --- /dev/null +++ b/docs/core/extensions/snippets/ai/ConsoleAI.FunctionalityPipelines/ConsoleAI.FunctionalityPipelines.csproj @@ -0,0 +1,20 @@ + + + + Exe + net9.0 + enable + enable + + + + + + + + + + + + + diff --git a/docs/core/extensions/snippets/ai/ConsoleAI.FunctionalityPipelines/Program.cs b/docs/core/extensions/snippets/ai/ConsoleAI.FunctionalityPipelines/Program.cs new file mode 100644 index 0000000000000..16f563b3689a3 --- /dev/null +++ b/docs/core/extensions/snippets/ai/ConsoleAI.FunctionalityPipelines/Program.cs @@ -0,0 +1,46 @@ +using Microsoft.Extensions.AI; +using Microsoft.Extensions.Caching.Distributed; +using Microsoft.Extensions.Caching.Memory; +using Microsoft.Extensions.Options; +using OpenTelemetry.Trace; + +// Configure OpenTelemetry exporter +var sourceName = Guid.NewGuid().ToString(); +var tracerProvider = OpenTelemetry.Sdk.CreateTracerProviderBuilder() + .AddSource(sourceName) + .AddConsoleExporter() + .Build(); + +// Explore changing the order of the intermediate "Use" calls to see that impact +// that has on what gets cached, traced, etc. +IChatClient client = new ChatClientBuilder( + new OllamaChatClient(new Uri("http://localhost:11434"), "llama3.1")) + .UseDistributedCache(new MemoryDistributedCache( + Options.Create(new MemoryDistributedCacheOptions()))) + .UseFunctionInvocation() + .UseOpenTelemetry( + sourceName: sourceName, + configure: static c => c.EnableSensitiveData = true) + .Build(); + +ChatOptions options = new() +{ + Tools = + [ + AIFunctionFactory.Create( + () => Random.Shared.NextDouble() > 0.5 ? "It's sunny" : "It's raining", + name: "GetCurrentWeather", + description: "Gets the current weather") + ] +}; + +for (int i = 0; i < 3; ++i) +{ + List history = + [ + new ChatMessage(ChatRole.System, "You are a helpful AI assistant"), + new ChatMessage(ChatRole.User, "Do I need an umbrella?") + ]; + + Console.WriteLine(await client.CompleteAsync(history, options)); +} diff --git a/docs/core/extensions/snippets/ai/ConsoleAI.ProvideOptions/ConsoleAI.ProvideOptions.csproj b/docs/core/extensions/snippets/ai/ConsoleAI.ProvideOptions/ConsoleAI.ProvideOptions.csproj new file mode 100644 index 0000000000000..52b8ab4531c7f --- /dev/null +++ b/docs/core/extensions/snippets/ai/ConsoleAI.ProvideOptions/ConsoleAI.ProvideOptions.csproj @@ -0,0 +1,18 @@ + + + + Exe + net9.0 + enable + enable + + + + + + + + + + + diff --git a/docs/core/extensions/snippets/ai/ConsoleAI.ProvideOptions/Program.cs b/docs/core/extensions/snippets/ai/ConsoleAI.ProvideOptions/Program.cs new file mode 100644 index 0000000000000..c6ce0bfb7010e --- /dev/null +++ b/docs/core/extensions/snippets/ai/ConsoleAI.ProvideOptions/Program.cs @@ -0,0 +1,13 @@ +using Microsoft.Extensions.AI; + +IChatClient client = new ChatClientBuilder( + new OllamaChatClient(new Uri("http://localhost:11434"))) + .ConfigureOptions(options => options.ModelId ??= "phi3") + .Build(); + +// will request "phi3" +Console.WriteLine(await client.CompleteAsync("What is AI?")); + +// will request "llama3.1" +Console.WriteLine(await client.CompleteAsync( + "What is AI?", new() { ModelId = "llama3.1" })); diff --git a/docs/core/extensions/snippets/ai/ConsoleAI.ToolCalling/ConsoleAI.ToolCalling.csproj b/docs/core/extensions/snippets/ai/ConsoleAI.ToolCalling/ConsoleAI.ToolCalling.csproj new file mode 100644 index 0000000000000..52b8ab4531c7f --- /dev/null +++ b/docs/core/extensions/snippets/ai/ConsoleAI.ToolCalling/ConsoleAI.ToolCalling.csproj @@ -0,0 +1,18 @@ + + + + Exe + net9.0 + enable + enable + + + + + + + + + + + diff --git a/docs/core/extensions/snippets/ai/ConsoleAI.ToolCalling/Program.cs b/docs/core/extensions/snippets/ai/ConsoleAI.ToolCalling/Program.cs new file mode 100644 index 0000000000000..ff8ef0c2ba7c8 --- /dev/null +++ b/docs/core/extensions/snippets/ai/ConsoleAI.ToolCalling/Program.cs @@ -0,0 +1,21 @@ +using System.ComponentModel; +using Microsoft.Extensions.AI; + +[Description("Gets the current weather")] +string GetCurrentWeather() => Random.Shared.NextDouble() > 0.5 + ? "It's sunny" + : "It's raining"; + +IChatClient client = new ChatClientBuilder( + new OllamaChatClient(new Uri("http://localhost:11434"), "llama3.1")) + .UseFunctionInvocation() + .Build(); + +var response = client.CompleteStreamingAsync( + "Should I wear a rain coat?", + new() { Tools = [AIFunctionFactory.Create(GetCurrentWeather)] }); + +await foreach (var update in response) +{ + Console.Write(update); +} diff --git a/docs/core/extensions/snippets/ai/ConsoleAI.UseExample/ConsoleAI.UseExample.csproj b/docs/core/extensions/snippets/ai/ConsoleAI.UseExample/ConsoleAI.UseExample.csproj new file mode 100644 index 0000000000000..b615dd1b868c2 --- /dev/null +++ b/docs/core/extensions/snippets/ai/ConsoleAI.UseExample/ConsoleAI.UseExample.csproj @@ -0,0 +1,14 @@ + + + + Exe + net9.0 + enable + enable + + + + + + + diff --git a/docs/core/extensions/snippets/ai/ConsoleAI.UseExample/Program.cs b/docs/core/extensions/snippets/ai/ConsoleAI.UseExample/Program.cs new file mode 100644 index 0000000000000..aa3de1cec1423 --- /dev/null +++ b/docs/core/extensions/snippets/ai/ConsoleAI.UseExample/Program.cs @@ -0,0 +1,28 @@ +using Microsoft.Extensions.AI; +using System.Threading.RateLimiting; + +RateLimiter rateLimiter = new ConcurrencyLimiter(new() +{ + PermitLimit = 1, + QueueLimit = int.MaxValue +}); + +var client = new SampleChatClient(new Uri("http://localhost"), "test") + .AsBuilder() + .UseDistributedCache() + .Use(async (chatMessages, options, nextAsync, cancellationToken) => + { + using var lease = await rateLimiter.AcquireAsync(permitCount: 1, cancellationToken) + .ConfigureAwait(false); + + if (!lease.IsAcquired) + { + throw new InvalidOperationException("Unable to acquire lease."); + } + + await nextAsync(chatMessages, options, cancellationToken); + }) + .UseOpenTelemetry() + .Build(); + +// Use client diff --git a/docs/core/extensions/snippets/ai/ConsoleAI.UseExampleAlt/ConsoleAI.UseExampleAlt.csproj b/docs/core/extensions/snippets/ai/ConsoleAI.UseExampleAlt/ConsoleAI.UseExampleAlt.csproj new file mode 100644 index 0000000000000..b615dd1b868c2 --- /dev/null +++ b/docs/core/extensions/snippets/ai/ConsoleAI.UseExampleAlt/ConsoleAI.UseExampleAlt.csproj @@ -0,0 +1,14 @@ + + + + Exe + net9.0 + enable + enable + + + + + + + diff --git a/docs/core/extensions/snippets/ai/ConsoleAI.UseExampleAlt/Program.cs b/docs/core/extensions/snippets/ai/ConsoleAI.UseExampleAlt/Program.cs new file mode 100644 index 0000000000000..1b35dfe6d25c5 --- /dev/null +++ b/docs/core/extensions/snippets/ai/ConsoleAI.UseExampleAlt/Program.cs @@ -0,0 +1,27 @@ +using System.Threading.RateLimiting; +using Microsoft.Extensions.AI; +using Microsoft.Extensions.DependencyInjection; + +var client = new SampleChatClient(new Uri("http://localhost"), "test") + .AsBuilder() + .UseDistributedCache() + .Use(static (innerClient, services) => + { + var rateLimiter = services.GetRequiredService(); + + return new AnonymousDelegatingChatClient( + innerClient, async (chatMessages, options, nextAsync, cancellationToken) => + { + using var lease = await rateLimiter.AcquireAsync(permitCount: 1, cancellationToken) + .ConfigureAwait(false); + + if (!lease.IsAcquired) + { + throw new InvalidOperationException("Unable to acquire lease."); + } + + await nextAsync(chatMessages, options, cancellationToken); + }); + }) + .UseOpenTelemetry() + .Build(); diff --git a/docs/core/extensions/snippets/ai/ConsoleAI.UseTelemetry/ConsoleAI.UseTelemetry.csproj b/docs/core/extensions/snippets/ai/ConsoleAI.UseTelemetry/ConsoleAI.UseTelemetry.csproj new file mode 100644 index 0000000000000..b97375a313615 --- /dev/null +++ b/docs/core/extensions/snippets/ai/ConsoleAI.UseTelemetry/ConsoleAI.UseTelemetry.csproj @@ -0,0 +1,18 @@ + + + + Exe + net9.0 + enable + enable + + + + + + + + + + + diff --git a/docs/core/extensions/snippets/ai/ConsoleAI.UseTelemetry/Program.cs b/docs/core/extensions/snippets/ai/ConsoleAI.UseTelemetry/Program.cs new file mode 100644 index 0000000000000..d4c5e2c28e723 --- /dev/null +++ b/docs/core/extensions/snippets/ai/ConsoleAI.UseTelemetry/Program.cs @@ -0,0 +1,20 @@ +using Microsoft.Extensions.AI; +using OpenTelemetry.Trace; + +// Configure OpenTelemetry exporter +var sourceName = Guid.NewGuid().ToString(); +var tracerProvider = OpenTelemetry.Sdk.CreateTracerProviderBuilder() + .AddSource(sourceName) + .AddConsoleExporter() + .Build(); + +var sampleChatClient = new SampleChatClient( + new Uri("http://coolsite.ai"), "target-ai-model"); + +IChatClient client = new ChatClientBuilder(sampleChatClient) + .UseOpenTelemetry( + sourceName: sourceName, + configure: static c => c.EnableSensitiveData = true) + .Build(); + +Console.WriteLine((await client.CompleteAsync("What is AI?")).Message); diff --git a/docs/core/extensions/snippets/ai/ConsoleAI/ConsoleAI.csproj b/docs/core/extensions/snippets/ai/ConsoleAI/ConsoleAI.csproj new file mode 100644 index 0000000000000..bcec98d0ad009 --- /dev/null +++ b/docs/core/extensions/snippets/ai/ConsoleAI/ConsoleAI.csproj @@ -0,0 +1,18 @@ + + + + Exe + net9.0 + enable + enable + + + + + + + + + + + diff --git a/docs/core/extensions/snippets/ai/ConsoleAI/Program.cs b/docs/core/extensions/snippets/ai/ConsoleAI/Program.cs new file mode 100644 index 0000000000000..258a6d41ac681 --- /dev/null +++ b/docs/core/extensions/snippets/ai/ConsoleAI/Program.cs @@ -0,0 +1,8 @@ +using Microsoft.Extensions.AI; + +IChatClient client = new SampleChatClient( + new Uri("http://coolsite.ai"), "target-ai-model"); + +var response = await client.CompleteAsync("What is AI?"); + +Console.WriteLine(response.Message); diff --git a/docs/fundamentals/toc.yml b/docs/fundamentals/toc.yml index 41d78e90e1aa4..f3c21c755d52a 100644 --- a/docs/fundamentals/toc.yml +++ b/docs/fundamentals/toc.yml @@ -998,6 +998,9 @@ items: href: runtime-libraries/system-console.md - name: The System.Random class href: runtime-libraries/system-random.md + - name: Artificial intelligence (AI) + displayName: microsoft.extensions.ai,ollama,ai,openai,azure inference,ichatclient + href: ../core/extensions/artificial-intelligence.md - name: Dependency injection items: - name: Overview From 6cc80197cd66e9f3dd33d01194b190e070048fed Mon Sep 17 00:00:00 2001 From: Julien Couvreur Date: Tue, 17 Dec 2024 10:19:27 -0800 Subject: [PATCH 4/6] Allow `yield return` in `lock` (#43988) * Allow `yield return` in `lock` * Remove lock statement restriction from iterator yield --- .../language-reference/compiler-messages/iterator-yield.md | 5 ----- 1 file changed, 5 deletions(-) diff --git a/docs/csharp/language-reference/compiler-messages/iterator-yield.md b/docs/csharp/language-reference/compiler-messages/iterator-yield.md index 9e2e1754f6cc0..b65145bffa571 100644 --- a/docs/csharp/language-reference/compiler-messages/iterator-yield.md +++ b/docs/csharp/language-reference/compiler-messages/iterator-yield.md @@ -13,7 +13,6 @@ f1_keywords: - "CS4013" - "CS8154" - "CS8176" - - "CS9237" - "CS9238" - "CS9239" helpviewer_keywords: @@ -28,7 +27,6 @@ helpviewer_keywords: - "CS4013" - "CS8154" - "CS8176" - - "CS9237" - "CS9238" - "CS9239" ms.date: 07/02/2024 @@ -51,7 +49,6 @@ That's by design. The text closely matches the text of the compiler error / warn - [**CS4013**](#ref-safety-in-iterator-methods): *Instance of type cannot be used inside a nested function, query expression, iterator block or async method* - [**CS8154**](#structure-of-an-iterator-method): *The body cannot be an iterator block because it returns by reference* - [**CS8176**](#ref-safety-in-iterator-methods): *Iterators cannot have by-reference locals* -- [**CS9237**](#restrictions-on-iterator-methods): *'yield return' should not be used in the body of a lock statement* - [**CS9238**](#restrictions-on-iterator-methods): *Cannot use 'yield return' in an 'unsafe' block* - [**CS9239**](#restrictions-on-iterator-methods): *The `&` operator cannot be used on parameters or local variables in iterator methods.* @@ -82,7 +79,6 @@ The body of an iterator method must conform to restrictions on the `yield return - **CS1626**: *Cannot yield a value in the body of a try block with a catch clause* - **CS1631**: *Cannot yield a value in the body of a catch clause* - **CS1629**: *Unsafe code may not appear in iterators* -- **CS9237**: *''yield return' should not be used in the body of a lock statement* - **CS9238**: *Cannot use 'yield return' in an 'unsafe' block* - **CS9239**: *The `&` operator cannot be used on parameters or local variables in iterator methods.* @@ -90,7 +86,6 @@ These errors indicate that your code violates safety rules because an iterator r - You can't `yield return` from a `catch` or `finally` clause. - You can't `yield return` from a `try` block with a catch clause. -- You can't `yield return` from inside a `lock` statement block. Doing so can cause deadlocks. - You can't `yield return` from an `unsafe` block. The context for an iterator creates a nested `safe` block within the enclosing `unsafe` block. - You can't use the `&` operator to take the address of a variable in an iterator method. From 402643106042cdfbd2dd21cf2f72678546d2358b Mon Sep 17 00:00:00 2001 From: Bill Wagner Date: Tue, 17 Dec 2024 13:36:33 -0500 Subject: [PATCH 5/6] Update .openpublishing.redirection.csharp.json (#43992) --- .openpublishing.redirection.csharp.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.openpublishing.redirection.csharp.json b/.openpublishing.redirection.csharp.json index db99fd814d2ae..7b12bcb3e04cb 100644 --- a/.openpublishing.redirection.csharp.json +++ b/.openpublishing.redirection.csharp.json @@ -113,7 +113,7 @@ "redirect_url": "/dotnet/csharp/language-reference/language-specification/types.md#893-nullable-reference-types" }, { - "source_path_from_root": "/_csharplang/proposals/csharp-8.0/nested-stackalloc.md", + "source_path_from_root": "/redirections/proposals/csharp-8.0/nested-stackalloc.md", "redirect_url": "/dotnet/csharp/language-reference/language-specification/structs#164127-stackalloc" }, { From 81399028a4674c45b6a4a18810c89e2137af12db Mon Sep 17 00:00:00 2001 From: Azure SDK Bot <53356347+azure-sdk@users.noreply.github.com> Date: Tue, 17 Dec 2024 11:21:21 -0800 Subject: [PATCH 6/6] Update package index with latest published versions (#43991) --- docs/azure/includes/dotnet-all.md | 16 ++++++++-------- docs/azure/includes/dotnet-new.md | 2 +- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/docs/azure/includes/dotnet-all.md b/docs/azure/includes/dotnet-all.md index 2d5a6150a9ba1..5e1abf560171c 100644 --- a/docs/azure/includes/dotnet-all.md +++ b/docs/azure/includes/dotnet-all.md @@ -38,7 +38,7 @@ | Dev Center | NuGet [1.0.0](https://www.nuget.org/packages/Azure.Developer.DevCenter/1.0.0) | [docs](/dotnet/api/overview/azure/Developer.DevCenter-readme) | GitHub [1.0.0](https://github.com/Azure/azure-sdk-for-net/tree/Azure.Developer.DevCenter_1.0.0/sdk/devcenter/Azure.Developer.DevCenter/) | | Device Update | NuGet [1.0.0](https://www.nuget.org/packages/Azure.IoT.DeviceUpdate/1.0.0) | [docs](/dotnet/api/overview/azure/IoT.DeviceUpdate-readme) | GitHub [1.0.0](https://github.com/Azure/azure-sdk-for-net/tree/Azure.IoT.DeviceUpdate_1.0.0/sdk/deviceupdate/Azure.IoT.DeviceUpdate/) | | Digital Twins | NuGet [1.4.0](https://www.nuget.org/packages/Azure.DigitalTwins.Core/1.4.0) | [docs](/dotnet/api/overview/azure/DigitalTwins.Core-readme) | GitHub [1.4.0](https://github.com/Azure/azure-sdk-for-net/tree/Azure.DigitalTwins.Core_1.4.0/sdk/digitaltwins/Azure.DigitalTwins.Core/) | -| Document Intelligence | NuGet [1.0.0-beta.3](https://www.nuget.org/packages/Azure.AI.DocumentIntelligence/1.0.0-beta.3) | [docs](/dotnet/api/overview/azure/AI.DocumentIntelligence-readme?view=azure-dotnet-preview&preserve-view=true) | GitHub [1.0.0-beta.3](https://github.com/Azure/azure-sdk-for-net/tree/Azure.AI.DocumentIntelligence_1.0.0-beta.3/sdk/documentintelligence/Azure.AI.DocumentIntelligence/) | +| Document Intelligence | NuGet [1.0.0](https://www.nuget.org/packages/Azure.AI.DocumentIntelligence/1.0.0) | [docs](/dotnet/api/overview/azure/AI.DocumentIntelligence-readme) | GitHub [1.0.0](https://github.com/Azure/azure-sdk-for-net/tree/Azure.AI.DocumentIntelligence_1.0.0/sdk/documentintelligence/Azure.AI.DocumentIntelligence/) | | Document Translation | NuGet [2.0.0](https://www.nuget.org/packages/Azure.AI.Translation.Document/2.0.0) | [docs](/dotnet/api/overview/azure/AI.Translation.Document-readme) | GitHub [2.0.0](https://github.com/Azure/azure-sdk-for-net/tree/Azure.AI.Translation.Document_2.0.0/sdk/translation/Azure.AI.Translation.Document/) | | Event Grid | NuGet [4.28.0](https://www.nuget.org/packages/Azure.Messaging.EventGrid/4.28.0) | [docs](/dotnet/api/overview/azure/Messaging.EventGrid-readme) | GitHub [4.28.0](https://github.com/Azure/azure-sdk-for-net/tree/Azure.Messaging.EventGrid_4.28.0/sdk/eventgrid/Azure.Messaging.EventGrid/) | | Event Grid Namespaces | NuGet [1.0.0](https://www.nuget.org/packages/Azure.Messaging.EventGrid.Namespaces/1.0.0) | [docs](/dotnet/api/overview/azure/Messaging.EventGrid.Namespaces-readme) | GitHub [1.0.0](https://github.com/Azure/azure-sdk-for-net/tree/Azure.Messaging.EventGrid.Namespaces_1.0.0/sdk/eventgrid/Azure.Messaging.EventGrid.Namespaces/) | @@ -372,11 +372,11 @@ | Unknown Display Name | NuGet [1.0.0-beta.0](https://www.nuget.org/packages/Microsoft.Azure.Cosmos.FaultInjection/1.0.0-beta.0) | | | | Unknown Display Name | NuGet [1.0.4-preview](https://www.nuget.org/packages/Microsoft.Azure.Functions.Worker.Extensions.MySql/1.0.4-preview) | | | | Unknown Display Name | NuGet [1.0.4-preview](https://www.nuget.org/packages/Microsoft.Azure.WebJobs.Extensions.MySql/1.0.4-preview) | | | -| Unknown Display Name | NuGet [1.41.1](https://www.nuget.org/packages/Microsoft.CognitiveServices.Speech.CLI/1.41.1) | | | -| Unknown Display Name | NuGet [1.41.1](https://www.nuget.org/packages/Microsoft.CognitiveServices.Speech.Extension.Embedded.SR/1.41.1) | | | -| Unknown Display Name | NuGet [1.41.1](https://www.nuget.org/packages/Microsoft.CognitiveServices.Speech.Extension.Embedded.TTS/1.41.1) | | | -| Unknown Display Name | NuGet [1.41.1](https://www.nuget.org/packages/Microsoft.CognitiveServices.Speech.Extension.ONNX.Runtime/1.41.1) | | | -| Unknown Display Name | NuGet [1.41.1](https://www.nuget.org/packages/Microsoft.CognitiveServices.Speech.Extension.Telemetry/1.41.1) | | | +| Unknown Display Name | NuGet [1.42.0](https://www.nuget.org/packages/Microsoft.CognitiveServices.Speech.CLI/1.42.0) | | | +| Unknown Display Name | NuGet [1.42.0](https://www.nuget.org/packages/Microsoft.CognitiveServices.Speech.Extension.Embedded.SR/1.42.0) | | | +| Unknown Display Name | NuGet [1.42.0](https://www.nuget.org/packages/Microsoft.CognitiveServices.Speech.Extension.Embedded.TTS/1.42.0) | | | +| Unknown Display Name | NuGet [1.42.0](https://www.nuget.org/packages/Microsoft.CognitiveServices.Speech.Extension.ONNX.Runtime/1.42.0) | | | +| Unknown Display Name | NuGet [1.42.0](https://www.nuget.org/packages/Microsoft.CognitiveServices.Speech.Extension.Telemetry/1.42.0) | | | | Anomaly Detector | NuGet [1.0.0](https://www.nuget.org/packages/Microsoft.Azure.CognitiveServices.AnomalyDetector/1.0.0) | | GitHub [1.0.0](https://github.com/Azure/azure-sdk-for-net/tree/Microsoft.Azure.CognitiveServices.AnomalyDetector_1.0.0-preview.1/sdk/cognitiveservices/AnomalyDetector) | | App Service | NuGet [0.2.2-alpha](https://www.nuget.org/packages/Microsoft.Azure.AppService/0.2.2-alpha) | | | | Application Insights | NuGet [0.9.0-preview](https://www.nuget.org/packages/Microsoft.Azure.ApplicationInsights/0.9.0-preview) | | | @@ -457,8 +457,8 @@ | Search - Common | NuGet [10.1.0](https://www.nuget.org/packages/Microsoft.Azure.Search.Common/10.1.0) | | GitHub [10.1.0](https://github.com/Azure/azure-sdk-for-net/tree/Microsoft.Azure.Search.Common_10.1.0/sdk/search/Microsoft.Azure.Search.Common/) | | Search - Data | NuGet [10.1.0](https://www.nuget.org/packages/Microsoft.Azure.Search.Data/10.1.0) | | GitHub [10.1.0](https://github.com/Azure/azure-sdk-for-net/tree/Microsoft.Azure.Search.Data_10.1.0/sdk/search/Microsoft.Azure.Search.Data/) | | Search - Service | NuGet [10.1.0](https://www.nuget.org/packages/Microsoft.Azure.Search.Service/10.1.0) | | GitHub [10.1.0](https://github.com/Azure/azure-sdk-for-net/tree/Microsoft.Azure.Search.Service_10.1.0/sdk/search/Microsoft.Azure.Search.Service/) | -| Speech | NuGet [1.41.1](https://www.nuget.org/packages/Microsoft.CognitiveServices.Speech/1.41.1) | | | -| Speech Remote Conversation | NuGet [1.41.1](https://www.nuget.org/packages/Microsoft.CognitiveServices.Speech.Remoteconversation/1.41.1) | | | +| Speech | NuGet [1.42.0](https://www.nuget.org/packages/Microsoft.CognitiveServices.Speech/1.42.0) | | | +| Speech Remote Conversation | NuGet [1.42.0](https://www.nuget.org/packages/Microsoft.CognitiveServices.Speech.Remoteconversation/1.42.0) | | | | Speech Xamarin iOS | NuGet [1.25.0](https://www.nuget.org/packages/Microsoft.CognitiveServices.Speech.Xamarin.iOS/1.25.0) | | | | Spell Check | NuGet [4.1.0-preview.1](https://www.nuget.org/packages/Microsoft.Azure.CognitiveServices.Language.SpellCheck/4.1.0-preview.1) | | GitHub [4.1.0-preview.1](https://github.com/Azure/azure-sdk-for-net/tree/Microsoft.Azure.CognitiveServices.Language.SpellCheck_4.1.0-preview.1/sdk/cognitiveservices/Language.SpellCheck) | | Spring Cloud Client | NuGet [2.0.0-preview.3](https://www.nuget.org/packages/Microsoft.Azure.SpringCloud.Client/2.0.0-preview.3) | | | diff --git a/docs/azure/includes/dotnet-new.md b/docs/azure/includes/dotnet-new.md index 97b5f37235f83..be11ad15683ee 100644 --- a/docs/azure/includes/dotnet-new.md +++ b/docs/azure/includes/dotnet-new.md @@ -39,7 +39,7 @@ | Dev Center | NuGet [1.0.0](https://www.nuget.org/packages/Azure.Developer.DevCenter/1.0.0) | [docs](/dotnet/api/overview/azure/Developer.DevCenter-readme) | GitHub [1.0.0](https://github.com/Azure/azure-sdk-for-net/tree/Azure.Developer.DevCenter_1.0.0/sdk/devcenter/Azure.Developer.DevCenter/) | | Device Update | NuGet [1.0.0](https://www.nuget.org/packages/Azure.IoT.DeviceUpdate/1.0.0) | [docs](/dotnet/api/overview/azure/IoT.DeviceUpdate-readme) | GitHub [1.0.0](https://github.com/Azure/azure-sdk-for-net/tree/Azure.IoT.DeviceUpdate_1.0.0/sdk/deviceupdate/Azure.IoT.DeviceUpdate/) | | Digital Twins | NuGet [1.4.0](https://www.nuget.org/packages/Azure.DigitalTwins.Core/1.4.0) | [docs](/dotnet/api/overview/azure/DigitalTwins.Core-readme) | GitHub [1.4.0](https://github.com/Azure/azure-sdk-for-net/tree/Azure.DigitalTwins.Core_1.4.0/sdk/digitaltwins/Azure.DigitalTwins.Core/) | -| Document Intelligence | NuGet [1.0.0-beta.3](https://www.nuget.org/packages/Azure.AI.DocumentIntelligence/1.0.0-beta.3) | [docs](/dotnet/api/overview/azure/AI.DocumentIntelligence-readme?view=azure-dotnet-preview&preserve-view=true) | GitHub [1.0.0-beta.3](https://github.com/Azure/azure-sdk-for-net/tree/Azure.AI.DocumentIntelligence_1.0.0-beta.3/sdk/documentintelligence/Azure.AI.DocumentIntelligence/) | +| Document Intelligence | NuGet [1.0.0](https://www.nuget.org/packages/Azure.AI.DocumentIntelligence/1.0.0) | [docs](/dotnet/api/overview/azure/AI.DocumentIntelligence-readme) | GitHub [1.0.0](https://github.com/Azure/azure-sdk-for-net/tree/Azure.AI.DocumentIntelligence_1.0.0/sdk/documentintelligence/Azure.AI.DocumentIntelligence/) | | Document Translation | NuGet [2.0.0](https://www.nuget.org/packages/Azure.AI.Translation.Document/2.0.0) | [docs](/dotnet/api/overview/azure/AI.Translation.Document-readme) | GitHub [2.0.0](https://github.com/Azure/azure-sdk-for-net/tree/Azure.AI.Translation.Document_2.0.0/sdk/translation/Azure.AI.Translation.Document/) | | Event Grid | NuGet [4.28.0](https://www.nuget.org/packages/Azure.Messaging.EventGrid/4.28.0) | [docs](/dotnet/api/overview/azure/Messaging.EventGrid-readme) | GitHub [4.28.0](https://github.com/Azure/azure-sdk-for-net/tree/Azure.Messaging.EventGrid_4.28.0/sdk/eventgrid/Azure.Messaging.EventGrid/) | | Event Grid Namespaces | NuGet [1.0.0](https://www.nuget.org/packages/Azure.Messaging.EventGrid.Namespaces/1.0.0) | [docs](/dotnet/api/overview/azure/Messaging.EventGrid.Namespaces-readme) | GitHub [1.0.0](https://github.com/Azure/azure-sdk-for-net/tree/Azure.Messaging.EventGrid.Namespaces_1.0.0/sdk/eventgrid/Azure.Messaging.EventGrid.Namespaces/) |