Skip to content

Commit 0d999f9

Browse files
Merge pull request #45071 from dotnet/main
Merge main into live
2 parents dc5a0ed + cdb02b2 commit 0d999f9

35 files changed

+425
-84
lines changed

Diff for: .openpublishing.redirection.ai.json

+33-5
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,8 @@
11
{
22
"redirections": [
33
{
4-
"source_path_from_root": "/docs/ai/quickstarts/get-started-azure-openai.md",
5-
"redirect_url": "/dotnet/ai/quickstarts/get-started-openai"
4+
"source_path_from_root": "/docs/ai/how-to/app-service-db-auth.md",
5+
"redirect_url": "/dotnet/ai"
66
},
77
{
88
"source_path_from_root": "/docs/ai/how-to/use-redis-for-memory.md",
@@ -13,8 +13,36 @@
1313
"redirect_url": "/dotnet/ai"
1414
},
1515
{
16-
"source_path_from_root": "/docs/ai/how-to/app-service-db-auth.md",
17-
"redirect_url": "/dotnet/ai"
16+
"source_path_from_root": "/docs/ai/quickstarts/get-started-azure-openai.md",
17+
"redirect_url": "/dotnet/ai/quickstarts/build-chat-app"
18+
},
19+
{
20+
"source_path_from_root": "/docs/ai/quickstarts/get-started-openai.md",
21+
"redirect_url": "/dotnet/ai/quickstarts/build-chat-app"
22+
},
23+
{
24+
"source_path_from_root": "/docs/ai/quickstarts/quickstart-ai-chat-with-data.md",
25+
"redirect_url": "/dotnet/ai/quickstarts/build-vector-search-app"
26+
},
27+
{
28+
"source_path_from_root": "/docs/ai/quickstarts/quickstart-assistants.md",
29+
"redirect_url": "/dotnet/ai/quickstarts/create-assistant"
30+
},
31+
{
32+
"source_path_from_root": "/docs/ai/quickstarts/quickstart-azure-openai-tool.md",
33+
"redirect_url": "/dotnet/ai/quickstarts/use-function-calling"
34+
},
35+
{
36+
"source_path_from_root": "/docs/ai/quickstarts/quickstart-local-ai.md",
37+
"redirect_url": "/dotnet/ai/quickstarts/chat-local-model"
38+
},
39+
{
40+
"source_path_from_root": "/docs/ai/quickstarts/quickstart-openai-generate-images.md",
41+
"redirect_url": "/dotnet/ai/quickstarts/generate-images"
42+
},
43+
{
44+
"source_path_from_root": "/docs/ai/quickstarts/quickstart-openai-summarize-text.md",
45+
"redirect_url": "/dotnet/ai/quickstarts/prompt-model"
1846
}
1947
]
20-
}
48+
}

Diff for: docs/ai/ai-extensions.md

+2-2
Original file line numberDiff line numberDiff line change
@@ -99,5 +99,5 @@ For an end-to-end sample using `Microsoft.Extensions.AI`, see [eShopSupport](htt
9999

100100
## Next steps
101101

102-
- [Build an AI chat app with .NET](./quickstarts/get-started-openai.md)
103-
- [Quickstart - Summarize text using Azure AI chat app with .NET](./quickstarts/quickstart-openai-summarize-text.md)
102+
- [Build an AI chat app with .NET](/dotnet/ai/quickstarts/build-chat-app)
103+
- [Quickstart - Summarize text using Azure AI chat app with .NET](/dotnet/ai/quickstarts/prompt-model)

Diff for: docs/ai/dotnet-ai-ecosystem.md

+2-2
Original file line numberDiff line numberDiff line change
@@ -63,7 +63,7 @@ Azure offers many other AI services to build specific application capabilities a
6363

6464
.NET apps can also connect to local AI models for many different development scenarios. [Semantic Kernel](https://github.com/microsoft/semantic-kernel) is the recommended tool to connect to local models using .NET. Semantic Kernel can connect to many different models hosted across a variety of platforms and abstracts away lower-level implementation details.
6565

66-
For example, you can use [Ollama](https://ollama.com/) to [connect to local AI models with .NET](quickstarts/quickstart-local-ai.md), including several small language models (SLMs) developed by Microsoft:
66+
For example, you can use [Ollama](https://ollama.com/) to [connect to local AI models with .NET](/dotnet/ai/quickstarts/chat-local-model), including several small language models (SLMs) developed by Microsoft:
6767

6868
| Model | Description |
6969
|---------------------|-----------------------------------------------------------|
@@ -84,7 +84,7 @@ This article summarized the tools and SDKs in the .NET ecosystem, with a focus o
8484
## Next steps
8585

8686
- [What is Semantic Kernel?](/semantic-kernel/overview/)
87-
- [Quickstart - Summarize text using Azure AI chat app with .NET](./quickstarts/quickstart-openai-summarize-text.md)
87+
- [Quickstart - Summarize text using Azure AI chat app with .NET](/dotnet/ai/quickstarts/prompt-model)
8888

8989
[phi3]: https://azure.microsoft.com/products/phi-3
9090
[orca]: https://www.microsoft.com/research/project/orca/

Diff for: docs/ai/get-started/dotnet-ai-overview.md

+6-6
Original file line numberDiff line numberDiff line change
@@ -36,16 +36,16 @@ We recommend the following sequence of tutorials and articles for an introductio
3636

3737
| Scenario | Tutorial |
3838
|----------|----------|
39-
| Create a chat application | [Build an Azure AI chat app with .NET](../quickstarts/get-started-openai.md)|
40-
| Summarize text | [Summarize text using Azure AI chat app with .NET](../quickstarts/quickstart-openai-summarize-text.md) |
41-
| Chat with your data | [Get insight about your data from an .NET Azure AI chat app](../quickstarts/quickstart-ai-chat-with-data.md) |
42-
| Call .NET functions with AI | [Extend Azure AI using tools and execute a local function with .NET](../quickstarts/quickstart-azure-openai-tool.md) |
43-
| Generate images | [Generate images using Azure AI with .NET](../quickstarts/quickstart-openai-generate-images.md) |
39+
| Create a chat application | [Build an Azure AI chat app with .NET](/dotnet/ai/quickstarts/build-chat-app)|
40+
| Summarize text | [Summarize text using Azure AI chat app with .NET](/dotnet/ai/quickstarts/prompt-model) |
41+
| Chat with your data | [Get insight about your data from an .NET Azure AI chat app](/dotnet/ai/quickstarts/build-vector-search-app) |
42+
| Call .NET functions with AI | [Extend Azure AI using tools and execute a local function with .NET](/dotnet/ai/quickstarts/use-function-calling) |
43+
| Generate images | [Generate images using Azure AI with .NET](/dotnet/ai/quickstarts/generate-images) |
4444
| Train your own model |[ML.NET tutorial](https://dotnet.microsoft.com/learn/ml-dotnet/get-started-tutorial/intro) |
4545

4646
Browse the table of contents to learn more about the core concepts, starting with [How generative AI and LLMs work](../conceptual/how-genai-and-llms-work.md).
4747

4848
## Next steps
4949

50-
- [Quickstart: Build an Azure AI chat app with .NET](../quickstarts/get-started-openai.md)
50+
- [Quickstart: Build an Azure AI chat app with .NET](/dotnet/ai/quickstarts/build-chat-app)
5151
- [Video series: Machine Learning and AI with .NET](/shows/machine-learning-and-ai-with-dotnet-for-beginners)

Diff for: docs/ai/how-to/content-filtering.md

+1-1
Original file line numberDiff line numberDiff line change
@@ -49,4 +49,4 @@ The response was filtered due to the prompt triggering Azure OpenAI's content ma
4949

5050
* [Create and assign a content filter](/azure/ai-services/openai/how-to/content-filters)
5151
* [Content Filtering concepts](/azure/ai-services/openai/concepts/content-filter)
52-
* [Create a chat app](../quickstarts/quickstart-openai-summarize-text.md)
52+
* [Create a chat app](/dotnet/ai/quickstarts/prompt-model)

Diff for: docs/ai/index.yml

+2
Original file line numberDiff line numberDiff line change
@@ -49,6 +49,8 @@ landingContent:
4949
url: conceptual/vector-databases.md
5050
- text: Prompt engineering
5151
url: conceptual/prompt-engineering-dotnet.md
52+
- text: Evaluation libraries
53+
url: conceptual/evaluation-libraries.md
5254

5355
# Card (Optional; Remove if not applicable.)
5456
- title: Common tasks

Diff for: docs/ai/quickstarts/get-started-openai.md renamed to docs/ai/quickstarts/build-chat-app.md

+2-2
Original file line numberDiff line numberDiff line change
@@ -153,5 +153,5 @@ azd down
153153

154154
## Next steps
155155

156-
- [Quickstart - Chat with a local AI model](quickstart-local-ai.md)
157-
- [Generate images using AI with .NET](quickstart-openai-generate-images.md)
156+
- [Quickstart - Chat with a local AI model](/dotnet/ai/quickstarts/chat-local-model)
157+
- [Generate images using AI with .NET](/dotnet/ai/quickstarts/generate-images)

Diff for: docs/ai/quickstarts/quickstart-ai-chat-with-data.md renamed to docs/ai/quickstarts/build-vector-search-app.md

+2-2
Original file line numberDiff line numberDiff line change
@@ -205,5 +205,5 @@ azd down
205205

206206
## Next steps
207207

208-
- [Quickstart - Chat with a local AI model](quickstart-local-ai.md)
209-
- [Generate images using AI with .NET](quickstart-openai-generate-images.md)
208+
- [Quickstart - Chat with a local AI model](/dotnet/ai/quickstarts/chat-local-model)
209+
- [Generate images using AI with .NET](/dotnet/ai/quickstarts/generate-images)

Diff for: docs/ai/quickstarts/quickstart-local-ai.md renamed to docs/ai/quickstarts/chat-local-model.md

+2-2
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ In this quickstart, you learn how to create a conversational .NET console chat a
2020

2121
## Run the local AI model
2222

23-
Complete the following steps to configure and run a local AI Model on your device. Many different AI models are available to run locally and are trained for different tasks, such as generating code, analyzing images, generative chat, or creating embeddings. For this quickstart, you'll use the general purpose `phi3:mini` model, which is a small but capable generative AI created by Microsoft.
23+
Complete the following steps to configure and run a local AI model on your device. Many different AI models are available to run locally and are trained for different tasks, such as generating code, analyzing images, generative chat, or creating embeddings. For this quickstart, you'll use the general purpose `phi3:mini` model, which is a small but capable generative AI created by Microsoft.
2424

2525
1. Open a terminal window and verify that Ollama is available on your device:
2626

@@ -54,7 +54,7 @@ Complete the following steps to configure and run a local AI Model on your devic
5454

5555
## Create the .NET app
5656

57-
Complete the following steps to create a .NET console app that will connect to your local `phi3:mini` AI model:
57+
Complete the following steps to create a .NET console app that connects to your local `phi3:mini` AI model.
5858

5959
1. In a terminal window, navigate to an empty directory on your device and create a new app with the `dotnet new` command:
6060

Diff for: docs/ai/quickstarts/evaluate-ai-response.md

+116
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,116 @@
1+
---
2+
title: Quickstart - Evaluate a model's response
3+
description: Learn how to create an MSTest app to evaluate the AI chat response of a language model.
4+
ms.date: 02/25/2025
5+
ms.topic: quickstart
6+
ms.custom: devx-track-dotnet, devx-track-dotnet-ai
7+
---
8+
9+
# Evaluate a model's response
10+
11+
In this quickstart, you create an MSTest app to evaluate the chat response of a model. The test app uses the [Microsoft.Extensions.AI.Evaluation](https://www.nuget.org/packages/Microsoft.Extensions.AI.Evaluation) libraries.
12+
13+
## Prerequisites
14+
15+
- [Install .NET 8.0](https://dotnet.microsoft.com/download) or a later version
16+
- [Install Ollama](https://ollama.com/) locally on your machine
17+
- [Visual Studio Code](https://code.visualstudio.com/) (optional)
18+
19+
## Run the local AI model
20+
21+
Complete the following steps to configure and run a local AI model on your device. For this quickstart, you'll use the general purpose `phi3:mini` model, which is a small but capable generative AI created by Microsoft.
22+
23+
1. Open a terminal window and verify that Ollama is available on your device:
24+
25+
```bash
26+
ollama
27+
```
28+
29+
If Ollama is available, it displays a list of available commands.
30+
31+
1. Start Ollama:
32+
33+
```bash
34+
ollama serve
35+
```
36+
37+
If Ollama is running, it displays a list of available commands.
38+
39+
1. Pull the `phi3:mini` model from the Ollama registry and wait for it to download:
40+
41+
```bash
42+
ollama pull phi3:mini
43+
```
44+
45+
1. After the download completes, run the model:
46+
47+
```bash
48+
ollama run phi3:mini
49+
```
50+
51+
Ollama starts the `phi3:mini` model and provides a prompt for you to interact with it.
52+
53+
## Create the test app
54+
55+
Complete the following steps to create an MSTest project that connects to your local `phi3:mini` AI model.
56+
57+
1. In a terminal window, navigate to the directory where you want to create your app, and create a new MSTest app with the `dotnet new` command:
58+
59+
```dotnetcli
60+
dotnet new mstest -o TestAI
61+
```
62+
63+
1. Navigate to the `TestAI` directory, and add the necessary packages to your app:
64+
65+
```dotnetcli
66+
dotnet add package Microsoft.Extensions.AI.Ollama --prerelease
67+
dotnet add package Microsoft.Extensions.AI.Abstractions --prerelease
68+
dotnet add package Microsoft.Extensions.AI.Evaluation --prerelease
69+
dotnet add package Microsoft.Extensions.AI.Evaluation.Quality --prerelease
70+
```
71+
72+
1. Open the new app in your editor of choice, such as Visual Studio Code.
73+
74+
```dotnetcli
75+
code .
76+
```
77+
78+
## Add the test app code
79+
80+
1. Rename the file *Test1.cs* to *MyTests.cs*, and then open the file and rename the class to `MyTests`.
81+
1. Add the private <xref:Microsoft.Extensions.AI.Evaluation.ChatConfiguration> and chat message and response members to the `MyTests` class. The `s_messages` field is a list that contains two <xref:Microsoft.Extensions.AI.ChatMessage> objects&mdash;one instructs the behavior of the chat bot, and the other is the question from the user.
82+
83+
:::code language="csharp" source="./snippets/evaluate-ai-responses/MyTests.cs" id="PrivateMembers":::
84+
85+
1. Add the `InitializeAsync` method to the `MyTests` class.
86+
87+
:::code language="csharp" source="./snippets/evaluate-ai-responses/MyTests.cs" id="Initialize":::
88+
89+
This methods accomplishes the following tasks:
90+
91+
- Sets up the <xref:Microsoft.Extensions.AI.Evaluation.ChatConfiguration>.
92+
- Sets the <xref:Microsoft.Extensions.AI.ChatOptions>, including the <xref:Microsoft.Extensions.AI.ChatOptions.Temperature> and the <xref:Microsoft.Extensions.AI.ChatOptions.ResponseFormat>.
93+
- Fetches the response to be evaluated by calling <xref:Microsoft.Extensions.AI.IChatClient.GetResponseAsync(System.Collections.Generic.IList{Microsoft.Extensions.AI.ChatMessage},Microsoft.Extensions.AI.ChatOptions,System.Threading.CancellationToken)>, and stores it in a static variable.
94+
95+
1. Add the `GetOllamaChatConfiguration` method, which creates the <xref:Microsoft.Extensions.AI.IChatClient> that the evaluator uses to communicate with the model.
96+
97+
:::code language="csharp" source="./snippets/evaluate-ai-responses/MyTests.cs" id="GetChatConfig":::
98+
99+
1. Add a test method to evaluate the model's response.
100+
101+
:::code language="csharp" source="./snippets/evaluate-ai-responses/MyTests.cs" id="TestCoherence":::
102+
103+
This method does the following:
104+
105+
- Invokes the <xref:Microsoft.Extensions.AI.Evaluation.Quality.CoherenceEvaluator> to evaluate the *coherence* of the response. The <xref:Microsoft.Extensions.AI.Evaluation.IEvaluator.EvaluateAsync(System.Collections.Generic.IEnumerable{Microsoft.Extensions.AI.ChatMessage},Microsoft.Extensions.AI.ChatMessage,Microsoft.Extensions.AI.Evaluation.ChatConfiguration,System.Collections.Generic.IEnumerable{Microsoft.Extensions.AI.Evaluation.EvaluationContext},System.Threading.CancellationToken)> method returns an <xref:Microsoft.Extensions.AI.Evaluation.EvaluationResult> that contains a <xref:Microsoft.Extensions.AI.Evaluation.NumericMetric>. A `NumericMetric` contains a numeric value that's typically used to represent numeric scores that fall within a well-defined range.
106+
- Retrieves the coherence score from the <xref:Microsoft.Extensions.AI.Evaluation.EvaluationResult>.
107+
- Validates the *default interpretation* for the returned coherence metric. Evaluators can include a default interpretation for the metrics they return. You can also change the default interpretation to suit your specific requirements, if needed.
108+
- Validates that no diagnostics are present on the returned coherence metric. Evaluators can include diagnostics on the metrics they return to indicate errors, warnings, or other exceptional conditions encountered during evaluation.
109+
110+
## Run the test/evaluation
111+
112+
Run the test using your preferred test workflow, for example, by using the CLI command `dotnet test` or through [Test Explorer](/visualstudio/test/run-unit-tests-with-test-explorer).
113+
114+
## Next steps
115+
116+
Next, try evaluating against different models to see if the results change. Then, check out the extensive examples in the [dotnet/ai-samples repo](https://github.com/dotnet/ai-samples/blob/main/src/microsoft-extensions-ai-evaluation/api/) to see how to invoke multiple evaluators, add additional context, invoke a custom evaluator, attach diagnostics, or change the default interpretation of metrics.

Diff for: docs/ai/quickstarts/quickstart-openai-generate-images.md renamed to docs/ai/quickstarts/generate-images.md

+1-1
Original file line numberDiff line numberDiff line change
@@ -144,5 +144,5 @@ azd down
144144

145145
## Next steps
146146

147-
- [Quickstart - Build an AI chat app with .NET](get-started-openai.md)
147+
- [Quickstart - Build an AI chat app with .NET](/dotnet/ai/quickstarts/build-chat-app)
148148
- [Generate text and conversations with .NET and Azure OpenAI Completions](/training/modules/open-ai-dotnet-text-completions/)

Diff for: docs/ai/quickstarts/includes/clone-sample-repo.md

+1-1
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@ ms.topic: include
77

88
## Clone the sample repository
99

10-
You can create your own app using the steps in the sections ahead, or you can clone the GitHub repository that contains the completed sample apps for all of the quickstarts. If you plan to use Azure OpenAI, the sample repo is also structured as an Azure Developer CLI template that can provision an Azure OpenAI resource for you.
10+
You can create your own app using the steps in the sections ahead, or you can clone the GitHub repository that contains the completed sample apps for all of the quickstarts. If you plan to use Azure OpenAI, the sample repo is also structured as an Azure Developer CLI template that can provision an Azure OpenAI resource for you.
1111

1212
```bash
1313
git clone https://github.com/dotnet/ai-samples.git

Diff for: docs/ai/quickstarts/quickstart-openai-summarize-text.md renamed to docs/ai/quickstarts/prompt-model.md

+1-1
Original file line numberDiff line numberDiff line change
@@ -153,5 +153,5 @@ azd down
153153

154154
## Next steps
155155

156-
- [Quickstart - Build an AI chat app with .NET](get-started-openai.md)
156+
- [Quickstart - Build an AI chat app with .NET](/dotnet/ai/quickstarts/build-chat-app)
157157
- [Generate text and conversations with .NET and Azure OpenAI Completions](/training/modules/open-ai-dotnet-text-completions/)
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,85 @@
1+
using Microsoft.Extensions.AI;
2+
using Microsoft.Extensions.AI.Evaluation;
3+
using Microsoft.Extensions.AI.Evaluation.Quality;
4+
5+
namespace TestAI;
6+
7+
[TestClass]
8+
public sealed class MyTests
9+
{
10+
// <SnippetPrivateMembers>
11+
private static ChatConfiguration? s_chatConfiguration;
12+
private static IList<ChatMessage> s_messages = [
13+
new ChatMessage(
14+
ChatRole.System,
15+
"""
16+
You're an AI assistant that can answer questions related to astronomy.
17+
Keep your responses concise and try to stay under 100 words.
18+
Use the imperial measurement system for all measurements in your response.
19+
"""),
20+
new ChatMessage(
21+
ChatRole.User,
22+
"How far is the planet Venus from Earth at its closest and furthest points?")];
23+
private static ChatMessage s_response = new();
24+
// </SnippetPrivateMembers>
25+
26+
// <SnippetInitialize>
27+
[ClassInitialize]
28+
public static async Task InitializeAsync(TestContext _)
29+
{
30+
/// Set up the <see cref="ChatConfiguration"/>,
31+
// which includes the <see cref="IChatClient"/> that the
32+
/// evaluator uses to communicate with the model.
33+
s_chatConfiguration = GetOllamaChatConfiguration();
34+
35+
var chatOptions =
36+
new ChatOptions
37+
{
38+
Temperature = 0.0f,
39+
ResponseFormat = ChatResponseFormat.Text
40+
};
41+
42+
/// Fetch the response to be evaluated
43+
// and store it in a static variable.
44+
ChatResponse response = await s_chatConfiguration.ChatClient.GetResponseAsync(s_messages, chatOptions);
45+
s_response = response.Message;
46+
}
47+
// </SnippetInitialize>
48+
49+
// <SnippetGetChatConfig>
50+
private static ChatConfiguration GetOllamaChatConfiguration()
51+
{
52+
/// Get a chat client for the Ollama endpoint.
53+
IChatClient client =
54+
new OllamaChatClient(
55+
new Uri("http://localhost:11434"),
56+
modelId: "phi3:mini");
57+
58+
return new ChatConfiguration(client);
59+
}
60+
// </SnippetGetChatConfig>
61+
62+
// <SnippetTestCoherence>
63+
[TestMethod]
64+
public async Task TestCoherence()
65+
{
66+
IEvaluator coherenceEvaluator = new CoherenceEvaluator();
67+
EvaluationResult result = await coherenceEvaluator.EvaluateAsync(
68+
s_messages,
69+
s_response,
70+
s_chatConfiguration);
71+
72+
/// Retrieve the score for coherence from the <see cref="EvaluationResult"/>.
73+
NumericMetric coherence = result.Get<NumericMetric>(CoherenceEvaluator.CoherenceMetricName);
74+
75+
/// Validate the default interpretation
76+
// for the returned coherence metric.
77+
Assert.IsFalse(coherence.Interpretation!.Failed);
78+
Assert.IsTrue(coherence.Interpretation.Rating is EvaluationRating.Good or EvaluationRating.Exceptional);
79+
80+
// Validate that no diagnostics are present
81+
// on the returned coherence metric.
82+
Assert.IsFalse(coherence.ContainsDiagnostics());
83+
}
84+
// </SnippetTestCoherence>
85+
}

0 commit comments

Comments
 (0)