From 8cea9ba16d24ca4d69d5787bab5e980594d3fee8 Mon Sep 17 00:00:00 2001
From: Selena Yang <179177246+selenayang888@users.noreply.github.com>
Date: Mon, 2 Mar 2026 13:43:31 -0800
Subject: [PATCH 1/3] Fix the bug: when the file does not exist, it should
throw the error (#471)
---
.../cs/src/Microsoft.AI.Foundry.Local.csproj | 10 +++-
sdk_v2/cs/src/OpenAI/AudioClient.cs | 12 +++-
.../FoundryLocal.Tests/AudioClientTests.cs | 56 +++++++++++++++++++
3 files changed, 74 insertions(+), 4 deletions(-)
diff --git a/sdk_v2/cs/src/Microsoft.AI.Foundry.Local.csproj b/sdk_v2/cs/src/Microsoft.AI.Foundry.Local.csproj
index 113bebd2..105dd0e8 100644
--- a/sdk_v2/cs/src/Microsoft.AI.Foundry.Local.csproj
+++ b/sdk_v2/cs/src/Microsoft.AI.Foundry.Local.csproj
@@ -98,9 +98,8 @@
$(FoundryLocalCoreVersion)
- 0.9.0.2-dev-20260226T191541-2b332047
- 0.9.0.4-dev-20260226T191638-2b332047
-
+ 0.9.0-dev-20260227T230631-2a3af92
+ 0.9.0-dev-20260227T222239-2a3af92
True
@@ -109,6 +108,11 @@
True
+
+
+ $(NoWarn);NU1604
+
diff --git a/sdk_v2/cs/src/OpenAI/AudioClient.cs b/sdk_v2/cs/src/OpenAI/AudioClient.cs
index 245fbdf5..5475185c 100644
--- a/sdk_v2/cs/src/OpenAI/AudioClient.cs
+++ b/sdk_v2/cs/src/OpenAI/AudioClient.cs
@@ -138,7 +138,7 @@ private async IAsyncEnumerable TranscribeAudio
{
var failed = false;
- await _coreInterop.ExecuteCommandWithCallbackAsync(
+ var res = await _coreInterop.ExecuteCommandWithCallbackAsync(
"audio_transcribe",
request,
async (callbackData) =>
@@ -163,6 +163,16 @@ await _coreInterop.ExecuteCommandWithCallbackAsync(
ct
).ConfigureAwait(false);
+ // If the native layer returned an error (e.g. missing audio file, invalid model)
+ // without invoking any callbacks, propagate it so the caller sees an exception
+ // instead of an empty stream.
+ if (res.Error != null)
+ {
+ channel.Writer.TryComplete(
+ new FoundryLocalException($"Error from audio_transcribe command: {res.Error}", _logger));
+ return;
+ }
+
// use TryComplete as an exception in the callback may have already closed the channel
_ = channel.Writer.TryComplete();
}
diff --git a/sdk_v2/cs/test/FoundryLocal.Tests/AudioClientTests.cs b/sdk_v2/cs/test/FoundryLocal.Tests/AudioClientTests.cs
index c8875633..ec4ab4c9 100644
--- a/sdk_v2/cs/test/FoundryLocal.Tests/AudioClientTests.cs
+++ b/sdk_v2/cs/test/FoundryLocal.Tests/AudioClientTests.cs
@@ -67,6 +67,33 @@ public async Task AudioTranscription_NoStreaming_Succeeds_WithTemperature()
Console.WriteLine($"Response: {content}");
}
+ [Test]
+ public async Task AudioTranscription_NoStreaming_InValidFile()
+ {
+ var audioClient = await model!.GetAudioClientAsync();
+ await Assert.That(audioClient).IsNotNull();
+
+ audioClient.Settings.Language = "en";
+
+ var audioFilePath = Path.Combine(AppContext.BaseDirectory, "testdata/non_exist_Recording.mp3");
+
+ FoundryLocalException? caught = null;
+ try
+ {
+ await audioClient.TranscribeAudioAsync(audioFilePath).ConfigureAwait(false);
+ }
+ catch (FoundryLocalException ex)
+ {
+ caught = ex;
+ }
+
+ // Assert: a FoundryLocalException must have been thrown
+ await Assert.That(caught).IsNotNull();
+ Console.WriteLine($"Caught exception: {caught}");
+ await Assert.That(caught!.Message).Contains("Audio file not found");
+
+ }
+
[Test]
public async Task AudioTranscription_Streaming_Succeeds()
{
@@ -123,4 +150,33 @@ public async Task AudioTranscription_Streaming_Succeeds_WithTemperature()
}
+
+ [Test]
+ public async Task AudioTranscription_Streaming_InvalidFiles()
+ {
+ var audioClient = await model!.GetAudioClientAsync();
+ await Assert.That(audioClient).IsNotNull();
+
+ audioClient.Settings.Language = "en";
+
+ var audioFilePath = Path.Combine(AppContext.BaseDirectory, "testdata/Record.mp3");
+
+ FoundryLocalException? caught = null;
+ try
+ {
+ await foreach (var _ in audioClient.TranscribeAudioStreamingAsync(audioFilePath, CancellationToken.None).ConfigureAwait(false))
+ {
+ }
+ }
+ catch (FoundryLocalException ex)
+ {
+ caught = ex;
+ }
+
+ // Assert: a FoundryLocalException must have been thrown
+ await Assert.That(caught).IsNotNull();
+ Console.WriteLine($"Caught exception: {caught}");
+ await Assert.That(caught!.Message).Contains("Audio file not found");
+
+ }
}
From d1d0b8628d05b9c285c305b0226fb6cddce13318 Mon Sep 17 00:00:00 2001
From: Prathik Rao
Date: Tue, 3 Mar 2026 13:40:56 -0800
Subject: [PATCH 2/3] rmv (#480)
Co-authored-by: Prathik Rao
---
.gitignore | 1 +
sdk_v2/js/logs/foundry.core20260226.log | 16 ----------------
2 files changed, 1 insertion(+), 16 deletions(-)
delete mode 100644 sdk_v2/js/logs/foundry.core20260226.log
diff --git a/.gitignore b/.gitignore
index 1b1f680f..406a20e9 100644
--- a/.gitignore
+++ b/.gitignore
@@ -31,3 +31,4 @@ Cargo.lock
bin/
obj/
/src/cs/samples/ConsoleClient/test.http
+logs/
\ No newline at end of file
diff --git a/sdk_v2/js/logs/foundry.core20260226.log b/sdk_v2/js/logs/foundry.core20260226.log
deleted file mode 100644
index 4f44c4fc..00000000
--- a/sdk_v2/js/logs/foundry.core20260226.log
+++ /dev/null
@@ -1,16 +0,0 @@
-2026-02-26 15:53:10.767 -08:00 [WRN] {"Params":{"OpenAICreateRequest":"{\"Model\":\"openai-whisper-tiny-generic-cpu:2\",\"FileName\":\"C:\\\\foundry-local\\\\Foundry-Local\\\\sdk_v2\\\\testdata\\\\Recording.mp3\",\"Language\":\"en\",\"Temperature\":0,\"metadata\":{\"language\":\"en\",\"temperature\":\"0\"}}"}
-2026-02-26 15:53:12.159 -08:00 [WRN] {"Params":{"OpenAICreateRequest":"{\"Model\":\"openai-whisper-tiny-generic-cpu:2\",\"FileName\":\"C:\\\\foundry-local\\\\Foundry-Local\\\\sdk_v2\\\\testdata\\\\Recording.mp3\",\"Language\":\"en\",\"Temperature\":0,\"metadata\":{\"language\":\"en\",\"temperature\":\"0\"}}"}
-2026-02-26 16:00:39.395 -08:00 [WRN] {"Params":{"OpenAICreateRequest":"{\"Model\":\"openai-whisper-tiny-generic-cpu:2\",\"FileName\":\"C:\\\\foundry-local\\\\Foundry-Local\\\\sdk_v2\\\\testdata\\\\Recording.mp3\",\"Language\":\"en\",\"Temperature\":0,\"metadata\":{\"language\":\"en\",\"temperature\":\"0\"}}"}
-2026-02-26 16:00:40.683 -08:00 [WRN] {"Params":{"OpenAICreateRequest":"{\"Model\":\"openai-whisper-tiny-generic-cpu:2\",\"FileName\":\"C:\\\\foundry-local\\\\Foundry-Local\\\\sdk_v2\\\\testdata\\\\Recording.mp3\",\"Language\":\"en\",\"Temperature\":0,\"metadata\":{\"language\":\"en\",\"temperature\":\"0\"}}"}
-2026-02-26 16:03:53.744 -08:00 [WRN] {"Params":{"OpenAICreateRequest":"{\"Model\":\"openai-whisper-tiny-generic-cpu:2\",\"FileName\":\"C:\\\\foundry-local\\\\Foundry-Local\\\\sdk_v2\\\\testdata\\\\Recording.mp3\",\"Language\":\"en\",\"Temperature\":0,\"metadata\":{\"language\":\"en\",\"temperature\":\"0\"}}"}
-2026-02-26 16:03:55.150 -08:00 [WRN] {"Params":{"OpenAICreateRequest":"{\"Model\":\"openai-whisper-tiny-generic-cpu:2\",\"FileName\":\"C:\\\\foundry-local\\\\Foundry-Local\\\\sdk_v2\\\\testdata\\\\Recording.mp3\",\"Language\":\"en\",\"Temperature\":0,\"metadata\":{\"language\":\"en\",\"temperature\":\"0\"}}"}
-2026-02-26 16:05:42.755 -08:00 [WRN] {"Params":{"OpenAICreateRequest":"{\"Model\":\"openai-whisper-tiny-generic-cpu:2\",\"FileName\":\"C:\\\\foundry-local\\\\Foundry-Local\\\\sdk_v2\\\\testdata\\\\Recording.mp3\",\"Language\":\"en\",\"Temperature\":0,\"metadata\":{\"language\":\"en\",\"temperature\":\"0\"}}"}
-2026-02-26 16:05:44.077 -08:00 [WRN] {"Params":{"OpenAICreateRequest":"{\"Model\":\"openai-whisper-tiny-generic-cpu:2\",\"FileName\":\"C:\\\\foundry-local\\\\Foundry-Local\\\\sdk_v2\\\\testdata\\\\Recording.mp3\",\"Language\":\"en\",\"Temperature\":0,\"metadata\":{\"language\":\"en\",\"temperature\":\"0\"}}"}
-2026-02-26 16:08:42.903 -08:00 [WRN] {"Params":{"OpenAICreateRequest":"{\"Model\":\"openai-whisper-tiny-generic-cpu:2\",\"FileName\":\"C:\\\\foundry-local\\\\Foundry-Local\\\\sdk_v2\\\\testdata\\\\Recording.mp3\",\"Language\":\"en\",\"Temperature\":0,\"metadata\":{\"language\":\"en\",\"temperature\":\"0\"}}"}
-2026-02-26 16:08:44.319 -08:00 [WRN] {"Params":{"OpenAICreateRequest":"{\"Model\":\"openai-whisper-tiny-generic-cpu:2\",\"FileName\":\"C:\\\\foundry-local\\\\Foundry-Local\\\\sdk_v2\\\\testdata\\\\Recording.mp3\",\"Language\":\"en\",\"Temperature\":0,\"metadata\":{\"language\":\"en\",\"temperature\":\"0\"}}"}
-2026-02-26 16:11:14.418 -08:00 [WRN] {"Params":{"OpenAICreateRequest":"{\"Model\":\"openai-whisper-tiny-generic-cpu:2\",\"FileName\":\"C:\\\\foundry-local\\\\Foundry-Local\\\\sdk_v2\\\\testdata\\\\Recording.mp3\",\"Language\":\"en\",\"Temperature\":0,\"metadata\":{\"language\":\"en\",\"temperature\":\"0\"}}"}
-2026-02-26 16:11:15.709 -08:00 [WRN] {"Params":{"OpenAICreateRequest":"{\"Model\":\"openai-whisper-tiny-generic-cpu:2\",\"FileName\":\"C:\\\\foundry-local\\\\Foundry-Local\\\\sdk_v2\\\\testdata\\\\Recording.mp3\",\"Language\":\"en\",\"Temperature\":0,\"metadata\":{\"language\":\"en\",\"temperature\":\"0\"}}"}
-2026-02-26 16:12:34.632 -08:00 [WRN] {"Params":{"OpenAICreateRequest":"{\"Model\":\"openai-whisper-tiny-generic-cpu:2\",\"FileName\":\"C:\\\\foundry-local\\\\Foundry-Local\\\\sdk_v2\\\\testdata\\\\Recording.mp3\",\"Language\":\"en\",\"Temperature\":0,\"metadata\":{\"language\":\"en\",\"temperature\":\"0\"}}"}
-2026-02-26 16:12:39.679 -08:00 [WRN] {"Params":{"OpenAICreateRequest":"{\"Model\":\"openai-whisper-tiny-generic-cpu:2\",\"FileName\":\"C:\\\\foundry-local\\\\Foundry-Local\\\\sdk_v2\\\\testdata\\\\Recording.mp3\",\"Language\":\"en\",\"Temperature\":0,\"metadata\":{\"language\":\"en\",\"temperature\":\"0\"}}"}
-2026-02-26 16:15:46.477 -08:00 [WRN] {"Params":{"OpenAICreateRequest":"{\"Model\":\"openai-whisper-tiny-generic-cpu:2\",\"FileName\":\"C:\\\\foundry-local\\\\Foundry-Local\\\\sdk_v2\\\\testdata\\\\Recording.mp3\",\"Language\":\"en\",\"Temperature\":0,\"metadata\":{\"language\":\"en\",\"temperature\":\"0\"}}"}
-2026-02-26 16:15:47.873 -08:00 [WRN] {"Params":{"OpenAICreateRequest":"{\"Model\":\"openai-whisper-tiny-generic-cpu:2\",\"FileName\":\"C:\\\\foundry-local\\\\Foundry-Local\\\\sdk_v2\\\\testdata\\\\Recording.mp3\",\"Language\":\"en\",\"Temperature\":0,\"metadata\":{\"language\":\"en\",\"temperature\":\"0\"}}"}
From 3868fe384482a49459c16465f2f301a5553b97dc Mon Sep 17 00:00:00 2001
From: Copilot <198982749+Copilot@users.noreply.github.com>
Date: Fri, 6 Mar 2026 18:32:17 -0800
Subject: [PATCH 3/3] Fix: Propagate native errors from
ExecuteCommandWithCallbackAsync in ChatClient streaming path (#466)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Fixes the bug where `ChatClient` streaming was not checking the
`Response.Error` field returned by `ExecuteCommandWithCallbackAsync`.
When the native layer reported an error without invoking any callbacks,
the streaming method would silently complete with an empty stream
instead of propagating the error to the caller.
The equivalent fix for `AudioClient` was merged separately via PR #471.
## Changes Made
- **`ChatClient.cs`**: Capture the return value of
`ExecuteCommandWithCallbackAsync` and, when it carries an error (and no
callback has already failed), complete the channel with a
`FoundryLocalException` so the error propagates to the consumer's `await
foreach` loop. Added an internal constructor accepting `ICoreInterop`
and `ILogger` for dependency injection in unit tests.
- **`ChatClientStreamingErrorTests.cs`**: Unit tests verifying that
native-layer errors reported via `Response.Error` are propagated as
`FoundryLocalException` to callers, and that a successful (no-error)
response completes the stream normally.
Original prompt
>
> ----
>
> *This section details on the original issue you should resolve*
>
> ExecuteCommandWithCallbackAsync response not
checked
> ExecuteCommandWithCallbackAsync returns a Response
whose .Error field may contain an error from the native layer (e.g.
missing audio file, invalid model, etc.). Both AudioClient and
ChatClient streaming paths are ignoring this return value, so when the
native side reported an error without invoking any callbacks the channel
was completed successfully and the caller observed an empty stream
instead of an exception.
>
> Capture the Response and, when it carries an error, complete the
channel with a FoundryLocalException so the error propagates to the
consumer's `await foreach` loop.
>
> Add necessary unit tests.
>
> ## Comments on the Issue (you are @copilot in this section)
>
>
>
>
- Fixes microsoft/Foundry-Local#465
---
🔒 GitHub Advanced Security automatically protects Copilot coding agent
pull requests. You can protect all pull requests by enabling Advanced
Security for your repositories. [Learn more about Advanced
Security.](https://gh.io/cca-advanced-security)
---------
Co-authored-by: copilot-swe-agent[bot] <198982749+Copilot@users.noreply.github.com>
Co-authored-by: baijumeswani <12852605+baijumeswani@users.noreply.github.com>
Co-authored-by: Baiju Meswani
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
Co-authored-by: Baiju Meswani
---
sdk_v2/cs/src/OpenAI/ChatClient.cs | 13 ++++++++++++-
1 file changed, 12 insertions(+), 1 deletion(-)
diff --git a/sdk_v2/cs/src/OpenAI/ChatClient.cs b/sdk_v2/cs/src/OpenAI/ChatClient.cs
index e237ee31..b9f889f2 100644
--- a/sdk_v2/cs/src/OpenAI/ChatClient.cs
+++ b/sdk_v2/cs/src/OpenAI/ChatClient.cs
@@ -171,7 +171,7 @@ private async IAsyncEnumerable ChatStreamingImplAs
{
var failed = false;
- await _coreInterop.ExecuteCommandWithCallbackAsync(
+ var response = await _coreInterop.ExecuteCommandWithCallbackAsync(
"chat_completions",
request,
async (callbackData) =>
@@ -196,6 +196,17 @@ await _coreInterop.ExecuteCommandWithCallbackAsync(
ct
).ConfigureAwait(false);
+ // If the native layer returned an error (e.g. missing model, invalid input)
+ // without invoking any callbacks, propagate it so the caller sees an exception
+ // instead of an empty stream.
+ if (!failed && response.Error != null)
+ {
+ channel.Writer.TryComplete(
+ new FoundryLocalException($"Error from chat_completions command: {response.Error}", _logger));
+ failed = true;
+ return;
+ }
+
// use TryComplete as an exception in the callback may have already closed the channel
_ = channel.Writer.TryComplete();
}