diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Services/AzureOpenAIChatCompletionServiceTests.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Services/AzureOpenAIChatCompletionServiceTests.cs index 435caa3c425a..9302b75c39bf 100644 --- a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Services/AzureOpenAIChatCompletionServiceTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Services/AzureOpenAIChatCompletionServiceTests.cs @@ -415,6 +415,66 @@ public async Task GetStreamingTextContentsWorksCorrectlyAsync() Assert.Equal("Stop", enumerator.Current.Metadata?["FinishReason"]); } + [Fact] + public async Task GetStreamingChatContentsWithAsynchronousFilterWorksCorrectlyAsync() + { + // Arrange + var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient); + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(AzureOpenAITestHelper.GetTestResponse("chat_completion_streaming_async_filter_response.txt"))); + + this._messageHandlerStub.ResponsesToReturn.Add(new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StreamContent(stream) + }); + + // Act & Assert + var enumerator = service.GetStreamingChatMessageContentsAsync("Prompt").GetAsyncEnumerator(); + +#pragma warning disable AOAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed. + await enumerator.MoveNextAsync(); + var message = enumerator.Current; + + Assert.IsType(message.InnerContent); + var update = (StreamingChatCompletionUpdate)message.InnerContent; + var promptResults = update.GetContentFilterResultForPrompt(); + Assert.Equal(ContentFilterSeverity.Safe, promptResults.Hate.Severity); + Assert.Equal(ContentFilterSeverity.Safe, promptResults.Sexual.Severity); + Assert.Equal(ContentFilterSeverity.Safe, promptResults.Violence.Severity); + Assert.Equal(ContentFilterSeverity.Safe, promptResults.SelfHarm.Severity); + Assert.False(promptResults.Jailbreak.Detected); + + await enumerator.MoveNextAsync(); + message = enumerator.Current; + + await enumerator.MoveNextAsync(); + message = enumerator.Current; + + await enumerator.MoveNextAsync(); + message = enumerator.Current; + + await enumerator.MoveNextAsync(); + message = enumerator.Current; + + Assert.IsType(message.InnerContent); + update = (StreamingChatCompletionUpdate)message.InnerContent; + + var filterResults = update.GetContentFilterResultForResponse(); + Assert.Equal(ContentFilterSeverity.Safe, filterResults.Hate.Severity); + Assert.Equal(ContentFilterSeverity.Safe, filterResults.Sexual.Severity); + Assert.Equal(ContentFilterSeverity.Safe, filterResults.SelfHarm.Severity); + Assert.Equal(ContentFilterSeverity.Safe, filterResults.Violence.Severity); + + await enumerator.MoveNextAsync(); + message = enumerator.Current; + + Assert.IsType(message.InnerContent); + update = (StreamingChatCompletionUpdate)message.InnerContent; + filterResults = update.GetContentFilterResultForResponse(); + Assert.False(filterResults.ProtectedMaterialCode.Detected); + Assert.False(filterResults.ProtectedMaterialText.Detected); +#pragma warning restore AOAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed. + } + [Fact] public async Task GetStreamingChatMessageContentsWorksCorrectlyAsync() { diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_streaming_async_filter_response.txt b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_streaming_async_filter_response.txt new file mode 100644 index 000000000000..078ad45af412 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_streaming_async_filter_response.txt @@ -0,0 +1,13 @@ +data: {"choices":[],"created":0,"id":"","model":"","object":"","prompt_filter_results":[{"prompt_index":0,"content_filter_results":{"hate":{"filtered":false,"severity":"safe"},"jailbreak":{"filtered":false,"detected":false},"self_harm":{"filtered":false,"severity":"safe"},"sexual":{"filtered":false,"severity":"safe"},"violence":{"filtered":false,"severity":"safe"}}}]} + +data: {"choices":[{"delta":{"content":"","role":"assistant"},"finish_reason":null,"index":0,"logprobs":null}],"created":1724860848,"id":"chatcmpl-123","model":"gpt-4o-2024-05-13","object":"chat.completion.chunk","system_fingerprint":"fp_abc28019ad"} + +data: {"choices":[{"delta":{"content":"Kindness"},"finish_reason":null,"index":0,"logprobs":null}],"created":1724860848,"id":"chatcmpl-123","model":"gpt-4o-2024-05-13","object":"chat.completion.chunk","system_fingerprint":"fp_abc28019ad"} + +data: {"choices":[{"delta":{},"finish_reason":"stop","index":0,"logprobs":null}],"created":1724860848,"id":"chatcmpl-123","model":"gpt-4o-2024-05-13","object":"chat.completion.chunk","system_fingerprint":"fp_abc28019ad"} + +data: {"choices":[{"content_filter_offsets":{"check_offset":1576,"start_offset":1576,"end_offset":2318},"content_filter_results":{"hate":{"filtered":false,"severity":"safe"},"self_harm":{"filtered":false,"severity":"safe"},"sexual":{"filtered":false,"severity":"safe"},"violence":{"filtered":false,"severity":"safe"}},"finish_reason":null,"index":0}],"created":0,"id":"","model":"","object":""} + +data: {"choices":[{"content_filter_offsets":{"check_offset":1576,"start_offset":1576,"end_offset":2318},"content_filter_results":{"protected_material_code":{"filtered":false,"detected":false},"protected_material_text":{"filtered":false,"detected":false}},"finish_reason":null,"index":0}],"created":0,"id":"","model":"","object":""} + +data: [DONE] \ No newline at end of file diff --git a/dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.ChatCompletion.cs b/dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.ChatCompletion.cs index 6546bd291235..bcad35358b0d 100644 --- a/dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.ChatCompletion.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.ChatCompletion.cs @@ -416,24 +416,26 @@ internal async IAsyncEnumerable GetStreamingC var openAIStreamingChatMessageContent = new OpenAIStreamingChatMessageContent(chatCompletionUpdate, 0, targetModel, metadata); - foreach (var functionCallUpdate in chatCompletionUpdate.ToolCallUpdates) + if (openAIStreamingChatMessageContent.ToolCallUpdates is not null) { - // Using the code below to distinguish and skip non - function call related updates. - // The Kind property of updates can't be reliably used because it's only initialized for the first update. - if (string.IsNullOrEmpty(functionCallUpdate.Id) && - string.IsNullOrEmpty(functionCallUpdate.FunctionName) && - string.IsNullOrEmpty(functionCallUpdate.FunctionArgumentsUpdate)) + foreach (var functionCallUpdate in openAIStreamingChatMessageContent.ToolCallUpdates!) { - continue; - } + // Using the code below to distinguish and skip non - function call related updates. + // The Kind property of updates can't be reliably used because it's only initialized for the first update. + if (string.IsNullOrEmpty(functionCallUpdate.Id) && + string.IsNullOrEmpty(functionCallUpdate.FunctionName) && + string.IsNullOrEmpty(functionCallUpdate.FunctionArgumentsUpdate)) + { + continue; + } - openAIStreamingChatMessageContent.Items.Add(new StreamingFunctionCallUpdateContent( - callId: functionCallUpdate.Id, - name: functionCallUpdate.FunctionName, - arguments: functionCallUpdate.FunctionArgumentsUpdate, - functionCallIndex: functionCallUpdate.Index)); + openAIStreamingChatMessageContent.Items.Add(new StreamingFunctionCallUpdateContent( + callId: functionCallUpdate.Id, + name: functionCallUpdate.FunctionName, + arguments: functionCallUpdate.FunctionArgumentsUpdate, + functionCallIndex: functionCallUpdate.Index)); + } } - streamedContents?.Add(openAIStreamingChatMessageContent); yield return openAIStreamingChatMessageContent; } diff --git a/dotnet/src/Connectors/Connectors.OpenAI/Core/OpenAIStreamingChatMessageContent.cs b/dotnet/src/Connectors/Connectors.OpenAI/Core/OpenAIStreamingChatMessageContent.cs index bd9ae55ce888..e83c16cdc31e 100644 --- a/dotnet/src/Connectors/Connectors.OpenAI/Core/OpenAIStreamingChatMessageContent.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI/Core/OpenAIStreamingChatMessageContent.cs @@ -1,5 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. +using System; using System.Collections.Generic; using System.Text; using Microsoft.SemanticKernel.ChatCompletion; @@ -33,7 +34,7 @@ internal OpenAIStreamingChatMessageContent( string modelId, IReadOnlyDictionary? metadata = null) : base( - chatUpdate.Role.HasValue ? new AuthorRole(chatUpdate.Role.Value.ToString()) : null, + null, null, chatUpdate, choiceIndex, @@ -41,9 +42,30 @@ internal OpenAIStreamingChatMessageContent( Encoding.UTF8, metadata) { - this.ToolCallUpdates = chatUpdate.ToolCallUpdates; - this.FinishReason = chatUpdate.FinishReason; - this.Items = CreateContentItems(chatUpdate.ContentUpdate); + try + { + this.FinishReason = chatUpdate.FinishReason; + + if (chatUpdate.Role.HasValue) + { + this.Role = new AuthorRole(chatUpdate.Role.ToString()!); + } + + if (chatUpdate.ToolCallUpdates is not null) + { + this.ToolCallUpdates = chatUpdate.ToolCallUpdates; + } + + if (chatUpdate.ContentUpdate is not null) + { + this.Items = CreateContentItems(chatUpdate.ContentUpdate); + } + } + catch (NullReferenceException) + { + // Temporary bugfix for: https://github.com/openai/openai-dotnet/issues/198 + // TODO: Remove this try-catch block once the bug is fixed. + } } ///