11using Microsoft . ClientModel . TestFramework ;
2+ using Microsoft . ClientModel . TestFramework . Mocks ;
23using Microsoft . VisualStudio . TestPlatform . ObjectModel ;
34using NUnit . Framework ;
45using OpenAI . Chat ;
1516using System . Net ;
1617using System . Text ;
1718using System . Text . Json ;
19+ using System . Threading ;
1820using System . Threading . Tasks ;
1921using static OpenAI . Tests . Telemetry . TestMeterListener ;
2022using static OpenAI . Tests . TestHelpers ;
@@ -877,6 +879,30 @@ public async Task GetChatCompletionMessagesHandlesNonExistentCompletion()
877879 }
878880 }
879881
882+ [ Test ]
883+ public void GetChatCompletionMessagesWithInvalidParameters ( )
884+ {
885+ ChatClient client = CreateProxyFromClient ( GetTestClient < ChatClient > ( scenario : TestScenario . Chat ) ) ;
886+
887+ // Test with null completion ID
888+ Assert . ThrowsAsync < ArgumentNullException > ( async ( ) =>
889+ {
890+ await foreach ( var message in client . GetChatCompletionMessagesAsync ( null ) )
891+ {
892+ // Should not reach here
893+ }
894+ } ) ;
895+
896+ // Test with empty completion ID
897+ Assert . ThrowsAsync < ArgumentException > ( async ( ) =>
898+ {
899+ await foreach ( var message in client . GetChatCompletionMessagesAsync ( "" ) )
900+ {
901+ // Should not reach here
902+ }
903+ } ) ;
904+ }
905+
880906 [ Test ]
881907 public async Task ChatServiceTierWorks ( )
882908 {
@@ -894,6 +920,142 @@ public async Task ChatServiceTierWorks()
894920 Assert . That ( completion . ServiceTier , Is . EqualTo ( ChatServiceTier . Default ) ) ;
895921 }
896922
923+ [ SyncOnly ]
924+ [ Test ]
925+ public void StreamingChatCanBeCancelled ( )
926+ {
927+ MockPipelineResponse response = new MockPipelineResponse ( 200 ) . WithContent ( """
928+ data: {"id":"chatcmpl-A7mKGugwaczn3YyrJLlZY6CM0Wlkr","object":"chat.completion.chunk","created":1726417424,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_483d39d857","choices":[{"index":0,"delta":{"role":"assistant","content":"","refusal":null},"logprobs":null,"finish_reason":null}],"usage":null}
929+
930+ data: {"id":"chatcmpl-A7mKGugwaczn3YyrJLlZY6CM0Wlkr","object":"chat.completion.chunk","created":1726417424,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_483d39d857","choices":[{"index":0,"delta":{"content":"The"},"logprobs":null,"finish_reason":null}],"usage":null}
931+
932+ data: [DONE]
933+ """ ) ;
934+
935+ OpenAIClientOptions options = new OpenAIClientOptions ( )
936+ {
937+ Transport = new MockPipelineTransport ( _ => response )
938+ } ;
939+
940+ CancellationTokenSource cancellationTokenSource = new ( ) ;
941+ cancellationTokenSource . CancelAfter ( 1000 ) ;
942+
943+ ChatClient client = CreateProxyFromClient ( GetTestClient < ChatClient > ( TestScenario . Chat , options : options ) ) ;
944+ IEnumerable < ChatMessage > messages = [ new UserChatMessage ( "What are the best pizza toppings? Give me a breakdown on the reasons." ) ] ;
945+
946+ CollectionResult < StreamingChatCompletionUpdate > streamingResult = client . CompleteChatStreaming ( messages , cancellationToken : cancellationTokenSource . Token ) ;
947+ IEnumerator < StreamingChatCompletionUpdate > enumerator = streamingResult . GetEnumerator ( ) ;
948+
949+ enumerator . MoveNext ( ) ;
950+ StreamingChatCompletionUpdate firstUpdate = enumerator . Current ;
951+
952+ Assert . That ( firstUpdate , Is . Not . Null ) ;
953+ Assert . That ( cancellationTokenSource . IsCancellationRequested , Is . False ) ;
954+
955+ Thread . Sleep ( 1000 ) ;
956+
957+ Assert . Throws < OperationCanceledException > ( ( ) =>
958+ {
959+ // Should throw for the second update.
960+ Assert . That ( cancellationTokenSource . IsCancellationRequested ) ;
961+ Assert . That ( cancellationTokenSource . Token . IsCancellationRequested ) ;
962+ enumerator . MoveNext ( ) ;
963+ enumerator . MoveNext ( ) ;
964+ } ) ;
965+ }
966+
967+ [ AsyncOnly ]
968+ [ Test ]
969+ public async Task StreamingChatCanBeCancelledAsync ( )
970+ {
971+ MockPipelineResponse response = new MockPipelineResponse ( 200 ) . WithContent ( """
972+ data: {"id":"chatcmpl-A7mKGugwaczn3YyrJLlZY6CM0Wlkr","object":"chat.completion.chunk","created":1726417424,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_483d39d857","choices":[{"index":0,"delta":{"role":"assistant","content":"","refusal":null},"logprobs":null,"finish_reason":null}],"usage":null}
973+
974+ data: {"id":"chatcmpl-A7mKGugwaczn3YyrJLlZY6CM0Wlkr","object":"chat.completion.chunk","created":1726417424,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_483d39d857","choices":[{"index":0,"delta":{"content":"The"},"logprobs":null,"finish_reason":null}],"usage":null}
975+
976+ data: [DONE]
977+ """ ) ;
978+
979+ OpenAIClientOptions options = new OpenAIClientOptions ( )
980+ {
981+ Transport = new MockPipelineTransport ( _ => response )
982+ {
983+ ExpectSyncPipeline = ! IsAsync
984+ }
985+ } ;
986+
987+ CancellationTokenSource cancellationTokenSource = new ( ) ;
988+ cancellationTokenSource . CancelAfter ( 1000 ) ;
989+
990+ ChatClient client = GetTestClient < ChatClient > ( TestScenario . Chat , options : options ) ;
991+ IEnumerable < ChatMessage > messages = [ new UserChatMessage ( "What are the best pizza toppings? Give me a breakdown on the reasons." ) ] ;
992+
993+ AsyncCollectionResult < StreamingChatCompletionUpdate > streamingResult = client . CompleteChatStreamingAsync ( messages , cancellationToken : cancellationTokenSource . Token ) ;
994+ IAsyncEnumerator < StreamingChatCompletionUpdate > enumerator = streamingResult . GetAsyncEnumerator ( ) ;
995+
996+ await enumerator . MoveNextAsync ( ) ;
997+ StreamingChatCompletionUpdate firstUpdate = enumerator . Current ;
998+
999+ Assert . That ( firstUpdate , Is . Not . Null ) ;
1000+ Assert . That ( cancellationTokenSource . IsCancellationRequested , Is . False ) ;
1001+
1002+ await Task . Delay ( 1000 ) ;
1003+
1004+ Assert . ThrowsAsync < OperationCanceledException > ( async ( ) =>
1005+ {
1006+ // Should throw for the second update.
1007+ Assert . That ( cancellationTokenSource . IsCancellationRequested ) ;
1008+ Assert . That ( cancellationTokenSource . Token . IsCancellationRequested ) ;
1009+ await enumerator . MoveNextAsync ( ) ;
1010+ await enumerator . MoveNextAsync ( ) ;
1011+ } ) ;
1012+ }
1013+
1014+ [ Test ]
1015+ public async Task CompleteChatStreamingClosesNetworkStream ( )
1016+ {
1017+ MockPipelineResponse response = new MockPipelineResponse ( 200 ) . WithContent ( """
1018+ data: {"id":"chatcmpl-A7mKGugwaczn3YyrJLlZY6CM0Wlkr","object":"chat.completion.chunk","created":1726417424,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_483d39d857","choices":[{"index":0,"delta":{"role":"assistant","content":"","refusal":null},"logprobs":null,"finish_reason":null}],"usage":null}
1019+
1020+ data: {"id":"chatcmpl-A7mKGugwaczn3YyrJLlZY6CM0Wlkr","object":"chat.completion.chunk","created":1726417424,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_483d39d857","choices":[{"index":0,"delta":{"content":"The"},"logprobs":null,"finish_reason":null}],"usage":null}
1021+
1022+ data: [DONE]
1023+ """ ) ;
1024+
1025+ OpenAIClientOptions options = new ( )
1026+ {
1027+ Transport = new MockPipelineTransport ( _ => response )
1028+ {
1029+ ExpectSyncPipeline = ! IsAsync
1030+ }
1031+ } ;
1032+
1033+ ChatClient client = CreateProxyFromClient ( GetTestClient < ChatClient > ( TestScenario . Chat , options : options ) ) ;
1034+ IEnumerable < ChatMessage > messages = [ new UserChatMessage ( "What are the best pizza toppings? Give me a breakdown on the reasons." ) ] ;
1035+
1036+ int updateCount = 0 ;
1037+ TimeSpan ? firstTokenReceiptTime = null ;
1038+ TimeSpan ? latestTokenReceiptTime = null ;
1039+ Stopwatch stopwatch = Stopwatch . StartNew ( ) ;
1040+ AsyncCollectionResult < StreamingChatCompletionUpdate > streamingResult = client . CompleteChatStreamingAsync ( messages ) ;
1041+
1042+ Assert . That ( streamingResult , Is . InstanceOf < AsyncCollectionResult < StreamingChatCompletionUpdate > > ( ) ) ;
1043+ Assert . That ( response . IsDisposed , Is . False ) ;
1044+
1045+ await foreach ( StreamingChatCompletionUpdate chatUpdate in streamingResult )
1046+ {
1047+ firstTokenReceiptTime ??= stopwatch . Elapsed ;
1048+ latestTokenReceiptTime = stopwatch . Elapsed ;
1049+ updateCount ++ ;
1050+
1051+ Console . WriteLine ( stopwatch . Elapsed . TotalMilliseconds ) ;
1052+ }
1053+
1054+ stopwatch . Stop ( ) ;
1055+
1056+ Assert . That ( response . IsDisposed ) ;
1057+ }
1058+
8971059 [ OneTimeTearDown ]
8981060 public void TearDown ( )
8991061 {
0 commit comments