1+ using System . Net ;
2+ using Chats . BE . Controllers . Users . Usages . Dtos ;
3+ using Chats . BE . Services . Models ;
4+ using Chats . BE . Services . Models . ChatServices . Anthropic ;
5+ using Chats . BE . Services . Models . Dtos ;
6+ using Chats . BE . Services . Models . Neutral ;
7+ using Chats . BE . UnitTest . ChatServices . Http ;
8+ using Chats . DB ;
9+ using Chats . DB . Enums ;
10+
11+ namespace Chats . BE . UnitTest . ChatServices . Anthropic ;
12+
13+ public class DeepSeekAnthropicServiceTests
14+ {
15+ private static IHttpClientFactory CreateMockHttpClientFactory ( params string [ ] chunks )
16+ {
17+ return new FiddlerDumpHttpClientFactory ( [ .. chunks ] , HttpStatusCode . OK ) ;
18+ }
19+
20+ private static ChatRequest CreateRequest ( )
21+ {
22+ ModelKey modelKey = new ( )
23+ {
24+ Id = 1 ,
25+ Name = "TestKey" ,
26+ Secret = "test-api-key" ,
27+ Host = "https://api.deepseek.com/anthropic" ,
28+ ModelProviderId = ( int ) DBModelProvider . DeepSeek ,
29+ } ;
30+
31+ Model model = new ( )
32+ {
33+ Id = 1 ,
34+ Name = "Test Model" ,
35+ DeploymentName = "deepseek-reasoner" ,
36+ ModelKeyId = 1 ,
37+ ModelKey = modelKey ,
38+ AllowStreaming = true ,
39+ MaxResponseTokens = 2048 ,
40+ ApiTypeId = ( byte ) DBApiType . AnthropicMessages ,
41+ } ;
42+
43+ ChatConfig chatConfig = new ( )
44+ {
45+ Id = 1 ,
46+ ModelId = 1 ,
47+ Model = model ,
48+ } ;
49+
50+ return new ChatRequest
51+ {
52+ Messages = [ NeutralMessage . FromUserText ( "hello" ) ] ,
53+ ChatConfig = chatConfig ,
54+ Source = UsageSource . Api ,
55+ Streamed = true ,
56+ EndUserId = "8" ,
57+ } ;
58+ }
59+
60+ [ Fact ]
61+ public async Task ChatStreamed_MessageDeltaWithoutInputTokens_PreservesPreviousInputTokens ( )
62+ {
63+ IHttpClientFactory httpClientFactory = CreateMockHttpClientFactory (
64+ "data: {\" type\" :\" message_start\" ,\" message\" :{\" id\" :\" msg_1\" ,\" type\" :\" message\" ,\" role\" :\" assistant\" ,\" model\" :\" deepseek-reasoner\" ,\" content\" :[],\" stop_reason\" :null,\" stop_sequence\" :null,\" usage\" :{\" input_tokens\" :36,\" output_tokens\" :0}}}\n \n " ,
65+ "data: {\" type\" :\" content_block_start\" ,\" index\" :0,\" content_block\" :{\" type\" :\" text\" ,\" text\" :\" \" }}\n \n " ,
66+ "data: {\" type\" :\" content_block_delta\" ,\" index\" :0,\" delta\" :{\" type\" :\" text_delta\" ,\" text\" :\" Hello\" }}\n \n " ,
67+ "data: {\" type\" :\" message_delta\" ,\" delta\" :{\" stop_reason\" :\" end_turn\" ,\" stop_sequence\" :null},\" usage\" :{\" output_tokens\" :151}}\n \n " ,
68+ "data: {\" type\" :\" message_stop\" }\n \n "
69+ ) ;
70+ DeepSeekAnthropicService service = new ( httpClientFactory ) ;
71+ ChatRequest request = CreateRequest ( ) ;
72+
73+ List < ChatSegment > segments = [ ] ;
74+ await foreach ( ChatSegment segment in service . ChatStreamed ( request , CancellationToken . None ) )
75+ {
76+ segments . Add ( segment ) ;
77+ }
78+
79+ List < UsageChatSegment > usageSegments = segments . OfType < UsageChatSegment > ( ) . ToList ( ) ;
80+ Assert . Equal ( 2 , usageSegments . Count ) ;
81+ Assert . Equal ( 36 , usageSegments [ 0 ] . Usage . InputTokens ) ;
82+ Assert . Equal ( 0 , usageSegments [ 0 ] . Usage . OutputTokens ) ;
83+ Assert . Equal ( 36 , usageSegments [ 1 ] . Usage . InputTokens ) ;
84+ Assert . Equal ( 151 , usageSegments [ 1 ] . Usage . OutputTokens ) ;
85+
86+ FinishReasonChatSegment ? finishReason = segments . OfType < FinishReasonChatSegment > ( ) . LastOrDefault ( ) ;
87+ Assert . NotNull ( finishReason ) ;
88+ Assert . Equal ( DBFinishReason . Success , finishReason . FinishReason ) ;
89+ }
90+
91+ [ Fact ]
92+ public async Task ChatStreamed_MessageDeltaWithoutCacheTokens_PreservesPreviousCacheTokens ( )
93+ {
94+ IHttpClientFactory httpClientFactory = CreateMockHttpClientFactory (
95+ "data: {\" type\" :\" message_start\" ,\" message\" :{\" id\" :\" msg_1\" ,\" type\" :\" message\" ,\" role\" :\" assistant\" ,\" model\" :\" deepseek-reasoner\" ,\" content\" :[],\" stop_reason\" :null,\" stop_sequence\" :null,\" usage\" :{\" input_tokens\" :36,\" cache_creation_input_tokens\" :9,\" cache_read_input_tokens\" :7,\" output_tokens\" :0}}}\n \n " ,
96+ "data: {\" type\" :\" content_block_start\" ,\" index\" :0,\" content_block\" :{\" type\" :\" text\" ,\" text\" :\" \" }}\n \n " ,
97+ "data: {\" type\" :\" content_block_delta\" ,\" index\" :0,\" delta\" :{\" type\" :\" text_delta\" ,\" text\" :\" Hello\" }}\n \n " ,
98+ "data: {\" type\" :\" message_delta\" ,\" delta\" :{\" stop_reason\" :\" end_turn\" ,\" stop_sequence\" :null},\" usage\" :{\" output_tokens\" :151}}\n \n " ,
99+ "data: {\" type\" :\" message_stop\" }\n \n "
100+ ) ;
101+ DeepSeekAnthropicService service = new ( httpClientFactory ) ;
102+ ChatRequest request = CreateRequest ( ) ;
103+
104+ List < UsageChatSegment > usageSegments = [ ] ;
105+ await foreach ( ChatSegment segment in service . ChatStreamed ( request , CancellationToken . None ) )
106+ {
107+ if ( segment is UsageChatSegment usage )
108+ {
109+ usageSegments . Add ( usage ) ;
110+ }
111+ }
112+
113+ Assert . Equal ( 2 , usageSegments . Count ) ;
114+ UsageChatSegment finalUsage = usageSegments [ 1 ] ;
115+ Assert . Equal ( 36 , finalUsage . Usage . InputTokens ) ;
116+ Assert . Equal ( 151 , finalUsage . Usage . OutputTokens ) ;
117+ Assert . Equal ( 7 , finalUsage . Usage . CacheTokens ) ;
118+ Assert . Equal ( 9 , finalUsage . Usage . CacheCreationTokens ) ;
119+ }
120+ }
0 commit comments