1- from typing import Any , AsyncIterator , Dict , Iterator , List , Literal , Optional , Union
1+ from typing import Any , AsyncIterator , Dict , Iterator , List , Literal , Optional , Union , overload
22from uuid import UUID
33
44from pydantic import TypeAdapter
55
66from asknews_sdk .api .base import BaseAPI
7+ from asknews_sdk .client import APIClient , AsyncAPIClient
78from asknews_sdk .dto .alert import AlertLog , AlertResponse , CreateAlertRequest , UpdateAlertRequest
89from asknews_sdk .dto .chat import (
910 CreateChatCompletionRequest ,
2627 CreateDeepNewsResponseStreamSource ,
2728)
2829from asknews_sdk .errors import APIError
29- from asknews_sdk .response import EventSource
30-
31-
32- class ChatAPI (BaseAPI ):
30+ from asknews_sdk .response import AsyncEventSource , EventSource
31+
32+
33+ ChatModel = Literal [
34+ "gpt-4o-mini" ,
35+ "gpt-4-1106-preview" ,
36+ "open-mixtral-8x7b" ,
37+ "meta-llama/Meta-Llama-3-70B-Instruct" ,
38+ "meta-llama/Meta-Llama-3.1-70B-Instruct" ,
39+ "meta-llama/Meta-Llama-3.3-70B-Instruct" ,
40+ "meta-llama/Meta-Llama-3.1-405B-Instruct" ,
41+ "claude-3-5-sonnet-20240620" ,
42+ "claude-3-5-sonnet-latest" ,
43+ "gpt-4o" ,
44+ "o3-mini" ,
45+ ]
46+ DeepNewsModel = Literal [
47+ "gpt-5" ,
48+ "claude-3-7-sonnet-latest" ,
49+ "deepseek" ,
50+ "deepseek-basic" ,
51+ "deepseek-r1-0528" ,
52+ "o3-mini" ,
53+ "claude-sonnet-4-20250514" ,
54+ "claude-opus-4-20250514" ,
55+ "gemini-2.5-pro" ,
56+ "o3" ,
57+ ]
58+
59+
60+ class ChatAPI (BaseAPI [APIClient ]):
3361 """
3462 Chat API
3563
3664 https://add-docs.review.docs.asknews.app/en/reference#tag--chat
3765 """
66+ @overload
67+ def get_chat_completions (
68+ self ,
69+ messages : List [Dict [str , str ]],
70+ model : ChatModel = "gpt-4o-mini" ,
71+ stream : Literal [False ] = False ,
72+ inline_citations : Literal ["markdown_link" , "numbered" , "none" ] = "markdown_link" ,
73+ append_references : bool = True ,
74+ asknews_watermark : bool = True ,
75+ journalist_mode : bool = True ,
76+ conversational_awareness : bool = False ,
77+ filter_params : Optional [Dict ] = None ,
78+ * ,
79+ http_headers : Optional [Dict ] = None ,
80+ ) -> CreateChatCompletionResponse :
81+ ...
3882
83+ @overload
3984 def get_chat_completions (
4085 self ,
4186 messages : List [Dict [str , str ]],
42- model : Literal [
43- "gpt-4o-mini" ,
44- "gpt-4-1106-preview" ,
45- "open-mixtral-8x7b" ,
46- "meta-llama/Meta-Llama-3-70B-Instruct" ,
47- "meta-llama/Meta-Llama-3.1-70B-Instruct" ,
48- "meta-llama/Meta-Llama-3.3-70B-Instruct" ,
49- "meta-llama/Meta-Llama-3.1-405B-Instruct" ,
50- "claude-3-5-sonnet-20240620" ,
51- "claude-3-5-sonnet-latest" ,
52- "gpt-4o" ,
53- "o3-mini" ,
54- ] = "gpt-4o-mini" ,
87+ model : ChatModel = "gpt-4o-mini" ,
88+ stream : bool = False ,
89+ inline_citations : Literal ["markdown_link" , "numbered" , "none" ] = "markdown_link" ,
90+ append_references : bool = True ,
91+ asknews_watermark : bool = True ,
92+ journalist_mode : bool = True ,
93+ conversational_awareness : bool = False ,
94+ filter_params : Optional [Dict ] = None ,
95+ * ,
96+ http_headers : Optional [Dict ] = None ,
97+ ) -> Iterator [CreateChatCompletionResponseStream ]:
98+ ...
99+
100+ def get_chat_completions (
101+ self ,
102+ messages : List [Dict [str , str ]],
103+ model : ChatModel = "gpt-4o-mini" ,
55104 stream : bool = False ,
56105 inline_citations : Literal ["markdown_link" , "numbered" , "none" ] = "markdown_link" ,
57106 append_references : bool = True ,
@@ -70,10 +119,7 @@ def get_chat_completions(
70119 :param messages: List of messages in the conversation.
71120 :type messages: List[Dict[str, str]]
72121 :param model: Model to use for chat completion, defaults to "gpt-3.5-turbo-16k"
73- :type model: Literal[
74- "gpt-3.5-turbo-16k", "gpt-4-1106-preview", "mistral-small",
75- "mixtral-8x7b-32768"
76- ]
122+ :type model: ChatModel
77123 :param stream: Whether to stream the response, defaults to False
78124 :type stream: bool
79125 :param inline_citations: Inline citations format, defaults to "markdown_link"
@@ -118,7 +164,6 @@ def get_chat_completions(
118164 )
119165
120166 if stream :
121-
122167 def _stream ():
123168 for event in EventSource .from_api_response (response ):
124169 if event .content == "[DONE]" :
@@ -479,21 +524,52 @@ def list_alert_logs(
479524 )
480525 return PaginatedResponse [AlertLog ].model_validate (response .content )
481526
527+ @overload
482528 def get_deep_news (
483529 self ,
484530 messages : List [Dict [str , str ]],
485- model : Literal [
486- "gpt-5" ,
487- "claude-3-7-sonnet-latest" ,
488- "deepseek" ,
489- "deepseek-basic" ,
490- "deepseek-r1-0528" ,
491- "o3-mini" ,
492- "claude-sonnet-4-20250514" ,
493- "claude-opus-4-20250514" ,
494- "gemini-2.5-pro" ,
495- "o3" ,
496- ] = "deepseek" ,
531+ model : DeepNewsModel = "deepseek" ,
532+ stream : Literal [False ] = False ,
533+ inline_citations : Literal ["markdown_link" , "numbered" , "none" ] = "markdown_link" ,
534+ append_references : bool = True ,
535+ asknews_watermark : bool = True ,
536+ journalist_mode : bool = True ,
537+ conversational_awareness : bool = False ,
538+ filter_params : Optional [Dict ] = None ,
539+ sources : Optional [List [str ]] = None ,
540+ search_depth : int = 3 ,
541+ max_depth : int = 5 ,
542+ return_sources : bool = True ,
543+ * ,
544+ http_headers : Optional [Dict ] = None ,
545+ ) -> CreateDeepNewsResponse :
546+ ...
547+
548+ @overload
549+ def get_deep_news (
550+ self ,
551+ messages : List [Dict [str , str ]],
552+ model : DeepNewsModel = "deepseek" ,
553+ stream : Literal [True ] = True ,
554+ inline_citations : Literal ["markdown_link" , "numbered" , "none" ] = "markdown_link" ,
555+ append_references : bool = True ,
556+ asknews_watermark : bool = True ,
557+ journalist_mode : bool = True ,
558+ conversational_awareness : bool = False ,
559+ filter_params : Optional [Dict ] = None ,
560+ sources : Optional [List [str ]] = None ,
561+ search_depth : int = 3 ,
562+ max_depth : int = 5 ,
563+ return_sources : bool = True ,
564+ * ,
565+ http_headers : Optional [Dict ] = None ,
566+ ) -> Iterator [CreateDeepNewsResponseStream ]:
567+ ...
568+
569+ def get_deep_news (
570+ self ,
571+ messages : List [Dict [str , str ]],
572+ model : DeepNewsModel = "deepseek" ,
497573 stream : bool = False ,
498574 inline_citations : Literal ["markdown_link" , "numbered" , "none" ] = "markdown_link" ,
499575 append_references : bool = True ,
@@ -545,7 +621,6 @@ def get_deep_news(
545621 )
546622
547623 if stream :
548-
549624 def _stream ():
550625 for event in EventSource .from_api_response (response ):
551626 if event .content == "[DONE]" :
@@ -569,28 +644,50 @@ def _stream():
569644 return CreateDeepNewsResponse .model_validate (response .content )
570645
571646
572- class AsyncChatAPI (BaseAPI ):
647+ class AsyncChatAPI (BaseAPI [ AsyncAPIClient ] ):
573648 """
574649 Chat API
575650
576651 https://api.asknews.app/docs#tag/chat
577652 """
653+ @overload
654+ async def get_chat_completions (
655+ self ,
656+ messages : List [Dict [str , str ]],
657+ model : ChatModel = "gpt-4o-mini" ,
658+ stream : Literal [False ] = False ,
659+ inline_citations : Literal ["markdown_link" , "numbered" , "none" ] = "markdown_link" ,
660+ append_references : bool = True ,
661+ asknews_watermark : bool = True ,
662+ journalist_mode : bool = True ,
663+ conversational_awareness : bool = False ,
664+ filter_params : Optional [Dict ] = None ,
665+ * ,
666+ http_headers : Optional [Dict ] = None ,
667+ ) -> CreateChatCompletionResponse :
668+ ...
578669
670+ @overload
579671 async def get_chat_completions (
580672 self ,
581673 messages : List [Dict [str , str ]],
582- model : Literal [
583- "gpt-4o-mini" ,
584- "gpt-4-1106-preview" ,
585- "open-mixtral-8x7b" ,
586- "meta-llama/Meta-Llama-3-70B-Instruct" ,
587- "meta-llama/Meta-Llama-3.1-70B-Instruct" ,
588- "meta-llama/Meta-Llama-3.3-70B-Instruct" ,
589- "meta-llama/Meta-Llama-3.1-405B-Instruct" ,
590- "claude-3-5-sonnet-20240620" ,
591- "claude-3-5-sonnet-latest" ,
592- "gpt-4o" ,
593- ] = "gpt-4o-mini" ,
674+ model : ChatModel = "gpt-4o-mini" ,
675+ stream : Literal [True ] = True ,
676+ inline_citations : Literal ["markdown_link" , "numbered" , "none" ] = "markdown_link" ,
677+ append_references : bool = True ,
678+ asknews_watermark : bool = True ,
679+ journalist_mode : bool = True ,
680+ conversational_awareness : bool = False ,
681+ filter_params : Optional [Dict ] = None ,
682+ * ,
683+ http_headers : Optional [Dict ] = None ,
684+ ) -> AsyncIterator [CreateChatCompletionResponseStream ]:
685+ ...
686+
687+ async def get_chat_completions (
688+ self ,
689+ messages : List [Dict [str , str ]],
690+ model : ChatModel = "gpt-4o-mini" ,
594691 stream : bool = False ,
595692 inline_citations : Literal ["markdown_link" , "numbered" , "none" ] = "markdown_link" ,
596693 append_references : bool = True ,
@@ -660,7 +757,7 @@ async def get_chat_completions(
660757 if stream :
661758
662759 async def _stream ():
663- async for event in EventSource .from_api_response (response ):
760+ async for event in AsyncEventSource .from_api_response (response ):
664761 if event .content == "[DONE]" :
665762 break
666763
@@ -1018,21 +1115,52 @@ async def list_alert_logs(
10181115 )
10191116 return PaginatedResponse [AlertLog ].model_validate (response .content )
10201117
1118+ @overload
10211119 async def get_deep_news (
10221120 self ,
10231121 messages : List [Dict [str , str ]],
1024- model : Literal [
1025- "gpt-5" ,
1026- "claude-3-7-sonnet-latest" ,
1027- "deepseek" ,
1028- "deepseek-basic" ,
1029- "deepseek-r1-0528" ,
1030- "o3-mini" ,
1031- "claude-sonnet-4-20250514" ,
1032- "claude-opus-4-20250514" ,
1033- "gemini-2.5-pro" ,
1034- "o3" ,
1035- ] = "deepseek" ,
1122+ model : DeepNewsModel = "deepseek" ,
1123+ stream : Literal [False ] = False ,
1124+ inline_citations : Literal ["markdown_link" , "numbered" , "none" ] = "markdown_link" ,
1125+ append_references : bool = True ,
1126+ asknews_watermark : bool = True ,
1127+ journalist_mode : bool = True ,
1128+ conversational_awareness : bool = False ,
1129+ filter_params : Optional [Dict ] = None ,
1130+ sources : Optional [List [str ]] = None ,
1131+ return_sources : bool = True ,
1132+ search_depth : int = 3 ,
1133+ max_depth : int = 5 ,
1134+ * ,
1135+ http_headers : Optional [Dict ] = None ,
1136+ ) -> CreateDeepNewsResponse :
1137+ ...
1138+
1139+ @overload
1140+ async def get_deep_news (
1141+ self ,
1142+ messages : List [Dict [str , str ]],
1143+ model : DeepNewsModel = "deepseek" ,
1144+ stream : Literal [True ] = True ,
1145+ inline_citations : Literal ["markdown_link" , "numbered" , "none" ] = "markdown_link" ,
1146+ append_references : bool = True ,
1147+ asknews_watermark : bool = True ,
1148+ journalist_mode : bool = True ,
1149+ conversational_awareness : bool = False ,
1150+ filter_params : Optional [Dict ] = None ,
1151+ sources : Optional [List [str ]] = None ,
1152+ return_sources : bool = True ,
1153+ search_depth : int = 3 ,
1154+ max_depth : int = 5 ,
1155+ * ,
1156+ http_headers : Optional [Dict ] = None ,
1157+ ) -> AsyncIterator [CreateDeepNewsResponseStream ]:
1158+ ...
1159+
1160+ async def get_deep_news (
1161+ self ,
1162+ messages : List [Dict [str , str ]],
1163+ model : DeepNewsModel = "deepseek" ,
10361164 stream : bool = False ,
10371165 inline_citations : Literal ["markdown_link" , "numbered" , "none" ] = "markdown_link" ,
10381166 append_references : bool = True ,
@@ -1084,9 +1212,8 @@ async def get_deep_news(
10841212 )
10851213
10861214 if stream :
1087-
10881215 async def _stream ():
1089- async for event in EventSource .from_api_response (response ):
1216+ async for event in AsyncEventSource .from_api_response (response ):
10901217 if event .content == "[DONE]" :
10911218 break
10921219
0 commit comments