Skip to content

Commit d775cdd

Browse files
authored
[Fix] Fix Openai_streaming about max_worker and o1_model_list (open-compass#2367)
1 parent 1562cd9 commit d775cdd

File tree

1 file changed

+27
-23
lines changed

1 file changed

+27
-23
lines changed

opencompass/models/openai_streaming.py

Lines changed: 27 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212
OPENAISDK_API_BASE = os.environ.get('OPENAI_BASE_URL',
1313
'https://api.openai.com/v1/')
1414

15-
O1_MODEL_LIST = ['o1', 'o3', 'o4']
15+
O1_MODEL_LIST = ['o1', 'o3', 'o4', 'gpt-5']
1616

1717

1818
@MODELS.register_module()
@@ -52,28 +52,32 @@ def __init__(self,
5252
think_tag: str = '</think>',
5353
openai_extra_kwargs: Dict | None = None,
5454
stream: bool = True,
55-
stream_chunk_size: int = 1):
56-
57-
super().__init__(path=path,
58-
max_seq_len=max_seq_len,
59-
query_per_second=query_per_second,
60-
rpm_verbose=rpm_verbose,
61-
retry=retry,
62-
key=key,
63-
org=org,
64-
meta_template=meta_template,
65-
openai_api_base=openai_api_base,
66-
openai_proxy_url=openai_proxy_url,
67-
mode=mode,
68-
logprobs=logprobs,
69-
top_logprobs=top_logprobs,
70-
temperature=temperature,
71-
tokenizer_path=tokenizer_path,
72-
extra_body=extra_body,
73-
verbose=verbose,
74-
http_client_cfg=http_client_cfg,
75-
status_code_mappings=status_code_mappings,
76-
think_tag=think_tag)
55+
stream_chunk_size: int = 1,
56+
max_workers: Optional[int] = None):
57+
58+
super().__init__(
59+
path=path,
60+
max_seq_len=max_seq_len,
61+
query_per_second=query_per_second,
62+
rpm_verbose=rpm_verbose,
63+
retry=retry,
64+
key=key,
65+
org=org,
66+
meta_template=meta_template,
67+
openai_api_base=openai_api_base,
68+
openai_proxy_url=openai_proxy_url,
69+
mode=mode,
70+
logprobs=logprobs,
71+
top_logprobs=top_logprobs,
72+
temperature=temperature,
73+
tokenizer_path=tokenizer_path,
74+
extra_body=extra_body,
75+
verbose=verbose,
76+
http_client_cfg=http_client_cfg,
77+
status_code_mappings=status_code_mappings,
78+
think_tag=think_tag,
79+
max_workers=max_workers,
80+
)
7781

7882
self.stream = stream
7983
self.stream_chunk_size = stream_chunk_size

0 commit comments

Comments
 (0)