Skip to content

Commit f782464

Browse files
committed
Enable streaming on o1 since it's now supported
For real this time, it seems This reverts commit 148f7d8.
1 parent a126a49 commit f782464

File tree

1 file changed

+2
-10
lines changed

1 file changed

+2
-10
lines changed

src/gptcmd/llm/openai.py

Lines changed: 2 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -167,9 +167,6 @@ def _estimate_cost_in_cents(
167167
+ Decimal(sampled_tokens) * sampled_scale
168168
) * Decimal("100")
169169

170-
def _supports_streaming(self) -> bool:
171-
return not self.model.startswith("o1")
172-
173170
def complete(self, messages: Sequence[Message]) -> LLMResponse:
174171
kwargs = {
175172
"model": self.model,
@@ -246,16 +243,11 @@ def validate_api_params(self, params):
246243

247244
@property
248245
def stream(self) -> bool:
249-
return self._supports_streaming() and self._stream
246+
return self._stream
250247

251248
@stream.setter
252249
def stream(self, val: bool):
253-
if not self._supports_streaming():
254-
raise NotImplementedError(
255-
"Streamed responses are not supported by this model"
256-
)
257-
else:
258-
self._stream = val
250+
self._stream = val
259251

260252
@property
261253
def valid_models(self) -> Iterable[str]:

0 commit comments

Comments
 (0)