|
| 1 | +from enum import Enum |
| 2 | +from typing import ( |
| 3 | + TYPE_CHECKING, |
| 4 | + Any, |
| 5 | + AsyncIterator, |
| 6 | + Dict, |
| 7 | + Iterator, |
| 8 | + List, |
| 9 | + Optional, |
| 10 | +) |
| 11 | + |
| 12 | +from typing_extensions import Unpack |
| 13 | + |
| 14 | +from replicate.exceptions import ReplicateError |
| 15 | +from replicate.identifier import ModelVersionIdentifier |
| 16 | + |
| 17 | +try: |
| 18 | + from pydantic import v1 as pydantic # type: ignore |
| 19 | +except ImportError: |
| 20 | + import pydantic # type: ignore |
| 21 | + |
| 22 | + |
| 23 | +if TYPE_CHECKING: |
| 24 | + import httpx |
| 25 | + |
| 26 | + from replicate.client import Client |
| 27 | + from replicate.prediction import Predictions |
| 28 | + |
| 29 | + |
| 30 | +class ServerSentEvent(pydantic.BaseModel): |
| 31 | + """ |
| 32 | + A server-sent event. |
| 33 | + """ |
| 34 | + |
| 35 | + class EventType(Enum): |
| 36 | + """ |
| 37 | + A server-sent event type. |
| 38 | + """ |
| 39 | + |
| 40 | + OUTPUT = "output" |
| 41 | + LOGS = "logs" |
| 42 | + ERROR = "error" |
| 43 | + DONE = "done" |
| 44 | + |
| 45 | + event: EventType |
| 46 | + data: str |
| 47 | + id: str |
| 48 | + retry: Optional[int] |
| 49 | + |
| 50 | + def __str__(self) -> str: |
| 51 | + if self.event == "output": |
| 52 | + return self.data |
| 53 | + |
| 54 | + return "" |
| 55 | + |
| 56 | + |
| 57 | +class EventSource: |
| 58 | + """ |
| 59 | + A server-sent event source. |
| 60 | + """ |
| 61 | + |
| 62 | + response: "httpx.Response" |
| 63 | + |
| 64 | + def __init__(self, response: "httpx.Response") -> None: |
| 65 | + self.response = response |
| 66 | + content_type, _, _ = response.headers["content-type"].partition(";") |
| 67 | + if content_type != "text/event-stream": |
| 68 | + raise ValueError( |
| 69 | + "Expected response Content-Type to be 'text/event-stream', " |
| 70 | + f"got {content_type!r}" |
| 71 | + ) |
| 72 | + |
| 73 | + class Decoder: |
| 74 | + """ |
| 75 | + A decoder for server-sent events. |
| 76 | + """ |
| 77 | + |
| 78 | + event: Optional["ServerSentEvent.EventType"] = None |
| 79 | + data: List[str] = [] |
| 80 | + last_event_id: Optional[str] = None |
| 81 | + retry: Optional[int] = None |
| 82 | + |
| 83 | + def decode(self, line: str) -> Optional[ServerSentEvent]: |
| 84 | + """ |
| 85 | + Decode a line and return a server-sent event if applicable. |
| 86 | + """ |
| 87 | + |
| 88 | + if not line: |
| 89 | + if ( |
| 90 | + not any([self.event, self.data, self.last_event_id, self.retry]) |
| 91 | + or self.event is None |
| 92 | + or self.last_event_id is None |
| 93 | + ): |
| 94 | + return None |
| 95 | + |
| 96 | + sse = ServerSentEvent( |
| 97 | + event=self.event, |
| 98 | + data="\n".join(self.data), |
| 99 | + id=self.last_event_id, |
| 100 | + retry=self.retry, |
| 101 | + ) |
| 102 | + |
| 103 | + self.event = None |
| 104 | + self.data = [] |
| 105 | + self.retry = None |
| 106 | + |
| 107 | + return sse |
| 108 | + |
| 109 | + if line.startswith(":"): |
| 110 | + return None |
| 111 | + |
| 112 | + fieldname, _, value = line.partition(":") |
| 113 | + value = value.lstrip() |
| 114 | + |
| 115 | + if fieldname == "event": |
| 116 | + if event := ServerSentEvent.EventType(value): |
| 117 | + self.event = event |
| 118 | + elif fieldname == "data": |
| 119 | + self.data.append(value) |
| 120 | + elif fieldname == "id": |
| 121 | + if "\0" not in value: |
| 122 | + self.last_event_id = value |
| 123 | + elif fieldname == "retry": |
| 124 | + try: |
| 125 | + self.retry = int(value) |
| 126 | + except (TypeError, ValueError): |
| 127 | + pass |
| 128 | + |
| 129 | + return None |
| 130 | + |
| 131 | + def __iter__(self) -> Iterator[ServerSentEvent]: |
| 132 | + decoder = EventSource.Decoder() |
| 133 | + for line in self.response.iter_lines(): |
| 134 | + line = line.rstrip("\n") |
| 135 | + sse = decoder.decode(line) |
| 136 | + if sse is not None: |
| 137 | + if sse.event == "done": |
| 138 | + return |
| 139 | + elif sse.event == "error": |
| 140 | + raise RuntimeError(sse.data) |
| 141 | + else: |
| 142 | + yield sse |
| 143 | + |
| 144 | + async def __aiter__(self) -> AsyncIterator[ServerSentEvent]: |
| 145 | + decoder = EventSource.Decoder() |
| 146 | + async for line in self.response.aiter_lines(): |
| 147 | + line = line.rstrip("\n") |
| 148 | + sse = decoder.decode(line) |
| 149 | + if sse is not None: |
| 150 | + if sse.event == "done": |
| 151 | + return |
| 152 | + elif sse.event == "error": |
| 153 | + raise RuntimeError(sse.data) |
| 154 | + else: |
| 155 | + yield sse |
| 156 | + |
| 157 | + |
| 158 | +def stream( |
| 159 | + client: "Client", |
| 160 | + ref: str, |
| 161 | + input: Optional[Dict[str, Any]] = None, |
| 162 | + **params: Unpack["Predictions.CreatePredictionParams"], |
| 163 | +) -> Iterator[ServerSentEvent]: |
| 164 | + """ |
| 165 | + Run a model and stream its output. |
| 166 | + """ |
| 167 | + |
| 168 | + params = params or {} |
| 169 | + params["stream"] = True |
| 170 | + |
| 171 | + _, _, version_id = ModelVersionIdentifier.parse(ref) |
| 172 | + prediction = client.predictions.create( |
| 173 | + version=version_id, input=input or {}, **params |
| 174 | + ) |
| 175 | + |
| 176 | + url = prediction.urls and prediction.urls.get("stream", None) |
| 177 | + if not url or not isinstance(url, str): |
| 178 | + raise ReplicateError("Model does not support streaming") |
| 179 | + |
| 180 | + headers = {} |
| 181 | + headers["Accept"] = "text/event-stream" |
| 182 | + headers["Cache-Control"] = "no-store" |
| 183 | + |
| 184 | + with client._client.stream("GET", url, headers=headers) as response: |
| 185 | + yield from EventSource(response) |
| 186 | + |
| 187 | + |
| 188 | +async def async_stream( |
| 189 | + client: "Client", |
| 190 | + ref: str, |
| 191 | + input: Optional[Dict[str, Any]] = None, |
| 192 | + **params: Unpack["Predictions.CreatePredictionParams"], |
| 193 | +) -> AsyncIterator[ServerSentEvent]: |
| 194 | + """ |
| 195 | + Run a model and stream its output asynchronously. |
| 196 | + """ |
| 197 | + |
| 198 | + params = params or {} |
| 199 | + params["stream"] = True |
| 200 | + |
| 201 | + _, _, version_id = ModelVersionIdentifier.parse(ref) |
| 202 | + prediction = await client.predictions.async_create( |
| 203 | + version=version_id, input=input or {}, **params |
| 204 | + ) |
| 205 | + |
| 206 | + url = prediction.urls and prediction.urls.get("stream", None) |
| 207 | + if not url or not isinstance(url, str): |
| 208 | + raise ReplicateError("Model does not support streaming") |
| 209 | + |
| 210 | + headers = {} |
| 211 | + headers["Accept"] = "text/event-stream" |
| 212 | + headers["Cache-Control"] = "no-store" |
| 213 | + |
| 214 | + async with client._async_client.stream("GET", url, headers=headers) as response: |
| 215 | + async for event in EventSource(response): |
| 216 | + yield event |
0 commit comments