|
14 | 14 |
|
15 | 15 | from __future__ import annotations |
16 | 16 |
|
| 17 | +import asyncio |
17 | 18 | import uuid |
18 | 19 | import datetime |
19 | 20 | from dataclasses import dataclass |
@@ -303,21 +304,23 @@ def __init__( |
303 | 304 | extensions=dict(self._header.extensions), |
304 | 305 | file_name=self._header.file_header.file_name, |
305 | 306 | ) |
| 307 | + self._write_lock = asyncio.Lock() |
306 | 308 |
|
307 | 309 | async def write(self, data: bytes): |
308 | | - chunked_data = [ |
309 | | - data[i : i + STREAM_CHUNK_SIZE] |
310 | | - for i in range(0, len(data), STREAM_CHUNK_SIZE) |
311 | | - ] |
312 | | - |
313 | | - for chunk in chunked_data: |
314 | | - self._next_chunk_index += 1 |
315 | | - chunk_msg = proto_DataStream.Chunk( |
316 | | - stream_id=self._header.stream_id, |
317 | | - chunk_index=self._next_chunk_index, |
318 | | - content=chunk, |
319 | | - ) |
320 | | - await self._send_chunk(chunk_msg) |
| 310 | + async with self._write_lock: |
| 311 | + chunked_data = [ |
| 312 | + data[i : i + STREAM_CHUNK_SIZE] |
| 313 | + for i in range(0, len(data), STREAM_CHUNK_SIZE) |
| 314 | + ] |
| 315 | + |
| 316 | + for chunk in chunked_data: |
| 317 | + self._next_chunk_index += 1 |
| 318 | + chunk_msg = proto_DataStream.Chunk( |
| 319 | + stream_id=self._header.stream_id, |
| 320 | + chunk_index=self._next_chunk_index, |
| 321 | + content=chunk, |
| 322 | + ) |
| 323 | + await self._send_chunk(chunk_msg) |
321 | 324 |
|
322 | 325 | @property |
323 | 326 | def info(self) -> FileStreamInfo: |
|
0 commit comments