|
2 | 2 |
|
3 | 3 | import asyncio |
4 | 4 | import dataclasses |
| 5 | +import datetime |
5 | 6 | import logging |
6 | 7 | from typing import TYPE_CHECKING, Any, Self |
7 | 8 |
|
8 | 9 | import aiofiles |
9 | | -from pydantic import ByteSize |
| 10 | +from pydantic import ByteSize, TypeAdapter |
10 | 11 |
|
11 | 12 | from bunkr_uploader.api import BunkrrAPI |
12 | 13 | from bunkr_uploader.api._exceptions import ChunkUploadError, FileUploadError |
13 | 14 | from bunkr_uploader.api._files import Chunk, File |
14 | 15 | from bunkr_uploader.api._responses import UploadResponse |
15 | 16 |
|
| 17 | +from .logger import json_logger |
16 | 18 | from .progress import new_progress |
17 | 19 |
|
18 | 20 | if TYPE_CHECKING: |
|
23 | 25 |
|
24 | 26 | from bunkr_uploader.config import ConfigSettings |
25 | 27 |
|
| 28 | + |
26 | 29 | logger = logging.getLogger(__name__) |
27 | 30 |
|
28 | 31 |
|
| 32 | +def _utc_now() -> datetime.datetime: |
| 33 | + return datetime.datetime.now().astimezone(datetime.UTC) |
| 34 | + |
| 35 | + |
29 | 36 | @dataclasses.dataclass(slots=True) |
30 | 37 | class FileUploadResult: |
31 | 38 | file: File |
32 | 39 | result: UploadResponse |
| 40 | + timestamp: datetime.datetime = dataclasses.field(init=False, default_factory=_utc_now) |
| 41 | + |
| 42 | + def dumps(self) -> str: |
| 43 | + return _file_upload_result_serializer(self, indent=2).decode() |
| 44 | + |
| 45 | + |
| 46 | +_file_upload_result_serializer = TypeAdapter(FileUploadResult).dump_json |
33 | 47 |
|
34 | 48 |
|
35 | 49 | class BunkrrUploader: |
@@ -71,7 +85,7 @@ def _prepare_files(self, files: Iterable[Path]) -> Generator[File]: |
71 | 85 | else: |
72 | 86 | yield file |
73 | 87 |
|
74 | | - async def _upload_chunk(self, file: File, chunk: Chunk, server: URL) -> bool: |
| 88 | + async def _upload_chunk(self, file: File, server: URL, chunk: Chunk) -> bool: |
75 | 89 | """Upload a single chunk with retry mechanism.""" |
76 | 90 | for attempt in range(self.settings.chunk_retries): |
77 | 91 | msg = f"uploading chunk {chunk.index} of file {file.original_name} (attempt {attempt + 1}/{self.settings.chunk_retries})" |
@@ -113,7 +127,7 @@ async def _upload_file(self, file: File, server: URL) -> UploadResponse: |
113 | 127 | return await self._api.direct_upload(file, server) |
114 | 128 |
|
115 | 129 | async for chunk in self._chunked_read(file): |
116 | | - await self._upload_chunk(file, chunk, server) |
| 130 | + await self._upload_chunk(file, server, chunk) |
117 | 131 |
|
118 | 132 | return await self._api.finish_chunks(file, server) |
119 | 133 |
|
@@ -196,7 +210,9 @@ async def _upload(self, file: File, album_id: str | None) -> FileUploadResult: |
196 | 210 | try: |
197 | 211 | server = await self._get_server() |
198 | 212 | response = await self._upload_file(file, server) |
199 | | - return FileUploadResult(file, response) |
| 213 | + result = FileUploadResult(file, response) |
| 214 | + json_logger.info(result) |
| 215 | + return result |
200 | 216 | finally: |
201 | 217 | self._sem.release() |
202 | 218 |
|
|
0 commit comments