Skip to content

Commit 473241c

Browse files
committed
Fixed Single Threaded Download, Removed timeout argument
1 parent ff8d2db commit 473241c

File tree

5 files changed

+48
-55
lines changed

5 files changed

+48
-55
lines changed

DOCS.md

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
# TechZDL v1.2.4 Documentation
1+
# TechZDL v1.2.5 Documentation
22

33
## Installation
44

@@ -66,7 +66,6 @@ Here is a list of arguments you can pass to the `TechZDL` class to modify your d
6666
- `chunk_size` `(int)`: Size of each download chunk in bytes. Defaults to 5 MB.
6767
- `single_threaded` `(bool)`: Force single-threaded download. Defaults to False.
6868
- `max_retries` `(int)`: Maximum retries for each chunk/file download. Defaults to 3.
69-
- `timeout` `(int)`: Timeout for each request in seconds. Defaults to 60.
7069

7170
### Attributes
7271

README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
# TechZDL v1.2.4
1+
# TechZDL v1.2.5
22

33
TechZDL is a powerful file downloader package for Python that supports multi-threaded downloads, dynamic worker adjustments based on network speed, custom headers, and more.
44

demos/timeout_and_max_retries.py renamed to demos/max_retries.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,16 +1,14 @@
1-
# This script demonstrates how to configure the downloader to handle timeouts and retries.
2-
# The 'timeout' parameter sets the maximum time (in seconds) to wait for a server response.
1+
# This script demonstrates how to configure the downloader to handle retries.
32
# The 'max_retries' parameter sets the maximum number of retry attempts for each chunk or file download.
43
# These settings are useful for handling unreliable network conditions or server issues.
54

65
import asyncio
7-
from techzdl.api import TechZDL
6+
from techzdl import TechZDL
87

98

109
async def main():
1110
downloader = TechZDL(
1211
url="https://link.testfile.org/bNYZFw", # URL of the file to download
13-
timeout=30, # Timeout in seconds for each request (default: 60 seconds)
1412
max_retries=5, # Maximum number of retries for each chunk/file download (default: 3)
1513
)
1614
await downloader.start()

setup.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33

44
setup(
55
name="techzdl",
6-
version="1.2.4",
6+
version="1.2.5",
77
author="TechShreyash",
88
author_email="[email protected]",
99
description="A simple yet powerfull file downloader package for python",

techzdl/__init__.py

Lines changed: 43 additions & 47 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
# Name: techzdl
2-
# Version: 1.2.4
2+
# Version: 1.2.5
33
# Summary: A simple yet powerfull file downloader package for python
44
# Home-page: https://github.com/TechShreyash/techzdl
55
# Author: TechShreyash
@@ -43,7 +43,6 @@ def __init__(
4343
chunk_size: int = 5 * 1024 * 1024,
4444
single_threaded: bool = False,
4545
max_retries: int = 3,
46-
timeout: int = 60,
4746
) -> None:
4847
"""
4948
Initialize the TechZDL object.
@@ -64,7 +63,6 @@ def __init__(
6463
- `chunk_size` `(int, optional)`: Size of each download chunk in bytes. Defaults to 5 MB.
6564
- `single_threaded` `(bool, optional)`: Force single-threaded download. Defaults to False.
6665
- `max_retries` `(int, optional)`: Maximum retries for each chunk/file download. Defaults to 3.
67-
- `timeout` `(int, optional)`: Timeout for each request in seconds. Defaults to 60.
6866
6967
#### Examples:
7068
```python
@@ -105,7 +103,6 @@ async def main():
105103
self.curl_cffi_required = False
106104
self.max_retries = max_retries
107105
self.session = None
108-
self.timeout = timeout
109106
self.is_running = False
110107
self.downloader_tasks = []
111108
self.temp_file_path = None
@@ -185,9 +182,7 @@ async def get_file_info(self) -> dict:
185182
for i in range(self.max_retries):
186183
try:
187184

188-
session = aiohttp.ClientSession(
189-
timeout=aiohttp.ClientTimeout(total=self.timeout)
190-
)
185+
session = aiohttp.ClientSession()
191186

192187
self._log(f"Fetching file info from {self.url}")
193188
response = None
@@ -213,7 +208,7 @@ async def get_file_info(self) -> dict:
213208
)
214209
await session.close()
215210

216-
session = AsyncSession(timeout=self.timeout)
211+
session = AsyncSession()
217212

218213
response = None
219214
try:
@@ -451,41 +446,49 @@ async def _dynamic_worker_updater(self, semaphore: AdjustableSemaphore) -> None:
451446
prev_downloaded = self.size_done
452447
prev_speed = speed
453448

449+
async def _single_threaded_download_child(self) -> None:
450+
response = None
451+
if self.curl_cffi_required:
452+
try:
453+
response = await self.session.get(
454+
url=self.url, headers=self.custom_headers, stream=True
455+
)
456+
async with aiofiles.open(self.output_path, "wb") as output_file:
457+
async for chunk in response.aiter_content():
458+
await output_file.write(chunk)
459+
self.size_done += len(chunk)
460+
except Exception as e:
461+
raise e
462+
finally:
463+
if response:
464+
response.close()
465+
else:
466+
try:
467+
response = await self.session.get(self.url, headers=self.custom_headers)
468+
async with aiofiles.open(self.output_path, "wb") as output_file:
469+
while chunk := await response.content.read(self.chunk_size):
470+
await output_file.write(chunk)
471+
self.size_done += len(chunk)
472+
except Exception as e:
473+
raise e
474+
finally:
475+
if response:
476+
response.close()
477+
454478
async def _single_threaded_download(self) -> None:
455479
"""
456480
Perform a single-threaded download of the file.
457481
"""
458482
for i in range(self.max_retries):
483+
self.size_done = 0 # Reset size_done if retrying
484+
459485
try:
460-
response = None
461-
if self.curl_cffi_required:
462-
try:
463-
response = await self.session.get(
464-
url=self.url, headers=self.custom_headers, stream=True
465-
)
466-
async with aiofiles.open(self.output_path, "wb") as output_file:
467-
async for chunk in response.aiter_content():
468-
await output_file.write(chunk)
469-
self.size_done += len(chunk)
470-
except Exception as e:
471-
raise e
472-
finally:
473-
if response:
474-
response.close()
475-
else:
476-
try:
477-
response = await self.session.get(
478-
self.url, headers=self.custom_headers
479-
)
480-
async with aiofiles.open(self.output_path, "wb") as output_file:
481-
while chunk := await response.content.read(self.chunk_size):
482-
await output_file.write(chunk)
483-
self.size_done += len(chunk)
484-
except Exception as e:
485-
raise e
486-
finally:
487-
if response:
488-
response.close()
486+
await self._task_runner(
487+
[
488+
self._single_threaded_download_child(),
489+
self._show_progress("Downloading"),
490+
]
491+
)
489492
break
490493
except Exception as e:
491494
self._log(f"Error downloading file: {e}", level="error")
@@ -557,9 +560,7 @@ async def _download_manager(self) -> Path:
557560
try:
558561
if self.session:
559562
await self.session.close()
560-
self.session = aiohttp.ClientSession(
561-
timeout=aiohttp.ClientTimeout(total=self.timeout)
562-
)
563+
self.session = aiohttp.ClientSession()
563564

564565
self._log(f"Fetching file info from {self.url}")
565566
response = None
@@ -590,7 +591,7 @@ async def _download_manager(self) -> Path:
590591
)
591592
await self.session.close()
592593

593-
self.session = AsyncSession(timeout=self.timeout)
594+
self.session = AsyncSession()
594595
self.curl_cffi_required = True
595596

596597
response = None
@@ -642,12 +643,7 @@ async def _download_manager(self) -> Path:
642643
self._log("Starting single-threaded download")
643644
self._log(f"Downloading {self.filename}")
644645

645-
await self._task_runner(
646-
[
647-
self._single_threaded_download(),
648-
self._show_progress("Downloading"),
649-
]
650-
)
646+
await self._single_threaded_download()
651647
else:
652648
self._log(
653649
"Server supports range requests. Starting multi-threaded download"

0 commit comments

Comments
 (0)