Skip to content

Commit 83e2084

Browse files
committed
Small pylint code cleanup
1 parent ebb4262 commit 83e2084

File tree

5 files changed

+83
-63
lines changed

5 files changed

+83
-63
lines changed

pyproject.toml

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,9 @@ disable = [
1010
"missing-function-docstring", # C0116
1111
"line-too-long", # C0301
1212
"too-few-public-methods", # R0903
13+
"too-many-instance-attributes", # R0902
14+
"too-many-arguments", # R0913
15+
"too-many-positional-arguments", # R0917
1316
]
1417

1518
[tool.pytest.ini_options]

src/utils/common.py

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -83,3 +83,21 @@ def wait_and_exit(seconds=30):
8383
logger.info(f"Decluttarr will wait for {seconds} seconds and then exit.")
8484
time.sleep(seconds)
8585
sys.exit()
86+
87+
88+
def extract_json_from_response(response, key: str | None = None):
89+
try:
90+
data = response.json()
91+
except ValueError as e:
92+
raise ValueError("Response content is not valid JSON") from e
93+
94+
if key is None:
95+
return data
96+
97+
if not isinstance(data, dict):
98+
raise ValueError("Response JSON is not a dictionary, cannot extract key")
99+
100+
if key not in data:
101+
raise ValueError(f"Key '{key}' not found in API response")
102+
103+
return data[key]

src/utils/queue_manager.py

Lines changed: 39 additions & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
import logging
2-
from src.utils.common import make_request
2+
from src.utils.common import make_request, extract_json_from_response
33
from src.utils.log_setup import logger
44

55

@@ -29,18 +29,22 @@ async def get_queue_items(self, queue_scope):
2929
error = f"Invalid queue_scope: {queue_scope}"
3030
raise ValueError(error)
3131
if logger.isEnabledFor(logging.DEBUG):
32-
logger.debug("queue_manager.py/get_queue_items/queue (%s): %s", queue_scope, self.format_queue(queue_items))
32+
logger.debug(
33+
"queue_manager.py/get_queue_items/queue (%s): %s",
34+
queue_scope,
35+
self.format_queue(queue_items),
36+
)
3337
return queue_items
3438

3539
async def _get_queue(self, *, full_queue=False):
3640
# Step 1: Refresh the queue (now internal)
3741
await self._refresh_queue()
3842

3943
# Step 2: Get the total number of records
40-
record_count = await self._get_total_records(full_queue)
44+
total_records_count = await self._get_total_records_count(full_queue)
4145

4246
# Step 3: Get all records using `arr.full_queue_parameter`
43-
queue = await self._get_arr_records(full_queue, record_count)
47+
queue = await self._get_arr_records(full_queue, total_records_count)
4448

4549
# Step 4: Filter the queue based on delayed items and ignored download clients
4650
queue = self._filter_out_ignored_statuses(queue)
@@ -63,41 +67,40 @@ async def _refresh_queue(self):
6367
headers={"X-Api-Key": self.arr.api_key},
6468
)
6569

66-
async def _get_total_records(self, full_queue):
70+
async def _get_total_records_count(self, full_queue):
6771
# Get the total number of records from the queue using `arr.full_queue_parameter`
6872
params = {self.arr.full_queue_parameter: full_queue}
69-
response = (
70-
await make_request(
71-
method="GET",
72-
endpoint=f"{self.arr.api_url}/queue",
73-
settings=self.settings,
74-
params=params,
75-
headers={"X-Api-Key": self.arr.api_key},
76-
)
77-
).json()
78-
return response["totalRecords"]
73+
total_records = await self.fetch_queue_field(params, key="totalRecords")
74+
return total_records
7975

80-
async def _get_arr_records(self, full_queue, record_count):
76+
async def _get_arr_records(self, full_queue, total_records_count):
8177
# Get all records based on the count (with pagination) using `arr.full_queue_parameter`
82-
if record_count == 0:
78+
if total_records_count == 0:
8379
return []
8480

85-
params = {"page": "1", "pageSize": record_count}
81+
params = {"page": "1", "pageSize": total_records_count}
8682
if full_queue:
8783
params |= {self.arr.full_queue_parameter: full_queue}
8884

89-
records = (
90-
await make_request(
91-
method="GET",
92-
endpoint=f"{self.arr.api_url}/queue",
93-
settings=self.settings,
94-
params=params,
95-
headers={"X-Api-Key": self.arr.api_key},
96-
)
97-
).json()
98-
return records["records"]
85+
records = await self.fetch_queue_field(params, key="records")
86+
return records
87+
88+
89+
async def fetch_queue_field(self, params, key: str | None = None):
90+
# Gets the response of the /queue endpoint and extracts a specific field from the json response
91+
response = await make_request(
92+
method="GET",
93+
endpoint=f"{self.arr.api_url}/queue",
94+
settings=self.settings,
95+
params=params,
96+
headers={"X-Api-Key": self.arr.api_key},
97+
)
98+
return extract_json_from_response(response, key=key)
9999

100-
def _filter_out_ignored_statuses(self, queue, ignored_statuses=("delay","downloadClientUnavailable")):
100+
101+
def _filter_out_ignored_statuses(
102+
self, queue, ignored_statuses=("delay", "downloadClientUnavailable")
103+
):
101104
"""
102105
All matching items are removed from the queue. However, logging of ignored items
103106
is limited to one per (download title, protocol, indexer) combination to reduce log noise
@@ -125,14 +128,15 @@ def _filter_out_ignored_statuses(self, queue, ignored_statuses=("delay","downloa
125128
if status in ignored_statuses:
126129
if combination not in seen_combinations:
127130
seen_combinations.add(combination)
128-
logger.debug(f"queue_manager.py/_filter_out_ignored_statuses: Ignored queue item: {title} (Status: {status}, Protocol: {protocol}, Indexer: {indexer})")
131+
logger.debug(
132+
f"queue_manager.py/_filter_out_ignored_statuses: Ignored queue item: {title} (Status: {status}, Protocol: {protocol}, Indexer: {indexer})"
133+
)
129134
continue
130135

131136
filtered_queue.append(item)
132137

133138
return filtered_queue
134139

135-
136140
def _filter_out_ignored_download_clients(self, queue):
137141
# Filters out ignored download clients
138142
if queue is None:
@@ -185,9 +189,7 @@ def group_by_download_id(self, queue_items):
185189
grouped_dict[download_id] = {
186190
"queue_ids": [item_id],
187191
**{
188-
key: queue_item[key]
189-
for key in retain_keys
190-
if key in queue_item
192+
key: queue_item[key] for key in retain_keys if key in queue_item
191193
},
192194
}
193195

@@ -199,7 +201,9 @@ def filter_queue_by_status(queue, statuses: list[str]) -> list[dict]:
199201
return [item for item in queue if item.get("status") in statuses]
200202

201203
@staticmethod
202-
def filter_queue_by_status_and_error_message(queue, conditions: list[tuple[str, str]]) -> list[dict]:
204+
def filter_queue_by_status_and_error_message(
205+
queue, conditions: list[tuple[str, str]]
206+
) -> list[dict]:
203207
"""Filter queue items that match any given (status, errorMessage) pair."""
204208
queue_items = []
205209
for item in queue:

src/utils/wanted_manager.py

Lines changed: 22 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
from src.utils.common import make_request
1+
from src.utils.common import make_request, extract_json_from_response
22

33

44
class WantedManager:
@@ -12,39 +12,34 @@ async def get_wanted_items(self, missing_or_cutoff):
1212
1313
missing_or_cutoff: Drives whether missing or cutoff items are retrieved
1414
"""
15-
record_count = await self._get_total_records(missing_or_cutoff)
16-
return await self._get_arr_records(missing_or_cutoff, record_count)
15+
total_records_count = await self._get_total_records_count(missing_or_cutoff)
16+
return await self._get_arr_records(missing_or_cutoff, total_records_count)
1717

18-
async def _get_total_records(self, missing_or_cutoff):
19-
# Get the total number of records from wanted
20-
response = (
21-
await make_request(
22-
method="GET",
23-
endpoint=f"{self.arr.api_url}/wanted/{missing_or_cutoff}",
24-
settings=self.settings,
25-
headers={"X-Api-Key": self.arr.api_key},
26-
)
27-
).json()
28-
return response["totalRecords"]
18+
async def _get_total_records_count(self, missing_or_cutoff: str) -> int:
19+
total_records = await self.fetch_wanted_field(missing_or_cutoff, key="totalRecords")
20+
return total_records
2921

30-
async def _get_arr_records(self, missing_or_cutoff, record_count):
22+
async def _get_arr_records(self, missing_or_cutoff, total_records_count):
3123
# Get all records based on the count (with pagination)
32-
if record_count == 0:
24+
if total_records_count == 0:
3325
return []
3426

3527
sort_key = f"{self.arr.detail_item_key}s.lastSearchTime"
36-
params = {"page": "1", "pageSize": record_count, "sortKey": sort_key}
28+
params = {"page": "1", "pageSize": total_records_count, "sortKey": sort_key}
3729

38-
records = (
39-
await make_request(
40-
method="GET",
41-
endpoint=f"{self.arr.api_url}/wanted/{missing_or_cutoff}",
42-
settings=self.settings,
43-
params=params,
44-
headers={"X-Api-Key": self.arr.api_key},
45-
)
46-
).json()
47-
return records["records"]
30+
records = await self.fetch_wanted_field(missing_or_cutoff, params=params, key="records")
31+
return records
32+
33+
async def fetch_wanted_field(self, missing_or_cutoff: str, params: dict | None = None, key: str | None = None):
34+
# Gets the response of the /queue endpoint and extracts a specific field from the json response
35+
response = await make_request(
36+
method="GET",
37+
endpoint=f"{self.arr.api_url}/wanted/{missing_or_cutoff}",
38+
settings=self.settings,
39+
params=params,
40+
headers={"X-Api-Key": self.arr.api_key},
41+
)
42+
return extract_json_from_response(response, key=key)
4843

4944
async def search_items(self, detail_ids):
5045
"""Search items by detail IDs."""

tests/jobs/test_remove_slow.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
from src.jobs.remove_slow import RemoveSlow
66

77

8-
@pytest.mark.asynciox
8+
@pytest.mark.asyncio
99
@pytest.mark.parametrize(
1010
("item", "expected_result"),
1111
[

0 commit comments

Comments
 (0)