Skip to content

Commit de156e8

Browse files
committed
Update version to 7.6.8 and enhance missing book processing in Readarr by improving logging, switching from author-based to book-based processing, and adding checks for API configuration and stop signals.
1 parent 25a085c commit de156e8

File tree

2 files changed

+92
-77
lines changed

2 files changed

+92
-77
lines changed

src/primary/apps/readarr/missing.py

Lines changed: 91 additions & 76 deletions
Original file line numberDiff line numberDiff line change
@@ -55,16 +55,32 @@ def process_missing_books(
5555

5656
monitored_only = app_settings.get("monitored_only", True)
5757
skip_future_releases = app_settings.get("skip_future_releases", True)
58-
# skip_author_refresh setting removed as it was a performance bottleneck
5958
hunt_missing_books = app_settings.get("hunt_missing_books", 0)
6059

6160
# Use advanced settings from general.json for command operations
6261
command_wait_delay = get_advanced_setting("command_wait_delay", 1)
6362
command_wait_attempts = get_advanced_setting("command_wait_attempts", 600)
6463

64+
readarr_logger.info(f"Hunt Missing Books: {hunt_missing_books}")
65+
readarr_logger.info(f"Monitored Only: {monitored_only}")
66+
readarr_logger.info(f"Skip Future Releases: {skip_future_releases}")
67+
68+
if not api_url or not api_key:
69+
readarr_logger.error("API URL or Key not configured in settings. Cannot process missing books.")
70+
return False
71+
72+
# Skip if hunt_missing_books is set to 0
73+
if hunt_missing_books <= 0:
74+
readarr_logger.info("'hunt_missing_books' setting is 0 or less. Skipping missing book processing.")
75+
return False
76+
77+
# Check for stop signal
78+
if stop_check():
79+
readarr_logger.info("Stop requested before starting missing books. Aborting...")
80+
return False
81+
6582
# Get missing books
6683
readarr_logger.info("Retrieving wanted/missing books...")
67-
readarr_logger.info("Retrieving wanted/missing books...")
6884

6985
# Call the correct function to get missing books
7086
missing_books_data = readarr_api.get_wanted_missing_books(api_url, api_key, api_timeout, monitored_only)
@@ -75,114 +91,113 @@ def process_missing_books(
7591

7692
readarr_logger.info(f"Found {len(missing_books_data)} missing books.")
7793

78-
# Group by author ID (optional)
79-
books_by_author = {}
94+
if not missing_books_data:
95+
readarr_logger.info("No missing books found.")
96+
return False
97+
98+
# Check for stop signal after retrieving books
99+
if stop_check():
100+
readarr_logger.info("Stop requested after retrieving missing books. Aborting...")
101+
return False
102+
103+
# Filter out already processed books using stateful management (now book-based instead of author-based)
104+
unprocessed_books = []
80105
for book in missing_books_data:
81-
author_id = book.get("authorId")
82-
if author_id:
83-
if author_id not in books_by_author:
84-
books_by_author[author_id] = []
85-
books_by_author[author_id].append(book)
86-
87-
author_ids = list(books_by_author.keys())
88-
89-
# Filter out already processed authors using stateful management
90-
unprocessed_authors = []
91-
for author_id in author_ids:
92-
if not is_processed("readarr", instance_name, str(author_id)):
93-
unprocessed_authors.append(author_id)
106+
book_id = str(book.get("id"))
107+
if not is_processed("readarr", instance_name, book_id):
108+
unprocessed_books.append(book)
94109
else:
95-
readarr_logger.debug(f"Skipping already processed author ID: {author_id}")
110+
readarr_logger.debug(f"Skipping already processed book ID: {book_id}")
96111

97-
readarr_logger.info(f"Found {len(unprocessed_authors)} unprocessed authors out of {len(author_ids)} total authors with missing books.")
112+
readarr_logger.info(f"Found {len(unprocessed_books)} unprocessed missing books out of {len(missing_books_data)} total.")
98113

99-
if not unprocessed_authors:
100-
readarr_logger.info(f"No unprocessed authors found for {instance_name}. All available authors have been processed.")
114+
if not unprocessed_books:
115+
readarr_logger.info("No unprocessed missing books found. All available books have been processed.")
101116
return False
102117

103-
# Always randomly select authors/books to process
104-
readarr_logger.info(f"Randomly selecting up to {hunt_missing_books} authors with missing books.")
105-
authors_to_process = random.sample(unprocessed_authors, min(hunt_missing_books, len(unprocessed_authors)))
118+
# Select individual books to process (fixed: was selecting authors, now selects books)
119+
readarr_logger.info(f"Randomly selecting up to {hunt_missing_books} individual books to search.")
120+
books_to_process = random.sample(unprocessed_books, min(hunt_missing_books, len(unprocessed_books)))
121+
122+
readarr_logger.info(f"Selected {len(books_to_process)} individual books to search for missing items.")
123+
124+
# Add detailed logging for selected books
125+
if books_to_process:
126+
readarr_logger.info(f"Books selected for processing in this cycle:")
127+
for idx, book in enumerate(books_to_process):
128+
book_id = book.get("id")
129+
book_title = book.get("title", "Unknown Title")
130+
author_id = book.get("authorId", "Unknown")
131+
readarr_logger.info(f" {idx+1}. '{book_title}' (ID: {book_id}, Author ID: {author_id})")
106132

107-
readarr_logger.info(f"Selected {len(authors_to_process)} authors to search for missing books.")
108133
processed_count = 0
109-
processed_something = False
110-
processed_authors = [] # Track author names processed
134+
processed_books = [] # Track book titles processed
111135

112-
for author_id in authors_to_process:
136+
# Process each individual book
137+
for book in books_to_process:
113138
if stop_check():
114139
readarr_logger.info("Stop signal received, aborting Readarr missing cycle.")
115140
break
116141

117-
author_info = readarr_api.get_author_details(api_url, api_key, author_id, api_timeout) # Assuming this exists
118-
author_name = author_info.get("authorName", f"Author ID {author_id}") if author_info else f"Author ID {author_id}"
119-
120-
readarr_logger.info(f"Processing missing books for author: \"{author_name}\" (Author ID: {author_id})")
142+
book_id = book.get("id")
143+
book_title = book.get("title", f"Unknown Book ID {book_id}")
144+
author_id = book.get("authorId")
145+
146+
# Get author name for logging
147+
author_info = readarr_api.get_author_details(api_url, api_key, author_id, api_timeout) if author_id else None
148+
author_name = author_info.get("authorName", f"Author ID {author_id}") if author_info else "Unknown Author"
121149

122-
# Refresh functionality has been removed as it was identified as a performance bottleneck
150+
readarr_logger.info(f"Processing missing book: '{book_title}' by {author_name} (Book ID: {book_id})")
123151

124-
# Search for missing books associated with the author
125-
readarr_logger.info(f" - Searching for missing books...")
126-
book_ids_for_author = [book['id'] for book in books_by_author[author_id]] # 'id' is bookId
127-
128-
# Create detailed log with book titles
129-
book_details = []
130-
for book in books_by_author[author_id]:
131-
book_title = book.get('title', f"Book ID {book['id']}")
132-
book_details.append(f"'{book_title}' (ID: {book['id']})")
133-
134-
# Construct detailed log message
135-
details_string = ', '.join(book_details)
136-
log_message = f"Triggering Book Search for {len(book_details)} books by author '{author_name}': [{details_string}]"
137-
readarr_logger.debug(log_message) # Changed level from INFO to DEBUG
152+
# Search for this individual book (fixed: was searching all books by author)
153+
readarr_logger.info(f" - Searching for individual book: '{book_title}'...")
138154

139-
# Mark author as processed BEFORE triggering any searches
140-
add_processed_id("readarr", instance_name, str(author_id))
141-
readarr_logger.debug(f"Added author ID {author_id} to processed list for {instance_name}")
155+
# Mark book as processed BEFORE triggering search to prevent duplicates
156+
add_processed_id("readarr", instance_name, str(book_id))
157+
readarr_logger.debug(f"Added book ID {book_id} to processed list for {instance_name}")
142158

143-
# Now trigger the search
144-
search_command_result = readarr_api.search_books(api_url, api_key, book_ids_for_author, api_timeout)
159+
# Search for the specific book (using book search instead of author search)
160+
search_command_result = readarr_api.search_books(api_url, api_key, [book_id], api_timeout)
145161

146162
if search_command_result:
147163
# Extract command ID if the result is a dictionary, otherwise use the result directly
148164
command_id = search_command_result.get('id') if isinstance(search_command_result, dict) else search_command_result
149-
readarr_logger.info(f"Triggered book search command {command_id} for author {author_name}. Assuming success for now.") # Log only command ID
165+
readarr_logger.info(f"Triggered book search command {command_id} for '{book_title}' by {author_name}.")
150166
increment_stat("readarr", "hunted")
151167

152-
# Tag the author if enabled
153-
if tag_processed_items:
168+
# Tag the book's author if enabled (keep author tagging as it's still useful)
169+
if tag_processed_items and author_id:
154170
try:
155171
readarr_api.tag_processed_author(api_url, api_key, api_timeout, author_id)
156172
readarr_logger.debug(f"Tagged author {author_id} as processed")
157173
except Exception as e:
158174
readarr_logger.warning(f"Failed to tag author {author_id}: {e}")
159175

160-
# Log multiple history entries - one for each book with author info
161-
for book in books_by_author[author_id]:
162-
book_title = book.get('title', f"Unknown Book ID {book['id']}")
163-
# Format includes both author and book info
164-
media_name = f"{author_name} - {book_title}"
165-
# Log each book as a separate history entry with book_id
166-
log_processed_media("readarr", media_name, book['id'], instance_name, "missing")
167-
readarr_logger.debug(f"Logged missing book history entry: {media_name} (ID: {book['id']})")
176+
# Log history entry for this specific book
177+
media_name = f"{author_name} - {book_title}"
178+
log_processed_media("readarr", media_name, book_id, instance_name, "missing")
179+
readarr_logger.debug(f"Logged missing book history entry: {media_name} (ID: {book_id})")
168180

169-
readarr_logger.debug(f"Logged history entries for {len(books_by_author[author_id])} books by author: {author_name}")
170-
171-
processed_count += 1 # Count processed authors/groups
172-
processed_authors.append(author_name) # Add to list of processed authors
173-
processed_something = True
174-
readarr_logger.info(f"Processed {processed_count}/{len(authors_to_process)} authors/groups for missing books this cycle.")
181+
processed_count += 1
182+
processed_books.append(f"'{book_title}' by {author_name}")
183+
processed_any = True
184+
readarr_logger.info(f"Processed {processed_count}/{len(books_to_process)} books for missing search this cycle.")
175185
else:
176-
readarr_logger.error(f"Failed to trigger search for author {author_name}.")
186+
readarr_logger.error(f"Failed to trigger search for book '{book_title}' by {author_name}.")
177187

178188
if processed_count >= hunt_missing_books:
179-
readarr_logger.info(f"Reached target of {hunt_missing_books} authors/groups processed for this cycle.")
189+
readarr_logger.info(f"Reached target of {hunt_missing_books} books processed for this cycle.")
180190
break
181191

182-
if processed_authors:
183-
authors_list = '", "'.join(processed_authors)
184-
readarr_logger.info(f'Completed processing {processed_count} authors/groups for missing books this cycle: "{authors_list}"')
192+
if processed_books:
193+
# Log first few books, then summarize if there are many
194+
if len(processed_books) <= 3:
195+
books_list = ', '.join(processed_books)
196+
readarr_logger.info(f'Completed processing {processed_count} books for missing search this cycle: {books_list}')
197+
else:
198+
first_books = ', '.join(processed_books[:3])
199+
readarr_logger.info(f'Completed processing {processed_count} books for missing search this cycle: {first_books} and {len(processed_books)-3} others')
185200
else:
186-
readarr_logger.info(f"Completed processing {processed_count} authors/groups for missing books this cycle.")
201+
readarr_logger.info(f"Completed processing {processed_count} books for missing search this cycle.")
187202

188-
return processed_something
203+
return processed_any

version.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
7.6.7
1+
7.6.8

0 commit comments

Comments
 (0)