Merge branch 'pr/NaruZosa/252' into decluttarr-v2

This commit is contained in:
Benjamin Harder
2025-06-02 14:26:03 +02:00
58 changed files with 848 additions and 744 deletions

View File

@@ -1,18 +1,16 @@
# Cleans the download queue
from src.utils.log_setup import logger
from src.utils.queue_manager import QueueManager
from src.jobs.remove_bad_files import RemoveBadFiles
from src.jobs.remove_failed_imports import RemoveFailedImports
from src.jobs.remove_failed_downloads import RemoveFailedDownloads
from src.jobs.remove_failed_imports import RemoveFailedImports
from src.jobs.remove_metadata_missing import RemoveMetadataMissing
from src.jobs.remove_missing_files import RemoveMissingFiles
from src.jobs.remove_orphans import RemoveOrphans
from src.jobs.remove_slow import RemoveSlow
from src.jobs.remove_stalled import RemoveStalled
from src.jobs.remove_unmonitored import RemoveUnmonitored
from src.jobs.search_handler import SearchHandler
from src.utils.log_setup import logger
from src.utils.queue_manager import QueueManager
class JobManager:
@@ -27,7 +25,7 @@ class JobManager:
await self.search_jobs()
async def removal_jobs(self):
logger.verbose(f"")
logger.verbose("")
logger.verbose(f"Cleaning queue on {self.arr.name}:")
if not await self._queue_has_items():
return
@@ -63,14 +61,14 @@ class JobManager:
full_queue = await queue_manager.get_queue_items("full")
if full_queue:
logger.debug(
f"job_runner/full_queue at start: %s",
"job_runner/full_queue at start: %s",
queue_manager.format_queue(full_queue),
)
return True
else:
self.arr.tracker.reset()
logger.verbose(">>> Queue is empty.")
return False
self.arr.tracker.reset()
logger.verbose(">>> Queue is empty.")
return False
async def _qbit_connected(self):
for qbit in self.settings.download_clients.qbittorrent:
@@ -78,14 +76,14 @@ class JobManager:
# Check if any client is disconnected
if not await qbit.check_qbit_connected():
logger.warning(
f">>> qBittorrent is disconnected. Skipping queue cleaning on {self.arr.name}."
f">>> qBittorrent is disconnected. Skipping queue cleaning on {self.arr.name}.",
)
return False
return True
def _get_removal_jobs(self):
"""
Returns a list of enabled removal job instances based on the provided settings.
Return a list of enabled removal job instances based on the provided settings.
Each job is included if the corresponding attribute exists and is truthy in settings.jobs.
"""
@@ -105,6 +103,6 @@ class JobManager:
for removal_job_name, removal_job_class in removal_job_classes.items():
if getattr(self.settings.jobs, removal_job_name, False):
jobs.append(
removal_job_class(self.arr, self.settings, removal_job_name)
removal_job_class(self.arr, self.settings, removal_job_name),
)
return jobs

0
src/jobs/__init__.py Normal file
View File

View File

@@ -1,5 +1,6 @@
from src.utils.log_setup import logger
class RemovalHandler:
def __init__(self, arr, settings, job_name):
self.arr = arr
@@ -53,12 +54,12 @@ class RemovalHandler:
if affected_download['protocol'] != 'torrent':
return "remove" # handling is only implemented for torrent
client_implemenation = await self.arr.get_download_client_implementation(affected_download['downloadClient'])
if client_implemenation != "QBittorrent":
client_implementation = await self.arr.get_download_client_implementation(affected_download['downloadClient'])
if client_implementation != "QBittorrent":
return "remove" # handling is only implemented for qbit
if len(self.settings.download_clients.qbittorrent) == 0:
return "remove" # qbit not configured, thus can't tag
return "remove" # qbit not configured, thus can't tag
if download_id in self.arr.tracker.private:
return self.settings.general.private_tracker_handling

View File

@@ -1,8 +1,9 @@
from abc import ABC, abstractmethod
from src.utils.queue_manager import QueueManager
from src.utils.log_setup import logger
from src.jobs.strikes_handler import StrikesHandler
from src.jobs.removal_handler import RemovalHandler
from src.jobs.strikes_handler import StrikesHandler
from src.utils.log_setup import logger
from src.utils.queue_manager import QueueManager
class RemovalJob(ABC):
@@ -16,14 +17,15 @@ class RemovalJob(ABC):
queue = []
# Default class attributes (can be overridden in subclasses)
def __init__(self, arr, settings, job_name):
def __init__(self, arr, settings, job_name) -> None:
self.arr = arr
self.settings = settings
self.job_name = job_name
self.job = getattr(self.settings.jobs, self.job_name)
self.queue_manager = QueueManager(self.arr, self.settings)
async def run(self):
async def run(self) -> int:
if not self.job.enabled:
return 0
logger.debug(f"removal_job.py/run: Launching job '{self.job_name}', and checking if any items in {self.queue_scope} queue.")
@@ -57,7 +59,8 @@ class RemovalJob(ABC):
def _ignore_protected(self):
"""
Filters out downloads that are in the protected tracker.
Filter out downloads that are in the protected tracker.
Directly updates self.affected_downloads.
"""
self.affected_downloads = {
@@ -66,6 +69,6 @@ class RemovalJob(ABC):
if download_id not in self.arr.tracker.protected
}
@abstractmethod # Imlemented on level of each removal job
async def _find_affected_items(self):
@abstractmethod # Implemented on level of each removal job
async def _find_affected_items(self) -> None:
pass

View File

@@ -1,32 +1,35 @@
import os
from pathlib import Path
from src.jobs.removal_job import RemovalJob
from src.utils.log_setup import logger
# fmt: off
STANDARD_EXTENSIONS = [
# Movies, TV Shows (Radarr, Sonarr, Whisparr)
".webm", ".m4v", ".3gp", ".nsv", ".ty", ".strm", ".rm", ".rmvb", ".m3u", ".ifo", ".mov", ".qt", ".divx", ".xvid", ".bivx", ".nrg", ".pva", ".wmv", ".asf", ".asx", ".ogm", ".ogv", ".m2v", ".avi", ".bin", ".dat", ".dvr-ms", ".mpg", ".mpeg", ".mp4", ".avc", ".vp3", ".svq3", ".nuv", ".viv", ".dv", ".fli", ".flv", ".wpl", ".img", ".iso", ".vob", ".mkv", ".mk3d", ".ts", ".wtv", ".m2ts",
# Subs (Radarr, Sonarr, Whisparr)
".sub", ".srt", ".idx",
# Audio (Lidarr, Readarr)
".aac", ".aif", ".aiff", ".aifc", ".ape", ".flac", ".mp2", ".mp3", ".m4a", ".m4b", ".m4p", ".mp4a", ".oga", ".ogg", ".opus", ".vorbis", ".wma", ".wav", ".wv", "wavepack",
# Text (Readarr)
".epub", ".kepub", ".mobi", ".azw3", ".pdf",
]
# Archives can be handled by tools such as unpackerr:
ARCHIVE_EXTENSIONS = [
".rar", ".tar", ".tgz", ".gz", ".zip", ".7z", ".bz2", ".tbz2", ".iso",
]
BAD_KEYWORDS = ["Sample", "Trailer"]
BAD_KEYWORD_LIMIT = 500 # Megabyte; do not remove items larger than that
# fmt: on
class RemoveBadFiles(RemovalJob):
queue_scope = "normal"
blocklist = True
# fmt: off
good_extensions = [
# Movies, TV Shows (Radarr, Sonarr, Whisparr)
".webm", ".m4v", ".3gp", ".nsv", ".ty", ".strm", ".rm", ".rmvb", ".m3u", ".ifo", ".mov", ".qt", ".divx", ".xvid", ".bivx", ".nrg", ".pva", ".wmv", ".asf", ".asx", ".ogm", ".ogv", ".m2v", ".avi", ".bin", ".dat", ".dvr-ms", ".mpg", ".mpeg", ".mp4", ".avc", ".vp3", ".svq3", ".nuv", ".viv", ".dv", ".fli", ".flv", ".wpl", ".img", ".iso", ".vob", ".mkv", ".mk3d", ".ts", ".wtv", ".m2ts",
# Subs (Radarr, Sonarr, Whisparr)
".sub", ".srt", ".idx",
# Audio (Lidarr, Readarr)
".aac", ".aif", ".aiff", ".aifc", ".ape", ".flac", ".mp2", ".mp3", ".m4a", ".m4b", ".m4p", ".mp4a", ".oga", ".ogg", ".opus", ".vorbis", ".wma", ".wav", ".wv", "wavepack",
# Text (Readarr)
".epub", ".kepub", ".mobi", ".azw3", ".pdf",
]
# Archives can be handled by tools such as unpackerr:
archive_extensions = [
".rar", ".tar", ".tgz", ".gz", ".zip", ".7z", ".bz2", ".tbz2", ".iso",
]
bad_keywords = ["Sample", "Trailer"]
bad_keyword_limit = 500 # Megabyte; do not remove items larger than that
# fmt: on
async def _find_affected_items(self):
# Get in-scope download IDs
result = self._group_download_ids_by_client()
@@ -41,10 +44,12 @@ class RemoveBadFiles(RemovalJob):
affected_items.extend(client_items)
return affected_items
def _group_download_ids_by_client(self):
"""Group all relevant download IDs by download client.
Limited to qbittorrent currently, as no other download clients implemented"""
"""
Group all relevant download IDs by download client.
Limited to qbittorrent currently, as no other download clients implemented
"""
result = {}
for item in self.queue:
@@ -62,7 +67,7 @@ class RemoveBadFiles(RemovalJob):
result.setdefault(download_client, {
"download_client_type": download_client_type,
"download_ids": set()
"download_ids": set(),
})["download_ids"].add(item["downloadId"])
return result
@@ -91,55 +96,48 @@ class RemoveBadFiles(RemovalJob):
return affected_items
# -- Helper functions for qbit handling --
# -- Helper functions for qbit handling --
def _get_items_to_process(self, qbit_items):
"""Return only downloads that have metadata, are supposedly downloading.
Additionally, each dowload should be checked at least once (for bad extensions), and thereafter only if availabiliy drops to less than 100%"""
"""
Return only downloads that have metadata, are supposedly downloading.
This is to prevent the case where a download has metadata but is not actually downloading.
Additionally, each download should be checked at least once (for bad extensions), and thereafter only if availability drops to less than 100%
"""
return [
item for item in qbit_items
if (
item.get("has_metadata")
and item["state"] in {"downloading", "forcedDL", "stalledDL"}
and (
item["hash"] not in self.arr.tracker.extension_checked
or item["availability"] < 1
)
item.get("has_metadata")
and item["state"] in {"downloading", "forcedDL", "stalledDL"}
and (
item["hash"] not in self.arr.tracker.extension_checked
or item["availability"] < 1
)
)
]
async def _get_active_files(self, qbit_client, torrent_hash):
@staticmethod
async def _get_active_files(qbit_client, torrent_hash) -> list[dict]:
"""Return only files from the torrent that are still set to download, with file extension and name."""
files = await qbit_client.get_torrent_files(torrent_hash) # Await the async method
return [
{
**f, # Include all original file properties
"file_name": os.path.basename(f["name"]), # Add proper filename (without folder)
"file_extension": os.path.splitext(f["name"])[1], # Add file_extension (e.g., .mp3)
"file_name": Path(f["name"]).name, # Add proper filename (without folder)
"file_extension": Path(f["name"]).suffix, # Add file_extension (e.g., .mp3)
}
for f in files if f["priority"] > 0
]
def _log_stopped_files(self, stopped_files, torrent_name):
@staticmethod
def _log_stopped_files(stopped_files, torrent_name) -> None:
logger.verbose(
f">>> Stopped downloading {len(stopped_files)} file{'s' if len(stopped_files) != 1 else ''} in: {torrent_name}"
f">>> Stopped downloading {len(stopped_files)} file{'s' if len(stopped_files) != 1 else ''} in: {torrent_name}",
)
for file, reasons in stopped_files:
logger.verbose(f">>> - {file['file_name']} ({' & '.join(reasons)})")
def _all_files_stopped(self, torrent_files):
"""Check if no files remain with download priority."""
return all(f["priority"] == 0 for f in torrent_files)
def _match_queue_items(self, download_hash):
"""Find matching queue item(s) by downloadId (uppercase)."""
return [
item for item in self.queue
if item["downloadId"] == download_hash.upper()
]
def _get_stoppable_files(self, torrent_files):
"""Return files that can be marked as 'Do not Download' based on specific conditions."""
stoppable_files = []
@@ -156,7 +154,7 @@ class RemoveBadFiles(RemovalJob):
# Check for bad keywords
if self._contains_bad_keyword(file):
reasons.append("Contains bad keyword in path")
# Check if the file has low availability
if self._is_complete_partial(file):
reasons.append(f"Low availability: {file['availability'] * 100:.1f}%")
@@ -167,16 +165,15 @@ class RemoveBadFiles(RemovalJob):
return stoppable_files
def _is_bad_extension(self, file):
def _is_bad_extension(self, file) -> bool:
"""Check if the file has a bad extension."""
return file['file_extension'].lower() not in self.get_good_extensions()
return file["file_extension"].lower() not in self.get_good_extensions()
def get_good_extensions(self):
extensions = list(self.good_extensions)
good_extensions = list(STANDARD_EXTENSIONS)
if self.job.keep_archives:
extensions += self.archive_extensions
return extensions
good_extensions += ARCHIVE_EXTENSIONS
return good_extensions
def _contains_bad_keyword(self, file):
"""Check if the file path contains a bad keyword and is smaller than the limit."""
@@ -184,33 +181,29 @@ class RemoveBadFiles(RemovalJob):
file_size_mb = file.get("size", 0) / 1024 / 1024
return (
any(keyword.lower() in file_path for keyword in self.bad_keywords)
and file_size_mb <= self.bad_keyword_limit
any(keyword.lower() in file_path for keyword in BAD_KEYWORDS)
and file_size_mb <= BAD_KEYWORD_LIMIT
)
@staticmethod
def _is_complete_partial(file) -> bool:
"""Check if the availability is less than 100% and the file is not fully downloaded."""
return file["availability"] < 1 and file["progress"] != 1
def _is_complete_partial(self, file):
"""Check if the availability is less than 100% and the file is not fully downloaded"""
if file["availability"] < 1 and not file["progress"] == 1:
return True
return False
async def _mark_files_as_stopped(self, qbit_client, torrent_hash, stoppable_files):
"""Mark specific files as 'Do Not Download' in qBittorrent."""
for file, _ in stoppable_files:
await qbit_client.set_torrent_file_priority(torrent_hash, file['index'], 0)
for file, _ in stoppable_files:
await qbit_client.set_torrent_file_priority(torrent_hash, file["index"], 0)
def _all_files_stopped(self, torrent_files, stoppable_files):
@staticmethod
def _all_files_stopped(torrent_files, stoppable_files) -> bool:
"""Check if all files are either stopped (priority 0) or in the stoppable files list."""
stoppable_file_indexes= {file[0]["index"] for file in stoppable_files}
stoppable_file_indexes = {file[0]["index"] for file in stoppable_files}
return all(f["priority"] == 0 or f["index"] in stoppable_file_indexes for f in torrent_files)
def _match_queue_items(self, download_hash):
def _match_queue_items(self, download_hash) -> list:
"""Find matching queue item(s) by downloadId (uppercase)."""
return [
item for item in self.queue
if item["downloadId"].upper() == download_hash.upper()
]

View File

@@ -1,16 +1,9 @@
from src.jobs.removal_job import RemovalJob
class RemoveFailedDownloads(RemovalJob):
queue_scope = "normal"
blocklist = False
async def _find_affected_items(self):
affected_items = []
for item in self.queue:
if "status" in item:
if item["status"] == "failed":
affected_items.append(item)
return affected_items
return self.queue_manager.filter_queue_by_status(self.queue, ["failed"])

View File

@@ -1,6 +1,8 @@
import fnmatch
from src.jobs.removal_job import RemovalJob
class RemoveFailedImports(RemovalJob):
queue_scope = "normal"
blocklist = True
@@ -20,11 +22,12 @@ class RemoveFailedImports(RemovalJob):
return affected_items
def _is_valid_item(self, item):
@staticmethod
def _is_valid_item(item) -> bool:
"""Check if item has the necessary fields and is in a valid state."""
# Required fields that must be present in the item
required_fields = {"status", "trackedDownloadStatus", "trackedDownloadState", "statusMessages"}
# Check if all required fields are present
if not all(field in item for field in required_fields):
return False
@@ -34,35 +37,33 @@ class RemoveFailedImports(RemovalJob):
return False
# Check if the tracked download state is one of the allowed states
if item["trackedDownloadState"] not in {"importPending", "importFailed", "importBlocked"}:
return False
# If all checks pass, the item is valid
return True
return not (item["trackedDownloadState"] not in {"importPending", "importFailed", "importBlocked"})
def _prepare_removal_messages(self, item, patterns):
def _prepare_removal_messages(self, item, patterns) -> list[str]:
"""Prepare removal messages, adding the tracked download state and matching messages."""
messages = self._get_matching_messages(item["statusMessages"], patterns)
if not messages:
return []
removal_messages = [f">>>>> Tracked Download State: {item['trackedDownloadState']}"] + messages
return removal_messages
return [f">>>>> Tracked Download State: {item['trackedDownloadState']}", *messages]
def _get_matching_messages(self, status_messages, patterns):
@staticmethod
def _get_matching_messages(status_messages, patterns) -> list:
"""Extract messages matching the provided patterns (or all messages if no pattern)."""
matched_messages = []
if not patterns:
# No patterns provided, include all messages
for status_message in status_messages:
matched_messages.extend(f">>>>> - {msg}" for msg in status_message.get("messages", []))
else:
# Patterns provided, match only those messages that fit the patterns
for status_message in status_messages:
for msg in status_message.get("messages", []):
if any(fnmatch.fnmatch(msg, pattern) for pattern in patterns):
matched_messages.append(f">>>>> - {msg}")
return matched_messages
matched_messages.extend(
f">>>>> - {msg}"
for status_message in status_messages
for msg in status_message.get("messages", [])
if any(fnmatch.fnmatch(msg, pattern) for pattern in patterns)
)
return matched_messages

View File

@@ -6,13 +6,5 @@ class RemoveMetadataMissing(RemovalJob):
blocklist = True
async def _find_affected_items(self):
affected_items = []
for item in self.queue:
if "errorMessage" in item and "status" in item:
if (
item["status"] == "queued"
and item["errorMessage"] == "qBittorrent is downloading metadata"
):
affected_items.append(item)
return affected_items
conditions = [("queued", "qBittorrent is downloading metadata")]
return self.queue_manager.filter_queue_by_status_and_error_message(self.queue, conditions)

View File

@@ -1,5 +1,6 @@
from src.jobs.removal_job import RemovalJob
class RemoveMissingFiles(RemovalJob):
queue_scope = "normal"
blocklist = False
@@ -8,12 +9,12 @@ class RemoveMissingFiles(RemovalJob):
affected_items = []
for item in self.queue:
if self._is_failed_torrent(item) or self._is_bad_nzb(item):
if self._is_failed_torrent(item) or self._no_elibible_import(item):
affected_items.append(item)
return affected_items
def _is_failed_torrent(self, item):
@staticmethod
def _is_failed_torrent(item) -> bool:
return (
"status" in item
and item["status"] == "warning"
@@ -25,7 +26,8 @@ class RemoveMissingFiles(RemovalJob):
]
)
def _is_bad_nzb(self, item):
@staticmethod
def _no_elibible_import(item) -> bool:
if "status" in item and item["status"] == "completed" and "statusMessages" in item:
for status_message in item["statusMessages"]:
if "messages" in status_message:

View File

@@ -1,10 +1,9 @@
from src.jobs.removal_job import RemovalJob
class RemoveOrphans(RemovalJob):
queue_scope = "orphans"
blocklist = False
async def _find_affected_items(self):
return self.queue

View File

@@ -25,31 +25,34 @@ class RemoveSlow(RemovalJob):
if self._is_completed_but_stuck(item):
logger.info(
f">>> '{self.job_name}' detected download marked as slow as well as completed. Files most likely in process of being moved. Not removing: {item['title']}"
f">>> '{self.job_name}' detected download marked as slow as well as completed. Files most likely in process of being moved. Not removing: {item['title']}",
)
continue
downloaded, previous, increment, speed = await self._get_progress_stats(
item
item,
)
if self._is_slow(speed):
affected_items.append(item)
logger.debug(
f'remove_slow/slow speed detected: {item["title"]} '
f"(Speed: {speed} KB/s, KB now: {downloaded}, KB previous: {previous}, "
f"Diff: {increment}, In Minutes: {self.settings.general.timer})"
f"Diff: {increment}, In Minutes: {self.settings.general.timer})",
)
return affected_items
def _is_valid_item(self, item):
required_keys = {"downloadId", "size", "sizeleft", "status", "protocol"}
@staticmethod
def _is_valid_item(item) -> bool:
required_keys = {"downloadId", "size", "sizeleft", "status", "protocol"}
return required_keys.issubset(item)
def _is_usenet(self, item):
@staticmethod
def _is_usenet(item) -> bool:
return item.get("protocol") == "usenet"
def _is_completed_but_stuck(self, item):
@staticmethod
def _is_completed_but_stuck(item) -> bool:
return (
item["status"] == "downloading"
and item["size"] > 0
@@ -67,7 +70,7 @@ class RemoveSlow(RemovalJob):
download_progress = self._get_download_progress(item, download_id)
previous_progress, increment, speed = self._compute_increment_and_speed(
download_id, download_progress
download_id, download_progress,
)
self.arr.tracker.download_progress[download_id] = download_progress
@@ -83,15 +86,18 @@ class RemoveSlow(RemovalJob):
return progress
return self._fallback_progress(item)
def _try_get_qbit_progress(self, qbit, download_id):
@staticmethod
def _try_get_qbit_progress(qbit, download_id):
# noinspection PyBroadException
try:
return qbit.get_download_progress(download_id)
except Exception:
except Exception: # noqa: BLE001
return None
def _fallback_progress(self, item):
@staticmethod
def _fallback_progress(item):
logger.debug(
"get_progress_stats: Using imprecise method to determine download increments because either a different download client than qBitorrent is used, or the download client name in the config does not match with what is configured in your *arr download client settings"
"get_progress_stats: Using imprecise method to determine download increments because either a different download client than qBitorrent is used, or the download client name in the config does not match with what is configured in your *arr download client settings",
)
return item["size"] - item["sizeleft"]

View File

@@ -1,3 +1,5 @@
"""Removes stalled downloads."""
from src.jobs.removal_job import RemovalJob
@@ -6,15 +8,5 @@ class RemoveStalled(RemovalJob):
blocklist = True
async def _find_affected_items(self):
affected_items = []
for item in self.queue:
if "errorMessage" in item and "status" in item:
if (
item["status"] == "warning"
and item["errorMessage"]
== "The download is stalled with no connections"
):
affected_items.append(item)
return affected_items
conditions = [("warning", "The download is stalled with no connections")]
return self.queue_manager.filter_queue_by_status_and_error_message(self.queue, conditions)

View File

@@ -14,7 +14,6 @@ class RemoveUnmonitored(RemovalJob):
# When queue item has been matched to artist (for instance in lidarr) but not yet to the detail (eg. album), then detail key is logically missing.
# Thus we can't check if the item is monitored yet
monitored_download_ids.append(item["downloadId"])
# Second pass: Append queue items none that depends on download id is monitored
affected_items = []

View File

@@ -1,9 +1,10 @@
from datetime import datetime, timedelta, timezone
import dateutil.parser
from src.utils.log_setup import logger
from src.utils.wanted_manager import WantedManager
from src.utils.queue_manager import QueueManager
from src.utils.wanted_manager import WantedManager
class SearchHandler:
@@ -21,10 +22,10 @@ class SearchHandler:
wanted_items = await self._get_initial_wanted_items(search_type)
if not wanted_items:
return
logger.debug(f"search_handler.py/handle_search: Getting list of queue items to only search for items that are not already downloading.")
queue = await QueueManager(self.arr, self.settings).get_queue_items(
queue_scope="normal"
queue_scope="normal",
)
wanted_items = self._filter_wanted_items(wanted_items, queue)
if not wanted_items:
@@ -43,7 +44,8 @@ class SearchHandler:
logger.verbose(f"Searching for unmet cutoff content on {self.arr.name}:")
self.job = self.settings.jobs.search_unmet_cutoff_content
else:
raise ValueError(f"Unknown search type: {search_type}")
error = f"Unknown search type: {search_type}"
raise ValueError(error)
def _get_initial_wanted_items(self, search_type):
wanted = self.wanted_manager.get_wanted_items(search_type)
@@ -54,13 +56,13 @@ class SearchHandler:
def _filter_wanted_items(self, items, queue):
items = self._filter_already_downloading(items, queue)
if not items:
logger.verbose(f">>> All items already downloading, nothing to search for.")
logger.verbose(">>> All items already downloading, nothing to search for.")
return []
items = self._filter_recent_searches(items)
if not items:
logger.verbose(
f">>> All items recently searched for, thus not triggering another search."
">>> All items recently searched for, thus not triggering another search.",
)
return []

View File

@@ -8,7 +8,6 @@ class StrikesHandler:
self.max_strikes = max_strikes
self.tracker.defective.setdefault(job_name, {})
def check_permitted_strikes(self, affected_downloads):
self._recover_downloads(affected_downloads)
return self._apply_strikes_and_filter(affected_downloads)
@@ -27,10 +26,9 @@ class StrikesHandler:
)
del self.tracker.defective[self.job_name][d_id]
def _apply_strikes_and_filter(self, affected_downloads):
for d_id, queue_items in list(affected_downloads.items()):
title = queue_items[0]["title"]
for d_id, affected_download in list(affected_downloads.items()):
title = affected_download["title"]
strikes = self._increment_strike(d_id, title)
strikes_left = self.max_strikes - strikes
self._log_strike_status(title, strikes, strikes_left)
@@ -39,10 +37,9 @@ class StrikesHandler:
return affected_downloads
def _increment_strike(self, d_id, title):
entry = self.tracker.defective[self.job_name].setdefault(
d_id, {"title": title, "strikes": 0}
d_id, {"title": title, "strikes": 0},
)
entry["strikes"] += 1
return entry["strikes"]
@@ -58,7 +55,7 @@ class StrikesHandler:
">>> Job '%s' detected download (%s/%s strikes): %s",
self.job_name, strikes, self.max_strikes, title,
)
elif strikes_left <= -2:
elif strikes_left <= -2: # noqa: PLR2004
logger.info(
">>> Job '%s' detected download (%s/%s strikes): %s",
self.job_name, strikes, self.max_strikes, title,

0
src/settings/__init__.py Normal file
View File

View File

@@ -1,5 +1,6 @@
import yaml
def mask_sensitive_value(value, key, sensitive_attributes):
"""Mask the value if it's in the sensitive attributes."""
return "*****" if key in sensitive_attributes else value
@@ -40,19 +41,19 @@ def clean_object(obj, sensitive_attributes, internal_attributes, hide_internal_a
"""Clean an object (either a dict, class instance, or other types)."""
if isinstance(obj, dict):
return clean_dict(obj, sensitive_attributes, internal_attributes, hide_internal_attr)
elif hasattr(obj, "__dict__"):
if hasattr(obj, "__dict__"):
return clean_dict(vars(obj), sensitive_attributes, internal_attributes, hide_internal_attr)
else:
return mask_sensitive_value(obj, "", sensitive_attributes)
return mask_sensitive_value(obj, "", sensitive_attributes)
def get_config_as_yaml(
data,
sensitive_attributes=None,
internal_attributes=None,
*,
hide_internal_attr=True,
):
"""Main function to process the configuration into YAML format."""
"""Process the configuration into YAML format."""
if sensitive_attributes is None:
sensitive_attributes = set()
if internal_attributes is None:
@@ -67,7 +68,7 @@ def get_config_as_yaml(
# Process list-based config
if isinstance(obj, list):
cleaned_list = clean_list(
obj, sensitive_attributes, internal_attributes, hide_internal_attr
obj, sensitive_attributes, internal_attributes, hide_internal_attr,
)
if cleaned_list:
config_output[key] = cleaned_list
@@ -75,7 +76,7 @@ def get_config_as_yaml(
# Process dict or class-like object config
else:
cleaned_obj = clean_object(
obj, sensitive_attributes, internal_attributes, hide_internal_attr
obj, sensitive_attributes, internal_attributes, hide_internal_attr,
)
if cleaned_obj:
config_output[key] = cleaned_obj

View File

@@ -1,4 +1,5 @@
import os
from src.settings._config_as_yaml import get_config_as_yaml

View File

@@ -1,12 +1,14 @@
from src.settings._config_as_yaml import get_config_as_yaml
from src.settings._download_clients_qBit import QbitClients
from src.settings._download_clients_qbit import QbitClients
DOWNLOAD_CLIENT_TYPES = ["qbittorrent"]
class DownloadClients:
"""Represents all download clients."""
qbittorrent = None
download_client_types = [
"qbittorrent",
]
def __init__(self, config, settings):
self._set_qbit_clients(config, settings)
self.check_unique_download_client_types()
@@ -15,7 +17,7 @@ class DownloadClients:
download_clients = config.get("download_clients", {})
if isinstance(download_clients, dict):
self.qbittorrent = QbitClients(config, settings)
if not self.qbittorrent: # Unsets settings in general section needed for qbit (if no qbit is defined)
if not self.qbittorrent: # Unsets settings in general section needed for qbit (if no qbit is defined)
for key in [
"private_tracker_handling",
"public_tracker_handling",
@@ -25,40 +27,42 @@ class DownloadClients:
setattr(settings.general, key, None)
def config_as_yaml(self):
"""Logs all download clients."""
"""Log all download clients."""
return get_config_as_yaml(
{"qbittorrent": self.qbittorrent},
sensitive_attributes={"username", "password", "cookie"},
internal_attributes={ "api_url", "cookie", "settings", "min_version"},
hide_internal_attr=True
internal_attributes={"api_url", "cookie", "settings", "min_version"},
hide_internal_attr=True,
)
def check_unique_download_client_types(self):
"""Ensures that all download client names are unique.
This is important since downloadClient in arr goes by name, and
this is needed to link it to the right IP set up in the yaml config
(which may be different to the one donfigured in arr)"""
"""
Ensure that all download client names are unique.
This is important since downloadClient in arr goes by name, and
this is needed to link it to the right IP set up in the yaml config
(which may be different to the one configured in arr)
"""
seen = set()
for download_client_type in self.download_client_types:
for download_client_type in DOWNLOAD_CLIENT_TYPES:
download_clients = getattr(self, download_client_type, [])
# Check each client in the list
for client in download_clients:
name = getattr(client, "name", None)
if name is None:
raise ValueError(f'{download_client_type} client does not have a name ({client.base_url}).\nMake sure that the name corresponds with the name set in your *arr app for that download client.')
error = f"{download_client_type} client does not have a name ({client.base_url}).\nMake sure that the name corresponds with the name set in your *arr app for that download client."
raise ValueError(error)
if name.lower() in seen:
raise ValueError(f"Download client names must be unique. Duplicate name found: '{name}'\nMake sure that the name corresponds with the name set in your *arr app for that download client.")
else:
seen.add(name.lower())
error = f"Download client names must be unique. Duplicate name found: '{name}'\nMake sure that the name corresponds with the name set in your *arr app for that download client."
raise ValueError(error)
seen.add(name.lower())
def get_download_client_by_name(self, name: str):
"""Retrieve the download client and its type by its name."""
name_lower = name.lower()
for download_client_type in self.download_client_types:
for download_client_type in DOWNLOAD_CLIENT_TYPES:
download_clients = getattr(self, download_client_type, [])
# Check each client in the list

View File

@@ -1,6 +1,7 @@
from packaging import version
from src.utils.common import make_request, wait_and_exit
from src.settings._constants import ApiEndpoints, MinVersions
from src.utils.common import make_request, wait_and_exit
from src.utils.log_setup import logger
@@ -9,7 +10,7 @@ class QbitError(Exception):
class QbitClients(list):
"""Represents all qBittorrent clients"""
"""Represents all qBittorrent clients."""
def __init__(self, config, settings):
super().__init__()
@@ -20,7 +21,7 @@ class QbitClients(list):
if not isinstance(qbit_config, list):
logger.error(
"Invalid config format for qbittorrent clients. Expected a list."
"Invalid config format for qbittorrent clients. Expected a list.",
)
return
@@ -34,7 +35,7 @@ class QbitClients(list):
class QbitClient:
"""Represents a single qBittorrent client."""
cookie: str = None
cookie: dict[str, str] = None
version: str = None
def __init__(
@@ -48,11 +49,12 @@ class QbitClient:
self.settings = settings
if not base_url:
logger.error("Skipping qBittorrent client entry: 'base_url' is required.")
raise ValueError("qBittorrent client must have a 'base_url'.")
error = "qBittorrent client must have a 'base_url'."
raise ValueError(error)
self.base_url = base_url.rstrip("/")
self.api_url = self.base_url + getattr(ApiEndpoints, "qbittorrent")
self.min_version = getattr(MinVersions, "qbittorrent")
self.api_url = self.base_url + ApiEndpoints.qbittorrent
self.min_version = MinVersions.qbittorrent
self.username = username
self.password = password
self.name = name
@@ -65,13 +67,18 @@ class QbitClient:
self._remove_none_attributes()
def _remove_none_attributes(self):
"""Removes attributes that are None to keep the object clean."""
"""Remove attributes that are None to keep the object clean."""
for attr in list(vars(self)):
if getattr(self, attr) is None:
delattr(self, attr)
async def refresh_cookie(self):
"""Refresh the qBittorrent session cookie."""
def _connection_error():
error = "Login failed."
raise ConnectionError(error)
try:
logger.debug(
"_download_clients_qBit.py/refresh_cookie: Refreshing qBit cookie"
@@ -92,7 +99,7 @@ class QbitClient:
)
if response.text == "Fails.":
raise ConnectionError("Login failed.")
_connection_error()
self.cookie = {"SID": response.cookies["SID"]}
except Exception as e:
@@ -118,14 +125,13 @@ class QbitClient:
if version.parse(self.version) < version.parse(min_version):
logger.error(
f"Please update qBittorrent to at least version {min_version}. Current version: {self.version}"
)
raise QbitError(
f"qBittorrent version {self.version} is too old. Please update."
f"Please update qBittorrent to at least version {min_version}. Current version: {self.version}",
)
error = f"qBittorrent version {self.version} is too old. Please update."
raise QbitError(error)
if version.parse(self.version) < version.parse("5.0.0"):
logger.info(
f"[Tip!] Consider upgrading to qBittorrent v5.0.0 or newer to reduce network overhead."
"[Tip!] Consider upgrading to qBittorrent v5.0.0 or newer to reduce network overhead.",
)
async def create_tag(self, tag: str):
@@ -166,13 +172,13 @@ class QbitClient:
)
endpoint = f"{self.api_url}/app/preferences"
response = await make_request(
"get", endpoint, self.settings, cookies=self.cookie
"get", endpoint, self.settings, cookies=self.cookie,
)
qbit_settings = response.json()
if not qbit_settings.get("use_unwanted_folder"):
logger.info(
"Enabling 'Keep unselected files in .unwanted folder' in qBittorrent."
"Enabling 'Keep unselected files in .unwanted folder' in qBittorrent.",
)
data = {"json": '{"use_unwanted_folder": true}'}
await make_request(
@@ -205,7 +211,7 @@ class QbitClient:
ignore_test_run=True,
)
except Exception as e:
except Exception as e: # noqa: BLE001
tip = "💡 Tip: Did you specify the URL (and username/password if required) correctly?"
logger.error(f"-- | qBittorrent\n❗️ {e}\n{tip}\n")
wait_and_exit()
@@ -227,8 +233,7 @@ class QbitClient:
)["server_state"]["connection_status"]
if qbit_connection_status == "disconnected":
return False
else:
return True
return True
async def setup(self):
"""Perform the qBittorrent setup by calling relevant managers."""
@@ -252,7 +257,7 @@ class QbitClient:
await self.set_unwanted_folder()
async def get_protected_and_private(self):
"""Fetches torrents from qBittorrent and checks for protected and private status."""
"""Fetch torrents from qBittorrent and checks for protected and private status."""
protected_downloads = []
private_downloads = []
@@ -301,11 +306,12 @@ class QbitClient:
async def set_tag(self, tags, hashes):
"""
Sets tags to one or more torrents in qBittorrent.
Set tags to one or more torrents in qBittorrent.
Args:
tags (list): A list of tag names to be added.
hashes (list): A list of torrent hashes to which the tags should be applied.
"""
# Ensure hashes are provided as a string separated by '|'
hashes_str = "|".join(hashes)

View File

@@ -1,11 +1,12 @@
import yaml
from src.utils.log_setup import logger
from src.settings._validate_data_types import validate_data_types
from src.settings._config_as_yaml import get_config_as_yaml
from src.settings._validate_data_types import validate_data_types
from src.utils.log_setup import logger
VALID_TRACKER_HANDLING = {"remove", "skip", "obsolete_tag"}
class General:
"""Represents general settings for the application."""
VALID_TRACKER_HANDLING = {"remove", "skip", "obsolete_tag"}
log_level: str = "INFO"
test_run: bool = False
@@ -17,7 +18,6 @@ class General:
obsolete_tag: str = None
protected_tag: str = "Keep"
def __init__(self, config):
general_config = config.get("general", {})
self.log_level = general_config.get("log_level", self.log_level.upper())
@@ -32,31 +32,31 @@ class General:
self.protected_tag = general_config.get("protected_tag", self.protected_tag)
# Validate tracker handling settings
self.private_tracker_handling = self._validate_tracker_handling( self.private_tracker_handling, "private_tracker_handling" )
self.public_tracker_handling = self._validate_tracker_handling( self.public_tracker_handling, "public_tracker_handling" )
self.private_tracker_handling = self._validate_tracker_handling(self.private_tracker_handling, "private_tracker_handling")
self.public_tracker_handling = self._validate_tracker_handling(self.public_tracker_handling, "public_tracker_handling")
self.obsolete_tag = self._determine_obsolete_tag(self.obsolete_tag)
validate_data_types(self)
self._remove_none_attributes()
def _remove_none_attributes(self):
"""Removes attributes that are None to keep the object clean."""
"""Remove attributes that are None to keep the object clean."""
for attr in list(vars(self)):
if getattr(self, attr) is None:
delattr(self, attr)
def _validate_tracker_handling(self, value, field_name):
"""Validates tracker handling options. Defaults to 'remove' if invalid."""
if value not in self.VALID_TRACKER_HANDLING:
@staticmethod
def _validate_tracker_handling(value, field_name) -> str:
"""Validate tracker handling options. Defaults to 'remove' if invalid."""
if value not in VALID_TRACKER_HANDLING:
logger.error(
f"Invalid value '{value}' for {field_name}. Defaulting to 'remove'."
f"Invalid value '{value}' for {field_name}. Defaulting to 'remove'.",
)
return "remove"
return value
def _determine_obsolete_tag(self, obsolete_tag):
"""Defaults obsolete tag to "obsolete", only if none is provided and the tag is needed for handling """
"""Set obsolete tag to "obsolete", only if none is provided and the tag is needed for handling."""
if obsolete_tag is None and (
self.private_tracker_handling == "obsolete_tag"
or self.public_tracker_handling == "obsolete_tag"
@@ -65,10 +65,7 @@ class General:
return obsolete_tag
def config_as_yaml(self):
"""Logs all general settings."""
# yaml_output = yaml.dump(vars(self), indent=2, default_flow_style=False, sort_keys=False)
# logger.info(f"General Settings:\n{yaml_output}")
"""Log all general settings."""
return get_config_as_yaml(
vars(self),
)
)

View File

@@ -1,16 +1,16 @@
import requests
from packaging import version
from src.utils.log_setup import logger
from src.settings._config_as_yaml import get_config_as_yaml
from src.settings._constants import (
ApiEndpoints,
MinVersions,
FullQueueParameter,
DetailItemKey,
DetailItemSearchCommand,
FullQueueParameter,
MinVersions,
)
from src.settings._config_as_yaml import get_config_as_yaml
from src.utils.common import make_request, wait_and_exit
from src.utils.log_setup import logger
class Tracker:
@@ -63,9 +63,9 @@ class Instances:
"""Return a list of arr instances matching the given arr_type."""
return [arr for arr in self.arrs if arr.arr_type == arr_type]
def config_as_yaml(self, hide_internal_attr=True):
"""Logs all configured Arr instances while masking sensitive attributes."""
internal_attributes={
def config_as_yaml(self, *, hide_internal_attr=True):
"""Log all configured Arr instances while masking sensitive attributes."""
internal_attributes = {
"settings",
"api_url",
"min_version",
@@ -92,8 +92,6 @@ class Instances:
return "\n".join(outputs)
def check_any_arrs(self):
"""Check if there are any ARR instances."""
if not self.arrs:
@@ -128,12 +126,11 @@ class ArrInstances(list):
arr_type=arr_type,
base_url=client_config["base_url"],
api_key=client_config["api_key"],
)
),
)
except KeyError as e:
logger.error(
f"Missing required key {e} in {arr_type} client config."
)
error = f"Missing required key {e} in {arr_type} client config."
logger.error(error)
class ArrInstance:
@@ -146,11 +143,13 @@ class ArrInstance:
def __init__(self, settings, arr_type: str, base_url: str, api_key: str):
if not base_url:
logger.error(f"Skipping {arr_type} client entry: 'base_url' is required.")
raise ValueError(f"{arr_type} client must have a 'base_url'.")
error = f"{arr_type} client must have a 'base_url'."
raise ValueError(error)
if not api_key:
logger.error(f"Skipping {arr_type} client entry: 'api_key' is required.")
raise ValueError(f"{arr_type} client must have an 'api_key'.")
error = f"{arr_type} client must have an 'api_key'."
raise ValueError(error)
self.settings = settings
self.arr_type = arr_type
@@ -164,6 +163,8 @@ class ArrInstance:
self.detail_item_ids_key = self.detail_item_key + "Ids"
self.detail_item_search_command = getattr(DetailItemSearchCommand, arr_type)
self.detail_item_search_command = getattr(DetailItemSearchCommand, arr_type)
async def _check_ui_language(self):
"""Check if the UI language is set to English."""
endpoint = self.api_url + "/config/ui"
@@ -173,25 +174,26 @@ class ArrInstance:
if ui_language > 1: # Not English
logger.error("!! %s Error: !!", self.name)
logger.error(
f"> Decluttarr only works correctly if UI language is set to English (under Settings/UI in {self.name})"
f"> Decluttarr only works correctly if UI language is set to English (under Settings/UI in {self.name})",
)
logger.error(
"> Details: https://github.com/ManiMatter/decluttarr/issues/132)"
"> Details: https://github.com/ManiMatter/decluttarr/issues/132)",
)
raise ArrError("Not English")
error = "Not English"
raise ArrError(error)
def _check_min_version(self, status):
"""Check if ARR instance meets minimum version requirements."""
self.version = status["version"]
min_version = getattr(self.settings.min_versions, self.arr_type)
if min_version:
if version.parse(self.version) < version.parse(min_version):
logger.error("!! %s Error: !!", self.name)
logger.error(
f"> Please update {self.name} ({self.base_url}) to at least version {min_version}. Current version: {self.version}"
)
raise ArrError("Not meeting minimum version requirements")
if min_version and version.parse(self.version) < version.parse(min_version):
logger.error("!! %s Error: !!", self.name)
logger.error(
f"> Please update {self.name} ({self.base_url}) to at least version {min_version}. Current version: {self.version}",
)
error = f"Not meeting minimum version requirements: {min_version}"
logger.error(error)
def _check_arr_type(self, status):
"""Check if the ARR instance is of the correct type."""
@@ -199,9 +201,10 @@ class ArrInstance:
if actual_arr_type.lower() != self.arr_type:
logger.error("!! %s Error: !!", self.name)
logger.error(
f"> Your {self.name} ({self.base_url}) points to a {actual_arr_type} instance, rather than {self.arr_type}. Did you specify the wrong IP?"
f"> Your {self.name} ({self.base_url}) points to a {actual_arr_type} instance, rather than {self.arr_type}. Did you specify the wrong IP?",
)
raise ArrError("Wrong Arr Type")
error = "Wrong Arr Type"
logger.error(error)
async def _check_reachability(self):
"""Check if ARR instance is reachable."""
@@ -210,14 +213,13 @@ class ArrInstance:
endpoint = self.api_url + "/system/status"
headers = {"X-Api-Key": self.api_key}
response = await make_request(
"get", endpoint, self.settings, headers=headers, log_error=False
"get", endpoint, self.settings, headers=headers, log_error=False,
)
status = response.json()
return status
return response.json()
except Exception as e:
if isinstance(e, requests.exceptions.HTTPError):
response = getattr(e, "response", None)
if response is not None and response.status_code == 401:
if response is not None and response.status_code == 401: # noqa: PLR2004
tip = "💡 Tip: Have you configured the API_KEY correctly?"
else:
tip = f"💡 Tip: HTTP error occurred. Status: {getattr(response, 'status_code', 'unknown')}"
@@ -230,7 +232,7 @@ class ArrInstance:
raise ArrError(e) from e
async def setup(self):
"""Checks on specific ARR instance"""
"""Check on specific ARR instance."""
try:
status = await self._check_reachability()
self.name = status.get("instanceName", self.arr_type)
@@ -242,7 +244,7 @@ class ArrInstance:
logger.info(f"OK | {self.name} ({self.base_url})")
logger.debug(f"Current version of {self.name}: {self.version}")
except Exception as e:
except Exception as e: # noqa: BLE001
if not isinstance(e, ArrError):
logger.error(f"Unhandled error: {e}", exc_info=True)
wait_and_exit()
@@ -266,17 +268,19 @@ class ArrInstance:
return client.get("implementation", None)
return None
async def remove_queue_item(self, queue_id, blocklist=False):
async def remove_queue_item(self, queue_id, *, blocklist=False):
"""
Remove a specific queue item from the queue by its qeue id.
Remove a specific queue item from the queue by its queue id.
Sends a delete request to the API to remove the item.
Args:
queue_id (str): The quueue ID of the queue item to be removed.
queue_id (str): The queue ID of the queue item to be removed.
blocklist (bool): Whether to add the item to the blocklist. Default is False.
Returns:
bool: Returns True if the removal was successful, False otherwise.
"""
logger.debug(f"_instances.py/remove_queue_item: Removing queue item, blocklist: {blocklist}")
endpoint = f"{self.api_url}/queue/{queue_id}"
@@ -285,14 +289,11 @@ class ArrInstance:
# Send the request to remove the download from the queue
response = await make_request(
"delete", endpoint, self.settings, headers=headers, json=json_payload
"delete", endpoint, self.settings, headers=headers, json=json_payload,
)
# If the response is successful, return True, else return False
if response.status_code == 200:
return True
else:
return False
return response.status_code == 200 # noqa: PLR2004
async def is_monitored(self, detail_id):
"""Check if detail item (like a book, series, etc) is monitored."""

View File

@@ -1,6 +1,6 @@
from src.utils.log_setup import logger
from src.settings._validate_data_types import validate_data_types
from src.settings._config_as_yaml import get_config_as_yaml
from src.settings._validate_data_types import validate_data_types
from src.utils.log_setup import logger
class JobParams:
@@ -36,7 +36,7 @@ class JobParams:
self._remove_none_attributes()
def _remove_none_attributes(self):
"""Removes attributes that are None to keep the object clean."""
"""Remove attributes that are None to keep the object clean."""
for attr in list(vars(self)):
if getattr(self, attr) is None:
delattr(self, attr)
@@ -56,16 +56,16 @@ class JobDefaults:
job_defaults_config = config.get("job_defaults", {})
self.max_strikes = job_defaults_config.get("max_strikes", self.max_strikes)
self.max_concurrent_searches = job_defaults_config.get(
"max_concurrent_searches", self.max_concurrent_searches
"max_concurrent_searches", self.max_concurrent_searches,
)
self.min_days_between_searches = job_defaults_config.get(
"min_days_between_searches", self.min_days_between_searches
"min_days_between_searches", self.min_days_between_searches,
)
validate_data_types(self)
class Jobs:
"""Represents all jobs explicitly"""
"""Represent all jobs explicitly."""
def __init__(self, config):
self.job_defaults = JobDefaults(config)
@@ -79,10 +79,10 @@ class Jobs:
)
self.remove_failed_downloads = JobParams()
self.remove_failed_imports = JobParams(
message_patterns=self.job_defaults.message_patterns
message_patterns=self.job_defaults.message_patterns,
)
self.remove_metadata_missing = JobParams(
max_strikes=self.job_defaults.max_strikes
max_strikes=self.job_defaults.max_strikes,
)
self.remove_missing_files = JobParams()
self.remove_orphans = JobParams()
@@ -108,8 +108,7 @@ class Jobs:
self._set_job_settings(job_name, config["jobs"][job_name])
def _set_job_settings(self, job_name, job_config):
"""Sets per-job config settings"""
"""Set per-job config settings."""
job = getattr(self, job_name, None)
if (
job_config is None
@@ -134,8 +133,8 @@ class Jobs:
setattr(self, job_name, job)
validate_data_types(
job, self.job_defaults
) # Validates and applies defauls from job_defaults
job, self.job_defaults,
) # Validates and applies defaults from job_defaults
def log_status(self):
job_strings = []
@@ -158,7 +157,7 @@ class Jobs:
)
def list_job_status(self):
"""Returns a string showing each job and whether it's enabled or not using emojis."""
"""Return a string showing each job and whether it's enabled or not using emojis."""
lines = []
for name, obj in vars(self).items():
if hasattr(obj, "enabled"):

View File

@@ -1,5 +1,7 @@
import os
from pathlib import Path
import yaml
from src.utils.log_setup import logger
CONFIG_MAPPING = {
@@ -34,7 +36,8 @@ CONFIG_MAPPING = {
def get_user_config(settings):
"""Checks if data is read from enviornment variables, or from yaml file.
"""
Check if data is read from environment variables, or from yaml file.
Reads from environment variables if in docker, unless in docker-compose "USE_CONFIG_YAML" is set to true.
Then the config file is read.
@@ -53,7 +56,7 @@ def get_user_config(settings):
def _parse_env_var(key: str) -> dict | list | str | int | None:
"""Helper function to parse one setting input key"""
"""Parse one setting input key."""
raw_value = os.getenv(key)
if raw_value is None:
return None
@@ -67,7 +70,7 @@ def _parse_env_var(key: str) -> dict | list | str | int | None:
def _load_section(keys: list[str]) -> dict:
"""Helper function to parse one section of expected config"""
"""Parse one section of expected config."""
section_config = {}
for key in keys:
parsed = _parse_env_var(key)
@@ -76,14 +79,6 @@ def _load_section(keys: list[str]) -> dict:
return section_config
def _load_from_env() -> dict:
"""Main function to load settings from env"""
config = {}
for section, keys in CONFIG_MAPPING.items():
config[section] = _load_section(keys)
return config
def _load_from_env() -> dict:
config = {}
@@ -100,7 +95,7 @@ def _load_from_env() -> dict:
parsed_value = _lowercase(parsed_value)
except yaml.YAMLError as e:
logger.error(
f"Failed to parse environment variable {key} as YAML:\n{e}"
f"Failed to parse environment variable {key} as YAML:\n{e}",
)
parsed_value = {}
section_config[key.lower()] = parsed_value
@@ -111,28 +106,26 @@ def _load_from_env() -> dict:
def _lowercase(data):
"""Translates recevied keys (for instance setting-keys of jobs) to lower case"""
"""Translate received keys (for instance setting-keys of jobs) to lower case."""
if isinstance(data, dict):
return {str(k).lower(): _lowercase(v) for k, v in data.items()}
elif isinstance(data, list):
if isinstance(data, list):
return [_lowercase(item) for item in data]
else:
# Leave strings and other types unchanged
return data
# Leave strings and other types unchanged
return data
def _config_file_exists(settings):
config_path = settings.paths.config_file
return os.path.exists(config_path)
return Path(config_path).exists()
def _load_from_yaml_file(settings):
"""Reads config from YAML file and returns a dict."""
"""Read config from YAML file and returns a dict."""
config_path = settings.paths.config_file
try:
with open(config_path, "r", encoding="utf-8") as file:
config = yaml.safe_load(file) or {}
return config
with Path(config_path).open(encoding="utf-8") as file:
return yaml.safe_load(file) or {}
except yaml.YAMLError as e:
logger.error("Error reading YAML file: %s", e)
return {}

View File

@@ -1,14 +1,21 @@
import inspect
from src.utils.log_setup import logger
def validate_data_types(cls, default_cls=None):
"""Ensures all attributes match expected types dynamically.
"""
Ensure all attributes match expected types dynamically.
If default_cls is provided, the default key is taken from this class rather than the own class
If the attribute doesn't exist in `default_cls`, fall back to `cls.__class__`.
"""
def _unhandled_conversion():
error = f"Unhandled type conversion for '{attr}': {expected_type}"
raise TypeError(error)
annotations = inspect.get_annotations(cls.__class__) # Extract type hints
for attr, expected_type in annotations.items():
@@ -17,7 +24,7 @@ def validate_data_types(cls, default_cls=None):
value = getattr(cls, attr)
default_source = default_cls if default_cls and hasattr(default_cls, attr) else cls.__class__
default_value = getattr(default_source, attr, None)
default_value = getattr(default_source, attr, None)
if value == default_value:
continue
@@ -37,22 +44,20 @@ def validate_data_types(cls, default_cls=None):
elif expected_type is dict:
value = convert_to_dict(value)
else:
raise TypeError(f"Unhandled type conversion for '{attr}': {expected_type}")
except Exception as e:
_unhandled_conversion()
except Exception as e: # noqa: BLE001
logger.error(
f"❗️ Invalid type for '{attr}': Expected {expected_type.__name__}, but got {type(value).__name__}. "
f"Error: {e}. Using default value: {default_value}"
f"Error: {e}. Using default value: {default_value}",
)
value = default_value
setattr(cls, attr, value)
# --- Helper Functions ---
def convert_to_bool(raw_value):
"""Converts strings like 'yes', 'no', 'true', 'false' into boolean values."""
"""Convert strings like 'yes', 'no', 'true', 'false' into boolean values."""
if isinstance(raw_value, bool):
return raw_value
@@ -64,28 +69,29 @@ def convert_to_bool(raw_value):
if raw_value in true_values:
return True
elif raw_value in false_values:
if raw_value in false_values:
return False
else:
raise ValueError(f"Invalid boolean value: '{raw_value}'")
error = f"Invalid boolean value: '{raw_value}'"
raise ValueError(error)
def convert_to_str(raw_value):
"""Ensures a string and trims whitespace."""
"""Ensure a string and trims whitespace."""
if isinstance(raw_value, str):
return raw_value.strip()
return str(raw_value).strip()
def convert_to_list(raw_value):
"""Ensures a value is a list."""
"""Ensure a value is a list."""
if isinstance(raw_value, list):
return [convert_to_str(item) for item in raw_value]
return [convert_to_str(raw_value)] # Wrap single values in a list
def convert_to_dict(raw_value):
"""Ensures a value is a dictionary."""
"""Ensure a value is a dictionary."""
if isinstance(raw_value, dict):
return {convert_to_str(k): v for k, v in raw_value.items()}
raise TypeError(f"Expected dict but got {type(raw_value).__name__}")
error = f"Expected dict but got {type(raw_value).__name__}"
raise TypeError(error)

View File

@@ -1,14 +1,14 @@
from src.utils.log_setup import configure_logging
from src.settings._constants import Envs, MinVersions, Paths
# from src.settings._migrate_legacy import migrate_legacy
from src.settings._general import General
from src.settings._jobs import Jobs
from src.settings._download_clients import DownloadClients
from src.settings._general import General
from src.settings._instances import Instances
from src.settings._jobs import Jobs
from src.settings._user_config import get_user_config
from src.utils.log_setup import configure_logging
class Settings:
min_versions = MinVersions()
paths = Paths()
@@ -21,7 +21,6 @@ class Settings:
self.instances = Instances(config, self)
configure_logging(self)
def __repr__(self):
sections = [
("ENVIRONMENT SETTINGS", "envs"),
@@ -30,11 +29,8 @@ class Settings:
("JOB SETTINGS", "jobs"),
("INSTANCE SETTINGS", "instances"),
("DOWNLOAD CLIENT SETTINGS", "download_clients"),
]
messages = []
messages.append("🛠️ Decluttarr - Settings 🛠️")
messages.append("-"*80)
# messages.append("")
]
messages = ["🛠️ Decluttarr - Settings 🛠️", "-" * 80]
for title, attr_name in sections:
section = getattr(self, attr_name, None)
section_content = section.config_as_yaml()
@@ -44,18 +40,14 @@ class Settings:
elif section_content != "{}":
messages.append(self._format_section_title(title))
messages.append(section_content)
messages.append("") # Extra linebreak after section
messages.append("") # Extra linebreak after section
return "\n".join(messages)
def _format_section_title(self, name, border_length=50, symbol="="):
@staticmethod
def _format_section_title(name, border_length=50, symbol="=") -> str:
"""Format section title with centered name and hash borders."""
padding = max(border_length - len(name) - 2, 0) # 4 for spaces
left_hashes = right_hashes = padding // 2
if padding % 2 != 0:
right_hashes += 1
return f"{symbol * left_hashes} {name} {symbol * right_hashes}"

0
src/utils/__init__.py Normal file
View File

View File

@@ -1,9 +1,10 @@
import asyncio
import sys
import time
import asyncio
import logging
import copy
import requests
from src.utils.log_setup import logger
@@ -27,13 +28,13 @@ def sanitize_kwargs(data):
else:
redacted[key] = sanitize_kwargs(value)
return redacted
elif isinstance(data, list):
if isinstance(data, list):
return [sanitize_kwargs(item) for item in data]
return data
async def make_request(
method: str, endpoint: str, settings, timeout: int = 15, log_error = True, **kwargs
method: str, endpoint: str, settings, timeout: int = 15, *, log_error=True, **kwargs,
) -> requests.Response:
"""
A utility function to make HTTP requests (GET, POST, DELETE, PUT).
@@ -56,7 +57,7 @@ async def make_request(
logger.debug(
f"common.py/make_request: Making {method.upper()} request to {endpoint} with kwargs={sanitized_kwargs}"
)
# Make the request using the method passed (get, post, etc.)
response = await asyncio.to_thread(
getattr(requests, method.lower()),

View File

@@ -1,6 +1,6 @@
import logging
import os
from logging.handlers import RotatingFileHandler
from pathlib import Path
# Track added logging levels
_added_levels = {}
@@ -9,7 +9,8 @@ _added_levels = {}
def add_logging_level(level_name, level_num):
"""Dynamically add a custom logging level."""
if level_name in _added_levels or level_num in _added_levels.values():
raise ValueError(f"Logging level '{level_name}' or number '{level_num}' already exists.")
error = f"Logging level '{level_name}' or number '{level_num}' already exists."
raise ValueError(error)
logging.addLevelName(level_num, level_name.upper())
@@ -26,41 +27,41 @@ def add_logging_level(level_name, level_num):
add_logging_level("TRACE", 5)
add_logging_level("VERBOSE", 15)
# Configure the default logger
logger = logging.getLogger(__name__)
def set_handler_format(log_handler, long_format = True):
def set_handler_format(log_handler, *, long_format=True):
if long_format:
target_format = logging.Formatter("%(asctime)s | %(levelname)-7s | %(message)s", "%Y-%m-%d %H:%M:%S")
else:
target_format = logging.Formatter("%(levelname)-7s | %(message)s")
log_handler.setFormatter(target_format)
# Default console handler
console_handler = logging.StreamHandler()
set_handler_format(console_handler, long_format = True)
set_handler_format(console_handler, long_format=True)
logger.addHandler(console_handler)
logger.setLevel(logging.INFO)
def configure_logging(settings):
"""Add a file handler and adjust log levels for all handlers."""
if settings.envs.in_docker:
set_handler_format(console_handler, long_format = False)
set_handler_format(console_handler, long_format=False)
log_file = settings.paths.logs
log_dir = os.path.dirname(log_file)
os.makedirs(log_dir, exist_ok=True)
log_dir = Path(log_file).parent
Path(log_dir).mkdir(exist_ok=True, parents=True)
# File handler
file_handler = RotatingFileHandler(log_file, maxBytes=50 * 1024 * 1024, backupCount=2)
set_handler_format(file_handler, long_format = True)
set_handler_format(file_handler, long_format=True)
logger.addHandler(file_handler)
# Update log level for all handlers
log_level = getattr(logging, settings.general.log_level.upper(), logging.INFO)
for handler in logger.handlers:
handler.setLevel(log_level)
logger.setLevel(log_level)
logger.setLevel(log_level)

View File

@@ -1,6 +1,6 @@
import logging
from src.utils.log_setup import logger
from src.utils.common import make_request
from src.utils.log_setup import logger
class QueueManager:
@@ -10,7 +10,8 @@ class QueueManager:
async def get_queue_items(self, queue_scope):
"""
Retrieves queue items based on the scope.
Retrieve queue items based on the scope.
queue_scope:
"normal" = normal queue
"orphans" = orphaned queue items (in full queue but not in normal queue)
@@ -25,12 +26,13 @@ class QueueManager:
elif queue_scope == "full":
queue_items = await self._get_queue(full_queue=True)
else:
raise ValueError(f"Invalid queue_scope: {queue_scope}")
error = f"Invalid queue_scope: {queue_scope}"
raise ValueError(error)
if logger.isEnabledFor(logging.DEBUG):
logger.debug("queue_manager.py/get_queue_items/queue (%s): %s", queue_scope, self.format_queue(queue_items))
return queue_items
async def _get_queue(self, full_queue=False):
async def _get_queue(self, *, full_queue=False):
# Step 1: Refresh the queue (now internal)
await self._refresh_queue()
@@ -43,15 +45,14 @@ class QueueManager:
# Step 4: Filter the queue based on delayed items and ignored download clients
queue = self._filter_out_ignored_statuses(queue)
queue = self._filter_out_ignored_download_clients(queue)
queue = self._add_detail_item_key(queue)
return queue
return self._add_detail_item_key(queue)
def _add_detail_item_key(self, queue):
"""Normalizes episodeID, bookID, etc so it can just be called by 'detail_item_id'"""
"""Normalize episodeID, bookID, etc. so it can just be called by 'detail_item_id'."""
for items in queue:
items["detail_item_id"] = items.get(self.arr.detail_item_id_key)
items["detail_item_id"] = items.get(self.arr.detail_item_id_key)
return queue
async def _refresh_queue(self):
# Refresh the queue by making the POST request using an external make_request function
await make_request(
@@ -88,7 +89,7 @@ class QueueManager:
records = (
await make_request(
method="GET",
endpoint=f"{self.arr.api_url}/queue",
endpoint=f"{self.arr.api_url}/queue",
settings=self.settings,
params=params,
headers={"X-Api-Key": self.arr.api_key},
@@ -110,8 +111,7 @@ class QueueManager:
list[dict]: Filtered queue.
"""
if queue is None:
return queue
return None
seen_combinations = set()
filtered_queue = []
@@ -152,7 +152,7 @@ class QueueManager:
return filtered_queue
def format_queue(self, queue_items):
def format_queue(self, queue_items) -> list | str:
if not queue_items:
return "empty"
return self.group_by_download_id(queue_items)
@@ -192,3 +192,20 @@ class QueueManager:
}
return grouped_dict
@staticmethod
def filter_queue_by_status(queue, statuses: list[str]) -> list[dict]:
"""Filter queue items that match any of the given statuses."""
return [item for item in queue if item.get("status") in statuses]
@staticmethod
def filter_queue_by_status_and_error_message(queue, conditions: list[tuple[str, str]]) -> list[dict]:
"""Filter queue items that match any given (status, errorMessage) pair."""
queue_items = []
for item in queue:
if "errorMessage" in item and "status" in item:
for status, message in conditions:
if item["status"] == status and item["errorMessage"] == message:
queue_items.append(item)
break # Stop checking other conditions once one matches
return queue_items

View File

@@ -1,49 +1,50 @@
import warnings
from src.utils.log_setup import logger
def show_welcome(settings):
messages = []
messages = ["🎉🎉🎉 Decluttarr - Application Started! 🎉🎉🎉",
"-" * 80,
"⭐️ Like this app?",
"Thanks for giving it a ⭐️ on GitHub!",
"https://github.com/ManiMatter/decluttarr/"]
# Show welcome message
messages.append("🎉🎉🎉 Decluttarr - Application Started! 🎉🎉🎉")
messages.append("-"*80)
# messages.append("")
messages.append("⭐️ Like this app?")
messages.append("Thanks for giving it a ⭐️ on GitHub!")
messages.append("https://github.com/ManiMatter/decluttarr/")
# Show info level tip
if settings.general.log_level == "INFO":
# messages.append("")
messages.append("")
messages.append("💡 Tip: More logs?")
messages.append("If you want to know more about what's going on, switch log level to 'VERBOSE'")
messages.extend([
"",
"💡 Tip: More logs?",
"If you want to know more about what's going on, switch log level to 'VERBOSE'",
])
# Show bug report tip
# messages.append("")
messages.append("")
messages.append("🐛 Found a bug?")
messages.append("Before reporting bugs on GitHub, please:")
messages.append("1) Check the readme on github")
messages.append("2) Check open and closed issues on github")
messages.append("3) Switch your logs to 'DEBUG' level")
messages.append("4) Turn off any features other than the one(s) causing it")
messages.append("5) Provide the full logs via pastebin on your GitHub issue")
messages.append("Once submitted, thanks for being responsive and helping debug / re-test")
messages.extend([
"",
"🐛 Found a bug?",
"Before reporting bugs on GitHub, please:",
"1) Check the readme on github",
"2) Check open and closed issues on github",
"3) Switch your logs to 'DEBUG' level",
"4) Turn off any features other than the one(s) causing it",
"5) Provide the full logs via pastebin on your GitHub issue",
"Once submitted, thanks for being responsive and helping debug / re-test",
])
# Show test mode tip
if settings.general.test_run:
# messages.append("")
messages.append("")
messages.append("=================== IMPORTANT ====================")
messages.append(" ⚠️ ⚠️ ⚠️ TEST MODE IS ACTIVE ⚠️ ⚠️ ⚠️")
messages.append("Decluttarr won't actually do anything for you...")
messages.append("You can change this via the setting 'test_run'")
messages.append("==================================================")
messages.extend([
"",
"=================== IMPORTANT ====================",
" ⚠️ ⚠️ ⚠️ TEST MODE IS ACTIVE ⚠️ ⚠️ ⚠️",
"Decluttarr won't actually do anything for you...",
"You can change this via the setting 'test_run'",
"==================================================",
])
messages.append("")
# messages.append("-"*80)
# Log all messages at once
logger.info("\n".join(messages))

View File

@@ -8,12 +8,12 @@ class WantedManager:
async def get_wanted_items(self, missing_or_cutoff):
"""
Retrieves wanted items :
missing_or_cutoff: Drives whether missing or cutoff items are retrieved
Retrieve wanted items.
missing_or_cutoff: Drives whether missing or cutoff items are retrieved
"""
record_count = await self._get_total_records(missing_or_cutoff)
missing_or_cutoff = await self._get_arr_records(missing_or_cutoff, record_count)
return missing_or_cutoff
return await self._get_arr_records(missing_or_cutoff, record_count)
async def _get_total_records(self, missing_or_cutoff):
# Get the total number of records from wanted
@@ -46,9 +46,8 @@ class WantedManager:
).json()
return records["records"]
async def search_items(self, detail_ids):
"""Search items by detail IDs"""
"""Search items by detail IDs."""
if isinstance(detail_ids, str):
detail_ids = [detail_ids]
@@ -62,4 +61,4 @@ class WantedManager:
settings=self.settings,
json=json,
headers={"X-Api-Key": self.arr.api_key},
)
)