mirror of
https://github.com/ManiMatter/decluttarr.git
synced 2026-04-21 16:25:35 +02:00
Code Rewrite to support multi instances
This commit is contained in:
@@ -1,183 +0,0 @@
|
||||
# Cleans the download queue
|
||||
import logging, verboselogs
|
||||
|
||||
logger = verboselogs.VerboseLogger(__name__)
|
||||
from src.utils.shared import errorDetails, get_queue
|
||||
from src.jobs.remove_failed import remove_failed
|
||||
from src.jobs.remove_failed_imports import remove_failed_imports
|
||||
from src.jobs.remove_metadata_missing import remove_metadata_missing
|
||||
from src.jobs.remove_missing_files import remove_missing_files
|
||||
from src.jobs.remove_orphans import remove_orphans
|
||||
from src.jobs.remove_slow import remove_slow
|
||||
from src.jobs.remove_stalled import remove_stalled
|
||||
from src.jobs.remove_unmonitored import remove_unmonitored
|
||||
from src.jobs.run_periodic_rescans import run_periodic_rescans
|
||||
from src.utils.trackers import Deleted_Downloads
|
||||
|
||||
|
||||
async def queueCleaner(
|
||||
settingsDict,
|
||||
arr_type,
|
||||
defective_tracker,
|
||||
download_sizes_tracker,
|
||||
protectedDownloadIDs,
|
||||
privateDowloadIDs,
|
||||
):
|
||||
# Read out correct instance depending on radarr/sonarr flag
|
||||
run_dict = {}
|
||||
if arr_type == "RADARR":
|
||||
BASE_URL = settingsDict["RADARR_URL"]
|
||||
API_KEY = settingsDict["RADARR_KEY"]
|
||||
NAME = settingsDict["RADARR_NAME"]
|
||||
full_queue_param = "includeUnknownMovieItems"
|
||||
elif arr_type == "SONARR":
|
||||
BASE_URL = settingsDict["SONARR_URL"]
|
||||
API_KEY = settingsDict["SONARR_KEY"]
|
||||
NAME = settingsDict["SONARR_NAME"]
|
||||
full_queue_param = "includeUnknownSeriesItems"
|
||||
elif arr_type == "LIDARR":
|
||||
BASE_URL = settingsDict["LIDARR_URL"]
|
||||
API_KEY = settingsDict["LIDARR_KEY"]
|
||||
NAME = settingsDict["LIDARR_NAME"]
|
||||
full_queue_param = "includeUnknownArtistItems"
|
||||
elif arr_type == "READARR":
|
||||
BASE_URL = settingsDict["READARR_URL"]
|
||||
API_KEY = settingsDict["READARR_KEY"]
|
||||
NAME = settingsDict["READARR_NAME"]
|
||||
full_queue_param = "includeUnknownAuthorItems"
|
||||
elif arr_type == "WHISPARR":
|
||||
BASE_URL = settingsDict["WHISPARR_URL"]
|
||||
API_KEY = settingsDict["WHISPARR_KEY"]
|
||||
NAME = settingsDict["WHISPARR_NAME"]
|
||||
full_queue_param = "includeUnknownSeriesItems"
|
||||
else:
|
||||
logger.error("Unknown arr_type specified, exiting: %s", str(arr_type))
|
||||
sys.exit()
|
||||
|
||||
# Cleans up the downloads queue
|
||||
logger.verbose("Cleaning queue on %s:", NAME)
|
||||
# Refresh queue:
|
||||
try:
|
||||
full_queue = await get_queue(BASE_URL, API_KEY, settingsDict, params={full_queue_param: True})
|
||||
if full_queue:
|
||||
logger.debug("queueCleaner/full_queue at start:")
|
||||
logger.debug(full_queue)
|
||||
|
||||
deleted_downloads = Deleted_Downloads([])
|
||||
items_detected = 0
|
||||
|
||||
if settingsDict["REMOVE_FAILED"]:
|
||||
items_detected += await remove_failed(
|
||||
settingsDict,
|
||||
BASE_URL,
|
||||
API_KEY,
|
||||
NAME,
|
||||
deleted_downloads,
|
||||
defective_tracker,
|
||||
protectedDownloadIDs,
|
||||
privateDowloadIDs,
|
||||
)
|
||||
|
||||
if settingsDict["REMOVE_FAILED_IMPORTS"]:
|
||||
items_detected += await remove_failed_imports(
|
||||
settingsDict,
|
||||
BASE_URL,
|
||||
API_KEY,
|
||||
NAME,
|
||||
deleted_downloads,
|
||||
defective_tracker,
|
||||
protectedDownloadIDs,
|
||||
privateDowloadIDs,
|
||||
)
|
||||
|
||||
if settingsDict["REMOVE_METADATA_MISSING"]:
|
||||
items_detected += await remove_metadata_missing(
|
||||
settingsDict,
|
||||
BASE_URL,
|
||||
API_KEY,
|
||||
NAME,
|
||||
deleted_downloads,
|
||||
defective_tracker,
|
||||
protectedDownloadIDs,
|
||||
privateDowloadIDs,
|
||||
)
|
||||
|
||||
if settingsDict["REMOVE_MISSING_FILES"]:
|
||||
items_detected += await remove_missing_files(
|
||||
settingsDict,
|
||||
BASE_URL,
|
||||
API_KEY,
|
||||
NAME,
|
||||
deleted_downloads,
|
||||
defective_tracker,
|
||||
protectedDownloadIDs,
|
||||
privateDowloadIDs,
|
||||
)
|
||||
|
||||
if settingsDict["REMOVE_ORPHANS"]:
|
||||
items_detected += await remove_orphans(
|
||||
settingsDict,
|
||||
BASE_URL,
|
||||
API_KEY,
|
||||
NAME,
|
||||
deleted_downloads,
|
||||
defective_tracker,
|
||||
protectedDownloadIDs,
|
||||
privateDowloadIDs,
|
||||
full_queue_param,
|
||||
)
|
||||
|
||||
if settingsDict["REMOVE_SLOW"]:
|
||||
items_detected += await remove_slow(
|
||||
settingsDict,
|
||||
BASE_URL,
|
||||
API_KEY,
|
||||
NAME,
|
||||
deleted_downloads,
|
||||
defective_tracker,
|
||||
protectedDownloadIDs,
|
||||
privateDowloadIDs,
|
||||
download_sizes_tracker,
|
||||
)
|
||||
|
||||
if settingsDict["REMOVE_STALLED"]:
|
||||
items_detected += await remove_stalled(
|
||||
settingsDict,
|
||||
BASE_URL,
|
||||
API_KEY,
|
||||
NAME,
|
||||
deleted_downloads,
|
||||
defective_tracker,
|
||||
protectedDownloadIDs,
|
||||
privateDowloadIDs,
|
||||
)
|
||||
|
||||
if settingsDict["REMOVE_UNMONITORED"]:
|
||||
items_detected += await remove_unmonitored(
|
||||
settingsDict,
|
||||
BASE_URL,
|
||||
API_KEY,
|
||||
NAME,
|
||||
deleted_downloads,
|
||||
defective_tracker,
|
||||
protectedDownloadIDs,
|
||||
privateDowloadIDs,
|
||||
arr_type,
|
||||
)
|
||||
if items_detected == 0:
|
||||
logger.verbose(">>> Queue is clean.")
|
||||
else:
|
||||
logger.verbose(">>> Queue is empty.")
|
||||
|
||||
if settingsDict["RUN_PERIODIC_RESCANS"]:
|
||||
await run_periodic_rescans(
|
||||
settingsDict,
|
||||
BASE_URL,
|
||||
API_KEY,
|
||||
NAME,
|
||||
arr_type,
|
||||
)
|
||||
|
||||
except Exception as error:
|
||||
errorDetails(NAME, error)
|
||||
return
|
||||
107
src/job_manager.py
Normal file
107
src/job_manager.py
Normal file
@@ -0,0 +1,107 @@
|
||||
# Cleans the download queue
|
||||
from src.utils.log_setup import logger
|
||||
from src.utils.queue_manager import QueueManager
|
||||
|
||||
from src.jobs.remove_bad_files import RemoveBadFiles
|
||||
from src.jobs.remove_failed_imports import RemoveFailedImports
|
||||
from src.jobs.remove_failed_downloads import RemoveFailedDownloads
|
||||
from src.jobs.remove_metadata_missing import RemoveMetadataMissing
|
||||
from src.jobs.remove_missing_files import RemoveMissingFiles
|
||||
from src.jobs.remove_orphans import RemoveOrphans
|
||||
from src.jobs.remove_slow import RemoveSlow
|
||||
from src.jobs.remove_stalled import RemoveStalled
|
||||
from src.jobs.remove_unmonitored import RemoveUnmonitored
|
||||
|
||||
from src.jobs.search_handler import SearchHandler
|
||||
|
||||
|
||||
class JobManager:
|
||||
arr = None
|
||||
|
||||
def __init__(self, settings):
|
||||
self.settings = settings
|
||||
|
||||
async def run_jobs(self, arr):
|
||||
self.arr = arr
|
||||
await self.removal_jobs()
|
||||
await self.search_jobs()
|
||||
|
||||
async def removal_jobs(self):
|
||||
logger.verbose(f"")
|
||||
logger.verbose(f"Cleaning queue on {self.arr.name}:")
|
||||
if not await self._queue_has_items():
|
||||
return
|
||||
|
||||
if not await self._qbit_connected():
|
||||
return
|
||||
|
||||
# Refresh trackers
|
||||
await self.arr.tracker.refresh_private_and_protected(self.settings)
|
||||
|
||||
# Execute Cleaning
|
||||
removal_jobs = self._get_removal_jobs()
|
||||
items_detected = 0
|
||||
for removal_job in removal_jobs:
|
||||
items_detected += await removal_job.run()
|
||||
|
||||
if items_detected == 0:
|
||||
logger.verbose(">>> Queue is clean.")
|
||||
|
||||
async def search_jobs(self):
|
||||
if (
|
||||
self.arr.arr_type == "whisparr"
|
||||
): # Whisparr does not support this endpoint (yet?)
|
||||
return
|
||||
if self.settings.jobs.search_missing_content.enabled:
|
||||
await SearchHandler(self.arr, self.settings).handle_search("missing")
|
||||
if self.settings.jobs.search_unmet_cutoff_content.enabled:
|
||||
await SearchHandler(self.arr, self.settings).handle_search("cutoff")
|
||||
|
||||
async def _queue_has_items(self):
|
||||
queue_manager = QueueManager(self.arr, self.settings)
|
||||
full_queue = await queue_manager.get_queue_items("full")
|
||||
if full_queue:
|
||||
logger.debug(
|
||||
f"job_runner/full_queue at start: %s",
|
||||
queue_manager.format_queue(full_queue),
|
||||
)
|
||||
return True
|
||||
else:
|
||||
logger.verbose(">>> Queue is empty.")
|
||||
return False
|
||||
|
||||
async def _qbit_connected(self):
|
||||
for qbit in self.settings.download_clients.qbittorrent:
|
||||
# Check if any client is disconnected
|
||||
if not await qbit.check_qbit_connected():
|
||||
logger.warning(
|
||||
f">>> qBittorrent is disconnected. Skipping queue cleaning on {self.arr.name}."
|
||||
)
|
||||
return False
|
||||
return True
|
||||
|
||||
def _get_removal_jobs(self):
|
||||
"""
|
||||
Returns a list of enabled removal job instances based on the provided settings.
|
||||
|
||||
Each job is included if the corresponding attribute exists and is truthy in settings.jobs.
|
||||
"""
|
||||
removal_job_classes = {
|
||||
"remove_bad_files": RemoveBadFiles,
|
||||
"remove_failed_imports": RemoveFailedImports,
|
||||
"remove_failed_downloads": RemoveFailedDownloads,
|
||||
"remove_metadata_missing": RemoveMetadataMissing,
|
||||
"remove_missing_files": RemoveMissingFiles,
|
||||
"remove_orphans": RemoveOrphans,
|
||||
"remove_slow": RemoveSlow,
|
||||
"remove_stalled": RemoveStalled,
|
||||
"remove_unmonitored": RemoveUnmonitored,
|
||||
}
|
||||
|
||||
jobs = []
|
||||
for removal_job_name, removal_job_class in removal_job_classes.items():
|
||||
if getattr(self.settings.jobs, removal_job_name, False):
|
||||
jobs.append(
|
||||
removal_job_class(self.arr, self.settings, removal_job_name)
|
||||
)
|
||||
return jobs
|
||||
68
src/jobs/removal_handler.py
Normal file
68
src/jobs/removal_handler.py
Normal file
@@ -0,0 +1,68 @@
|
||||
from src.utils.log_setup import logger
|
||||
|
||||
class RemovalHandler:
|
||||
def __init__(self, arr, settings, job_name):
|
||||
self.arr = arr
|
||||
self.settings = settings
|
||||
self.job_name = job_name
|
||||
|
||||
async def remove_downloads(self, affected_downloads, blocklist):
|
||||
for download_id in list(affected_downloads.keys()):
|
||||
logger.debug(
|
||||
"remove_download/deleted_downloads.dict IN: %s",
|
||||
str(self.arr.tracker.deleted),
|
||||
)
|
||||
|
||||
queue_item = affected_downloads[download_id][0]
|
||||
handling_method = await self._get_handling_method(download_id, queue_item)
|
||||
|
||||
if download_id in self.arr.tracker.deleted or handling_method == "skip":
|
||||
del affected_downloads[download_id]
|
||||
continue
|
||||
|
||||
if handling_method == "remove":
|
||||
await self._remove_download(queue_item, blocklist)
|
||||
elif handling_method == "tag_as_obsolete":
|
||||
await self._tag_as_obsolete(queue_item, download_id)
|
||||
|
||||
# Print out detailed removal messages (if any)
|
||||
if "removal_messages" in queue_item:
|
||||
for msg in queue_item["removal_messages"]:
|
||||
logger.info(msg)
|
||||
|
||||
self.arr.tracker.deleted.append(download_id)
|
||||
|
||||
logger.debug(
|
||||
"remove_download/arr_instance.tracker.deleted OUT: %s",
|
||||
str(self.arr.tracker.deleted),
|
||||
)
|
||||
|
||||
|
||||
async def _remove_download(self, queue_item, blocklist):
|
||||
queue_id = queue_item["id"]
|
||||
logger.info(f">>> Job '{self.job_name}' triggered removal: {queue_item['title']}")
|
||||
if not self.settings.general.test_run:
|
||||
await self.arr.remove_queue_item(queue_id=queue_id, blocklist=blocklist)
|
||||
|
||||
async def _tag_as_obsolete(self, queue_item, download_id):
|
||||
logger.info(f">>> Job'{self.job_name}' triggered obsolete-tagging: {queue_item['title']}")
|
||||
if not self.settings.general.test_run:
|
||||
for qbit in self.settings.download_clients.qbittorrent:
|
||||
await qbit.set_tag(tags=[self.settings.general.obsolete_tag], hashes=[download_id])
|
||||
|
||||
|
||||
async def _get_handling_method(self, download_id, queue_item):
|
||||
if queue_item['protocol'] != 'torrent':
|
||||
return "remove" # handling is only implemented for torrent
|
||||
|
||||
client_implemenation = await self.arr.get_download_client_implementation(queue_item['downloadClient'])
|
||||
if client_implemenation != "QBittorrent":
|
||||
return "remove" # handling is only implemented for qbit
|
||||
|
||||
if len(self.settings.download_clients.qbittorrent) == 0:
|
||||
return "remove" # qbit not configured, thus can't tag
|
||||
|
||||
if download_id in self.arr.tracker.private:
|
||||
return self.settings.general.private_tracker_handling
|
||||
|
||||
return self.settings.general.public_tracker_handling
|
||||
82
src/jobs/removal_job.py
Normal file
82
src/jobs/removal_job.py
Normal file
@@ -0,0 +1,82 @@
|
||||
from abc import ABC, abstractmethod
|
||||
from src.utils.queue_manager import QueueManager
|
||||
from src.utils.log_setup import logger
|
||||
from src.jobs.strikes_handler import StrikesHandler
|
||||
from src.jobs.removal_handler import RemovalHandler
|
||||
|
||||
|
||||
class RemovalJob(ABC):
|
||||
job_name = None
|
||||
blocklist = True
|
||||
queue_scope = None
|
||||
affected_items = None
|
||||
affected_downloads = None
|
||||
job = None
|
||||
max_strikes = None
|
||||
|
||||
# Default class attributes (can be overridden in subclasses)
|
||||
def __init__(self, arr, settings, job_name):
|
||||
self.arr = arr
|
||||
self.settings = settings
|
||||
self.job_name = job_name
|
||||
self.job = getattr(self.settings.jobs, self.job_name)
|
||||
self.queue_manager = QueueManager(self.arr, self.settings)
|
||||
|
||||
|
||||
async def run(self):
|
||||
if not self.job.enabled:
|
||||
return 0
|
||||
if await self.is_queue_empty(self.job_name, self.queue_scope):
|
||||
return 0
|
||||
self.affected_items = await self._find_affected_items()
|
||||
self.affected_downloads = self.queue_manager.group_by_download_id(self.affected_items)
|
||||
|
||||
# -- Checks --
|
||||
self._ignore_protected()
|
||||
|
||||
self.max_strikes = getattr(self.job, "max_strikes", None)
|
||||
if self.max_strikes:
|
||||
self.affected_downloads = StrikesHandler(
|
||||
job_name=self.job_name,
|
||||
arr=self.arr,
|
||||
max_strikes=self.max_strikes,
|
||||
).check_permitted_strikes(self.affected_downloads)
|
||||
|
||||
# -- Removal --
|
||||
await RemovalHandler(
|
||||
arr=self.arr,
|
||||
settings=self.settings,
|
||||
job_name=self.job_name,
|
||||
).remove_downloads(self.affected_downloads, self.blocklist)
|
||||
|
||||
return len(self.affected_downloads)
|
||||
|
||||
|
||||
|
||||
async def is_queue_empty(self, job_name, queue_scope="normal"):
|
||||
# Check if queue empty
|
||||
queue_items = await self.queue_manager.get_queue_items(queue_scope)
|
||||
logger.debug(
|
||||
f"{job_name}/queue IN: %s",
|
||||
self.queue_manager.format_queue(queue_items),
|
||||
)
|
||||
# Early exit if no queue
|
||||
if not queue_items:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _ignore_protected(self):
|
||||
"""
|
||||
Filters out downloads that are in the protected tracker.
|
||||
Directly updates self.affected_downloads.
|
||||
"""
|
||||
self.affected_downloads = {
|
||||
download_id: queue_items
|
||||
for download_id, queue_items in self.affected_downloads.items()
|
||||
if download_id not in self.arr.tracker.protected
|
||||
}
|
||||
|
||||
@abstractmethod # Imlemented on level of each removal job
|
||||
async def _find_affected_items(self):
|
||||
pass
|
||||
195
src/jobs/remove_bad_files.py
Normal file
195
src/jobs/remove_bad_files.py
Normal file
@@ -0,0 +1,195 @@
|
||||
import os
|
||||
from src.jobs.removal_job import RemovalJob
|
||||
from src.utils.log_setup import logger
|
||||
|
||||
class RemoveBadFiles(RemovalJob):
|
||||
queue_scope = "normal"
|
||||
blocklist = True
|
||||
|
||||
# fmt: off
|
||||
good_extensions = [
|
||||
# Movies, TV Shows (Radarr, Sonarr, Whisparr)
|
||||
".webm", ".m4v", ".3gp", ".nsv", ".ty", ".strm", ".rm", ".rmvb", ".m3u", ".ifo", ".mov", ".qt", ".divx", ".xvid", ".bivx", ".nrg", ".pva", ".wmv", ".asf", ".asx", ".ogm", ".ogv", ".m2v", ".avi", ".bin", ".dat", ".dvr-ms", ".mpg", ".mpeg", ".mp4", ".avc", ".vp3", ".svq3", ".nuv", ".viv", ".dv", ".fli", ".flv", ".wpl", ".img", ".iso", ".vob", ".mkv", ".mk3d", ".ts", ".wtv", ".m2ts",
|
||||
# Subs (Radarr, Sonarr, Whisparr)
|
||||
".sub", ".srt", ".idx",
|
||||
# Audio (Lidarr, Readarr)
|
||||
".aac", ".aif", ".aiff", ".aifc", ".ape", ".flac", ".mp2", ".mp3", ".m4a", ".m4b", ".m4p", ".mp4a", ".oga", ".ogg", ".opus", ".vorbis", ".wma", ".wav", ".wv", "wavepack",
|
||||
# Text (Readarr)
|
||||
".epub", ".kepub", ".mobi", ".azw3", ".pdf",
|
||||
]
|
||||
|
||||
bad_keywords = ["Sample", "Trailer"]
|
||||
bad_keyword_limit = 500 # Megabyte; do not remove items larger than that
|
||||
# fmt: on
|
||||
|
||||
async def _find_affected_items(self):
|
||||
queue = await self.queue_manager.get_queue_items(queue_scope="normal")
|
||||
|
||||
# Get in-scope download IDs
|
||||
result = self._group_download_ids_by_client(queue)
|
||||
|
||||
affected_items = []
|
||||
for download_client, info in result.items():
|
||||
download_client_type = info["download_client_type"]
|
||||
download_ids = info["download_ids"]
|
||||
|
||||
if download_client_type == "qbittorrent":
|
||||
client_items = await self._handle_qbit(download_client, download_ids, queue)
|
||||
affected_items.extend(client_items)
|
||||
return affected_items
|
||||
|
||||
|
||||
def _group_download_ids_by_client(self, queue):
|
||||
"""Group all relevant download IDs by download client.
|
||||
Limited to qbittorrent currently, as no other download clients implemented"""
|
||||
result = {}
|
||||
|
||||
for item in queue:
|
||||
download_client_name = item.get("downloadClient")
|
||||
if not download_client_name:
|
||||
continue
|
||||
|
||||
download_client, download_client_type = self.settings.download_clients.get_download_client_by_name(download_client_name)
|
||||
if not download_client or not download_client_type:
|
||||
continue
|
||||
|
||||
# Skip non-qBittorrent clients for now
|
||||
if download_client_type != "qbittorrent":
|
||||
continue
|
||||
|
||||
result.setdefault(download_client, {
|
||||
"download_client_type": download_client_type,
|
||||
"download_ids": set()
|
||||
})["download_ids"].add(item["downloadId"])
|
||||
|
||||
return result
|
||||
|
||||
|
||||
async def _handle_qbit(self, qbit_client, hashes, queue):
|
||||
"""Handle qBittorrent-specific logic for marking files as 'Do Not Download'."""
|
||||
affected_items = []
|
||||
qbit_items = await qbit_client.get_qbit_items(hashes=hashes)
|
||||
|
||||
for qbit_item in self._get_items_to_process(qbit_items):
|
||||
self.arr.tracker.extension_checked.append(qbit_item["hash"])
|
||||
|
||||
torrent_files = await self._get_active_files(qbit_client, qbit_item["hash"])
|
||||
stoppable_files = self._get_stoppable_files(torrent_files)
|
||||
|
||||
if not stoppable_files:
|
||||
continue
|
||||
|
||||
await self._mark_files_as_stopped(qbit_client, qbit_item["hash"], stoppable_files)
|
||||
self._log_stopped_files(stoppable_files, qbit_item["name"])
|
||||
|
||||
if self._all_files_stopped(torrent_files, stoppable_files):
|
||||
logger.verbose(">>> All files in this torrent have been marked as 'Do not Download'. Removing torrent.")
|
||||
affected_items.extend(self._match_queue_items(queue, qbit_item["hash"]))
|
||||
|
||||
return affected_items
|
||||
|
||||
# -- Helper functions for qbit handling --
|
||||
def _get_items_to_process(self, qbit_items):
|
||||
"""Return only downloads that have metadata, are supposedly downloading.
|
||||
Additionally, each dowload should be checked at least once (for bad extensions), and thereafter only if availabiliy drops to less than 100%"""
|
||||
return [
|
||||
item for item in qbit_items
|
||||
if (
|
||||
item.get("has_metadata")
|
||||
and item["state"] in {"downloading", "forcedDL", "stalledDL"}
|
||||
and (
|
||||
item["hash"] not in self.arr.tracker.extension_checked
|
||||
or item["availability"] < 1
|
||||
)
|
||||
)
|
||||
]
|
||||
|
||||
|
||||
async def _get_active_files(self, qbit_client, torrent_hash):
|
||||
"""Return only files from the torrent that are still set to download, with file extension and name."""
|
||||
files = await qbit_client.get_torrent_files(torrent_hash) # Await the async method
|
||||
return [
|
||||
{
|
||||
**f, # Include all original file properties
|
||||
"file_name": os.path.basename(f["name"]), # Add proper filename (without folder)
|
||||
"file_extension": os.path.splitext(f["name"])[1], # Add file_extension (e.g., .mp3)
|
||||
}
|
||||
for f in files if f["priority"] > 0
|
||||
]
|
||||
|
||||
def _log_stopped_files(self, stopped_files, torrent_name):
|
||||
logger.verbose(
|
||||
f">>> Stopped downloading {len(stopped_files)} file{'s' if len(stopped_files) != 1 else ''} in: {torrent_name}"
|
||||
)
|
||||
|
||||
for file, reasons in stopped_files:
|
||||
logger.verbose(f">>> - {file['file_name']} ({' & '.join(reasons)})")
|
||||
|
||||
def _all_files_stopped(self, torrent_files):
|
||||
"""Check if no files remain with download priority."""
|
||||
return all(f["priority"] == 0 for f in torrent_files)
|
||||
|
||||
def _match_queue_items(self, queue, download_hash):
|
||||
"""Find matching queue item(s) by downloadId (uppercase)."""
|
||||
return [
|
||||
item for item in queue
|
||||
if item["downloadId"] == download_hash.upper()
|
||||
]
|
||||
|
||||
|
||||
def _get_stoppable_files(self, torrent_files):
|
||||
"""Return files that can be marked as 'Do not Download' based on specific conditions."""
|
||||
stoppable_files = []
|
||||
|
||||
for file in torrent_files:
|
||||
# If the file has metadata and its priority is greater than 0, we can check it
|
||||
if file["priority"] > 0:
|
||||
reasons = []
|
||||
|
||||
# Check for bad extension
|
||||
if self._is_bad_extension(file):
|
||||
reasons.append(f"Bad extension: {file['file_extension']}")
|
||||
|
||||
# Check if the file has low availability
|
||||
if self._is_complete_partial(file):
|
||||
reasons.append(f"Low availability: {file['availability'] * 100:.1f}%")
|
||||
|
||||
# Only add to stoppable_files if there are reasons to stop the file
|
||||
if reasons:
|
||||
stoppable_files.append((file, reasons))
|
||||
|
||||
return stoppable_files
|
||||
|
||||
|
||||
def _is_bad_extension(self, file):
|
||||
"""Check if the file has a bad extension."""
|
||||
return file['file_extension'].lower() not in self.good_extensions
|
||||
|
||||
|
||||
|
||||
def _is_complete_partial(self, file):
|
||||
"""Check if the availability is less than 100% and the file is not fully downloaded"""
|
||||
if file["availability"] < 1 and not file["progress"] == 1:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
async def _mark_files_as_stopped(self, qbit_client, torrent_hash, stoppable_files):
|
||||
"""Mark specific files as 'Do Not Download' in qBittorrent."""
|
||||
for file, _ in stoppable_files:
|
||||
if not self.settings.general.test_run:
|
||||
await qbit_client.set_torrent_file_priority(torrent_hash, file['index'], 0)
|
||||
|
||||
def _all_files_stopped(self, torrent_files, stoppable_files):
|
||||
"""Check if all files are either stopped (priority 0) or in the stoppable files list."""
|
||||
stoppable_file_indexes= {file[0]["index"] for file in stoppable_files}
|
||||
return all(f["priority"] == 0 or f["index"] in stoppable_file_indexes for f in torrent_files)
|
||||
|
||||
def _match_queue_items(self, queue, download_hash):
|
||||
"""Find matching queue item(s) by downloadId (uppercase)."""
|
||||
return [
|
||||
item for item in queue
|
||||
if item["downloadId"].upper() == download_hash.upper()
|
||||
]
|
||||
|
||||
|
||||
@@ -1,64 +0,0 @@
|
||||
from src.utils.shared import (
|
||||
errorDetails,
|
||||
formattedQueueInfo,
|
||||
get_queue,
|
||||
privateTrackerCheck,
|
||||
protectedDownloadCheck,
|
||||
execute_checks,
|
||||
permittedAttemptsCheck,
|
||||
remove_download,
|
||||
qBitOffline,
|
||||
)
|
||||
import sys, os, traceback
|
||||
import logging, verboselogs
|
||||
|
||||
logger = verboselogs.VerboseLogger(__name__)
|
||||
|
||||
|
||||
async def remove_failed(
|
||||
settingsDict,
|
||||
BASE_URL,
|
||||
API_KEY,
|
||||
NAME,
|
||||
deleted_downloads,
|
||||
defective_tracker,
|
||||
protectedDownloadIDs,
|
||||
privateDowloadIDs,
|
||||
):
|
||||
# Detects failed and triggers delete. Does not add to blocklist
|
||||
try:
|
||||
failType = "failed"
|
||||
queue = await get_queue(BASE_URL, API_KEY, settingsDict)
|
||||
logger.debug("remove_failed/queue IN: %s", formattedQueueInfo(queue))
|
||||
|
||||
if not queue:
|
||||
return 0
|
||||
|
||||
if await qBitOffline(settingsDict, failType, NAME):
|
||||
return 0
|
||||
# Find items affected
|
||||
affectedItems = []
|
||||
for queueItem in queue:
|
||||
if "errorMessage" in queueItem and "status" in queueItem:
|
||||
if queueItem["status"] == "failed":
|
||||
affectedItems.append(queueItem)
|
||||
affectedItems = await execute_checks(
|
||||
settingsDict,
|
||||
affectedItems,
|
||||
failType,
|
||||
BASE_URL,
|
||||
API_KEY,
|
||||
NAME,
|
||||
deleted_downloads,
|
||||
defective_tracker,
|
||||
privateDowloadIDs,
|
||||
protectedDownloadIDs,
|
||||
addToBlocklist=False,
|
||||
doPrivateTrackerCheck=True,
|
||||
doProtectedDownloadCheck=True,
|
||||
doPermittedAttemptsCheck=False,
|
||||
)
|
||||
return len(affectedItems)
|
||||
except Exception as error:
|
||||
errorDetails(NAME, error)
|
||||
return 0
|
||||
17
src/jobs/remove_failed_downloads.py
Normal file
17
src/jobs/remove_failed_downloads.py
Normal file
@@ -0,0 +1,17 @@
|
||||
from src.jobs.removal_job import RemovalJob
|
||||
|
||||
class RemoveFailedDownloads(RemovalJob):
|
||||
queue_scope = "normal"
|
||||
blocklist = False
|
||||
|
||||
async def _find_affected_items(self):
|
||||
queue = await self.queue_manager.get_queue_items(queue_scope="normal")
|
||||
affected_items = []
|
||||
|
||||
for item in queue:
|
||||
if "status" in item:
|
||||
if item["status"] == "failed":
|
||||
affected_items.append(item)
|
||||
return affected_items
|
||||
|
||||
|
||||
@@ -1,105 +1,69 @@
|
||||
from src.utils.shared import errorDetails, formattedQueueInfo, get_queue, execute_checks
|
||||
import sys, os, traceback
|
||||
import logging, verboselogs
|
||||
import fnmatch
|
||||
from src.jobs.removal_job import RemovalJob
|
||||
|
||||
logger = verboselogs.VerboseLogger(__name__)
|
||||
class RemoveFailedImports(RemovalJob):
|
||||
queue_scope = "normal"
|
||||
blocklist = True
|
||||
|
||||
async def _find_affected_items(self):
|
||||
queue = await self.queue_manager.get_queue_items(queue_scope="normal")
|
||||
affected_items = []
|
||||
patterns = self.job.message_patterns
|
||||
|
||||
for item in queue:
|
||||
if not self._is_valid_item(item):
|
||||
continue
|
||||
|
||||
removal_messages = self._prepare_removal_messages(item, patterns)
|
||||
if removal_messages:
|
||||
item["removal_messages"] = removal_messages
|
||||
affected_items.append(item)
|
||||
|
||||
return affected_items
|
||||
|
||||
def _is_valid_item(self, item):
|
||||
"""Check if item has the necessary fields and is in a valid state."""
|
||||
# Required fields that must be present in the item
|
||||
required_fields = {"status", "trackedDownloadStatus", "trackedDownloadState", "statusMessages"}
|
||||
|
||||
# Check if all required fields are present
|
||||
if not all(field in item for field in required_fields):
|
||||
return False
|
||||
|
||||
# Check if the item's status is completed and the tracked status is warning
|
||||
if item["status"] != "completed" or item["trackedDownloadStatus"] != "warning":
|
||||
return False
|
||||
|
||||
# Check if the tracked download state is one of the allowed states
|
||||
if item["trackedDownloadState"] not in {"importPending", "importFailed", "importBlocked"}:
|
||||
return False
|
||||
|
||||
# If all checks pass, the item is valid
|
||||
return True
|
||||
|
||||
|
||||
async def remove_failed_imports(
|
||||
settingsDict,
|
||||
BASE_URL,
|
||||
API_KEY,
|
||||
NAME,
|
||||
deleted_downloads,
|
||||
defective_tracker,
|
||||
protectedDownloadIDs,
|
||||
privateDowloadIDs,
|
||||
):
|
||||
# Detects downloads stuck downloading meta data and triggers repeat check and subsequent delete. Adds to blocklist
|
||||
try:
|
||||
failType = "failed import"
|
||||
queue = await get_queue(BASE_URL, API_KEY, settingsDict)
|
||||
logger.debug("remove_failed_imports/queue IN: %s", formattedQueueInfo(queue))
|
||||
if not queue:
|
||||
return 0
|
||||
def _prepare_removal_messages(self, item, patterns):
|
||||
"""Prepare removal messages, adding the tracked download state and matching messages."""
|
||||
messages = self._get_matching_messages(item["statusMessages"], patterns)
|
||||
if not messages:
|
||||
return []
|
||||
|
||||
# Find items affected
|
||||
affectedItems = []
|
||||
removal_messages = [f">>>>> Tracked Download State: {item['trackedDownloadState']}"] + messages
|
||||
return removal_messages
|
||||
|
||||
# Check if any patterns have been specified
|
||||
patterns = settingsDict.get("FAILED_IMPORT_MESSAGE_PATTERNS", [])
|
||||
if not patterns: # If patterns is empty or not present
|
||||
patterns = None
|
||||
for queueItem in queue:
|
||||
if (
|
||||
"status" in queueItem
|
||||
and "trackedDownloadStatus" in queueItem
|
||||
and "trackedDownloadState" in queueItem
|
||||
and "statusMessages" in queueItem
|
||||
):
|
||||
|
||||
removal_messages = []
|
||||
if (
|
||||
queueItem["status"] == "completed"
|
||||
and queueItem["trackedDownloadStatus"] == "warning"
|
||||
and queueItem["trackedDownloadState"]
|
||||
in {"importPending", "importFailed", "importBlocked"}
|
||||
):
|
||||
|
||||
# Find messages that find specified pattern and put them into a "removal_message" that will be displayed in the logger when removing the affected item
|
||||
if not patterns:
|
||||
# No patterns defined - including all status messages in the removal_messages
|
||||
removal_messages.append(">>>>> Status Messages (All):")
|
||||
for statusMessage in queueItem["statusMessages"]:
|
||||
removal_messages.extend(
|
||||
f">>>>> - {message}"
|
||||
for message in statusMessage.get("messages", [])
|
||||
)
|
||||
else:
|
||||
# Specific patterns defined - only removing if any of these are matched
|
||||
for statusMessage in queueItem["statusMessages"]:
|
||||
messages = statusMessage.get("messages", [])
|
||||
for message in messages:
|
||||
if any(pattern in message for pattern in patterns):
|
||||
removal_messages.append(f">>>>> - {message}")
|
||||
if removal_messages:
|
||||
removal_messages.insert(
|
||||
0,
|
||||
">>>>> Status Messages (matching specified patterns):",
|
||||
)
|
||||
|
||||
if removal_messages:
|
||||
removal_messages = list(
|
||||
dict.fromkeys(removal_messages)
|
||||
) # deduplication
|
||||
removal_messages.insert(
|
||||
0,
|
||||
">>>>> Tracked Download State: "
|
||||
+ queueItem["trackedDownloadState"],
|
||||
)
|
||||
queueItem["removal_messages"] = removal_messages
|
||||
affectedItems.append(queueItem)
|
||||
|
||||
check_kwargs = {
|
||||
"settingsDict": settingsDict,
|
||||
"affectedItems": affectedItems,
|
||||
"failType": failType,
|
||||
"BASE_URL": BASE_URL,
|
||||
"API_KEY": API_KEY,
|
||||
"NAME": NAME,
|
||||
"deleted_downloads": deleted_downloads,
|
||||
"defective_tracker": defective_tracker,
|
||||
"privateDowloadIDs": privateDowloadIDs,
|
||||
"protectedDownloadIDs": protectedDownloadIDs,
|
||||
"addToBlocklist": True,
|
||||
"doPrivateTrackerCheck": False,
|
||||
"doProtectedDownloadCheck": True,
|
||||
"doPermittedAttemptsCheck": False,
|
||||
"extraParameters": {"keepTorrentForPrivateTrackers": True},
|
||||
}
|
||||
affectedItems = await execute_checks(**check_kwargs)
|
||||
|
||||
return len(affectedItems)
|
||||
except Exception as error:
|
||||
errorDetails(NAME, error)
|
||||
return 0
|
||||
def _get_matching_messages(self, status_messages, patterns):
|
||||
"""Extract messages matching the provided patterns (or all messages if no pattern)."""
|
||||
matched_messages = []
|
||||
|
||||
if not patterns:
|
||||
# No patterns provided, include all messages
|
||||
for status_message in status_messages:
|
||||
matched_messages.extend(f">>>>> - {msg}" for msg in status_message.get("messages", []))
|
||||
else:
|
||||
# Patterns provided, match only those messages that fit the patterns
|
||||
for status_message in status_messages:
|
||||
for msg in status_message.get("messages", []):
|
||||
if any(fnmatch.fnmatch(msg, pattern) for pattern in patterns):
|
||||
matched_messages.append(f">>>>> - {msg}")
|
||||
|
||||
return matched_messages
|
||||
@@ -1,66 +1,19 @@
|
||||
from src.utils.shared import (
|
||||
errorDetails,
|
||||
formattedQueueInfo,
|
||||
get_queue,
|
||||
privateTrackerCheck,
|
||||
protectedDownloadCheck,
|
||||
execute_checks,
|
||||
permittedAttemptsCheck,
|
||||
remove_download,
|
||||
qBitOffline,
|
||||
)
|
||||
import sys, os, traceback
|
||||
import logging, verboselogs
|
||||
|
||||
logger = verboselogs.VerboseLogger(__name__)
|
||||
from src.jobs.removal_job import RemovalJob
|
||||
|
||||
|
||||
async def remove_metadata_missing(
|
||||
settingsDict,
|
||||
BASE_URL,
|
||||
API_KEY,
|
||||
NAME,
|
||||
deleted_downloads,
|
||||
defective_tracker,
|
||||
protectedDownloadIDs,
|
||||
privateDowloadIDs,
|
||||
):
|
||||
# Detects downloads stuck downloading meta data and triggers repeat check and subsequent delete. Adds to blocklist
|
||||
try:
|
||||
failType = "missing metadata"
|
||||
queue = await get_queue(BASE_URL, API_KEY, settingsDict)
|
||||
logger.debug("remove_metadata_missing/queue IN: %s", formattedQueueInfo(queue))
|
||||
if not queue:
|
||||
return 0
|
||||
if await qBitOffline(settingsDict, failType, NAME):
|
||||
return 0
|
||||
# Find items affected
|
||||
affectedItems = []
|
||||
for queueItem in queue:
|
||||
if "errorMessage" in queueItem and "status" in queueItem:
|
||||
class RemoveMetadataMissing(RemovalJob):
|
||||
queue_scope = "normal"
|
||||
blocklist = True
|
||||
|
||||
async def _find_affected_items(self):
|
||||
queue = await self.queue_manager.get_queue_items(queue_scope="normal")
|
||||
affected_items = []
|
||||
|
||||
for item in queue:
|
||||
if "errorMessage" in item and "status" in item:
|
||||
if (
|
||||
queueItem["status"] == "queued"
|
||||
and queueItem["errorMessage"]
|
||||
== "qBittorrent is downloading metadata"
|
||||
item["status"] == "queued"
|
||||
and item["errorMessage"] == "qBittorrent is downloading metadata"
|
||||
):
|
||||
affectedItems.append(queueItem)
|
||||
affectedItems = await execute_checks(
|
||||
settingsDict,
|
||||
affectedItems,
|
||||
failType,
|
||||
BASE_URL,
|
||||
API_KEY,
|
||||
NAME,
|
||||
deleted_downloads,
|
||||
defective_tracker,
|
||||
privateDowloadIDs,
|
||||
protectedDownloadIDs,
|
||||
addToBlocklist=True,
|
||||
doPrivateTrackerCheck=True,
|
||||
doProtectedDownloadCheck=True,
|
||||
doPermittedAttemptsCheck=True,
|
||||
)
|
||||
return len(affectedItems)
|
||||
except Exception as error:
|
||||
errorDetails(NAME, error)
|
||||
return 0
|
||||
affected_items.append(item)
|
||||
return affected_items
|
||||
|
||||
@@ -1,81 +1,36 @@
|
||||
from src.utils.shared import (
|
||||
errorDetails,
|
||||
formattedQueueInfo,
|
||||
get_queue,
|
||||
privateTrackerCheck,
|
||||
protectedDownloadCheck,
|
||||
execute_checks,
|
||||
permittedAttemptsCheck,
|
||||
remove_download,
|
||||
qBitOffline,
|
||||
)
|
||||
import sys, os, traceback
|
||||
import logging, verboselogs
|
||||
from src.jobs.removal_job import RemovalJob
|
||||
|
||||
logger = verboselogs.VerboseLogger(__name__)
|
||||
class RemoveMissingFiles(RemovalJob):
|
||||
queue_scope = "normal"
|
||||
blocklist = False
|
||||
|
||||
async def _find_affected_items(self):
|
||||
queue = await self.queue_manager.get_queue_items(queue_scope="normal")
|
||||
affected_items = []
|
||||
|
||||
async def remove_missing_files(
|
||||
settingsDict,
|
||||
BASE_URL,
|
||||
API_KEY,
|
||||
NAME,
|
||||
deleted_downloads,
|
||||
defective_tracker,
|
||||
protectedDownloadIDs,
|
||||
privateDowloadIDs,
|
||||
):
|
||||
# Detects downloads broken because of missing files. Does not add to blocklist
|
||||
try:
|
||||
failType = "missing files"
|
||||
queue = await get_queue(BASE_URL, API_KEY, settingsDict)
|
||||
logger.debug("remove_missing_files/queue IN: %s", formattedQueueInfo(queue))
|
||||
if not queue:
|
||||
return 0
|
||||
if await qBitOffline(settingsDict, failType, NAME):
|
||||
return 0
|
||||
# Find items affected
|
||||
affectedItems = []
|
||||
for queueItem in queue:
|
||||
if "status" in queueItem:
|
||||
# case to check for failed torrents
|
||||
if (
|
||||
queueItem["status"] == "warning"
|
||||
and "errorMessage" in queueItem
|
||||
and (
|
||||
queueItem["errorMessage"]
|
||||
== "DownloadClientQbittorrentTorrentStateMissingFiles"
|
||||
or queueItem["errorMessage"] == "The download is missing files"
|
||||
or queueItem["errorMessage"] == "qBittorrent is reporting missing files"
|
||||
)
|
||||
):
|
||||
affectedItems.append(queueItem)
|
||||
# case to check for failed nzb's/bad files/empty directory
|
||||
if queueItem["status"] == "completed" and "statusMessages" in queueItem:
|
||||
for statusMessage in queueItem["statusMessages"]:
|
||||
if "messages" in statusMessage:
|
||||
for message in statusMessage["messages"]:
|
||||
if message.startswith(
|
||||
"No files found are eligible for import in"
|
||||
):
|
||||
affectedItems.append(queueItem)
|
||||
affectedItems = await execute_checks(
|
||||
settingsDict,
|
||||
affectedItems,
|
||||
failType,
|
||||
BASE_URL,
|
||||
API_KEY,
|
||||
NAME,
|
||||
deleted_downloads,
|
||||
defective_tracker,
|
||||
privateDowloadIDs,
|
||||
protectedDownloadIDs,
|
||||
addToBlocklist=False,
|
||||
doPrivateTrackerCheck=True,
|
||||
doProtectedDownloadCheck=True,
|
||||
doPermittedAttemptsCheck=False,
|
||||
for item in queue:
|
||||
if self._is_failed_torrent(item) or self._is_bad_nzb(item):
|
||||
affected_items.append(item)
|
||||
|
||||
return affected_items
|
||||
|
||||
def _is_failed_torrent(self, item):
|
||||
return (
|
||||
"status" in item
|
||||
and item["status"] == "warning"
|
||||
and "errorMessage" in item
|
||||
and item["errorMessage"] in [
|
||||
"DownloadClientQbittorrentTorrentStateMissingFiles",
|
||||
"The download is missing files",
|
||||
"qBittorrent is reporting missing files",
|
||||
]
|
||||
)
|
||||
return len(affectedItems)
|
||||
except Exception as error:
|
||||
errorDetails(NAME, error)
|
||||
return 0
|
||||
|
||||
def _is_bad_nzb(self, item):
|
||||
if "status" in item and item["status"] == "completed" and "statusMessages" in item:
|
||||
for status_message in item["statusMessages"]:
|
||||
if "messages" in status_message:
|
||||
for message in status_message["messages"]:
|
||||
if message.startswith("No files found are eligible for import in"):
|
||||
return True
|
||||
return False
|
||||
|
||||
@@ -1,76 +1,11 @@
|
||||
from src.utils.shared import (
|
||||
errorDetails,
|
||||
formattedQueueInfo,
|
||||
get_queue,
|
||||
privateTrackerCheck,
|
||||
protectedDownloadCheck,
|
||||
execute_checks,
|
||||
permittedAttemptsCheck,
|
||||
remove_download,
|
||||
)
|
||||
import sys, os, traceback
|
||||
import logging, verboselogs
|
||||
from src.jobs.removal_job import RemovalJob
|
||||
|
||||
logger = verboselogs.VerboseLogger(__name__)
|
||||
class RemoveOrphans(RemovalJob):
|
||||
queue_scope = "full"
|
||||
blocklist = False
|
||||
|
||||
async def _find_affected_items(self):
|
||||
affected_items = await self.queue_manager.get_queue_items(queue_scope="orphans")
|
||||
return affected_items
|
||||
|
||||
|
||||
async def remove_orphans(
|
||||
settingsDict,
|
||||
BASE_URL,
|
||||
API_KEY,
|
||||
NAME,
|
||||
deleted_downloads,
|
||||
defective_tracker,
|
||||
protectedDownloadIDs,
|
||||
privateDowloadIDs,
|
||||
full_queue_param,
|
||||
):
|
||||
# Removes downloads belonging to movies/tv shows that have been deleted in the meantime. Does not add to blocklist
|
||||
try:
|
||||
failType = "orphan"
|
||||
full_queue = await get_queue(
|
||||
BASE_URL, API_KEY, settingsDict, params={full_queue_param: True}
|
||||
)
|
||||
queue = await get_queue(BASE_URL, API_KEY, settingsDict)
|
||||
logger.debug("remove_orphans/full queue IN: %s", formattedQueueInfo(full_queue))
|
||||
if not full_queue:
|
||||
return 0 # By now the queue may be empty
|
||||
logger.debug("remove_orphans/queue IN: %s", formattedQueueInfo(queue))
|
||||
|
||||
# Find items affected
|
||||
# 1. create a list of the "known" queue items
|
||||
queueIDs = [queueItem["id"] for queueItem in queue] if queue else []
|
||||
affectedItems = []
|
||||
# 2. compare all queue items against the known ones, and those that are not found are the "unknown" or "orphan" ones
|
||||
for queueItem in full_queue:
|
||||
if queueItem["id"] not in queueIDs:
|
||||
affectedItems.append(queueItem)
|
||||
|
||||
affectedItems = await execute_checks(
|
||||
settingsDict,
|
||||
affectedItems,
|
||||
failType,
|
||||
BASE_URL,
|
||||
API_KEY,
|
||||
NAME,
|
||||
deleted_downloads,
|
||||
defective_tracker,
|
||||
privateDowloadIDs,
|
||||
protectedDownloadIDs,
|
||||
addToBlocklist=False,
|
||||
doPrivateTrackerCheck=True,
|
||||
doProtectedDownloadCheck=True,
|
||||
doPermittedAttemptsCheck=False,
|
||||
)
|
||||
logger.debug(
|
||||
"remove_orphans/full queue OUT: %s",
|
||||
formattedQueueInfo(
|
||||
await get_queue(
|
||||
BASE_URL, API_KEY, settingsDict, params={full_queue_param: True}
|
||||
)
|
||||
),
|
||||
)
|
||||
return len(affectedItems)
|
||||
except Exception as error:
|
||||
errorDetails(NAME, error)
|
||||
return 0
|
||||
|
||||
@@ -1,143 +1,106 @@
|
||||
from src.utils.shared import (
|
||||
errorDetails,
|
||||
formattedQueueInfo,
|
||||
get_queue,
|
||||
privateTrackerCheck,
|
||||
protectedDownloadCheck,
|
||||
execute_checks,
|
||||
permittedAttemptsCheck,
|
||||
remove_download,
|
||||
qBitOffline,
|
||||
)
|
||||
import sys, os, traceback
|
||||
import logging, verboselogs
|
||||
from src.utils.rest import rest_get
|
||||
|
||||
logger = verboselogs.VerboseLogger(__name__)
|
||||
from src.jobs.removal_job import RemovalJob
|
||||
from src.utils.log_setup import logger
|
||||
|
||||
|
||||
async def remove_slow(
|
||||
settingsDict,
|
||||
BASE_URL,
|
||||
API_KEY,
|
||||
NAME,
|
||||
deleted_downloads,
|
||||
defective_tracker,
|
||||
protectedDownloadIDs,
|
||||
privateDowloadIDs,
|
||||
download_sizes_tracker,
|
||||
):
|
||||
# Detects slow downloads and triggers delete. Adds to blocklist
|
||||
try:
|
||||
failType = "slow"
|
||||
queue = await get_queue(BASE_URL, API_KEY, settingsDict)
|
||||
logger.debug("remove_slow/queue IN: %s", formattedQueueInfo(queue))
|
||||
if not queue:
|
||||
return 0
|
||||
if await qBitOffline(settingsDict, failType, NAME):
|
||||
return 0
|
||||
# Find items affected
|
||||
affectedItems = []
|
||||
alreadyCheckedDownloadIDs = []
|
||||
for queueItem in queue:
|
||||
if (
|
||||
"downloadId" in queueItem
|
||||
and "size" in queueItem
|
||||
and "sizeleft" in queueItem
|
||||
and "status" in queueItem
|
||||
):
|
||||
if queueItem["downloadId"] not in alreadyCheckedDownloadIDs:
|
||||
alreadyCheckedDownloadIDs.append(
|
||||
queueItem["downloadId"]
|
||||
) # One downloadId may occur in multiple queueItems - only check once for all of them per iteration
|
||||
if (
|
||||
queueItem["protocol"] == "usenet"
|
||||
): # No need to check for speed for usenet, since there users pay for speed
|
||||
continue
|
||||
if queueItem["status"] == "downloading":
|
||||
if (
|
||||
queueItem["size"] > 0 and queueItem["sizeleft"] == 0
|
||||
): # Skip items that are finished downloading but are still marked as downloading. May be the case when files are moving
|
||||
logger.info(
|
||||
">>> Detected %s download that has completed downloading - skipping check (torrent files likely in process of being moved): %s",
|
||||
failType,
|
||||
queueItem["title"],
|
||||
)
|
||||
continue
|
||||
# determine if the downloaded bit on average between this and the last iteration is greater than the min threshold
|
||||
downloadedSize, previousSize, increment, speed = (
|
||||
await getDownloadedSize(
|
||||
settingsDict, queueItem, download_sizes_tracker, NAME
|
||||
)
|
||||
)
|
||||
if (
|
||||
queueItem["downloadId"] in download_sizes_tracker.dict
|
||||
and speed is not None
|
||||
):
|
||||
if speed < settingsDict["MIN_DOWNLOAD_SPEED"]:
|
||||
affectedItems.append(queueItem)
|
||||
logger.debug(
|
||||
"remove_slow/slow speed detected: %s (Speed: %d KB/s, KB now: %s, KB previous: %s, Diff: %s, In Minutes: %s",
|
||||
queueItem["title"],
|
||||
speed,
|
||||
downloadedSize,
|
||||
previousSize,
|
||||
increment,
|
||||
settingsDict["REMOVE_TIMER"],
|
||||
)
|
||||
class RemoveSlow(RemovalJob):
|
||||
queue_scope = "normal"
|
||||
blocklist = True
|
||||
|
||||
affectedItems = await execute_checks(
|
||||
settingsDict,
|
||||
affectedItems,
|
||||
failType,
|
||||
BASE_URL,
|
||||
API_KEY,
|
||||
NAME,
|
||||
deleted_downloads,
|
||||
defective_tracker,
|
||||
privateDowloadIDs,
|
||||
protectedDownloadIDs,
|
||||
addToBlocklist=True,
|
||||
doPrivateTrackerCheck=True,
|
||||
doProtectedDownloadCheck=True,
|
||||
doPermittedAttemptsCheck=True,
|
||||
async def _find_affected_items(self):
|
||||
queue = await self.queue_manager.get_queue_items(queue_scope=self.queue_scope)
|
||||
affected_items = []
|
||||
checked_ids = set()
|
||||
|
||||
for item in queue:
|
||||
if not self._is_valid_item(item):
|
||||
continue
|
||||
|
||||
download_id = item["downloadId"]
|
||||
|
||||
if download_id in checked_ids:
|
||||
continue # One downloadId may occur in multiple items - only check once for all of them per iteration
|
||||
checked_ids.add(download_id)
|
||||
|
||||
if self._is_usenet(item):
|
||||
continue # No need to check for speed for usenet, since there users pay for speed
|
||||
|
||||
if self._is_completed_but_stuck(item):
|
||||
logger.info(
|
||||
f">>> '{self.job_name}' detected download marked as slow as well as completed. Files most likely in process of being moved. Not removing: {item['title']}"
|
||||
)
|
||||
continue
|
||||
|
||||
downloaded, previous, increment, speed = await self._get_progress_stats(
|
||||
item
|
||||
)
|
||||
if self._is_slow(speed):
|
||||
affected_items.append(item)
|
||||
logger.debug(
|
||||
f'remove_slow/slow speed detected: {item["title"]} '
|
||||
f"(Speed: {speed} KB/s, KB now: {downloaded}, KB previous: {previous}, "
|
||||
f"Diff: {increment}, In Minutes: {self.settings.general.timer})"
|
||||
)
|
||||
|
||||
return affected_items
|
||||
|
||||
def _is_valid_item(self, item):
|
||||
required_keys = {"downloadId", "size", "sizeleft", "status", "protocol"}
|
||||
return required_keys.issubset(item)
|
||||
|
||||
def _is_usenet(self, item):
|
||||
return item.get("protocol") == "usenet"
|
||||
|
||||
def _is_completed_but_stuck(self, item):
|
||||
return (
|
||||
item["status"] == "downloading"
|
||||
and item["size"] > 0
|
||||
and item["sizeleft"] == 0
|
||||
)
|
||||
return len(affectedItems)
|
||||
except Exception as error:
|
||||
errorDetails(NAME, error)
|
||||
return 0
|
||||
|
||||
def _is_slow(self, speed):
|
||||
return (
|
||||
speed is not None
|
||||
and speed < self.job.min_speed
|
||||
)
|
||||
|
||||
async def getDownloadedSize(settingsDict, queueItem, download_sizes_tracker, NAME):
|
||||
try:
|
||||
# Determines the speed of download
|
||||
# Since Sonarr/Radarr do not update the downlodedSize on realtime, if possible, fetch it directly from qBit
|
||||
if (
|
||||
settingsDict["QBITTORRENT_URL"]
|
||||
and queueItem["downloadClient"] == "qBittorrent"
|
||||
):
|
||||
qbitInfo = await rest_get(
|
||||
settingsDict["QBITTORRENT_URL"] + "/torrents/info",
|
||||
params={"hashes": queueItem["downloadId"]},
|
||||
cookies=settingsDict["QBIT_COOKIE"],
|
||||
)
|
||||
downloadedSize = qbitInfo[0]["completed"]
|
||||
async def _get_progress_stats(self, item):
|
||||
download_id = item["downloadId"]
|
||||
|
||||
download_progress = self._get_download_progress(item, download_id)
|
||||
previous_progress, increment, speed = self._compute_increment_and_speed(
|
||||
download_id, download_progress
|
||||
)
|
||||
|
||||
self.arr.tracker.download_progress[download_id] = download_progress
|
||||
return download_progress, previous_progress, increment, speed
|
||||
|
||||
def _get_download_progress(self, item, download_id):
|
||||
download_client_name = item.get("downloadClient")
|
||||
if download_client_name:
|
||||
download_client, download_client_type = self.settings.download_clients.get_download_client_by_name(download_client_name)
|
||||
if download_client_type == "qbitorrent":
|
||||
progress = self._try_get_qbit_progress(download_client, download_id)
|
||||
if progress is not None:
|
||||
return progress
|
||||
return self._fallback_progress(item)
|
||||
|
||||
def _try_get_qbit_progress(self, qbit, download_id):
|
||||
try:
|
||||
return qbit.get_download_progress(download_id)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def _fallback_progress(self, item):
|
||||
logger.debug(
|
||||
"get_progress_stats: Using imprecise method to determine download increments because either a different download client than qBitorrent is used, or the download client name in the config does not match with what is configured in your *arr download client settings"
|
||||
)
|
||||
return item["size"] - item["sizeleft"]
|
||||
|
||||
def _compute_increment_and_speed(self, download_id, current_progress):
|
||||
previous_progress = self.arr.tracker.download_progress.get(download_id)
|
||||
if previous_progress is not None:
|
||||
increment = current_progress - previous_progress
|
||||
speed = round(increment / 1000 / (self.settings.general.timer * 60), 1)
|
||||
else:
|
||||
logger.debug(
|
||||
"getDownloadedSize/WARN: Using imprecise method to determine download increments because no direct qBIT query is possible"
|
||||
)
|
||||
downloadedSize = queueItem["size"] - queueItem["sizeleft"]
|
||||
if queueItem["downloadId"] in download_sizes_tracker.dict:
|
||||
previousSize = download_sizes_tracker.dict.get(queueItem["downloadId"])
|
||||
increment = downloadedSize - previousSize
|
||||
speed = round(increment / 1000 / (settingsDict["REMOVE_TIMER"] * 60), 1)
|
||||
else:
|
||||
previousSize = None
|
||||
increment = None
|
||||
speed = None
|
||||
|
||||
download_sizes_tracker.dict[queueItem["downloadId"]] = downloadedSize
|
||||
return downloadedSize, previousSize, increment, speed
|
||||
except Exception as error:
|
||||
errorDetails(NAME, error)
|
||||
return
|
||||
increment = speed = None
|
||||
return previous_progress, increment, speed
|
||||
|
||||
@@ -1,66 +1,21 @@
|
||||
from src.utils.shared import (
|
||||
errorDetails,
|
||||
formattedQueueInfo,
|
||||
get_queue,
|
||||
privateTrackerCheck,
|
||||
protectedDownloadCheck,
|
||||
execute_checks,
|
||||
permittedAttemptsCheck,
|
||||
remove_download,
|
||||
qBitOffline,
|
||||
)
|
||||
import sys, os, traceback
|
||||
import logging, verboselogs
|
||||
|
||||
logger = verboselogs.VerboseLogger(__name__)
|
||||
from src.jobs.removal_job import RemovalJob
|
||||
|
||||
|
||||
async def remove_stalled(
|
||||
settingsDict,
|
||||
BASE_URL,
|
||||
API_KEY,
|
||||
NAME,
|
||||
deleted_downloads,
|
||||
defective_tracker,
|
||||
protectedDownloadIDs,
|
||||
privateDowloadIDs,
|
||||
):
|
||||
# Detects stalled and triggers repeat check and subsequent delete. Adds to blocklist
|
||||
try:
|
||||
failType = "stalled"
|
||||
queue = await get_queue(BASE_URL, API_KEY, settingsDict)
|
||||
logger.debug("remove_stalled/queue IN: %s", formattedQueueInfo(queue))
|
||||
if not queue:
|
||||
return 0
|
||||
if await qBitOffline(settingsDict, failType, NAME):
|
||||
return 0
|
||||
# Find items affected
|
||||
affectedItems = []
|
||||
for queueItem in queue:
|
||||
if "errorMessage" in queueItem and "status" in queueItem:
|
||||
class RemoveStalled(RemovalJob):
|
||||
queue_scope = "normal"
|
||||
blocklist = True
|
||||
|
||||
async def _find_affected_items(self):
|
||||
queue = await self.queue_manager.get_queue_items(queue_scope="normal")
|
||||
affected_items = []
|
||||
for item in queue:
|
||||
if "errorMessage" in item and "status" in item:
|
||||
if (
|
||||
queueItem["status"] == "warning"
|
||||
and queueItem["errorMessage"]
|
||||
item["status"] == "warning"
|
||||
and item["errorMessage"]
|
||||
== "The download is stalled with no connections"
|
||||
):
|
||||
affectedItems.append(queueItem)
|
||||
affectedItems = await execute_checks(
|
||||
settingsDict,
|
||||
affectedItems,
|
||||
failType,
|
||||
BASE_URL,
|
||||
API_KEY,
|
||||
NAME,
|
||||
deleted_downloads,
|
||||
defective_tracker,
|
||||
privateDowloadIDs,
|
||||
protectedDownloadIDs,
|
||||
addToBlocklist=True,
|
||||
doPrivateTrackerCheck=True,
|
||||
doProtectedDownloadCheck=True,
|
||||
doPermittedAttemptsCheck=True,
|
||||
)
|
||||
return len(affectedItems)
|
||||
except Exception as error:
|
||||
errorDetails(NAME, error)
|
||||
return 0
|
||||
affected_items.append(item)
|
||||
return affected_items
|
||||
|
||||
|
||||
|
||||
@@ -1,98 +1,24 @@
|
||||
from src.utils.shared import (
|
||||
errorDetails,
|
||||
formattedQueueInfo,
|
||||
get_queue,
|
||||
privateTrackerCheck,
|
||||
protectedDownloadCheck,
|
||||
execute_checks,
|
||||
permittedAttemptsCheck,
|
||||
remove_download,
|
||||
)
|
||||
import sys, os, traceback
|
||||
import logging, verboselogs
|
||||
from src.jobs.removal_job import RemovalJob
|
||||
|
||||
logger = verboselogs.VerboseLogger(__name__)
|
||||
from src.utils.rest import rest_get
|
||||
class RemoveUnmonitored(RemovalJob):
|
||||
queue_scope = "normal"
|
||||
blocklist = False
|
||||
|
||||
async def _find_affected_items(self):
|
||||
queue = await self.queue_manager.get_queue_items(queue_scope="normal")
|
||||
|
||||
async def remove_unmonitored(
|
||||
settingsDict,
|
||||
BASE_URL,
|
||||
API_KEY,
|
||||
NAME,
|
||||
deleted_downloads,
|
||||
defective_tracker,
|
||||
protectedDownloadIDs,
|
||||
privateDowloadIDs,
|
||||
arr_type,
|
||||
):
|
||||
# Removes downloads belonging to movies/tv shows that are not monitored. Does not add to blocklist
|
||||
try:
|
||||
failType = "unmonitored"
|
||||
queue = await get_queue(BASE_URL, API_KEY, settingsDict)
|
||||
logger.debug("remove_unmonitored/queue IN: %s", formattedQueueInfo(queue))
|
||||
if not queue:
|
||||
return 0
|
||||
# Find items affected
|
||||
monitoredDownloadIDs = []
|
||||
for queueItem in queue:
|
||||
if arr_type == "SONARR":
|
||||
isMonitored = (
|
||||
await rest_get(
|
||||
f'{BASE_URL}/episode/{str(queueItem["episodeId"])}', API_KEY
|
||||
)
|
||||
)["monitored"]
|
||||
elif arr_type == "RADARR":
|
||||
isMonitored = (
|
||||
await rest_get(
|
||||
f'{BASE_URL}/movie/{str(queueItem["movieId"])}', API_KEY
|
||||
)
|
||||
)["monitored"]
|
||||
elif arr_type == "LIDARR":
|
||||
isMonitored = (
|
||||
await rest_get(
|
||||
f'{BASE_URL}/album/{str(queueItem["albumId"])}', API_KEY
|
||||
)
|
||||
)["monitored"]
|
||||
elif arr_type == "READARR":
|
||||
isMonitored = (
|
||||
await rest_get(
|
||||
f'{BASE_URL}/book/{str(queueItem["bookId"])}', API_KEY
|
||||
)
|
||||
)["monitored"]
|
||||
elif arr_type == "WHISPARR":
|
||||
isMonitored = (
|
||||
await rest_get(
|
||||
f'{BASE_URL}/episode/{str(queueItem["episodeId"])}', API_KEY
|
||||
)
|
||||
)["monitored"]
|
||||
if isMonitored:
|
||||
monitoredDownloadIDs.append(queueItem["downloadId"])
|
||||
# First pass: Check if items are monitored
|
||||
monitored_download_ids = []
|
||||
for item in queue:
|
||||
detail_item_id = item["detail_item_id"]
|
||||
if await self.arr.is_monitored(detail_item_id):
|
||||
monitored_download_ids.append(item["downloadId"])
|
||||
|
||||
affectedItems = []
|
||||
for queueItem in queue:
|
||||
if queueItem["downloadId"] not in monitoredDownloadIDs:
|
||||
affectedItems.append(
|
||||
queueItem
|
||||
) # One downloadID may be shared by multiple queueItems. Only removes it if ALL queueitems are unmonitored
|
||||
|
||||
affectedItems = await execute_checks(
|
||||
settingsDict,
|
||||
affectedItems,
|
||||
failType,
|
||||
BASE_URL,
|
||||
API_KEY,
|
||||
NAME,
|
||||
deleted_downloads,
|
||||
defective_tracker,
|
||||
privateDowloadIDs,
|
||||
protectedDownloadIDs,
|
||||
addToBlocklist=False,
|
||||
doPrivateTrackerCheck=True,
|
||||
doProtectedDownloadCheck=True,
|
||||
doPermittedAttemptsCheck=False,
|
||||
)
|
||||
return len(affectedItems)
|
||||
except Exception as error:
|
||||
errorDetails(NAME, error)
|
||||
return 0
|
||||
# Second pass: Append queue items none that depends on download id is monitored
|
||||
affected_items = []
|
||||
for queue_item in queue:
|
||||
if queue_item["downloadId"] not in monitored_download_ids:
|
||||
affected_items.append(
|
||||
queue_item
|
||||
) # One downloadID may be shared by multiple queue_items. Only removes it if ALL queueitems are unmonitored
|
||||
return affected_items
|
||||
@@ -1,128 +0,0 @@
|
||||
from src.utils.shared import (
|
||||
errorDetails,
|
||||
rest_get,
|
||||
rest_post,
|
||||
get_queue,
|
||||
get_arr_records,
|
||||
)
|
||||
import logging, verboselogs
|
||||
from datetime import datetime, timedelta, timezone
|
||||
import dateutil.parser
|
||||
|
||||
logger = verboselogs.VerboseLogger(__name__)
|
||||
|
||||
|
||||
async def run_periodic_rescans(
|
||||
settingsDict,
|
||||
BASE_URL,
|
||||
API_KEY,
|
||||
NAME,
|
||||
arr_type,
|
||||
):
|
||||
# Checks the wanted items and runs scans
|
||||
if not arr_type in settingsDict["RUN_PERIODIC_RESCANS"]:
|
||||
return
|
||||
try:
|
||||
queue = await get_queue(BASE_URL, API_KEY, settingsDict)
|
||||
check_on_endpoint = []
|
||||
RESCAN_SETTINGS = settingsDict["RUN_PERIODIC_RESCANS"][arr_type]
|
||||
if RESCAN_SETTINGS["MISSING"]:
|
||||
check_on_endpoint.append("missing")
|
||||
if RESCAN_SETTINGS["CUTOFF_UNMET"]:
|
||||
check_on_endpoint.append("cutoff")
|
||||
|
||||
params = {"sortDirection": "ascending"}
|
||||
if arr_type == "SONARR":
|
||||
params["sortKey"] = "episodes.lastSearchTime"
|
||||
queue_ids = [r["seriesId"] for r in queue if "seriesId" in r]
|
||||
series = await rest_get(f"{BASE_URL}/series", API_KEY)
|
||||
series_dict = {s["id"]: s for s in series}
|
||||
|
||||
elif arr_type == "RADARR":
|
||||
params["sortKey"] = "movies.lastSearchTime"
|
||||
queue_ids = [r["movieId"] for r in queue if "movieId" in r]
|
||||
|
||||
for end_point in check_on_endpoint:
|
||||
records = await get_arr_records(
|
||||
BASE_URL, API_KEY, params=params, end_point=f"wanted/{end_point}"
|
||||
)
|
||||
if records is None:
|
||||
logger.verbose(
|
||||
f">>> Rescan: No {end_point} items, thus nothing to rescan."
|
||||
)
|
||||
continue
|
||||
|
||||
# Filter out items that are already being downloaded (are in queue)
|
||||
records = [r for r in records if r["id"] not in queue_ids]
|
||||
if records is None:
|
||||
logger.verbose(
|
||||
f">>> Rescan: All {end_point} items are already being downloaded, thus nothing to rescan."
|
||||
)
|
||||
continue
|
||||
|
||||
# Remove records that have recently been searched already
|
||||
for record in reversed(records):
|
||||
if not (
|
||||
("lastSearchTime" not in record)
|
||||
or (
|
||||
(
|
||||
dateutil.parser.isoparse(record["lastSearchTime"])
|
||||
+ timedelta(days=RESCAN_SETTINGS["MIN_DAYS_BEFORE_RESCAN"])
|
||||
)
|
||||
< datetime.now(timezone.utc)
|
||||
)
|
||||
):
|
||||
records.remove(record)
|
||||
|
||||
# Select oldest records
|
||||
records = records[: RESCAN_SETTINGS["MAX_CONCURRENT_SCANS"]]
|
||||
|
||||
if not records:
|
||||
logger.verbose(
|
||||
f">>> Rescan: All {end_point} items have recently been scanned for, thus nothing to rescan."
|
||||
)
|
||||
continue
|
||||
|
||||
if arr_type == "SONARR":
|
||||
for record in records:
|
||||
series_id = record.get("seriesId")
|
||||
if series_id and series_id in series_dict:
|
||||
record["series"] = series_dict[series_id]
|
||||
else:
|
||||
record["series"] = (
|
||||
None # Or handle missing series info as needed
|
||||
)
|
||||
|
||||
logger.verbose(
|
||||
f">>> Running a scan for {len(records)} {end_point} items:\n"
|
||||
+ "\n".join(
|
||||
[
|
||||
f"{episode['series']['title']} (Season {episode['seasonNumber']} / Episode {episode['episodeNumber']} / Aired: {episode.get('airDate', 'Unknown')}): {episode['title']}"
|
||||
for episode in records
|
||||
]
|
||||
)
|
||||
)
|
||||
json = {
|
||||
"name": "EpisodeSearch",
|
||||
"episodeIds": [r["id"] for r in records],
|
||||
}
|
||||
|
||||
elif arr_type == "RADARR":
|
||||
logger.verbose(
|
||||
f">>> Running a scan for {len(records)} {end_point} items:\n"
|
||||
+ "\n".join(
|
||||
[f"{movie['title']} ({movie['year']})" for movie in records]
|
||||
)
|
||||
)
|
||||
json = {"name": "MoviesSearch", "movieIds": [r["id"] for r in records]}
|
||||
|
||||
if not settingsDict["TEST_RUN"]:
|
||||
await rest_post(
|
||||
url=BASE_URL + "/command",
|
||||
json=json,
|
||||
headers={"X-Api-Key": API_KEY},
|
||||
)
|
||||
|
||||
except Exception as error:
|
||||
errorDetails(NAME, error)
|
||||
return 0
|
||||
116
src/jobs/search_handler.py
Normal file
116
src/jobs/search_handler.py
Normal file
@@ -0,0 +1,116 @@
|
||||
from datetime import datetime, timedelta, timezone
|
||||
import dateutil.parser
|
||||
|
||||
from src.utils.log_setup import logger
|
||||
from src.utils.wanted_manager import WantedManager
|
||||
from src.utils.queue_manager import QueueManager
|
||||
|
||||
|
||||
class SearchHandler:
|
||||
def __init__(self, arr, settings):
|
||||
self.arr = arr
|
||||
self.settings = settings
|
||||
self.job = None
|
||||
self.wanted_manager = WantedManager(self.arr, self.settings)
|
||||
|
||||
async def handle_search(self, search_type):
|
||||
self._initialize_job(search_type)
|
||||
|
||||
wanted_items = await self._get_initial_wanted_items(search_type)
|
||||
if not wanted_items:
|
||||
return
|
||||
|
||||
queue = await QueueManager(self.arr, self.settings).get_queue_items(
|
||||
queue_scope="normal"
|
||||
)
|
||||
wanted_items = self._filter_wanted_items(wanted_items, queue)
|
||||
if not wanted_items:
|
||||
return
|
||||
|
||||
await self._log_items(wanted_items, search_type)
|
||||
await self._trigger_search(wanted_items)
|
||||
|
||||
def _initialize_job(self, search_type):
|
||||
logger.verbose("")
|
||||
if search_type == "missing":
|
||||
logger.verbose(f"Searching for missing content on {self.arr.name}:")
|
||||
self.job = self.settings.jobs.search_missing_content
|
||||
elif search_type == "cutoff":
|
||||
logger.verbose(f"Searching for unmet cutoff content on {self.arr.name}:")
|
||||
self.job = self.settings.jobs.search_unmet_cutoff_content
|
||||
else:
|
||||
raise ValueError(f"Unknown search type: {search_type}")
|
||||
|
||||
def _get_initial_wanted_items(self, search_type):
|
||||
wanted = self.wanted_manager.get_wanted_items(search_type)
|
||||
if not wanted:
|
||||
logger.verbose(f">>> No {search_type} items, thus not triggering a search.")
|
||||
return wanted
|
||||
|
||||
def _filter_wanted_items(self, items, queue):
|
||||
items = self._filter_already_downloading(items, queue)
|
||||
if not items:
|
||||
logger.verbose(f">>> All items already downloading, nothing to search for.")
|
||||
return []
|
||||
|
||||
items = self._filter_recent_searches(items)
|
||||
if not items:
|
||||
logger.verbose(
|
||||
f">>> All items recently searched for, thus not triggering another search."
|
||||
)
|
||||
return []
|
||||
|
||||
return items[: self.job.max_concurrent_searches]
|
||||
|
||||
def _filter_already_downloading(self, wanted_items, queue):
|
||||
queue_ids = {q[self.arr.detail_item_id_key] for q in queue}
|
||||
return [item for item in wanted_items if item["id"] not in queue_ids]
|
||||
|
||||
async def _trigger_search(self, items):
|
||||
if not self.settings.general.test_run:
|
||||
ids = [item["id"] for item in items]
|
||||
await self.wanted_manager.search_items(ids)
|
||||
|
||||
def _filter_recent_searches(self, items):
|
||||
now = datetime.now(timezone.utc)
|
||||
result = []
|
||||
|
||||
for item in items:
|
||||
last = item.get("lastSearchTime")
|
||||
if not last:
|
||||
item["lastSearchDateFormatted"] = "Never"
|
||||
item["daysSinceLastSearch"] = None
|
||||
result.append(item)
|
||||
continue
|
||||
|
||||
last_time = dateutil.parser.isoparse(last)
|
||||
days_ago = (now - last_time).days
|
||||
|
||||
if last_time + timedelta(days=self.job.min_days_between_searches) < now:
|
||||
item["lastSearchDateFormatted"] = last_time.strftime("%Y-%m-%d")
|
||||
item["daysSinceLastSearch"] = days_ago
|
||||
result.append(item)
|
||||
|
||||
return result
|
||||
|
||||
async def _log_items(self, items, search_type):
|
||||
logger.verbose(f">>> Running a scan for {len(items)} {search_type} items:")
|
||||
for item in items:
|
||||
if self.arr.arr_type in ["radarr", "readarr", "lidarr"]:
|
||||
title = item.get("title", "Unknown")
|
||||
logger.verbose(f">>> - {title}")
|
||||
|
||||
elif self.arr.arr_type == "sonarr":
|
||||
series = await self.arr.get_series()
|
||||
series_title = next(
|
||||
(s["title"] for s in series if s["id"] == item.get("seriesId")),
|
||||
"Unknown",
|
||||
)
|
||||
episode = item.get("episodeNumber", "00")
|
||||
season = item.get("seasonNumber", "00")
|
||||
season_numbering = f"S{int(season):02}/E{int(episode):02}"
|
||||
logger.verbose(f">>> - {series_title} ({season_numbering})")
|
||||
|
||||
async def _get_series_dict(self):
|
||||
series = await self.arr.rest_get("series")
|
||||
return {s["id"]: s for s in series}
|
||||
69
src/jobs/strikes_handler.py
Normal file
69
src/jobs/strikes_handler.py
Normal file
@@ -0,0 +1,69 @@
|
||||
from src.utils.log_setup import logger
|
||||
|
||||
|
||||
class StrikesHandler:
|
||||
def __init__(self, job_name, arr, max_strikes):
|
||||
self.job_name = job_name
|
||||
self.tracker = arr.tracker
|
||||
self.max_strikes = max_strikes
|
||||
self.tracker.defective.setdefault(job_name, {})
|
||||
|
||||
|
||||
def check_permitted_strikes(self, affected_downloads):
|
||||
self._recover_downloads(affected_downloads)
|
||||
return self._apply_strikes_and_filter(affected_downloads)
|
||||
|
||||
|
||||
def _recover_downloads(self, affected_downloads):
|
||||
recovered = [
|
||||
d_id for d_id in self.tracker.defective[self.job_name]
|
||||
if d_id not in dict(affected_downloads)
|
||||
]
|
||||
for d_id in recovered:
|
||||
logger.info(
|
||||
">>> Download no longer marked as %s: %s",
|
||||
self.job_name,
|
||||
self.tracker.defective[self.job_name][d_id]["title"],
|
||||
)
|
||||
del self.tracker.defective[self.job_name][d_id]
|
||||
|
||||
|
||||
def _apply_strikes_and_filter(self, affected_downloads):
|
||||
for d_id, queue_items in list(affected_downloads.items()):
|
||||
title = queue_items[0]["title"]
|
||||
strikes = self._increment_strike(d_id, title)
|
||||
strikes_left = self.max_strikes - strikes
|
||||
self._log_strike_status(title, strikes, strikes_left)
|
||||
if strikes_left >= 0:
|
||||
del affected_downloads[d_id]
|
||||
|
||||
return affected_downloads
|
||||
|
||||
|
||||
def _increment_strike(self, d_id, title):
|
||||
entry = self.tracker.defective[self.job_name].setdefault(
|
||||
d_id, {"title": title, "strikes": 0}
|
||||
)
|
||||
entry["strikes"] += 1
|
||||
return entry["strikes"]
|
||||
|
||||
def _log_strike_status(self, title, strikes, strikes_left):
|
||||
if strikes_left >= 0:
|
||||
logger.info(
|
||||
">>> Job '%s' detected download (%s/%s strikes): %s",
|
||||
self.job_name, strikes, self.max_strikes, title,
|
||||
)
|
||||
elif strikes_left == -1:
|
||||
logger.verbose(
|
||||
">>> Job '%s' detected download (%s/%s strikes): %s",
|
||||
self.job_name, strikes, self.max_strikes, title,
|
||||
)
|
||||
elif strikes_left <= -2:
|
||||
logger.info(
|
||||
">>> Job '%s' detected download (%s/%s strikes): %s",
|
||||
self.job_name, strikes, self.max_strikes, title,
|
||||
)
|
||||
logger.info(
|
||||
'>>> [Tip!] Since this download should already have been removed in a previous iteration but keeps coming back, this indicates the blocking of the torrent does not work correctly. Consider turning on the option "Reject Blocklisted Torrent Hashes While Grabbing" on the indexer in the *arr app: %s',
|
||||
title,
|
||||
)
|
||||
83
src/settings/_config_as_yaml.py
Normal file
83
src/settings/_config_as_yaml.py
Normal file
@@ -0,0 +1,83 @@
|
||||
import yaml
|
||||
|
||||
def mask_sensitive_value(value, key, sensitive_attributes):
|
||||
"""Mask the value if it's in the sensitive attributes."""
|
||||
return "*****" if key in sensitive_attributes else value
|
||||
|
||||
|
||||
def filter_internal_attributes(data, internal_attributes, hide_internal_attr):
|
||||
"""Filter out internal attributes based on the hide_internal_attr flag."""
|
||||
return {
|
||||
k: v
|
||||
for k, v in data.items()
|
||||
if not (hide_internal_attr and k in internal_attributes)
|
||||
}
|
||||
|
||||
|
||||
def clean_dict(data, sensitive_attributes, internal_attributes, hide_internal_attr):
|
||||
"""Clean a dictionary by masking sensitive attributes and filtering internal ones."""
|
||||
cleaned = {
|
||||
k: mask_sensitive_value(v, k, sensitive_attributes)
|
||||
for k, v in data.items()
|
||||
}
|
||||
return filter_internal_attributes(cleaned, internal_attributes, hide_internal_attr)
|
||||
|
||||
|
||||
def clean_list(obj, sensitive_attributes, internal_attributes, hide_internal_attr):
|
||||
"""Clean a list of dicts or class instances."""
|
||||
cleaned_list = []
|
||||
for entry in obj:
|
||||
if isinstance(entry, dict):
|
||||
cleaned_list.append(clean_dict(entry, sensitive_attributes, internal_attributes, hide_internal_attr))
|
||||
elif hasattr(entry, "__dict__"):
|
||||
cleaned_list.append(clean_dict(vars(entry), sensitive_attributes, internal_attributes, hide_internal_attr))
|
||||
else:
|
||||
cleaned_list.append(entry)
|
||||
return cleaned_list
|
||||
|
||||
|
||||
def clean_object(obj, sensitive_attributes, internal_attributes, hide_internal_attr):
|
||||
"""Clean an object (either a dict, class instance, or other types)."""
|
||||
if isinstance(obj, dict):
|
||||
return clean_dict(obj, sensitive_attributes, internal_attributes, hide_internal_attr)
|
||||
elif hasattr(obj, "__dict__"):
|
||||
return clean_dict(vars(obj), sensitive_attributes, internal_attributes, hide_internal_attr)
|
||||
else:
|
||||
return mask_sensitive_value(obj, "", sensitive_attributes)
|
||||
|
||||
|
||||
def get_config_as_yaml(
|
||||
data,
|
||||
sensitive_attributes=None,
|
||||
internal_attributes=None,
|
||||
hide_internal_attr=True,
|
||||
):
|
||||
"""Main function to process the configuration into YAML format."""
|
||||
if sensitive_attributes is None:
|
||||
sensitive_attributes = set()
|
||||
if internal_attributes is None:
|
||||
internal_attributes = set()
|
||||
|
||||
config_output = {}
|
||||
|
||||
for key, obj in data.items():
|
||||
if key.startswith("_"):
|
||||
continue
|
||||
|
||||
# Process list-based config
|
||||
if isinstance(obj, list):
|
||||
cleaned_list = clean_list(
|
||||
obj, sensitive_attributes, internal_attributes, hide_internal_attr
|
||||
)
|
||||
if cleaned_list:
|
||||
config_output[key] = cleaned_list
|
||||
|
||||
# Process dict or class-like object config
|
||||
else:
|
||||
cleaned_obj = clean_object(
|
||||
obj, sensitive_attributes, internal_attributes, hide_internal_attr
|
||||
)
|
||||
if cleaned_obj:
|
||||
config_output[key] = cleaned_obj
|
||||
|
||||
return yaml.dump(config_output, indent=2, default_flow_style=False, sort_keys=False)
|
||||
61
src/settings/_constants.py
Normal file
61
src/settings/_constants.py
Normal file
@@ -0,0 +1,61 @@
|
||||
import os
|
||||
from src.settings._config_as_yaml import get_config_as_yaml
|
||||
|
||||
|
||||
class Envs:
|
||||
def __init__(self):
|
||||
self.in_docker = os.environ.get("IN_DOCKER", "").lower() == "true"
|
||||
self.image_tag = os.environ.get("IMAGE_TAG") or "Local"
|
||||
self.short_commit_id = os.environ.get("SHORT_COMMIT_ID") or "n/a"
|
||||
self.use_config_yaml = False # Overwritten later if config file exists
|
||||
|
||||
def config_as_yaml(self):
|
||||
return get_config_as_yaml(self.__dict__)
|
||||
|
||||
|
||||
class Paths:
|
||||
logs = "./temp/log.txt"
|
||||
tracker = "./temp/tracker.txt"
|
||||
config_file = "./config/config.yaml"
|
||||
|
||||
|
||||
class ApiEndpoints:
|
||||
radarr = "/api/v3"
|
||||
sonarr = "/api/v3"
|
||||
lidarr = "/api/v1"
|
||||
readarr = "/api/v1"
|
||||
whisparr = "/api/v3"
|
||||
qbittorrent = "/api/v2"
|
||||
|
||||
|
||||
class MinVersions:
|
||||
radarr = "5.10.3.9171"
|
||||
sonarr = "4.0.9.2332"
|
||||
lidarr = "2.11.1.4621"
|
||||
readarr = "0.4.15.2787"
|
||||
whisparr = "2.0.0.548"
|
||||
qbittorrent = "4.3.0"
|
||||
|
||||
|
||||
class FullQueueParameter:
|
||||
radarr = "includeUnknownMovieItems"
|
||||
sonarr = "includeUnknownSeriesItems"
|
||||
lidarr = "includeUnknownArtistItems"
|
||||
readarr = "includeUnknownAuthorItems"
|
||||
whisparr = "includeUnknownSeriesItems"
|
||||
|
||||
|
||||
class DetailItemKey:
|
||||
radarr = "movie"
|
||||
sonarr = "episode"
|
||||
lidarr = "album"
|
||||
readarr = "book"
|
||||
whisparr = "episode"
|
||||
|
||||
|
||||
class DetailItemSearchCommand:
|
||||
radarr = "MoviesSearch"
|
||||
sonarr = "EpisodeSearch"
|
||||
lidarr = "BookSearch"
|
||||
readarr = "BookSearch"
|
||||
whisparr = None
|
||||
69
src/settings/_download_clients.py
Normal file
69
src/settings/_download_clients.py
Normal file
@@ -0,0 +1,69 @@
|
||||
from src.settings._config_as_yaml import get_config_as_yaml
|
||||
from src.settings._download_clients_qBit import QbitClients
|
||||
|
||||
class DownloadClients:
|
||||
"""Represents all download clients."""
|
||||
qbittorrent = None
|
||||
download_client_types = [
|
||||
"qbittorrent",
|
||||
]
|
||||
def __init__(self, config, settings):
|
||||
self._set_qbit_clients(config, settings)
|
||||
self.check_unique_download_client_types()
|
||||
|
||||
def _set_qbit_clients(self, config, settings):
|
||||
download_clients = config.get("download_clients", {})
|
||||
if isinstance(download_clients, dict):
|
||||
self.qbittorrent = QbitClients(config, settings)
|
||||
if not self.qbittorrent: # Unsets settings in general section needed for qbit (if no qbit is defined)
|
||||
for key in [
|
||||
"private_tracker_handling",
|
||||
"public_tracker_handling",
|
||||
"obsolete_tag",
|
||||
"protected_tag",
|
||||
]:
|
||||
setattr(settings.general, key, None)
|
||||
|
||||
def config_as_yaml(self):
|
||||
"""Logs all download clients."""
|
||||
return get_config_as_yaml(
|
||||
{"qbittorrent": self.qbittorrent},
|
||||
sensitive_attributes={"username", "password", "cookie"},
|
||||
internal_attributes={ "api_url", "cookie", "settings", "min_version"},
|
||||
hide_internal_attr=True
|
||||
)
|
||||
|
||||
|
||||
def check_unique_download_client_types(self):
|
||||
"""Ensures that all download client names are unique.
|
||||
This is important since downloadClient in arr goes by name, and
|
||||
this is needed to link it to the right IP set up in the yaml config
|
||||
(which may be different to the one donfigured in arr)"""
|
||||
|
||||
seen = set()
|
||||
for download_client_type in self.download_client_types:
|
||||
download_clients = getattr(self, download_client_type, [])
|
||||
|
||||
# Check each client in the list
|
||||
for client in download_clients:
|
||||
name = getattr(client, "name", None)
|
||||
if name is None:
|
||||
raise ValueError(f'{download_client_type} client does not have a name ({client.base_url}).\nMake sure that the name corresponds with the name set in your *arr app for that download client.')
|
||||
|
||||
if name.lower() in seen:
|
||||
raise ValueError(f"Download client names must be unique. Duplicate name found: '{name}'\nMake sure that the name corresponds with the name set in your *arr app for that download client.")
|
||||
else:
|
||||
seen.add(name.lower())
|
||||
|
||||
def get_download_client_by_name(self, name: str):
|
||||
"""Retrieve the download client and its type by its name."""
|
||||
name_lower = name.lower()
|
||||
for download_client_type in self.download_client_types:
|
||||
download_clients = getattr(self, download_client_type, [])
|
||||
|
||||
# Check each client in the list
|
||||
for client in download_clients:
|
||||
if client.name.lower() == name_lower:
|
||||
return client, download_client_type
|
||||
|
||||
return None, None
|
||||
347
src/settings/_download_clients_qBit.py
Normal file
347
src/settings/_download_clients_qBit.py
Normal file
@@ -0,0 +1,347 @@
|
||||
from packaging import version
|
||||
from src.utils.common import make_request, wait_and_exit
|
||||
from src.settings._constants import ApiEndpoints, MinVersions
|
||||
from src.utils.log_setup import logger
|
||||
|
||||
|
||||
class QbitError(Exception):
|
||||
pass
|
||||
|
||||
class QbitClients(list):
|
||||
"""Represents all qBittorrent clients"""
|
||||
|
||||
def __init__(self, config, settings):
|
||||
super().__init__()
|
||||
self._set_qbit_clients(config, settings)
|
||||
|
||||
def _set_qbit_clients(self, config, settings):
|
||||
qbit_config = config.get("download_clients", {}).get("qbittorrent", [])
|
||||
|
||||
if not isinstance(qbit_config, list):
|
||||
logger.error(
|
||||
"Invalid config format for qbittorrent clients. Expected a list."
|
||||
)
|
||||
return
|
||||
|
||||
for client_config in qbit_config:
|
||||
try:
|
||||
self.append(QbitClient(settings, **client_config))
|
||||
except TypeError as e:
|
||||
logger.error(f"Error parsing qbittorrent client config: {e}")
|
||||
|
||||
|
||||
|
||||
class QbitClient:
|
||||
"""Represents a single qBittorrent client."""
|
||||
|
||||
cookie: str = None
|
||||
version: str = None
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
settings,
|
||||
base_url: str = None,
|
||||
username: str = None,
|
||||
password: str = None,
|
||||
name: str = None
|
||||
):
|
||||
self.settings = settings
|
||||
if not base_url:
|
||||
logger.error("Skipping qBittorrent client entry: 'base_url' is required.")
|
||||
raise ValueError("qBittorrent client must have a 'base_url'.")
|
||||
|
||||
self.base_url = base_url.rstrip("/")
|
||||
self.api_url = self.base_url + getattr(ApiEndpoints, "qbittorrent")
|
||||
self.min_version = getattr(MinVersions, "qbittorrent")
|
||||
self.username = username
|
||||
self.password = password
|
||||
self.name = name
|
||||
if not self.name:
|
||||
logger.verbose("No name provided for qbittorrent client, assuming 'qBitorrent'. If the name used in your *arr is different, please correct either the name in your *arr, or set the name in your config")
|
||||
self.name = "qBittorrent"
|
||||
|
||||
self._remove_none_attributes()
|
||||
|
||||
def _remove_none_attributes(self):
|
||||
"""Removes attributes that are None to keep the object clean."""
|
||||
for attr in list(vars(self)):
|
||||
if getattr(self, attr) is None:
|
||||
delattr(self, attr)
|
||||
|
||||
|
||||
async def refresh_cookie(self):
|
||||
"""Refresh the qBittorrent session cookie."""
|
||||
try:
|
||||
endpoint = f"{self.api_url}/auth/login"
|
||||
data = {"username": getattr(self, 'username', ''), "password": getattr(self, 'password', '')}
|
||||
headers = {"content-type": "application/x-www-form-urlencoded"}
|
||||
response = await make_request(
|
||||
"post", endpoint, self.settings, data=data, headers=headers
|
||||
)
|
||||
|
||||
if response.text == "Fails.":
|
||||
raise ConnectionError("Login failed.")
|
||||
|
||||
self.cookie = {"SID": response.cookies["SID"]}
|
||||
logger.debug("qBit cookie refreshed!")
|
||||
except Exception as e:
|
||||
logger.error(f"Error refreshing qBit cookie: {e}")
|
||||
self.cookie = {}
|
||||
raise QbitError(e) from e
|
||||
|
||||
|
||||
|
||||
async def fetch_version(self):
|
||||
"""Fetch the current qBittorrent version."""
|
||||
endpoint = f"{self.api_url}/app/version"
|
||||
response = await make_request("get", endpoint, self.settings, cookies=self.cookie)
|
||||
self.version = response.text[1:] # Remove the '_v' prefix
|
||||
logger.debug(f"qBit version for client qBittorrent: {self.version}")
|
||||
|
||||
|
||||
async def validate_version(self):
|
||||
"""Check if the qBittorrent version meets minimum and recommended requirements."""
|
||||
min_version = self.settings.min_versions.qbittorrent
|
||||
|
||||
if version.parse(self.version) < version.parse(min_version):
|
||||
logger.error(
|
||||
f"Please update qBittorrent to at least version {min_version}. Current version: {self.version}"
|
||||
)
|
||||
raise QbitError(
|
||||
f"qBittorrent version {self.version} is too old. Please update."
|
||||
)
|
||||
if version.parse(self.version) < version.parse("5.0.0"):
|
||||
logger.info(
|
||||
f"[Tip!] Consider upgrading to qBittorrent v5.0.0 or newer to reduce network overhead."
|
||||
)
|
||||
|
||||
|
||||
async def create_tag(self):
|
||||
"""Create the protection tag in qBittorrent if it doesn't exist."""
|
||||
url = f"{self.api_url}/torrents/tags"
|
||||
response = await make_request("get", url, self.settings, cookies=self.cookie)
|
||||
|
||||
current_tags = response.json()
|
||||
if self.settings.general.protected_tag not in current_tags:
|
||||
logger.verbose(f"Creating protection tag: {self.settings.general.protected_tag}")
|
||||
if not self.settings.general.test_run:
|
||||
data = {"tags": self.settings.general.protected_tag}
|
||||
await make_request(
|
||||
"post",
|
||||
self.api_url + "/torrents/createTags",
|
||||
self.settings,
|
||||
data=data,
|
||||
cookies=self.cookie,
|
||||
)
|
||||
|
||||
if (
|
||||
self.settings.general.public_tracker_handling == "tag_as_obsolete"
|
||||
or self.settings.general.private_tracker_handling == "tag_as_obsolete"
|
||||
):
|
||||
if self.settings.general.obsolete_tag not in current_tags:
|
||||
logger.verbose(f"Creating obsolete tag: {self.settings.general.obsolete_tag}")
|
||||
if not self.settings.general.test_run:
|
||||
data = {"tags": self.settings.general.obsolete_tag}
|
||||
await make_request(
|
||||
"post",
|
||||
self.api_url + "/torrents/createTags",
|
||||
self.settings,
|
||||
data=data,
|
||||
cookies=self.cookie,
|
||||
)
|
||||
|
||||
async def set_unwanted_folder(self):
|
||||
"""Set the 'unwanted folder' setting in qBittorrent if needed."""
|
||||
if self.settings.jobs.remove_bad_files:
|
||||
endpoint = f"{self.api_url}/app/preferences"
|
||||
response = await make_request(
|
||||
"get", endpoint, self.settings, cookies=self.cookie
|
||||
)
|
||||
qbit_settings = response.json()
|
||||
|
||||
if not qbit_settings.get("use_unwanted_folder"):
|
||||
logger.info(
|
||||
"Enabling 'Keep unselected files in .unwanted folder' in qBittorrent."
|
||||
)
|
||||
if not self.settings.general.test_run:
|
||||
data = {"json": '{"use_unwanted_folder": true}'}
|
||||
await make_request(
|
||||
"post",
|
||||
self.api_url + "/app/setPreferences",
|
||||
self.settings,
|
||||
data=data,
|
||||
cookies=self.cookie,
|
||||
)
|
||||
|
||||
|
||||
async def check_qbit_reachability(self):
|
||||
"""Check if the qBittorrent URL is reachable."""
|
||||
try:
|
||||
endpoint = f"{self.api_url}/auth/login"
|
||||
data = {"username": getattr(self, 'username', ''), "password": getattr(self, 'password', '')}
|
||||
headers = {"content-type": "application/x-www-form-urlencoded"}
|
||||
await make_request(
|
||||
"post", endpoint, self.settings, data=data, headers=headers, log_error=False
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
tip = "💡 Tip: Did you specify the URL (and username/password if required) correctly?"
|
||||
logger.error(f"-- | qBittorrent\n❗️ {e}\n{tip}\n")
|
||||
wait_and_exit()
|
||||
|
||||
|
||||
async def check_qbit_connected(self):
|
||||
"""Check if the qBittorrent is connected to internet."""
|
||||
qbit_connection_status = ((
|
||||
await make_request(
|
||||
"get",
|
||||
self.api_url + "/sync/maindata",
|
||||
self.settings,
|
||||
cookies=self.cookie,
|
||||
)
|
||||
).json())["server_state"]["connection_status"]
|
||||
if qbit_connection_status == "disconnected":
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
|
||||
|
||||
async def setup(self):
|
||||
"""Perform the qBittorrent setup by calling relevant managers."""
|
||||
# Check reachabilty
|
||||
await self.check_qbit_reachability()
|
||||
|
||||
# Refresh the qBittorrent cookie first
|
||||
await self.refresh_cookie()
|
||||
|
||||
try:
|
||||
# Fetch version and validate it
|
||||
await self.fetch_version()
|
||||
await self.validate_version()
|
||||
logger.info(f"OK | qBittorrent ({self.base_url})")
|
||||
except QbitError as e:
|
||||
logger.error(f"qBittorrent version check failed: {e}")
|
||||
wait_and_exit() # Exit if version check fails
|
||||
|
||||
# Continue with other setup tasks regardless of version check result
|
||||
await self.create_tag()
|
||||
await self.set_unwanted_folder()
|
||||
|
||||
|
||||
async def get_protected_and_private(self):
|
||||
"""Fetches torrents from qBittorrent and checks for protected and private status."""
|
||||
protected_downloads = []
|
||||
private_downloads = []
|
||||
|
||||
# Fetch all torrents
|
||||
qbit_items = await self.get_qbit_items()
|
||||
|
||||
for qbit_item in qbit_items:
|
||||
# Fetch protected torrents (by tag)
|
||||
if self.settings.general.protected_tag in qbit_item.get("tags", []):
|
||||
protected_downloads.append(qbit_item["hash"].upper())
|
||||
|
||||
# Fetch private torrents
|
||||
if not (self.settings.general.private_tracker_handling == "remove" or self.settings.general.public_tracker_handling == "remove"):
|
||||
if version.parse(self.version) >= version.parse("5.0.0"):
|
||||
if qbit_item.get("private"):
|
||||
private_downloads.append(qbit_item["hash"].upper())
|
||||
else:
|
||||
qbit_item_props = await make_request(
|
||||
"get",
|
||||
self.api_url + "/torrents/properties",
|
||||
self.settings,
|
||||
params={"hash": qbit_item["hash"]},
|
||||
cookies=self.cookie,
|
||||
)
|
||||
if not qbit_item_props:
|
||||
logger.error(
|
||||
"Torrent %s not found on qBittorrent - potentially removed while checking if private. "
|
||||
"Consider upgrading qBit to v5.0.4 or newer to avoid this problem.",
|
||||
qbit_item["hash"],
|
||||
)
|
||||
continue
|
||||
if qbit_item_props.get("is_private", False):
|
||||
private_downloads.append(qbit_item["hash"].upper())
|
||||
qbit_item["private"] = qbit_item_props.get("is_private", None)
|
||||
|
||||
return protected_downloads, private_downloads
|
||||
|
||||
async def set_tag(self, tags, hashes):
|
||||
"""
|
||||
Sets tags to one or more torrents in qBittorrent.
|
||||
|
||||
Args:
|
||||
tags (list): A list of tag names to be added.
|
||||
hashes (list): A list of torrent hashes to which the tags should be applied.
|
||||
"""
|
||||
# Ensure hashes are provided as a string separated by '|'
|
||||
hashes_str = "|".join(hashes)
|
||||
|
||||
# Ensure tags are provided as a string separated by ',' (comma)
|
||||
tags_str = ",".join(tags)
|
||||
|
||||
# Prepare the data for the request
|
||||
data = {
|
||||
"hashes": hashes_str,
|
||||
"tags": tags_str
|
||||
}
|
||||
|
||||
# Perform the request to add the tag(s) to the torrents
|
||||
await make_request(
|
||||
"post",
|
||||
self.api_url + "/torrents/addTags",
|
||||
self.settings,
|
||||
data=data,
|
||||
cookies=self.cookie,
|
||||
)
|
||||
|
||||
|
||||
async def get_download_progress(self, download_id):
|
||||
items = await self.get_qbit_items(download_id)
|
||||
return items[0]["completed"]
|
||||
|
||||
|
||||
async def get_qbit_items(self, hashes=None):
|
||||
params = None
|
||||
if hashes:
|
||||
if isinstance(hashes, str):
|
||||
hashes = [hashes]
|
||||
params = {"hashes": "|".join(hashes).lower()} # Join and make lowercase
|
||||
|
||||
response = await make_request(
|
||||
method="get",
|
||||
endpoint=self.api_url + "/torrents/info",
|
||||
settings=self.settings,
|
||||
params=params,
|
||||
cookies=self.cookie,
|
||||
)
|
||||
return response.json()
|
||||
|
||||
|
||||
async def get_torrent_files(self, download_id):
|
||||
# this may not work if the wrong qbit
|
||||
response = await make_request(
|
||||
method="get",
|
||||
endpoint=self.api_url + "/torrents/files",
|
||||
settings=self.settings,
|
||||
params={"hash": download_id.lower()},
|
||||
cookies=self.cookie,
|
||||
)
|
||||
return response.json()
|
||||
|
||||
async def set_torrent_file_priority(self, download_id, file_id, priority = 0):
|
||||
data={
|
||||
"hash": download_id.lower(),
|
||||
"id": file_id,
|
||||
"priority": priority,
|
||||
}
|
||||
await make_request(
|
||||
"post",
|
||||
self.api_url + "/torrents/filePrio",
|
||||
self.settings,
|
||||
data=data,
|
||||
cookies=self.cookie,
|
||||
)
|
||||
|
||||
74
src/settings/_general.py
Normal file
74
src/settings/_general.py
Normal file
@@ -0,0 +1,74 @@
|
||||
import yaml
|
||||
from src.utils.log_setup import logger
|
||||
from src.settings._validate_data_types import validate_data_types
|
||||
from src.settings._config_as_yaml import get_config_as_yaml
|
||||
|
||||
class General:
|
||||
"""Represents general settings for the application."""
|
||||
VALID_TRACKER_HANDLING = {"remove", "skip", "obsolete_tag"}
|
||||
|
||||
log_level: str = "INFO"
|
||||
test_run: bool = False
|
||||
ssl_verification: bool = True
|
||||
timer: float = 10.0
|
||||
ignored_download_clients: list = []
|
||||
private_tracker_handling: str = "remove"
|
||||
public_tracker_handling: str = "remove"
|
||||
obsolete_tag: str = None
|
||||
protected_tag: str = "Keep"
|
||||
|
||||
|
||||
def __init__(self, config):
|
||||
general_config = config.get("general", {})
|
||||
self.log_level = general_config.get("log_level", self.log_level.upper())
|
||||
self.test_run = general_config.get("test_run", self.test_run)
|
||||
self.timer = general_config.get("timer", self.timer)
|
||||
self.ssl_verification = general_config.get("ssl_verification", self.ssl_verification)
|
||||
self.ignored_download_clients = general_config.get("ignored_download_clients", self.ignored_download_clients)
|
||||
|
||||
self.private_tracker_handling = general_config.get("private_tracker_handling", self.private_tracker_handling)
|
||||
self.public_tracker_handling = general_config.get("public_tracker_handling", self.public_tracker_handling)
|
||||
self.obsolete_tag = general_config.get("obsolete_tag", self.obsolete_tag)
|
||||
self.protected_tag = general_config.get("protected_tag", self.protected_tag)
|
||||
|
||||
# Validate tracker handling settings
|
||||
self.private_tracker_handling = self._validate_tracker_handling( self.private_tracker_handling, "private_tracker_handling" )
|
||||
self.public_tracker_handling = self._validate_tracker_handling( self.public_tracker_handling, "public_tracker_handling" )
|
||||
self.obsolete_tag = self._determine_obsolete_tag(self.obsolete_tag)
|
||||
|
||||
|
||||
validate_data_types(self)
|
||||
self._remove_none_attributes()
|
||||
|
||||
def _remove_none_attributes(self):
|
||||
"""Removes attributes that are None to keep the object clean."""
|
||||
for attr in list(vars(self)):
|
||||
if getattr(self, attr) is None:
|
||||
delattr(self, attr)
|
||||
|
||||
def _validate_tracker_handling(self, value, field_name):
|
||||
"""Validates tracker handling options. Defaults to 'remove' if invalid."""
|
||||
if value not in self.VALID_TRACKER_HANDLING:
|
||||
logger.error(
|
||||
f"Invalid value '{value}' for {field_name}. Defaulting to 'remove'."
|
||||
)
|
||||
return "remove"
|
||||
return value
|
||||
|
||||
def _determine_obsolete_tag(self, obsolete_tag):
|
||||
"""Defaults obsolete tag to "obsolete", only if none is provided and the tag is needed for handling """
|
||||
if obsolete_tag is None and (
|
||||
self.private_tracker_handling == "obsolete_tag"
|
||||
or self.public_tracker_handling == "obsolete_tag"
|
||||
):
|
||||
return "Obsolete"
|
||||
return obsolete_tag
|
||||
|
||||
def config_as_yaml(self):
|
||||
"""Logs all general settings."""
|
||||
# yaml_output = yaml.dump(vars(self), indent=2, default_flow_style=False, sort_keys=False)
|
||||
# logger.info(f"General Settings:\n{yaml_output}")
|
||||
|
||||
return get_config_as_yaml(
|
||||
vars(self),
|
||||
)
|
||||
296
src/settings/_instances.py
Normal file
296
src/settings/_instances.py
Normal file
@@ -0,0 +1,296 @@
|
||||
import requests
|
||||
from packaging import version
|
||||
|
||||
from src.utils.log_setup import logger
|
||||
from src.settings._constants import (
|
||||
ApiEndpoints,
|
||||
MinVersions,
|
||||
FullQueueParameter,
|
||||
DetailItemKey,
|
||||
DetailItemSearchCommand,
|
||||
)
|
||||
from src.settings._config_as_yaml import get_config_as_yaml
|
||||
from src.utils.common import make_request, wait_and_exit
|
||||
|
||||
|
||||
class Tracker:
|
||||
def __init__(self):
|
||||
self.protected = []
|
||||
self.private = []
|
||||
self.defective = {}
|
||||
self.download_progress = {}
|
||||
self.deleted = []
|
||||
self.extension_checked = []
|
||||
|
||||
async def refresh_private_and_protected(self, settings):
|
||||
protected_downloads = []
|
||||
private_downloads = []
|
||||
|
||||
for qbit in settings.download_clients.qbittorrent:
|
||||
protected, private = await qbit.get_protected_and_private()
|
||||
protected_downloads.extend(protected)
|
||||
private_downloads.extend(private)
|
||||
|
||||
self.protected = protected_downloads
|
||||
self.private = private_downloads
|
||||
|
||||
|
||||
class ArrError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class Instances:
|
||||
"""Represents all Arr instances."""
|
||||
|
||||
def __init__(self, config, settings):
|
||||
self.arrs = ArrInstances(config, settings)
|
||||
if not self.arrs:
|
||||
logger.error("No valid Arr instances found in the config.")
|
||||
wait_and_exit()
|
||||
|
||||
def get_by_arr_type(self, arr_type):
|
||||
"""Return a list of arr instances matching the given arr_type."""
|
||||
return [arr for arr in self.arrs if arr.arr_type == arr_type]
|
||||
|
||||
def config_as_yaml(self, hide_internal_attr=True):
|
||||
"""Logs all configured Arr instances while masking sensitive attributes."""
|
||||
internal_attributes={
|
||||
"settings",
|
||||
"api_url",
|
||||
"min_version",
|
||||
"arr_type",
|
||||
"full_queue_parameter",
|
||||
"monitored_item",
|
||||
"detail_item_key",
|
||||
"detail_item_id_key",
|
||||
"detail_item_ids_key",
|
||||
"detail_item_search_command",
|
||||
}
|
||||
|
||||
outputs = []
|
||||
for arr_type in ["sonarr", "radarr", "readarr", "lidarr", "whisparr"]:
|
||||
arrs = self.get_by_arr_type(arr_type)
|
||||
if arrs:
|
||||
output = get_config_as_yaml(
|
||||
{arr_type.capitalize(): arrs},
|
||||
sensitive_attributes={"api_key"},
|
||||
internal_attributes=internal_attributes,
|
||||
hide_internal_attr=hide_internal_attr,
|
||||
)
|
||||
outputs.append(output)
|
||||
|
||||
return "\n".join(outputs)
|
||||
|
||||
|
||||
|
||||
def check_any_arrs(self):
|
||||
"""Check if there are any ARR instances."""
|
||||
if not self.arrs:
|
||||
logger.warning("No ARR instances found.")
|
||||
wait_and_exit()
|
||||
|
||||
|
||||
class ArrInstances(list):
|
||||
"""Represents all Arr clients (Sonarr, Radarr, etc.)."""
|
||||
|
||||
def __init__(self, config, settings):
|
||||
super().__init__()
|
||||
self._load_clients(config, settings)
|
||||
|
||||
def _load_clients(self, config, settings):
|
||||
instances_config = config.get("instances", {})
|
||||
|
||||
if not isinstance(instances_config, dict):
|
||||
logger.error("Invalid format for 'instances'. Expected a dictionary.")
|
||||
return
|
||||
|
||||
for arr_type, clients in instances_config.items():
|
||||
if not isinstance(clients, list):
|
||||
logger.error(f"Invalid config format for {arr_type}. Expected a list.")
|
||||
continue
|
||||
|
||||
for client_config in clients:
|
||||
try:
|
||||
self.append(
|
||||
ArrInstance(
|
||||
settings,
|
||||
arr_type=arr_type,
|
||||
base_url=client_config["base_url"],
|
||||
api_key=client_config["api_key"],
|
||||
)
|
||||
)
|
||||
except KeyError as e:
|
||||
logger.error(
|
||||
f"Missing required key {e} in {arr_type} client config."
|
||||
)
|
||||
|
||||
|
||||
class ArrInstance:
|
||||
"""Represents an individual Arr instance (Sonarr, Radarr, etc.)."""
|
||||
|
||||
version: str = None
|
||||
name: str = None
|
||||
tracker = Tracker()
|
||||
|
||||
def __init__(self, settings, arr_type: str, base_url: str, api_key: str):
|
||||
if not base_url:
|
||||
logger.error(f"Skipping {arr_type} client entry: 'base_url' is required.")
|
||||
raise ValueError(f"{arr_type} client must have a 'base_url'.")
|
||||
|
||||
if not api_key:
|
||||
logger.error(f"Skipping {arr_type} client entry: 'api_key' is required.")
|
||||
raise ValueError(f"{arr_type} client must have an 'api_key'.")
|
||||
|
||||
self.settings = settings
|
||||
self.arr_type = arr_type
|
||||
self.base_url = base_url.rstrip("/")
|
||||
self.api_key = api_key
|
||||
self.api_url = self.base_url + getattr(ApiEndpoints, arr_type)
|
||||
self.min_version = getattr(MinVersions, arr_type)
|
||||
self.full_queue_parameter = getattr(FullQueueParameter, arr_type)
|
||||
self.detail_item_key = getattr(DetailItemKey, arr_type)
|
||||
self.detail_item_id_key = self.detail_item_key + "Id"
|
||||
self.detail_item_ids_key = self.detail_item_key + "Ids"
|
||||
self.detail_item_search_command = getattr(DetailItemSearchCommand, arr_type)
|
||||
|
||||
async def _check_ui_language(self):
|
||||
"""Check if the UI language is set to English."""
|
||||
endpoint = self.api_url + "/config/ui"
|
||||
headers = {"X-Api-Key": self.api_key}
|
||||
response = await make_request("get", endpoint, self.settings, headers=headers)
|
||||
ui_language = (response.json())["uiLanguage"]
|
||||
if ui_language > 1: # Not English
|
||||
logger.error("!! %s Error: !!", self.name)
|
||||
logger.error(
|
||||
f"> Decluttarr only works correctly if UI language is set to English (under Settings/UI in {self.name})"
|
||||
)
|
||||
logger.error(
|
||||
"> Details: https://github.com/ManiMatter/decluttarr/issues/132)"
|
||||
)
|
||||
raise ArrError("Not English")
|
||||
|
||||
def _check_min_version(self, status):
|
||||
"""Check if ARR instance meets minimum version requirements."""
|
||||
self.version = status["version"]
|
||||
min_version = getattr(self.settings.min_versions, self.arr_type)
|
||||
|
||||
if min_version:
|
||||
if version.parse(self.version) < version.parse(min_version):
|
||||
logger.error("!! %s Error: !!", self.name)
|
||||
logger.error(
|
||||
f"> Please update {self.name} ({self.base_url}) to at least version {min_version}. Current version: {self.version}"
|
||||
)
|
||||
raise ArrError("Not meeting minimum version requirements")
|
||||
|
||||
def _check_arr_type(self, status):
|
||||
"""Check if the ARR instance is of the correct type."""
|
||||
actual_arr_type = status["appName"]
|
||||
if actual_arr_type.lower() != self.arr_type:
|
||||
logger.error("!! %s Error: !!", self.name)
|
||||
logger.error(
|
||||
f"> Your {self.name} ({self.base_url}) points to a {actual_arr_type} instance, rather than {self.arr_type}. Did you specify the wrong IP?"
|
||||
)
|
||||
raise ArrError("Wrong Arr Type")
|
||||
|
||||
async def _check_reachability(self):
|
||||
"""Check if ARR instance is reachable."""
|
||||
try:
|
||||
endpoint = self.api_url + "/system/status"
|
||||
headers = {"X-Api-Key": self.api_key}
|
||||
response = await make_request(
|
||||
"get", endpoint, self.settings, headers=headers, log_error=False
|
||||
)
|
||||
status = response.json()
|
||||
return status
|
||||
except Exception as e:
|
||||
if isinstance(e, requests.exceptions.HTTPError):
|
||||
response = getattr(e, "response", None)
|
||||
if response is not None and response.status_code == 401:
|
||||
tip = "💡 Tip: Have you configured the API_KEY correctly?"
|
||||
else:
|
||||
tip = f"💡 Tip: HTTP error occurred. Status: {getattr(response, 'status_code', 'unknown')}"
|
||||
elif isinstance(e, requests.exceptions.RequestException):
|
||||
tip = "💡 Tip: Have you configured the URL correctly?"
|
||||
else:
|
||||
tip = ""
|
||||
|
||||
logger.error(f"-- | {self.arr_type} ({self.base_url})\n❗️ {e}\n{tip}\n")
|
||||
raise ArrError(e) from e
|
||||
|
||||
async def setup(self):
|
||||
"""Checks on specific ARR instance"""
|
||||
try:
|
||||
status = await self._check_reachability()
|
||||
self.name = status.get("instanceName", self.arr_type)
|
||||
self._check_arr_type(status)
|
||||
self._check_min_version(status)
|
||||
await self._check_ui_language()
|
||||
|
||||
# Display result
|
||||
logger.info(f"OK | {self.name} ({self.base_url})")
|
||||
logger.debug(f"Current version of {self.name}: {self.version}")
|
||||
|
||||
except Exception as e:
|
||||
if not isinstance(e, ArrError):
|
||||
logger.error(f"Unhandled error: {e}", exc_info=True)
|
||||
wait_and_exit()
|
||||
|
||||
async def get_download_client_implementation(self, download_client_name):
|
||||
"""Fetch download client information and return the implementation value."""
|
||||
endpoint = self.api_url + "/downloadclient"
|
||||
headers = {"X-Api-Key": self.api_key}
|
||||
|
||||
# Fetch the download client list from the API
|
||||
response = await make_request("get", endpoint, self.settings, headers=headers)
|
||||
|
||||
# Check if the response is a list
|
||||
download_clients = response.json()
|
||||
|
||||
# Find the client where the name matches client_name
|
||||
for client in download_clients:
|
||||
if client.get("name") == download_client_name:
|
||||
# Return the implementation value if found
|
||||
return client.get("implementation", None)
|
||||
return None
|
||||
|
||||
async def remove_queue_item(self, queue_id, blocklist=False):
|
||||
"""
|
||||
Remove a specific queue item from the queue by its qeue id.
|
||||
Sends a delete request to the API to remove the item.
|
||||
|
||||
Args:
|
||||
queue_id (str): The quueue ID of the queue item to be removed.
|
||||
blocklist (bool): Whether to add the item to the blocklist. Default is False.
|
||||
|
||||
Returns:
|
||||
bool: Returns True if the removal was successful, False otherwise.
|
||||
"""
|
||||
endpoint = f"{self.api_url}/queue/{queue_id}"
|
||||
headers = {"X-Api-Key": self.api_key}
|
||||
json_payload = {"removeFromClient": True, "blocklist": blocklist}
|
||||
|
||||
# Send the request to remove the download from the queue
|
||||
response = await make_request(
|
||||
"delete", endpoint, self.settings, headers=headers, json=json_payload
|
||||
)
|
||||
|
||||
# If the response is successful, return True, else return False
|
||||
if response.status_code == 200:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
async def is_monitored(self, detail_id):
|
||||
"""Check if detail item (like a book, series, etc) is monitored."""
|
||||
endpoint = f"{self.api_url}/{self.detail_item_key}/{detail_id}"
|
||||
headers = {"X-Api-Key": self.api_key}
|
||||
|
||||
response = await make_request("get", endpoint, self.settings, headers=headers)
|
||||
return response.json()["monitored"]
|
||||
|
||||
async def get_series(self):
|
||||
"""Fetch download client information and return the implementation value."""
|
||||
endpoint = self.api_url + "/series"
|
||||
headers = {"X-Api-Key": self.api_key}
|
||||
response = await make_request("get", endpoint, self.settings, headers=headers)
|
||||
return response.json()
|
||||
161
src/settings/_jobs.py
Normal file
161
src/settings/_jobs.py
Normal file
@@ -0,0 +1,161 @@
|
||||
from src.utils.log_setup import logger
|
||||
from src.settings._validate_data_types import validate_data_types
|
||||
from src.settings._config_as_yaml import get_config_as_yaml
|
||||
|
||||
|
||||
class JobParams:
|
||||
"""Represents individual job settings, with an 'enabled' flag and optional parameters."""
|
||||
|
||||
enabled: bool = False
|
||||
message_patterns: list
|
||||
max_strikes: int
|
||||
min_speed: int
|
||||
max_concurrent_searches: int
|
||||
min_days_between_searches: int
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
enabled=None,
|
||||
message_patterns=None,
|
||||
max_strikes=None,
|
||||
min_speed=None,
|
||||
max_concurrent_searches=None,
|
||||
min_days_between_searches=None,
|
||||
):
|
||||
self.enabled = enabled
|
||||
self.message_patterns = message_patterns
|
||||
self.max_strikes = max_strikes
|
||||
self.min_speed = min_speed
|
||||
self.max_concurrent_searches = max_concurrent_searches
|
||||
self.min_days_between_searches = min_days_between_searches
|
||||
|
||||
# Remove attributes that are None to keep the object clean
|
||||
self._remove_none_attributes()
|
||||
|
||||
def _remove_none_attributes(self):
|
||||
"""Removes attributes that are None to keep the object clean."""
|
||||
for attr in list(vars(self)):
|
||||
if getattr(self, attr) is None:
|
||||
delattr(self, attr)
|
||||
|
||||
|
||||
class JobDefaults:
|
||||
"""Represents default job settings."""
|
||||
|
||||
max_strikes: int = 3
|
||||
max_concurrent_searches: int = 3
|
||||
min_days_between_searches: int = 7
|
||||
min_speed: int = 100
|
||||
message_patterns = ["*"]
|
||||
|
||||
def __init__(self, config):
|
||||
job_defaults_config = config.get("job_defaults", {})
|
||||
self.max_strikes = job_defaults_config.get("max_strikes", self.max_strikes)
|
||||
self.max_concurrent_searches = job_defaults_config.get(
|
||||
"max_concurrent_searches", self.max_concurrent_searches
|
||||
)
|
||||
self.min_days_between_searches = job_defaults_config.get(
|
||||
"min_days_between_searches", self.min_days_between_searches
|
||||
)
|
||||
validate_data_types(self)
|
||||
|
||||
|
||||
class Jobs:
|
||||
"""Represents all jobs explicitly"""
|
||||
|
||||
def __init__(self, config):
|
||||
self.job_defaults = JobDefaults(config)
|
||||
self._set_job_defaults()
|
||||
self._set_job_configs(config)
|
||||
del self.job_defaults
|
||||
|
||||
def _set_job_defaults(self):
|
||||
self.remove_bad_files = JobParams()
|
||||
self.remove_failed_downloads = JobParams()
|
||||
self.remove_failed_imports = JobParams(
|
||||
message_patterns=self.job_defaults.message_patterns
|
||||
)
|
||||
self.remove_metadata_missing = JobParams(
|
||||
max_strikes=self.job_defaults.max_strikes
|
||||
)
|
||||
self.remove_missing_files = JobParams()
|
||||
self.remove_orphans = JobParams()
|
||||
self.remove_slow = JobParams(
|
||||
max_strikes=self.job_defaults.max_strikes,
|
||||
min_speed=self.job_defaults.min_speed,
|
||||
)
|
||||
self.remove_stalled = JobParams(max_strikes=self.job_defaults.max_strikes)
|
||||
self.remove_unmonitored = JobParams()
|
||||
self.search_unmet_cutoff_content = JobParams(
|
||||
max_concurrent_searches=self.job_defaults.max_concurrent_searches,
|
||||
min_days_between_searches=self.job_defaults.min_days_between_searches,
|
||||
)
|
||||
self.search_missing_content = JobParams(
|
||||
max_concurrent_searches=self.job_defaults.max_concurrent_searches,
|
||||
min_days_between_searches=self.job_defaults.min_days_between_searches,
|
||||
)
|
||||
|
||||
def _set_job_configs(self, config):
|
||||
# Populate jobs from YAML config
|
||||
for job_name in self.__dict__:
|
||||
if job_name != "job_defaults" and job_name in config.get("jobs", {}):
|
||||
self._set_job_settings(job_name, config["jobs"][job_name])
|
||||
|
||||
def _set_job_settings(self, job_name, job_config):
|
||||
"""Sets per-job config settings"""
|
||||
|
||||
job = getattr(self, job_name, None)
|
||||
if (
|
||||
job_config is None
|
||||
): # this triggers only when reading from yaml-file. for docker-compose, empty configs are not loaded, thus the entire job would not be parsed
|
||||
job.enabled = True
|
||||
elif isinstance(job_config, bool):
|
||||
if job:
|
||||
job.enabled = job_config
|
||||
else:
|
||||
job = JobParams(enabled=job_config)
|
||||
elif isinstance(job_config, dict):
|
||||
job_config.setdefault("enabled", True)
|
||||
|
||||
if job:
|
||||
for key, value in job_config.items():
|
||||
setattr(job, key, value)
|
||||
else:
|
||||
job = JobParams(**job_config)
|
||||
|
||||
else:
|
||||
job = JobParams(enabled=False)
|
||||
|
||||
setattr(self, job_name, job)
|
||||
validate_data_types(
|
||||
job, self.job_defaults
|
||||
) # Validates and applies defauls from job_defaults
|
||||
|
||||
def log_status(self):
|
||||
job_strings = []
|
||||
for job_name, job_obj in self.__dict__.items():
|
||||
if isinstance(job_obj, JobParams):
|
||||
job_strings.append(f"{job_name}: {job_obj.enabled}")
|
||||
status = "\n".join(job_strings)
|
||||
logger.info(status)
|
||||
|
||||
def config_as_yaml(self):
|
||||
filtered = {
|
||||
k: v
|
||||
for k, v in vars(self).items()
|
||||
if not hasattr(v, "enabled") or v.enabled
|
||||
}
|
||||
return get_config_as_yaml(
|
||||
filtered,
|
||||
internal_attributes={"enabled"},
|
||||
hide_internal_attr=True,
|
||||
)
|
||||
|
||||
def list_job_status(self):
|
||||
"""Returns a string showing each job and whether it's enabled or not using emojis."""
|
||||
lines = []
|
||||
for name, obj in vars(self).items():
|
||||
if hasattr(obj, "enabled"):
|
||||
status = "🟢" if obj.enabled else "⚪️"
|
||||
lines.append(f"{status} {name}")
|
||||
return "\n".join(lines)
|
||||
138
src/settings/_user_config.py
Normal file
138
src/settings/_user_config.py
Normal file
@@ -0,0 +1,138 @@
|
||||
import os
|
||||
import yaml
|
||||
from src.utils.log_setup import logger
|
||||
|
||||
CONFIG_MAPPING = {
|
||||
"general": [
|
||||
"LOG_LEVEL",
|
||||
"TEST_RUN",
|
||||
"TIMER",
|
||||
"SSL_VERIFICATION",
|
||||
"IGNORED_DOWNLOAD_CLIENTS",
|
||||
],
|
||||
"job_defaults": [
|
||||
"MAX_STRIKES",
|
||||
"MIN_DAYS_BETWEEN_SEARCHES",
|
||||
"MAX_CONCURRENT_SEARCHES",
|
||||
],
|
||||
"jobs": [
|
||||
"REMOVE_BAD_FILES",
|
||||
"REMOVE_FAILED_DOWNLOADS",
|
||||
"REMOVE_FAILED_IMPORTS",
|
||||
"REMOVE_METADATA_MISSING",
|
||||
"REMOVE_MISSING_FILES",
|
||||
"REMOVE_ORPHANS",
|
||||
"REMOVE_SLOW",
|
||||
"REMOVE_STALLED",
|
||||
"REMOVE_UNMONITORED",
|
||||
"SEARCH_UNMET_CUTOFF_CONTENT",
|
||||
"SEARCH_MISSING_CONTENT",
|
||||
],
|
||||
"instances": ["SONARR", "RADARR", "READARR", "LIDARR", "WHISPARR"],
|
||||
"download_clients": ["QBITTORRENT"],
|
||||
}
|
||||
|
||||
|
||||
def get_user_config(settings):
|
||||
"""Checks if data is read from enviornment variables, or from yaml file.
|
||||
|
||||
Reads from environment variables if in docker, unless in docker-compose "USE_CONFIG_YAML" is set to true.
|
||||
Then the config file is read.
|
||||
"""
|
||||
config = {}
|
||||
if _config_file_exists(settings):
|
||||
config = _load_from_yaml_file(settings)
|
||||
settings.envs.use_config_yaml = True
|
||||
elif settings.envs.in_docker:
|
||||
config = _load_from_env()
|
||||
# Ensure all top-level keys exist, even if empty
|
||||
for section in CONFIG_MAPPING:
|
||||
if config.get(section) is None:
|
||||
config[section] = {}
|
||||
return config
|
||||
|
||||
|
||||
def _parse_env_var(key: str) -> dict | list | str | int | None:
|
||||
"""Helper function to parse one setting input key"""
|
||||
raw_value = os.getenv(key)
|
||||
if raw_value is None:
|
||||
return None
|
||||
|
||||
try:
|
||||
parsed = yaml.safe_load(raw_value)
|
||||
return _lowercase(parsed)
|
||||
except yaml.YAMLError as e:
|
||||
logger.error(f"Failed to parse environment variable {key} as YAML:\n{e}")
|
||||
return {}
|
||||
|
||||
|
||||
def _load_section(keys: list[str]) -> dict:
|
||||
"""Helper function to parse one section of expected config"""
|
||||
section_config = {}
|
||||
for key in keys:
|
||||
parsed = _parse_env_var(key)
|
||||
if parsed is not None:
|
||||
section_config[key.lower()] = parsed
|
||||
return section_config
|
||||
|
||||
|
||||
def _load_from_env() -> dict:
|
||||
"""Main function to load settings from env"""
|
||||
config = {}
|
||||
for section, keys in CONFIG_MAPPING.items():
|
||||
config[section] = _load_section(keys)
|
||||
return config
|
||||
|
||||
|
||||
def _load_from_env() -> dict:
|
||||
config = {}
|
||||
|
||||
for section, keys in CONFIG_MAPPING.items():
|
||||
section_config = {}
|
||||
|
||||
for key in keys:
|
||||
raw_value = os.getenv(key)
|
||||
if raw_value is None:
|
||||
continue
|
||||
|
||||
try:
|
||||
parsed_value = yaml.safe_load(raw_value)
|
||||
parsed_value = _lowercase(parsed_value)
|
||||
except yaml.YAMLError as e:
|
||||
logger.error(
|
||||
f"Failed to parse environment variable {key} as YAML:\n{e}"
|
||||
)
|
||||
parsed_value = {}
|
||||
section_config[key.lower()] = parsed_value
|
||||
|
||||
config[section] = section_config
|
||||
|
||||
return config
|
||||
|
||||
|
||||
def _lowercase(data):
|
||||
"""Translates recevied keys (for instance setting-keys of jobs) to lower case"""
|
||||
if isinstance(data, dict):
|
||||
return {str(k).lower(): _lowercase(v) for k, v in data.items()}
|
||||
elif isinstance(data, list):
|
||||
return [_lowercase(item) for item in data]
|
||||
else:
|
||||
# Leave strings and other types unchanged
|
||||
return data
|
||||
|
||||
|
||||
def _config_file_exists(settings):
|
||||
config_path = settings.paths.config_file
|
||||
return os.path.exists(config_path)
|
||||
|
||||
|
||||
def _load_from_yaml_file(settings):
|
||||
"""Reads config from YAML file and returns a dict."""
|
||||
config_path = settings.paths.config_file
|
||||
try:
|
||||
with open(config_path, "r", encoding="utf-8") as file:
|
||||
config = yaml.safe_load(file) or {}
|
||||
return config
|
||||
except yaml.YAMLError as e:
|
||||
logger.error("Error reading YAML file: %s", e)
|
||||
return {}
|
||||
91
src/settings/_validate_data_types.py
Normal file
91
src/settings/_validate_data_types.py
Normal file
@@ -0,0 +1,91 @@
|
||||
|
||||
|
||||
import inspect
|
||||
from src.utils.log_setup import logger
|
||||
|
||||
def validate_data_types(cls, default_cls=None):
|
||||
"""Ensures all attributes match expected types dynamically.
|
||||
If default_cls is provided, the default key is taken from this class rather than the own class
|
||||
If the attribute doesn't exist in `default_cls`, fall back to `cls.__class__`.
|
||||
|
||||
"""
|
||||
annotations = inspect.get_annotations(cls.__class__) # Extract type hints
|
||||
|
||||
for attr, expected_type in annotations.items():
|
||||
if not hasattr(cls, attr): # Skip if attribute is missing
|
||||
continue
|
||||
|
||||
value = getattr(cls, attr)
|
||||
default_source = default_cls if default_cls and hasattr(default_cls, attr) else cls.__class__
|
||||
default_value = getattr(default_source, attr, None)
|
||||
|
||||
if value == default_value:
|
||||
continue
|
||||
|
||||
if not isinstance(value, expected_type):
|
||||
try:
|
||||
if expected_type is bool:
|
||||
value = convert_to_bool(value)
|
||||
elif expected_type is int:
|
||||
value = int(value)
|
||||
elif expected_type is float:
|
||||
value = float(value)
|
||||
elif expected_type is str:
|
||||
value = convert_to_str(value)
|
||||
elif expected_type is list:
|
||||
value = convert_to_list(value)
|
||||
elif expected_type is dict:
|
||||
value = convert_to_dict(value)
|
||||
else:
|
||||
raise TypeError(f"Unhandled type conversion for '{attr}': {expected_type}")
|
||||
except Exception as e:
|
||||
|
||||
logger.error(
|
||||
f"❗️ Invalid type for '{attr}': Expected {expected_type.__name__}, but got {type(value).__name__}. "
|
||||
f"Error: {e}. Using default value: {default_value}"
|
||||
)
|
||||
value = default_value
|
||||
|
||||
setattr(cls, attr, value)
|
||||
|
||||
|
||||
|
||||
# --- Helper Functions ---
|
||||
def convert_to_bool(raw_value):
|
||||
"""Converts strings like 'yes', 'no', 'true', 'false' into boolean values."""
|
||||
if isinstance(raw_value, bool):
|
||||
return raw_value
|
||||
|
||||
true_values = {"1", "yes", "true", "on"}
|
||||
false_values = {"0", "no", "false", "off"}
|
||||
|
||||
if isinstance(raw_value, str):
|
||||
raw_value = raw_value.strip().lower()
|
||||
|
||||
if raw_value in true_values:
|
||||
return True
|
||||
elif raw_value in false_values:
|
||||
return False
|
||||
else:
|
||||
raise ValueError(f"Invalid boolean value: '{raw_value}'")
|
||||
|
||||
|
||||
def convert_to_str(raw_value):
|
||||
"""Ensures a string and trims whitespace."""
|
||||
if isinstance(raw_value, str):
|
||||
return raw_value.strip()
|
||||
return str(raw_value).strip()
|
||||
|
||||
|
||||
def convert_to_list(raw_value):
|
||||
"""Ensures a value is a list."""
|
||||
if isinstance(raw_value, list):
|
||||
return [convert_to_str(item) for item in raw_value]
|
||||
return [convert_to_str(raw_value)] # Wrap single values in a list
|
||||
|
||||
|
||||
def convert_to_dict(raw_value):
|
||||
"""Ensures a value is a dictionary."""
|
||||
if isinstance(raw_value, dict):
|
||||
return {convert_to_str(k): v for k, v in raw_value.items()}
|
||||
raise TypeError(f"Expected dict but got {type(raw_value).__name__}")
|
||||
60
src/settings/settings.py
Normal file
60
src/settings/settings.py
Normal file
@@ -0,0 +1,60 @@
|
||||
from src.utils.log_setup import configure_logging
|
||||
from src.settings._constants import Envs, MinVersions, Paths
|
||||
# from src.settings._migrate_legacy import migrate_legacy
|
||||
from src.settings._general import General
|
||||
from src.settings._jobs import Jobs
|
||||
from src.settings._download_clients import DownloadClients
|
||||
from src.settings._instances import Instances
|
||||
from src.settings._user_config import get_user_config
|
||||
|
||||
class Settings:
|
||||
|
||||
min_versions = MinVersions()
|
||||
paths = Paths()
|
||||
|
||||
def __init__(self):
|
||||
self.envs = Envs()
|
||||
config = get_user_config(self)
|
||||
self.general = General(config)
|
||||
self.jobs = Jobs(config)
|
||||
self.download_clients = DownloadClients(config, self)
|
||||
self.instances = Instances(config, self)
|
||||
configure_logging(self)
|
||||
|
||||
|
||||
def __repr__(self):
|
||||
sections = [
|
||||
("ENVIRONMENT SETTINGS", "envs"),
|
||||
("GENERAL SETTINGS", "general"),
|
||||
("ACTIVE JOBS", "jobs"),
|
||||
("JOB SETTINGS", "jobs"),
|
||||
("INSTANCE SETTINGS", "instances"),
|
||||
("DOWNLOAD CLIENT SETTINGS", "download_clients"),
|
||||
]
|
||||
messages = []
|
||||
messages.append("🛠️ Decluttarr - Settings 🛠️")
|
||||
messages.append("-"*80)
|
||||
messages.append("")
|
||||
for title, attr_name in sections:
|
||||
section = getattr(self, attr_name, None)
|
||||
section_content = section.config_as_yaml()
|
||||
if title == "ACTIVE JOBS":
|
||||
messages.append(self._format_section_title(title))
|
||||
messages.append(self.jobs.list_job_status() + "\n")
|
||||
elif section_content != "{}\n":
|
||||
messages.append(self._format_section_title(title))
|
||||
messages.append(section_content + "\n")
|
||||
return "\n".join(messages)
|
||||
|
||||
|
||||
def _format_section_title(self, name, border_length=50, symbol="="):
|
||||
"""Format section title with centered name and hash borders."""
|
||||
padding = max(border_length - len(name) - 2, 0) # 4 for spaces
|
||||
left_hashes = right_hashes = padding // 2
|
||||
if padding % 2 != 0:
|
||||
right_hashes += 1
|
||||
return f"{symbol * left_hashes} {name} {symbol * right_hashes}\n"
|
||||
|
||||
|
||||
|
||||
|
||||
39
src/utils/common.py
Normal file
39
src/utils/common.py
Normal file
@@ -0,0 +1,39 @@
|
||||
import sys
|
||||
import time
|
||||
import asyncio
|
||||
import requests
|
||||
from src.utils.log_setup import logger
|
||||
|
||||
|
||||
async def make_request(
|
||||
method: str, endpoint: str, settings, timeout: int = 5, log_error = True, **kwargs
|
||||
) -> requests.Response:
|
||||
"""
|
||||
A utility function to make HTTP requests (GET, POST, DELETE, PUT).
|
||||
"""
|
||||
try:
|
||||
# Make the request using the method passed (get, post, etc.)
|
||||
response = await asyncio.to_thread(
|
||||
getattr(requests, method.lower()),
|
||||
endpoint,
|
||||
**kwargs,
|
||||
verify=settings.general.ssl_verification,
|
||||
timeout=timeout,
|
||||
)
|
||||
response.raise_for_status()
|
||||
return response
|
||||
except requests.exceptions.HTTPError as http_err:
|
||||
if log_error:
|
||||
logger.error(f"HTTP error occurred: {http_err}", exc_info=True)
|
||||
raise
|
||||
|
||||
except Exception as err:
|
||||
if log_error:
|
||||
logger.error(f"Other error occurred: {err}", exc_info=True)
|
||||
raise
|
||||
|
||||
|
||||
def wait_and_exit(seconds=30):
|
||||
logger.info(f"Decluttarr will wait for {seconds} seconds and then exit.")
|
||||
time.sleep(seconds)
|
||||
sys.exit()
|
||||
@@ -1,246 +0,0 @@
|
||||
#### Turning off black formatting
|
||||
# fmt: off
|
||||
########### Import Libraries
|
||||
import logging, verboselogs
|
||||
logger = verboselogs.VerboseLogger(__name__)
|
||||
from dateutil.relativedelta import relativedelta as rd
|
||||
import requests
|
||||
from src.utils.rest import rest_get, rest_post #
|
||||
from src.utils.shared import qBitRefreshCookie
|
||||
import asyncio
|
||||
from packaging import version
|
||||
|
||||
def setLoggingFormat(settingsDict):
|
||||
# Sets logger output to specific format
|
||||
log_level_num=logging.getLevelName(settingsDict['LOG_LEVEL'])
|
||||
logging.basicConfig(
|
||||
format=('' if settingsDict['IS_IN_DOCKER'] else '%(asctime)s ') + ('[%(levelname)-7s]' if settingsDict['LOG_LEVEL']=='VERBOSE' else '[%(levelname)s]') + ': %(message)s',
|
||||
level=log_level_num
|
||||
)
|
||||
return
|
||||
|
||||
|
||||
async def getArrInstanceName(settingsDict, arrApp):
|
||||
# Retrieves the names of the arr instances, and if not defined, sets a default (should in theory not be requried, since UI already enforces a value)
|
||||
try:
|
||||
if settingsDict[arrApp + '_URL']:
|
||||
settingsDict[arrApp + '_NAME'] = (await rest_get(settingsDict[arrApp + '_URL']+'/system/status', settingsDict[arrApp + '_KEY']))['instanceName']
|
||||
except:
|
||||
settingsDict[arrApp + '_NAME'] = arrApp.title()
|
||||
return settingsDict
|
||||
|
||||
|
||||
async def getProtectedAndPrivateFromQbit(settingsDict):
|
||||
# Returns two lists containing the hashes of Qbit that are either protected by tag, or are private trackers (if IGNORE_PRIVATE_TRACKERS is true)
|
||||
protectedDownloadIDs = []
|
||||
privateDowloadIDs = []
|
||||
if settingsDict['QBITTORRENT_URL']:
|
||||
# Fetch all torrents
|
||||
qbitItems = await rest_get(settingsDict['QBITTORRENT_URL']+'/torrents/info',params={}, cookies=settingsDict['QBIT_COOKIE'])
|
||||
|
||||
for qbitItem in qbitItems:
|
||||
# Fetch protected torrents (by tag)
|
||||
if settingsDict['NO_STALLED_REMOVAL_QBIT_TAG'] in qbitItem.get('tags'):
|
||||
protectedDownloadIDs.append(str.upper(qbitItem['hash']))
|
||||
|
||||
# Fetch private torrents
|
||||
if settingsDict['IGNORE_PRIVATE_TRACKERS']:
|
||||
if version.parse(settingsDict['QBIT_VERSION']) >= version.parse('5.1.0'):
|
||||
if qbitItem['private']:
|
||||
privateDowloadIDs.append(str.upper(qbitItem['hash']))
|
||||
else:
|
||||
qbitItemProperties = await rest_get(settingsDict['QBITTORRENT_URL']+'/torrents/properties',params={'hash': qbitItem['hash']}, cookies=settingsDict['QBIT_COOKIE'])
|
||||
if not qbitItemProperties:
|
||||
logger.error("Torrent %s not found on qBittorrent - potentially already removed whilst checking if torrent is private. Consider upgrading qBit to v5.1.0 or newer to avoid this problem.", qbitItem['hash'])
|
||||
continue
|
||||
if qbitItemProperties.get('is_private', False):
|
||||
privateDowloadIDs.append(str.upper(qbitItem['hash']))
|
||||
qbitItem['private'] = qbitItemProperties.get('is_private', None) # Adds the is_private flag to qbitItem info for simplified logging
|
||||
|
||||
logger.debug('main/getProtectedAndPrivateFromQbit/qbitItems: %s', str([{"hash": str.upper(item["hash"]), "name": item["name"], "category": item["category"], "tags": item["tags"], "private": item.get("private", None)} for item in qbitItems]))
|
||||
|
||||
logger.debug('main/getProtectedAndPrivateFromQbit/protectedDownloadIDs: %s', str(protectedDownloadIDs))
|
||||
logger.debug('main/getProtectedAndPrivateFromQbit/privateDowloadIDs: %s', str(privateDowloadIDs))
|
||||
|
||||
return protectedDownloadIDs, privateDowloadIDs
|
||||
|
||||
def showWelcome():
|
||||
# Welcome Message
|
||||
logger.info('#' * 50)
|
||||
logger.info('Decluttarr - Application Started!')
|
||||
logger.info('')
|
||||
logger.info('Like this app? Thanks for giving it a ⭐️ on GitHub!')
|
||||
logger.info('https://github.com/ManiMatter/decluttarr/')
|
||||
logger.info('')
|
||||
return
|
||||
|
||||
def showSettings(settingsDict):
|
||||
# Settings Message
|
||||
fmt = '{0.days} days {0.hours} hours {0.minutes} minutes'
|
||||
logger.info('*** Current Settings ***')
|
||||
logger.info('Version: %s', settingsDict['IMAGE_TAG'])
|
||||
logger.info('Commit: %s', settingsDict['SHORT_COMMIT_ID'])
|
||||
logger.info('')
|
||||
logger.info('%s | Removing failed downloads (%s)', str(settingsDict['REMOVE_FAILED']), 'REMOVE_FAILED')
|
||||
logger.info('%s | Removing failed imports (%s)', str(settingsDict['REMOVE_FAILED_IMPORTS']), 'REMOVE_FAILED_IMPORTS')
|
||||
if settingsDict['REMOVE_FAILED_IMPORTS'] and not settingsDict['FAILED_IMPORT_MESSAGE_PATTERNS']:
|
||||
logger.verbose ('> Any imports with a warning flag are considered failed, as no patterns specified (%s).', 'FAILED_IMPORT_MESSAGE_PATTERNS')
|
||||
elif settingsDict['REMOVE_FAILED_IMPORTS'] and settingsDict['FAILED_IMPORT_MESSAGE_PATTERNS']:
|
||||
logger.verbose ('> Imports with a warning flag are considered failed if the status message contains any of the following patterns:')
|
||||
for pattern in settingsDict['FAILED_IMPORT_MESSAGE_PATTERNS']:
|
||||
logger.verbose(' - "%s"', pattern)
|
||||
logger.info('%s | Removing downloads missing metadata (%s)', str(settingsDict['REMOVE_METADATA_MISSING']), 'REMOVE_METADATA_MISSING')
|
||||
logger.info('%s | Removing downloads missing files (%s)', str(settingsDict['REMOVE_MISSING_FILES']), 'REMOVE_MISSING_FILES')
|
||||
logger.info('%s | Removing orphan downloads (%s)', str(settingsDict['REMOVE_ORPHANS']), 'REMOVE_ORPHANS')
|
||||
logger.info('%s | Removing slow downloads (%s)', str(settingsDict['REMOVE_SLOW']), 'REMOVE_SLOW')
|
||||
logger.info('%s | Removing stalled downloads (%s)', str(settingsDict['REMOVE_STALLED']), 'REMOVE_STALLED')
|
||||
logger.info('%s | Removing downloads belonging to unmonitored items (%s)', str(settingsDict['REMOVE_UNMONITORED']), 'REMOVE_UNMONITORED')
|
||||
for arr_type, RESCAN_SETTINGS in settingsDict['RUN_PERIODIC_RESCANS'].items():
|
||||
logger.info('%s/%s (%s) | Search missing/cutoff-unmet items. Max queries/list: %s. Min. days to re-search: %s (%s)', RESCAN_SETTINGS['MISSING'], RESCAN_SETTINGS['CUTOFF_UNMET'], arr_type, RESCAN_SETTINGS['MAX_CONCURRENT_SCANS'], RESCAN_SETTINGS['MIN_DAYS_BEFORE_RESCAN'], 'RUN_PERIODIC_RESCANS')
|
||||
logger.info('')
|
||||
|
||||
logger.info('Running every: %s', fmt.format(rd(minutes=settingsDict['REMOVE_TIMER'])))
|
||||
if settingsDict['REMOVE_SLOW']:
|
||||
logger.info('Minimum speed enforced: %s KB/s', str(settingsDict['MIN_DOWNLOAD_SPEED']))
|
||||
logger.info('Permitted number of times before stalled/missing metadata/slow downloads are removed: %s', str(settingsDict['PERMITTED_ATTEMPTS']))
|
||||
if settingsDict['QBITTORRENT_URL']:
|
||||
logger.info('Downloads with this tag will be skipped: \"%s\"', settingsDict['NO_STALLED_REMOVAL_QBIT_TAG'])
|
||||
logger.info('Private Trackers will be skipped: %s', settingsDict['IGNORE_PRIVATE_TRACKERS'])
|
||||
if settingsDict['IGNORED_DOWNLOAD_CLIENTS']:
|
||||
logger.info('Download clients skipped: %s',", ".join(settingsDict['IGNORED_DOWNLOAD_CLIENTS']))
|
||||
logger.info('')
|
||||
logger.info('*** Configured Instances ***')
|
||||
|
||||
for instance in settingsDict['INSTANCES']:
|
||||
if settingsDict[instance + '_URL']:
|
||||
logger.info(
|
||||
'%s%s: %s',
|
||||
instance.title(),
|
||||
f" ({settingsDict.get(instance + '_NAME')})" if settingsDict.get(instance + '_NAME') != instance.title() else "",
|
||||
(settingsDict[instance + '_URL']).split('/api')[0]
|
||||
)
|
||||
|
||||
if settingsDict['QBITTORRENT_URL']:
|
||||
logger.info(
|
||||
'qBittorrent: %s',
|
||||
(settingsDict['QBITTORRENT_URL']).split('/api')[0]
|
||||
)
|
||||
|
||||
logger.info('')
|
||||
return
|
||||
|
||||
def upgradeChecks(settingsDict):
|
||||
if settingsDict['REMOVE_NO_FORMAT_UPGRADE']:
|
||||
logger.warn('❗️' * 10 + ' OUTDATED SETTINGS ' + '❗️' * 10 )
|
||||
logger.warn('')
|
||||
logger.warn("❗️ %s was replaced with %s.", 'REMOVE_NO_FORMAT_UPGRADE', 'REMOVE_FAILED_IMPORTS')
|
||||
logger.warn("❗️ Please check the ReadMe and update your settings.")
|
||||
logger.warn("❗️ Specifically read the section on %s.", 'FAILED_IMPORT_MESSAGE_PATTERNS')
|
||||
logger.warn('')
|
||||
logger.warn('❗️' * 29)
|
||||
logger.warn('')
|
||||
return
|
||||
|
||||
async def instanceChecks(settingsDict):
|
||||
# Checks if the arr and qbit instances are reachable, and returns the settings dictionary with the qbit cookie
|
||||
logger.info('*** Check Instances ***')
|
||||
error_occured = False
|
||||
# Check ARR-apps
|
||||
for instance in settingsDict['INSTANCES']:
|
||||
if settingsDict[instance + '_URL']:
|
||||
# Check instance is reachable
|
||||
try:
|
||||
response = await asyncio.get_event_loop().run_in_executor(None, lambda: requests.get(settingsDict[instance + '_URL']+'/system/status', params=None, headers={'X-Api-Key': settingsDict[instance + '_KEY']}, verify=settingsDict['SSL_VERIFICATION']))
|
||||
response.raise_for_status()
|
||||
except Exception as error:
|
||||
error_occured = True
|
||||
logger.error('!! %s Error: !!', instance.title())
|
||||
logger.error('> %s', error)
|
||||
if isinstance(error, requests.exceptions.HTTPError) and error.response.status_code == 401:
|
||||
logger.error ('> Have you configured %s correctly?', instance + '_KEY')
|
||||
|
||||
arr_status = response.json()
|
||||
if not error_occured:
|
||||
# Check if network settings are pointing to the right Arr-apps
|
||||
current_app = arr_status['appName']
|
||||
if current_app.upper() != instance:
|
||||
error_occured = True
|
||||
logger.error('!! %s Error: !!', instance.title())
|
||||
logger.error('> Your %s points to a %s instance, rather than %s. Did you specify the wrong IP?', instance + '_URL', current_app, instance.title())
|
||||
|
||||
if not error_occured:
|
||||
# Check minimum version requirements are met
|
||||
current_version = arr_status['version']
|
||||
if settingsDict[instance + '_MIN_VERSION']:
|
||||
if version.parse(current_version) < version.parse(settingsDict[instance + '_MIN_VERSION']):
|
||||
error_occured = True
|
||||
logger.error('!! %s Error: !!', instance.title())
|
||||
logger.error('> Please update %s to at least version %s. Current version: %s', instance.title(), settingsDict[instance + '_MIN_VERSION'], current_version)
|
||||
|
||||
if not error_occured:
|
||||
# Check if language is english
|
||||
uiLanguage = (await rest_get(settingsDict[instance + '_URL']+'/config/ui', settingsDict[instance + '_KEY']))['uiLanguage']
|
||||
if uiLanguage > 1: # Not English
|
||||
error_occured = True
|
||||
logger.error('!! %s Error: !!', instance.title())
|
||||
logger.error('> Decluttarr only works correctly if UI language is set to English (under Settings/UI in %s)', instance.title())
|
||||
logger.error('> Details: https://github.com/ManiMatter/decluttarr/issues/132)')
|
||||
|
||||
if not error_occured:
|
||||
logger.info('OK | %s', instance.title())
|
||||
logger.debug('Current version of %s: %s', instance, current_version)
|
||||
|
||||
# Check Bittorrent
|
||||
if settingsDict['QBITTORRENT_URL']:
|
||||
# Checking if qbit can be reached, and checking if version is OK
|
||||
await qBitRefreshCookie(settingsDict)
|
||||
if not settingsDict['QBIT_COOKIE']:
|
||||
error_occured = True
|
||||
|
||||
if not error_occured:
|
||||
qbit_version = await rest_get(settingsDict['QBITTORRENT_URL']+'/app/version',cookies=settingsDict['QBIT_COOKIE'])
|
||||
qbit_version = qbit_version[1:] # version without _v
|
||||
settingsDict['QBIT_VERSION'] = qbit_version
|
||||
if version.parse(qbit_version) < version.parse(settingsDict['QBITTORRENT_MIN_VERSION']):
|
||||
error_occured = True
|
||||
logger.error('-- | %s *** Error: %s ***', 'qBittorrent', 'Please update qBittorrent to at least version %s Current version: %s',settingsDict['QBITTORRENT_MIN_VERSION'], qbit_version)
|
||||
if not error_occured:
|
||||
logger.info('OK | %s', 'qBittorrent')
|
||||
if version.parse(settingsDict['QBIT_VERSION']) < version.parse('5.1.0'):
|
||||
logger.info('>>> [Tip!] qBittorrent (Consider upgrading to v5.1.0 or newer to reduce network overhead. You are on %s)', qbit_version) # Particularly if people have many torrents and use private trackers
|
||||
logger.debug('Current version of %s: %s', 'qBittorrent', qbit_version)
|
||||
|
||||
|
||||
if error_occured:
|
||||
logger.warning('At least one instance had a problem. Waiting for 60 seconds, then exiting Decluttarr.')
|
||||
await asyncio.sleep(60)
|
||||
exit()
|
||||
|
||||
logger.info('')
|
||||
return settingsDict
|
||||
|
||||
async def createQbitProtectionTag(settingsDict):
|
||||
# Creates the qBit Protection tag if not already present
|
||||
if settingsDict['QBITTORRENT_URL']:
|
||||
current_tags = await rest_get(settingsDict['QBITTORRENT_URL']+'/torrents/tags',cookies=settingsDict['QBIT_COOKIE'])
|
||||
if not settingsDict['NO_STALLED_REMOVAL_QBIT_TAG'] in current_tags:
|
||||
if settingsDict['QBITTORRENT_URL']:
|
||||
logger.info('Creating tag in qBittorrent: %s', settingsDict['NO_STALLED_REMOVAL_QBIT_TAG'])
|
||||
if not settingsDict['TEST_RUN']:
|
||||
await rest_post(url=settingsDict['QBITTORRENT_URL']+'/torrents/createTags', data={'tags': settingsDict['NO_STALLED_REMOVAL_QBIT_TAG']}, headers={'content-type': 'application/x-www-form-urlencoded'}, cookies=settingsDict['QBIT_COOKIE'])
|
||||
|
||||
def showLoggerLevel(settingsDict):
|
||||
logger.info('#' * 50)
|
||||
if settingsDict['LOG_LEVEL'] == 'INFO':
|
||||
logger.info('LOG_LEVEL = INFO: Only logging changes (switch to VERBOSE for more info)')
|
||||
else:
|
||||
logger.info(f'')
|
||||
if settingsDict['TEST_RUN']:
|
||||
logger.info(f'*'* 50)
|
||||
logger.info(f'*'* 50)
|
||||
logger.info(f'')
|
||||
logger.info(f'!! TEST_RUN FLAG IS SET !!')
|
||||
logger.info(f'NO UPDATES/DELETES WILL BE PERFORMED')
|
||||
logger.info(f'')
|
||||
logger.info(f'*'* 50)
|
||||
logger.info(f'*'* 50)
|
||||
57
src/utils/log_setup.py
Normal file
57
src/utils/log_setup.py
Normal file
@@ -0,0 +1,57 @@
|
||||
import logging
|
||||
import os
|
||||
from logging.handlers import RotatingFileHandler
|
||||
|
||||
# Track added logging levels
|
||||
_added_levels = {}
|
||||
|
||||
|
||||
def add_logging_level(level_name, level_num):
|
||||
"""Dynamically add a custom logging level."""
|
||||
if level_name in _added_levels or level_num in _added_levels.values():
|
||||
raise ValueError(f"Logging level '{level_name}' or number '{level_num}' already exists.")
|
||||
|
||||
logging.addLevelName(level_num, level_name.upper())
|
||||
|
||||
def log_method(self, message, *args, **kwargs):
|
||||
if self.isEnabledFor(level_num):
|
||||
self.log(level_num, message, *args, **kwargs)
|
||||
|
||||
setattr(logging.Logger, level_name.lower(), log_method)
|
||||
setattr(logging, level_name.upper(), level_num)
|
||||
_added_levels[level_name] = level_num
|
||||
|
||||
|
||||
# Add custom logging levels
|
||||
add_logging_level("TRACE", 5)
|
||||
add_logging_level("VERBOSE", 15)
|
||||
|
||||
|
||||
# Configure the default logger
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Default console handler
|
||||
console_handler = logging.StreamHandler()
|
||||
console_format = logging.Formatter("%(asctime)s | %(levelname)-7s | %(message)s", "%Y-%m-%d %H:%M:%S")
|
||||
console_handler.setFormatter(console_format)
|
||||
logger.addHandler(console_handler)
|
||||
logger.setLevel(logging.INFO)
|
||||
|
||||
|
||||
def configure_logging(settings):
|
||||
"""Add a file handler and adjust log levels for all handlers."""
|
||||
log_file = settings.paths.logs
|
||||
log_dir = os.path.dirname(log_file)
|
||||
os.makedirs(log_dir, exist_ok=True)
|
||||
|
||||
# File handler
|
||||
file_handler = RotatingFileHandler(log_file, maxBytes=50 * 1024 * 1024, backupCount=2)
|
||||
file_format = logging.Formatter("%(asctime)s | %(levelname)-7s | %(message)s", "%Y-%m-%d %H:%M:%S")
|
||||
file_handler.setFormatter(file_format)
|
||||
logger.addHandler(file_handler)
|
||||
|
||||
# Update log level for all handlers
|
||||
log_level = getattr(logging, settings.general.log_level.upper(), logging.INFO)
|
||||
for handler in logger.handlers:
|
||||
handler.setLevel(log_level)
|
||||
logger.setLevel(log_level)
|
||||
@@ -1,47 +0,0 @@
|
||||
def nested_set(dic, keys, value, matchConditions=None):
|
||||
# Sets the value of a key in a dictionary to a certain value.
|
||||
# If multiple items are present, it can filter for a matching item
|
||||
for key in keys[:-1]:
|
||||
dic = dic.setdefault(key, {})
|
||||
if matchConditions:
|
||||
i = 0
|
||||
match = False
|
||||
for item in dic:
|
||||
for matchCondition in matchConditions:
|
||||
if item[matchCondition] != matchConditions[matchCondition]:
|
||||
match = False
|
||||
break
|
||||
else:
|
||||
match = True
|
||||
if match:
|
||||
dic = dic[i]
|
||||
break
|
||||
i += 1
|
||||
dic[keys[-1]] = value
|
||||
|
||||
|
||||
def add_keys_nested_dict(d, keys, defaultValue=None):
|
||||
# Creates a nested value if key does not exist
|
||||
for key in keys[:-1]:
|
||||
if key not in d:
|
||||
d[key] = {}
|
||||
d = d[key]
|
||||
d.setdefault(keys[-1], defaultValue)
|
||||
|
||||
|
||||
def nested_get(dic, return_attribute, matchConditions):
|
||||
# Retrieves a list contained in return_attribute, found within dic based on matchConditions
|
||||
i = 0
|
||||
match = False
|
||||
hits = []
|
||||
for item in dic:
|
||||
for matchCondition in matchConditions:
|
||||
if item[matchCondition] != matchConditions[matchCondition]:
|
||||
match = False
|
||||
break
|
||||
else:
|
||||
match = True
|
||||
if match:
|
||||
hits.append(dic[i][return_attribute])
|
||||
i += 1
|
||||
return hits
|
||||
193
src/utils/queue_manager.py
Normal file
193
src/utils/queue_manager.py
Normal file
@@ -0,0 +1,193 @@
|
||||
from src.utils.log_setup import logger
|
||||
from src.utils.common import make_request
|
||||
|
||||
|
||||
class QueueManager:
|
||||
def __init__(self, arr, settings):
|
||||
self.arr = arr
|
||||
self.settings = settings
|
||||
|
||||
async def get_queue_items(self, queue_scope):
|
||||
"""
|
||||
Retrieves queue items based on the scope.
|
||||
queue_scope:
|
||||
"normal" = normal queue
|
||||
"orphans" = orphaned queue items (in full queue but not in normal queue)
|
||||
"full" = full queue
|
||||
"""
|
||||
if queue_scope == "normal":
|
||||
queue_items = await self._get_queue(full_queue=False)
|
||||
elif queue_scope == "orphans":
|
||||
full_queue = await self._get_queue(full_queue=True)
|
||||
queue = await self._get_queue(full_queue=False)
|
||||
queue_items = [fq for fq in full_queue if fq not in queue]
|
||||
elif queue_scope == "full":
|
||||
queue_items = await self._get_queue(full_queue=True)
|
||||
else:
|
||||
raise ValueError(f"Invalid queue_scope: {queue_scope}")
|
||||
return queue_items
|
||||
|
||||
async def _get_queue(self, full_queue=False):
|
||||
# Step 1: Refresh the queue (now internal)
|
||||
await self._refresh_queue()
|
||||
|
||||
# Step 2: Get the total number of records
|
||||
record_count = await self._get_total_records(full_queue)
|
||||
|
||||
# Step 3: Get all records using `arr.full_queue_parameter`
|
||||
queue = await self._get_arr_records(full_queue, record_count)
|
||||
|
||||
# Step 4: Filter the queue based on delayed items and ignored download clients
|
||||
queue = self._ignore_delayed_queue_items(queue)
|
||||
queue = self._filter_out_ignored_download_clients(queue)
|
||||
queue = self._add_detail_item_key(queue)
|
||||
return queue
|
||||
|
||||
def _add_detail_item_key(self, queue):
|
||||
"""Normalizes episodeID, bookID, etc so it can just be called by 'detail_item_id'"""
|
||||
for items in queue:
|
||||
items["detail_item_id"] = items.get(self.arr.detail_item_id_key)
|
||||
return queue
|
||||
|
||||
async def _refresh_queue(self):
|
||||
# Refresh the queue by making the POST request using an external make_request function
|
||||
await make_request(
|
||||
method="POST",
|
||||
endpoint=f"{self.arr.api_url}/command",
|
||||
settings=self.settings,
|
||||
json={"name": "RefreshMonitoredDownloads"},
|
||||
headers={"X-Api-Key": self.arr.api_key},
|
||||
)
|
||||
|
||||
async def _get_total_records(self, full_queue):
|
||||
# Get the total number of records from the queue using `arr.full_queue_parameter`
|
||||
params = {self.arr.full_queue_parameter: full_queue}
|
||||
response = (
|
||||
await make_request(
|
||||
method="GET",
|
||||
endpoint=f"{self.arr.api_url}/queue",
|
||||
settings=self.settings,
|
||||
params=params,
|
||||
headers={"X-Api-Key": self.arr.api_key},
|
||||
)
|
||||
).json()
|
||||
return response["totalRecords"]
|
||||
|
||||
async def _get_arr_records(self, full_queue, record_count):
|
||||
# Get all records based on the count (with pagination) using `arr.full_queue_parameter`
|
||||
if record_count == 0:
|
||||
return []
|
||||
|
||||
params = {"page": "1", "pageSize": record_count}
|
||||
if full_queue:
|
||||
params |= {self.arr.full_queue_parameter: full_queue}
|
||||
|
||||
records = (
|
||||
await make_request(
|
||||
method="GET",
|
||||
endpoint=f"{self.arr.api_url}/queue",
|
||||
settings=self.settings,
|
||||
params=params,
|
||||
headers={"X-Api-Key": self.arr.api_key},
|
||||
)
|
||||
).json()
|
||||
return records["records"]
|
||||
|
||||
def _ignore_delayed_queue_items(self, queue):
|
||||
# Ignores delayed queue items
|
||||
if queue is None:
|
||||
return queue
|
||||
seen_combinations = set()
|
||||
filtered_queue = []
|
||||
for queue_item in queue:
|
||||
indexer = queue_item.get("indexer", "No indexer")
|
||||
protocol = queue_item.get("protocol", "No protocol")
|
||||
combination = (queue_item["title"], protocol, indexer)
|
||||
if queue_item["status"] == "delay":
|
||||
if combination not in seen_combinations:
|
||||
seen_combinations.add(combination)
|
||||
logger.debug(
|
||||
">>> Delayed queue item ignored: %s (Protocol: %s, Indexer: %s)",
|
||||
queue_item["title"],
|
||||
protocol,
|
||||
indexer,
|
||||
)
|
||||
else:
|
||||
filtered_queue.append(queue_item)
|
||||
return filtered_queue
|
||||
|
||||
def _filter_out_ignored_download_clients(self, queue):
|
||||
# Filters out ignored download clients
|
||||
if queue is None:
|
||||
return queue
|
||||
filtered_queue = []
|
||||
|
||||
for queue_item in queue:
|
||||
download_client = queue_item.get("downloadClient", "Unknown client")
|
||||
if download_client in self.settings.general.ignored_download_clients:
|
||||
logger.debug(
|
||||
">>> Queue item ignored due to ignored download client: %s (Download Client: %s)",
|
||||
queue_item["title"],
|
||||
download_client,
|
||||
)
|
||||
else:
|
||||
filtered_queue.append(queue_item)
|
||||
|
||||
return filtered_queue
|
||||
|
||||
def format_queue(self, queue_items):
|
||||
if not queue_items:
|
||||
return "empty"
|
||||
|
||||
formatted_dict = {}
|
||||
|
||||
for queue_item in queue_items:
|
||||
download_id = queue_item.get("downloadId")
|
||||
item_id = queue_item.get("id")
|
||||
|
||||
if download_id in formatted_dict:
|
||||
formatted_dict[download_id]["IDs"].append(item_id)
|
||||
else:
|
||||
formatted_dict[download_id] = {
|
||||
"downloadId": download_id,
|
||||
"downloadTitle": queue_item.get("title"),
|
||||
"IDs": [item_id],
|
||||
"protocol": [queue_item.get("protocol")],
|
||||
"status": [queue_item.get("status")],
|
||||
}
|
||||
|
||||
return list(formatted_dict.values())
|
||||
|
||||
def group_by_download_id(self, queue_items):
|
||||
# Groups queue items by download ID and returns a dict where download ID is the key, and value is the list of queue items belonging to that downloadID
|
||||
# Queue item is limited to certain keys
|
||||
retain_keys = {
|
||||
"id": None,
|
||||
"detail_item_id": None,
|
||||
"title": "Unknown",
|
||||
"size": 0,
|
||||
"sizeleft": 0,
|
||||
"downloadClient": "Unknown",
|
||||
"protocol": "Unknown",
|
||||
"status": "Unknown",
|
||||
"trackedDownloadState": "Unknown",
|
||||
"statusMessages": [],
|
||||
"removal_messages": [],
|
||||
}
|
||||
|
||||
grouped_dict = {}
|
||||
|
||||
for queue_item in queue_items:
|
||||
download_id = queue_item["downloadId"]
|
||||
if download_id not in grouped_dict:
|
||||
grouped_dict[download_id] = []
|
||||
|
||||
# Filter and add default values if keys are missing
|
||||
filtered_item = {
|
||||
key: queue_item.get(key, retain_keys.get(key, None))
|
||||
for key in retain_keys
|
||||
}
|
||||
|
||||
grouped_dict[download_id].append(filtered_item)
|
||||
|
||||
return grouped_dict
|
||||
@@ -1,109 +0,0 @@
|
||||
########### Functions to call radarr/sonarr APIs
|
||||
import logging
|
||||
import asyncio
|
||||
import requests
|
||||
from requests.exceptions import RequestException
|
||||
import json
|
||||
from config.definitions import settingsDict
|
||||
|
||||
|
||||
# GET
|
||||
async def rest_get(url, api_key=None, params=None, cookies=None):
|
||||
try:
|
||||
headers = {"X-Api-Key": api_key} if api_key else None
|
||||
response = await asyncio.get_event_loop().run_in_executor(
|
||||
None,
|
||||
lambda: requests.get(
|
||||
url,
|
||||
params=params,
|
||||
headers=headers,
|
||||
cookies=cookies,
|
||||
verify=settingsDict["SSL_VERIFICATION"],
|
||||
),
|
||||
)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
except requests.exceptions.HTTPError as e:
|
||||
print("HTTP Error:", e)
|
||||
except RequestException as e:
|
||||
return response.text
|
||||
except ValueError as e:
|
||||
logging.error(f"Error parsing JSON response from {url}: {e}")
|
||||
return None
|
||||
|
||||
|
||||
# DELETE
|
||||
async def rest_delete(url, api_key, params=None):
|
||||
if settingsDict["TEST_RUN"]:
|
||||
return
|
||||
try:
|
||||
headers = {"X-Api-Key": api_key}
|
||||
response = await asyncio.get_event_loop().run_in_executor(
|
||||
None,
|
||||
lambda: requests.delete(
|
||||
url,
|
||||
params=params,
|
||||
headers=headers,
|
||||
verify=settingsDict["SSL_VERIFICATION"],
|
||||
),
|
||||
)
|
||||
response.raise_for_status()
|
||||
if response.status_code in [200, 204]:
|
||||
return None
|
||||
return response.json()
|
||||
except RequestException as e:
|
||||
logging.error(f"Error making API request to {url}: {e}")
|
||||
return None
|
||||
except ValueError as e:
|
||||
logging.error(f"Error parsing JSON response from {url}: {e}")
|
||||
return None
|
||||
|
||||
|
||||
# POST
|
||||
async def rest_post(url, data=None, json=None, headers=None, cookies=None):
|
||||
if settingsDict["TEST_RUN"]:
|
||||
return
|
||||
try:
|
||||
response = await asyncio.get_event_loop().run_in_executor(
|
||||
None,
|
||||
lambda: requests.post(
|
||||
url,
|
||||
data=data,
|
||||
json=json,
|
||||
headers=headers,
|
||||
cookies=cookies,
|
||||
verify=settingsDict["SSL_VERIFICATION"],
|
||||
),
|
||||
)
|
||||
response.raise_for_status()
|
||||
if response.status_code in (200, 201):
|
||||
return None
|
||||
return response.json()
|
||||
except RequestException as e:
|
||||
logging.error(f"Error making API request to {url}: {e}")
|
||||
return None
|
||||
except ValueError as e:
|
||||
logging.error(f"Error parsing JSON response from {url}: {e}")
|
||||
return None
|
||||
|
||||
|
||||
# PUT
|
||||
async def rest_put(url, api_key, data):
|
||||
if settingsDict["TEST_RUN"]:
|
||||
return
|
||||
try:
|
||||
headers = {"X-Api-Key": api_key} | {"content-type": "application/json"}
|
||||
response = await asyncio.get_event_loop().run_in_executor(
|
||||
None,
|
||||
lambda: requests.put(
|
||||
url, data=data, headers=headers, verify=settingsDict["SSL_VERIFICATION"]
|
||||
),
|
||||
)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
except RequestException as e:
|
||||
logging.error(f"Error making API request to {url}: {e}")
|
||||
return None
|
||||
except ValueError as e:
|
||||
logging.error(f"Error parsing JSON response from {url}: {e}")
|
||||
return None
|
||||
@@ -1,411 +0,0 @@
|
||||
# Shared Functions
|
||||
import logging, verboselogs
|
||||
import asyncio
|
||||
import requests
|
||||
logger = verboselogs.VerboseLogger(__name__)
|
||||
from src.utils.rest import rest_get, rest_delete, rest_post
|
||||
from src.utils.nest_functions import add_keys_nested_dict, nested_get
|
||||
import sys, os, traceback
|
||||
|
||||
|
||||
async def get_arr_records(BASE_URL, API_KEY, params={}, end_point=""):
|
||||
# All records from a given endpoint
|
||||
record_count = (await rest_get(f"{BASE_URL}/{end_point}", API_KEY, params))[
|
||||
"totalRecords"
|
||||
]
|
||||
if record_count == 0:
|
||||
return []
|
||||
records = await rest_get(
|
||||
f"{BASE_URL}/{end_point}",
|
||||
API_KEY,
|
||||
{"page": "1", "pageSize": record_count} | params,
|
||||
)
|
||||
return records["records"]
|
||||
|
||||
|
||||
async def get_queue(BASE_URL, API_KEY, settingsDict, params={}):
|
||||
# Refreshes and retrieves the current queue
|
||||
await rest_post(
|
||||
url=BASE_URL + "/command",
|
||||
json={"name": "RefreshMonitoredDownloads"},
|
||||
headers={"X-Api-Key": API_KEY},
|
||||
)
|
||||
queue = await get_arr_records(BASE_URL, API_KEY, params=params, end_point="queue")
|
||||
queue = filterOutDelayedQueueItems(queue)
|
||||
queue = filterOutIgnoredDownloadClients(queue, settingsDict)
|
||||
return queue
|
||||
|
||||
|
||||
def filterOutDelayedQueueItems(queue):
|
||||
# Ignores delayed queue items
|
||||
if queue is None:
|
||||
return queue
|
||||
seen_combinations = set()
|
||||
filtered_queue = []
|
||||
for queue_item in queue:
|
||||
# Use get() method with default value "No indexer" if 'indexer' key does not exist
|
||||
indexer = queue_item.get("indexer", "No indexer")
|
||||
protocol = queue_item.get("protocol", "No protocol")
|
||||
combination = (queue_item["title"], protocol, indexer)
|
||||
if queue_item["status"] == "delay":
|
||||
if combination not in seen_combinations:
|
||||
seen_combinations.add(combination)
|
||||
logger.debug(
|
||||
">>> Delayed queue item ignored: %s (Protocol: %s, Indexer: %s)",
|
||||
queue_item["title"],
|
||||
protocol,
|
||||
indexer,
|
||||
)
|
||||
else:
|
||||
filtered_queue.append(queue_item)
|
||||
return filtered_queue
|
||||
|
||||
|
||||
def filterOutIgnoredDownloadClients(queue, settingsDict):
|
||||
"""
|
||||
Filters out queue items whose download client is listed in IGNORED_DOWNLOAD_CLIENTS.
|
||||
"""
|
||||
if queue is None:
|
||||
return queue
|
||||
filtered_queue = []
|
||||
|
||||
for queue_item in queue:
|
||||
download_client = queue_item.get("downloadClient", "Unknown client")
|
||||
if download_client in settingsDict["IGNORED_DOWNLOAD_CLIENTS"]:
|
||||
logger.debug(
|
||||
">>> Queue item ignored due to ignored download client: %s (Download Client: %s)",
|
||||
queue_item["title"],
|
||||
download_client,
|
||||
)
|
||||
else:
|
||||
filtered_queue.append(queue_item)
|
||||
|
||||
return filtered_queue
|
||||
|
||||
|
||||
def privateTrackerCheck(settingsDict, affectedItems, failType, privateDowloadIDs):
|
||||
# Ignores private tracker items (if setting is turned on)
|
||||
for affectedItem in reversed(affectedItems):
|
||||
if (
|
||||
settingsDict["IGNORE_PRIVATE_TRACKERS"]
|
||||
and affectedItem["downloadId"] in privateDowloadIDs
|
||||
):
|
||||
affectedItems.remove(affectedItem)
|
||||
return affectedItems
|
||||
|
||||
|
||||
def protectedDownloadCheck(settingsDict, affectedItems, failType, protectedDownloadIDs):
|
||||
# Checks if torrent is protected and skips
|
||||
for affectedItem in reversed(affectedItems):
|
||||
if affectedItem["downloadId"] in protectedDownloadIDs:
|
||||
logger.verbose(
|
||||
">>> Detected %s download, tagged not to be killed: %s",
|
||||
failType,
|
||||
affectedItem["title"],
|
||||
)
|
||||
logger.debug(
|
||||
">>> DownloadID of above %s download (%s): %s",
|
||||
failType,
|
||||
affectedItem["title"],
|
||||
affectedItem["downloadId"],
|
||||
)
|
||||
affectedItems.remove(affectedItem)
|
||||
return affectedItems
|
||||
|
||||
|
||||
async def execute_checks(
|
||||
settingsDict,
|
||||
affectedItems,
|
||||
failType,
|
||||
BASE_URL,
|
||||
API_KEY,
|
||||
NAME,
|
||||
deleted_downloads,
|
||||
defective_tracker,
|
||||
privateDowloadIDs,
|
||||
protectedDownloadIDs,
|
||||
addToBlocklist,
|
||||
doPrivateTrackerCheck,
|
||||
doProtectedDownloadCheck,
|
||||
doPermittedAttemptsCheck,
|
||||
extraParameters={},
|
||||
):
|
||||
# Goes over the affected items and performs the checks that are parametrized
|
||||
try:
|
||||
# De-duplicates the affected items (one downloadid may be shared by multiple affected items)
|
||||
downloadIDs = []
|
||||
for affectedItem in reversed(affectedItems):
|
||||
if affectedItem["downloadId"] not in downloadIDs:
|
||||
downloadIDs.append(affectedItem["downloadId"])
|
||||
else:
|
||||
affectedItems.remove(affectedItem)
|
||||
# Skips protected items
|
||||
if doPrivateTrackerCheck:
|
||||
affectedItems = privateTrackerCheck(
|
||||
settingsDict, affectedItems, failType, privateDowloadIDs
|
||||
)
|
||||
if doProtectedDownloadCheck:
|
||||
affectedItems = protectedDownloadCheck(
|
||||
settingsDict, affectedItems, failType, protectedDownloadIDs
|
||||
)
|
||||
# Checks if failing more often than permitted
|
||||
if doPermittedAttemptsCheck:
|
||||
affectedItems = permittedAttemptsCheck(
|
||||
settingsDict, affectedItems, failType, BASE_URL, defective_tracker
|
||||
)
|
||||
|
||||
# Deletes all downloads that have not survived the checks
|
||||
for affectedItem in affectedItems:
|
||||
# Checks whether when removing the queue item from the *arr app the torrent should be kept
|
||||
removeFromClient = True
|
||||
if extraParameters.get("keepTorrentForPrivateTrackers", False):
|
||||
if (
|
||||
settingsDict["IGNORE_PRIVATE_TRACKERS"]
|
||||
and affectedItem["downloadId"] in privateDowloadIDs
|
||||
):
|
||||
removeFromClient = False
|
||||
|
||||
# Removes the queue item
|
||||
await remove_download(
|
||||
settingsDict,
|
||||
BASE_URL,
|
||||
API_KEY,
|
||||
affectedItem,
|
||||
failType,
|
||||
addToBlocklist,
|
||||
deleted_downloads,
|
||||
removeFromClient,
|
||||
)
|
||||
# Exit Logs
|
||||
if settingsDict["LOG_LEVEL"] == "DEBUG":
|
||||
queue = await get_queue(BASE_URL, API_KEY, settingsDict)
|
||||
logger.debug(
|
||||
"execute_checks/queue OUT (failType: %s): %s",
|
||||
failType,
|
||||
formattedQueueInfo(queue),
|
||||
)
|
||||
# Return removed items
|
||||
return affectedItems
|
||||
except Exception as error:
|
||||
errorDetails(NAME, error)
|
||||
return []
|
||||
|
||||
|
||||
def permittedAttemptsCheck(
|
||||
settingsDict, affectedItems, failType, BASE_URL, defective_tracker
|
||||
):
|
||||
# Checks if downloads are repeatedly found as stalled / stuck in metadata. Removes the items that are not exeeding permitted attempts
|
||||
# Shows all affected items (for debugging)
|
||||
logger.debug(
|
||||
"permittedAttemptsCheck/affectedItems: %s",
|
||||
", ".join(
|
||||
f"{affectedItem['id']}:{affectedItem['title']}:{affectedItem['downloadId']}"
|
||||
for affectedItem in affectedItems
|
||||
),
|
||||
)
|
||||
|
||||
# 2. Check if those that were previously defective are no longer defective -> those are recovered
|
||||
affectedDownloadIDs = [affectedItem["downloadId"] for affectedItem in affectedItems]
|
||||
try:
|
||||
recoveredDownloadIDs = [
|
||||
trackedDownloadIDs
|
||||
for trackedDownloadIDs in defective_tracker.dict[BASE_URL][failType]
|
||||
if trackedDownloadIDs not in affectedDownloadIDs
|
||||
]
|
||||
except KeyError:
|
||||
recoveredDownloadIDs = []
|
||||
logger.debug(
|
||||
"permittedAttemptsCheck/recoveredDownloadIDs: %s", str(recoveredDownloadIDs)
|
||||
)
|
||||
for recoveredDownloadID in recoveredDownloadIDs:
|
||||
logger.info(
|
||||
">>> Download no longer marked as %s: %s",
|
||||
failType,
|
||||
defective_tracker.dict[BASE_URL][failType][recoveredDownloadID]["title"],
|
||||
)
|
||||
del defective_tracker.dict[BASE_URL][failType][recoveredDownloadID]
|
||||
logger.debug(
|
||||
"permittedAttemptsCheck/defective_tracker.dict IN: %s",
|
||||
str(defective_tracker.dict),
|
||||
)
|
||||
|
||||
# 3. For those that are defective, add attempt + 1 if present before, or make attempt = 1.
|
||||
for affectedItem in reversed(affectedItems):
|
||||
try:
|
||||
defective_tracker.dict[BASE_URL][failType][affectedItem["downloadId"]][
|
||||
"Attempts"
|
||||
] += 1
|
||||
except KeyError:
|
||||
add_keys_nested_dict(
|
||||
defective_tracker.dict,
|
||||
[BASE_URL, failType, affectedItem["downloadId"]],
|
||||
{"title": affectedItem["title"], "Attempts": 1},
|
||||
)
|
||||
attempts_left = (
|
||||
settingsDict["PERMITTED_ATTEMPTS"]
|
||||
- defective_tracker.dict[BASE_URL][failType][affectedItem["downloadId"]][
|
||||
"Attempts"
|
||||
]
|
||||
)
|
||||
# If not exceeding the number of permitted times, remove from being affected
|
||||
if attempts_left >= 0: # Still got attempts left
|
||||
logger.info(
|
||||
">>> Detected %s download (%s out of %s permitted times): %s",
|
||||
failType,
|
||||
str(
|
||||
defective_tracker.dict[BASE_URL][failType][
|
||||
affectedItem["downloadId"]
|
||||
]["Attempts"]
|
||||
),
|
||||
str(settingsDict["PERMITTED_ATTEMPTS"]),
|
||||
affectedItem["title"],
|
||||
)
|
||||
affectedItems.remove(affectedItem)
|
||||
if attempts_left <= -1: # Too many attempts
|
||||
logger.info(
|
||||
">>> Detected %s download too many times (%s out of %s permitted times): %s",
|
||||
failType,
|
||||
str(
|
||||
defective_tracker.dict[BASE_URL][failType][
|
||||
affectedItem["downloadId"]
|
||||
]["Attempts"]
|
||||
),
|
||||
str(settingsDict["PERMITTED_ATTEMPTS"]),
|
||||
affectedItem["title"],
|
||||
)
|
||||
if (
|
||||
attempts_left <= -2
|
||||
): # Too many attempts and should already have been removed
|
||||
# If supposedly deleted item keeps coming back, print out guidance for "Reject Blocklisted Torrent Hashes While Grabbing"
|
||||
logger.verbose(
|
||||
'>>> [Tip!] Since this download should already have been removed in a previous iteration but keeps coming back, this indicates the blocking of the torrent does not work correctly. Consider turning on the option "Reject Blocklisted Torrent Hashes While Grabbing" on the indexer in the *arr app: %s',
|
||||
affectedItem["title"],
|
||||
)
|
||||
logger.debug(
|
||||
"permittedAttemptsCheck/defective_tracker.dict OUT: %s",
|
||||
str(defective_tracker.dict),
|
||||
)
|
||||
return affectedItems
|
||||
|
||||
|
||||
async def remove_download(
|
||||
settingsDict,
|
||||
BASE_URL,
|
||||
API_KEY,
|
||||
affectedItem,
|
||||
failType,
|
||||
addToBlocklist,
|
||||
deleted_downloads,
|
||||
removeFromClient,
|
||||
):
|
||||
# Removes downloads and creates log entry
|
||||
logger.debug(
|
||||
"remove_download/deleted_downloads.dict IN: %s", str(deleted_downloads.dict)
|
||||
)
|
||||
if affectedItem["downloadId"] not in deleted_downloads.dict:
|
||||
# "schizophrenic" removal:
|
||||
# Yes, the failed imports are removed from the -arr apps (so the removal kicks still in)
|
||||
# But in the torrent client they are kept
|
||||
if removeFromClient:
|
||||
logger.info(">>> Removing %s download: %s", failType, affectedItem["title"])
|
||||
else:
|
||||
logger.info(
|
||||
">>> Removing %s download (without removing from torrent client): %s",
|
||||
failType,
|
||||
affectedItem["title"],
|
||||
)
|
||||
|
||||
# Print out detailed removal messages (if any were added in the jobs)
|
||||
if "removal_messages" in affectedItem:
|
||||
for removal_message in affectedItem["removal_messages"]:
|
||||
logger.info(removal_message)
|
||||
|
||||
if not settingsDict["TEST_RUN"]:
|
||||
await rest_delete(
|
||||
f'{BASE_URL}/queue/{affectedItem["id"]}',
|
||||
API_KEY,
|
||||
{"removeFromClient": removeFromClient, "blocklist": addToBlocklist},
|
||||
)
|
||||
deleted_downloads.dict.append(affectedItem["downloadId"])
|
||||
|
||||
logger.debug(
|
||||
"remove_download/deleted_downloads.dict OUT: %s", str(deleted_downloads.dict)
|
||||
)
|
||||
return
|
||||
|
||||
|
||||
def errorDetails(NAME, error):
|
||||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||||
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
|
||||
logger.warning(
|
||||
">>> Queue cleaning failed on %s. (File: %s / Line: %s / %s)",
|
||||
NAME,
|
||||
fname,
|
||||
exc_tb.tb_lineno,
|
||||
traceback.format_exc(),
|
||||
)
|
||||
return
|
||||
|
||||
|
||||
def formattedQueueInfo(queue):
|
||||
try:
|
||||
# Returns queueID, title, and downloadID
|
||||
if not queue:
|
||||
return "empty"
|
||||
formatted_list = []
|
||||
for queue_item in queue:
|
||||
download_id = queue_item.get("downloadId", None)
|
||||
item_id = queue_item.get("id", None)
|
||||
# Check if there is an entry with the same download_id and title
|
||||
existing_entry = next(
|
||||
(item for item in formatted_list if item["downloadId"] == download_id),
|
||||
None,
|
||||
)
|
||||
if existing_entry:
|
||||
existing_entry["IDs"].append(item_id)
|
||||
else:
|
||||
formatted_list.append({
|
||||
"downloadId": download_id,
|
||||
"downloadTitle": queue_item.get("title"),
|
||||
"IDs": [item_id],
|
||||
"protocol": [queue_item.get("protocol")],
|
||||
"status": [queue_item.get("status")],
|
||||
})
|
||||
return formatted_list
|
||||
except Exception as error:
|
||||
errorDetails("formattedQueueInfo", error)
|
||||
logger.debug("formattedQueueInfo/queue for debug: %s", str(queue))
|
||||
return "error"
|
||||
|
||||
|
||||
async def qBitOffline(settingsDict, failType, NAME):
|
||||
if settingsDict["QBITTORRENT_URL"]:
|
||||
qBitConnectionStatus = (
|
||||
await rest_get(
|
||||
settingsDict["QBITTORRENT_URL"] + "/sync/maindata",
|
||||
cookies=settingsDict["QBIT_COOKIE"],
|
||||
)
|
||||
)["server_state"]["connection_status"]
|
||||
if qBitConnectionStatus == "disconnected":
|
||||
logger.warning(
|
||||
">>> qBittorrent is disconnected. Skipping %s queue cleaning failed on %s.",
|
||||
failType,
|
||||
NAME,
|
||||
)
|
||||
return True
|
||||
return False
|
||||
|
||||
async def qBitRefreshCookie(settingsDict):
|
||||
try:
|
||||
response = await asyncio.get_event_loop().run_in_executor(None, lambda: requests.post(settingsDict['QBITTORRENT_URL']+'/auth/login', data={'username': settingsDict['QBITTORRENT_USERNAME'], 'password': settingsDict['QBITTORRENT_PASSWORD']}, headers={'content-type': 'application/x-www-form-urlencoded'}, verify=settingsDict['SSL_VERIFICATION']))
|
||||
if response.text == 'Fails.':
|
||||
raise ConnectionError('Login failed.')
|
||||
response.raise_for_status()
|
||||
settingsDict['QBIT_COOKIE'] = {'SID': response.cookies['SID']}
|
||||
logger.debug('qBit cookie refreshed!')
|
||||
except Exception as error:
|
||||
logger.error('!! %s Error: !!', 'qBittorrent')
|
||||
logger.error('> %s', error)
|
||||
logger.error('> Details:')
|
||||
logger.error(response.text)
|
||||
settingsDict['QBIT_COOKIE'] = {}
|
||||
69
src/utils/startup.py
Normal file
69
src/utils/startup.py
Normal file
@@ -0,0 +1,69 @@
|
||||
import warnings
|
||||
from src.utils.log_setup import logger
|
||||
|
||||
def show_welcome(settings):
|
||||
messages = []
|
||||
|
||||
# Show welcome message
|
||||
messages.append("🎉🎉🎉 Decluttarr - Application Started! 🎉🎉🎉")
|
||||
messages.append("-"*80)
|
||||
messages.append("")
|
||||
messages.append("Like this app? Thanks for giving it a ⭐️ on GitHub!")
|
||||
messages.append("https://github.com/ManiMatter/decluttarr/")
|
||||
|
||||
# Show info level tip
|
||||
if settings.general.log_level == "INFO":
|
||||
messages.append("")
|
||||
messages.append("")
|
||||
messages.append("💡 Tip: More logs?")
|
||||
messages.append("If you want to know more about what's going on, switch log level to 'VERBOSE'")
|
||||
|
||||
# Show bug report tip
|
||||
messages.append("")
|
||||
messages.append("")
|
||||
messages.append("🐛 Found a bug?")
|
||||
messages.append("Before reporting bugs on GitHub, please:")
|
||||
messages.append("1) Check the readme on github")
|
||||
messages.append("2) Check open and closed issues on github")
|
||||
messages.append("3) Switch your logs to 'DEBUG' level")
|
||||
messages.append("4) Turn off any features other than the one(s) causing it")
|
||||
messages.append("5) Provide the full logs via pastebin on your GitHub issue")
|
||||
messages.append("Once submitted, thanks for being responsive and helping debug / re-test")
|
||||
|
||||
|
||||
# Show test mode tip
|
||||
if settings.general.test_run:
|
||||
messages.append("")
|
||||
messages.append("")
|
||||
messages.append("=================== IMPORTANT ====================")
|
||||
|
||||
if settings.general.test_run:
|
||||
messages.append("")
|
||||
messages.append("⚠️ ⚠️ ⚠️ TEST MODE IS ACTIVE ⚠️ ⚠️ ⚠️")
|
||||
messages.append("Decluttarr won't actually do anything for you...")
|
||||
messages.append("You can change this via the setting 'test_run'")
|
||||
messages.append("")
|
||||
|
||||
messages.append("")
|
||||
messages.append("-"*80)
|
||||
# Log all messages at once
|
||||
logger.info("\n".join(messages))
|
||||
|
||||
|
||||
async def launch_steps(settings):
|
||||
# Hide SSL Verification Warnings
|
||||
if not settings.general.ssl_verification:
|
||||
warnings.filterwarnings("ignore", message="Unverified HTTPS request")
|
||||
|
||||
logger.info(settings)
|
||||
show_welcome(settings)
|
||||
|
||||
logger.info("*** Checking Instances ***")
|
||||
# Check qbit, fetch initial cookie, and set tag (if needed)
|
||||
for qbit in settings.download_clients.qbittorrent:
|
||||
await qbit.setup()
|
||||
|
||||
# Setup arrs (apply checks, and store information)
|
||||
settings.instances.check_any_arrs()
|
||||
for arr in settings.instances.arrs:
|
||||
await arr.setup()
|
||||
@@ -1,17 +0,0 @@
|
||||
# Set up classes that allow tracking of items from one loop to the next
|
||||
class Defective_Tracker:
|
||||
# Keeps track of which downloads were already caught as stalled previously
|
||||
def __init__(self, dict):
|
||||
self.dict = dict
|
||||
|
||||
|
||||
class Download_Sizes_Tracker:
|
||||
# Keeps track of the file sizes of the downloads
|
||||
def __init__(self, dict):
|
||||
self.dict = dict
|
||||
|
||||
|
||||
class Deleted_Downloads:
|
||||
# Keeps track of which downloads have already been deleted (to not double-delete)
|
||||
def __init__(self, dict):
|
||||
self.dict = dict
|
||||
65
src/utils/wanted_manager.py
Normal file
65
src/utils/wanted_manager.py
Normal file
@@ -0,0 +1,65 @@
|
||||
from src.utils.common import make_request
|
||||
|
||||
|
||||
class WantedManager:
|
||||
def __init__(self, arr, settings):
|
||||
self.arr = arr
|
||||
self.settings = settings
|
||||
|
||||
async def get_wanted_items(self, missing_or_cutoff):
|
||||
"""
|
||||
Retrieves wanted items :
|
||||
missing_or_cutoff: Drives whether missing or cutoff items are retrieved
|
||||
"""
|
||||
record_count = await self._get_total_records(missing_or_cutoff)
|
||||
missing_or_cutoff = await self._get_arr_records(missing_or_cutoff, record_count)
|
||||
return missing_or_cutoff
|
||||
|
||||
async def _get_total_records(self, missing_or_cutoff):
|
||||
# Get the total number of records from wanted
|
||||
response = (
|
||||
await make_request(
|
||||
method="GET",
|
||||
endpoint=f"{self.arr.api_url}/wanted/{missing_or_cutoff}",
|
||||
settings=self.settings,
|
||||
headers={"X-Api-Key": self.arr.api_key},
|
||||
)
|
||||
).json()
|
||||
return response["totalRecords"]
|
||||
|
||||
async def _get_arr_records(self, missing_or_cutoff, record_count):
|
||||
# Get all records based on the count (with pagination)
|
||||
if record_count == 0:
|
||||
return []
|
||||
|
||||
sort_key = f"{self.arr.detail_item_key}s.lastSearchTime"
|
||||
params = {"page": "1", "pageSize": record_count, "sortKey": sort_key}
|
||||
|
||||
records = (
|
||||
await make_request(
|
||||
method="GET",
|
||||
endpoint=f"{self.arr.api_url}/wanted/{missing_or_cutoff}",
|
||||
settings=self.settings,
|
||||
params=params,
|
||||
headers={"X-Api-Key": self.arr.api_key},
|
||||
)
|
||||
).json()
|
||||
return records["records"]
|
||||
|
||||
|
||||
async def search_items(self, detail_ids):
|
||||
"""Search items by detail IDs"""
|
||||
if isinstance(detail_ids, str):
|
||||
detail_ids = [detail_ids]
|
||||
|
||||
json = {
|
||||
"name": self.arr.detail_item_search_command,
|
||||
self.arr.detail_item_ids_key: detail_ids,
|
||||
}
|
||||
await make_request(
|
||||
method="POST",
|
||||
endpoint=f"{self.arr.api_url}/command",
|
||||
settings=self.settings,
|
||||
json=json,
|
||||
headers={"X-Api-Key": self.arr.api_key},
|
||||
)
|
||||
Reference in New Issue
Block a user