feat: add remove_completed job and new download client job type

This commit is contained in:
Jakub Buzuk
2025-09-11 02:08:56 +02:00
parent b2cb1ebf86
commit b433e06338
7 changed files with 580 additions and 15 deletions

View File

@@ -1,5 +1,6 @@
# Cleans the download queue
from src.jobs.remove_bad_files import RemoveBadFiles
from src.jobs.remove_completed import RemoveCompleted
from src.jobs.remove_failed_downloads import RemoveFailedDownloads
from src.jobs.remove_failed_imports import RemoveFailedImports
from src.jobs.remove_metadata_missing import RemoveMetadataMissing
@@ -25,6 +26,41 @@ class JobManager:
await self.removal_jobs()
await self.search_jobs()
async def run_download_client_jobs(self):
"""Run jobs that operate on download clients directly."""
if not await self._download_clients_connected():
return None
items_detected = 0
for download_client_type in ["qbittorrent", "sabnzbd"]:
download_clients = getattr(
self.settings.download_clients,
download_client_type,
[],
)
for client in download_clients:
logger.info(
f"*** Running jobs on {client.name} ({client.base_url}) ***",
)
# Get jobs for this client
download_client_jobs = self._get_download_client_jobs_for_client(
client,
download_client_type,
)
if not any(job.job.enabled for job in download_client_jobs):
logger.verbose(
"Download Client Jobs: None triggered (No jobs active)",
)
continue
for download_client_job in download_client_jobs:
items_detected += await download_client_job.run()
return items_detected
async def removal_jobs(self):
# Check removal jobs
removal_jobs = self._get_removal_jobs()
@@ -72,7 +108,7 @@ class JobManager:
async def _queue_has_items(self):
logger.debug(
f"job_manager.py/_queue_has_items (Before any removal jobs): Checking if any items in full queue"
"job_manager.py/_queue_has_items (Before any removal jobs): Checking if any items in full queue",
)
queue_manager = QueueManager(self.arr, self.settings)
full_queue = await queue_manager.get_queue_items("full")
@@ -99,10 +135,12 @@ class JobManager:
async def _check_client_connection_status(self, clients):
for client in clients:
logger.debug(
f"job_manager.py/_check_client_connection_status: Checking if {client.name} is connected"
f"job_manager.py/_check_client_connection_status: Checking if {client.name} is connected",
)
if not await client.check_connected():
logger.warning(f">>> {client.name} is disconnected. Skipping queue cleaning on {self.arr.name}.")
logger.warning(
f">>> {client.name} is disconnected. Skipping queue cleaning on {self.arr.name}.",
)
return False
return True
@@ -131,3 +169,26 @@ class JobManager:
removal_job_class(self.arr, self.settings, removal_job_name),
)
return jobs
def _get_download_client_jobs_for_client(self, client, client_type):
"""
Return a list of download client job instances for a specific download client.
Each job is included if the corresponding attribute exists and is truthy in settings.jobs.
"""
download_client_job_classes = {
"remove_completed": RemoveCompleted,
}
jobs = []
for job_name, job_class in download_client_job_classes.items():
if getattr(self.settings.jobs, job_name, False):
jobs.append(
job_class(
client,
client_type,
self.settings,
job_name,
),
)
return jobs

View File

@@ -0,0 +1,152 @@
from abc import ABC, abstractmethod
from src.utils.common import make_request
from src.utils.log_setup import logger
class DownloadClientRemovalJob(ABC):
"""Base class for removal jobs that run on download clients directly."""
job_name = None
def __init__(
self,
download_client: object,
download_client_type: str,
settings: object,
job_name: str,
) -> None:
self.download_client = download_client
self.download_client_type = download_client_type
self.settings = settings
self.job_name = job_name
self.job = getattr(self.settings.jobs, self.job_name)
async def run(self) -> int:
"""Run the download client job."""
if not self.job.enabled:
return 0
logger.debug(
f"download_client_job.py/run: Launching job '{self.job_name}' on {self.download_client.name} "
f"({self.download_client_type})",
)
all_items = await self._get_all_items()
if not all_items:
return 0
items_to_remove = await self._get_items_to_remove(all_items)
# Filter out protected items
items_to_remove = self._filter_protected_items(items_to_remove)
if not items_to_remove:
logger.debug(f"No items to remove for job '{self.job_name}'.")
return 0
# Remove the affected items
await self._remove_items(items_to_remove)
return len(items_to_remove)
async def _get_all_items(self) -> list:
"""Get all items from the download client."""
try:
if self.download_client_type == "qbittorrent":
return await self.download_client.get_qbit_items()
if self.download_client_type == "sabnzbd":
return await self.download_client.get_history_items()
except Exception as e:
logger.error(
f"Error fetching items from {self.download_client.name}: {e}",
)
return []
def _filter_protected_items(self, items: list) -> list:
"""Filter out items that are protected by tags or categories."""
protected_tag = getattr(self.settings.general, "protected_tag", None)
if not protected_tag:
return items
filtered_items = []
for item in items:
is_protected = False
item_name = item.get("name", "unknown")
if self.download_client_type == "qbittorrent":
tags = item.get("tags", "").split(",")
tags = [tag.strip() for tag in tags if tag.strip()]
category = item.get("category", "")
if protected_tag in tags or protected_tag == category:
is_protected = True
elif self.download_client_type == "sabnzbd":
category = item.get("category", "")
if protected_tag == category:
is_protected = True
if is_protected:
logger.debug(f"Ignoring protected item: {item_name}")
else:
filtered_items.append(item)
return filtered_items
@abstractmethod
async def _get_items_to_remove(self, items: list) -> list:
"""Return a list of items to remove from the download client."""
async def _remove_items(self, items: list) -> None:
"""Remove the affected items from the download client."""
if self.settings.general.test_run:
logger.info("Test run is enabled. Skipping actual removal.")
for item in items:
item_name = item.get("name", "unknown")
logger.info(f"Would have removed download: {item_name}")
return
for item in items:
item_name = item.get("name", "unknown")
try:
if self.download_client_type == "qbittorrent":
await self._remove_qbittorrent_item(item)
elif self.download_client_type == "sabnzbd":
await self._remove_sabnzbd_item(item)
logger.info(
f"Removed download: {item_name}",
)
except Exception as e:
logger.error(f"Failed to remove {item_name}: {e}")
async def _remove_qbittorrent_item(self, item: dict) -> None:
"""Remove a torrent from qBittorrent."""
download_id = item["hash"].lower()
data = {
"hashes": download_id,
"deleteFiles": "true",
}
await make_request(
"post",
f"{self.download_client.api_url}/torrents/delete",
self.settings,
data=data,
cookies=self.download_client.cookie,
)
async def _remove_sabnzbd_item(self, item: dict) -> None:
"""Remove a download from SABnzbd history."""
download_id = item["nzo_id"]
params = {
"mode": "history",
"name": "delete",
"value": download_id,
"apikey": self.download_client.api_key,
"output": "json",
}
await make_request(
"get",
self.download_client.api_url,
self.settings,
params=params,
)

View File

@@ -0,0 +1,87 @@
"""Removes completed torrents that have specific tags/categories."""
from src.jobs.download_client_removal_job import DownloadClientRemovalJob
from src.utils.log_setup import logger
COMPLETED_STATES = [
"stoppedUP",
"pausedUP", # Older qBittorrent versions
]
class RemoveCompleted(DownloadClientRemovalJob):
"""Job to remove completed torrents that match specific tags or categories."""
async def run(self) -> int:
if self.download_client_type == "sabnzbd":
logger.debug(
f"Skipping job '{self.job_name}' for Usenet client {self.download_client.name}.",
)
return 0
return await super().run()
async def _get_items_to_remove(self, items: list) -> list:
"""
Filters a list of items from a download client and returns those
that should be removed based on completion status and other criteria.
"""
target_tags, target_categories = self._get_targets()
if not target_tags and not target_categories:
logger.debug(
"No target tags or categories specified for remove_completed job.",
)
return []
items_to_remove = [
item
for item in items
if self._is_completed(item)
and self._meets_target_criteria(item, target_tags, target_categories)
]
for item in items_to_remove:
logger.debug(
f"Found completed item to remove: {item.get('name', 'unknown')}",
)
return items_to_remove
def _is_completed(self, item: dict) -> bool:
"""Check if an item has met its seeding goals."""
state = item.get("state", "")
if state not in COMPLETED_STATES:
return False
# Additional sanity checks for ratio and seeding time
ratio = item.get("ratio", 0)
ratio_limit = item.get("ratio_limit", -1)
seeding_time = item.get("seeding_time", 0)
seeding_time_limit = item.get("seeding_time_limit", -1)
ratio_limit_met = ratio >= ratio_limit > 0
seeding_time_limit_met = seeding_time >= seeding_time_limit > 0
return ratio_limit_met or seeding_time_limit_met
def _meets_target_criteria(
self,
item: dict,
target_tags: list,
target_categories: list,
) -> bool:
"""Check if an item has the required tags or categories for removal."""
item_category = item.get("category", "")
if item_category in target_categories:
return True
tags = item.get("tags", "").split(",")
item_tags = {tag.strip() for tag in tags if tag.strip()}
return bool(item_tags.intersection(target_tags))
def _get_targets(self) -> tuple[list, list]:
"""Get the list of tags and categories to look for from job settings."""
tags = getattr(self.job, "target_tags", [])
categories = getattr(self.job, "target_categories", [])
return tags, categories

View File

@@ -64,7 +64,7 @@ class JobDefaults:
self.max_concurrent_searches = max_concurrent_searches
else:
logger.warning(
f"Job default 'max_concurrent_searches' must be an integer greater 0. Found: {str(max_concurrent_searches)}. Using default: {self.max_concurrent_searches}"
f"Job default 'max_concurrent_searches' must be an integer greater 0. Found: {max_concurrent_searches!s}. Using default: {self.max_concurrent_searches}",
)
self.min_days_between_searches = job_defaults_config.get(
"min_days_between_searches",
@@ -84,6 +84,7 @@ class Jobs:
def _set_job_defaults(self):
self.remove_bad_files = JobParams(keep_archives=self.job_defaults.keep_archives)
self.remove_completed = JobParams()
self.remove_failed_downloads = JobParams()
self.remove_failed_imports = JobParams(
message_patterns=self.job_defaults.message_patterns,
@@ -109,7 +110,6 @@ class Jobs:
)
self.detect_deletions = JobParams()
def _set_job_configs(self, config):
# Populate jobs from YAML config
for job_name in self.__dict__: