diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000..550ad05 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,5 @@ +__pycache__/ +config/config.conf +test*.py +ToDo +.vscode \ No newline at end of file diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..fcae65f --- /dev/null +++ b/.gitignore @@ -0,0 +1,5 @@ +__pycache__/ +.vscode/ +config/config.conf +test*.py +ToDo diff --git a/README.md b/README.md new file mode 100644 index 0000000..ed003f4 --- /dev/null +++ b/README.md @@ -0,0 +1,39 @@ +# **Declutarr** + +## Overview +Declutarr keeps the radarr & sonarr queue free of stalled / redundant downloads. + +Feature overview: +- Automatically delete failed downloads +- Automatically delete downloads belonging to Movies/TV shows that have been deleted in the meantime ('Orphan downloads') +- Automatically delete stalled downloads, after they have been found to be stalled multiple times in a row +- Automatically delete downloads that are stuck downloading metadata +- Automatically delete downloads belonging to Movies/TV shows that are unmonitored + +You may run this locally by launch main.py, or by mounting it inside a docker container. +A sample docker-compose.yml is included. + +## Getting started +If you want to run in docker: +1) Pull the declutarr into "your-docker-folder/own_coding/declutarr" +2) Study the config file to understand the settings +3) Update your docker-compose (see the template) +4) Run the docker-compose +5) Enjoy + +If you want to run locally: +1) Pull declutarr into whatever location you want +2) Study the config file to understand the settings +3) Edit the config file to your liking +4) run main.py +5) Enjoy + +## Credits +- Script for detecting stalled downloads expanded on code by MattDGTL/sonarr-radarr-queue-cleaner +- Script to read out config expanded on code by syncarr/syncarr +- SONARR/RADARR team & contributors for their great product, API documenation, and guidance in their Discord channel +- Particular thanks to them for adding an additional flag to their API that allowed this script detect downloads stuck finding metadata + +## Disclaimer +This script comes free of any warranty, and you are using it at your own risk. +I do not intend to maintain this repo, feel free to fork & create PRs if you want to expand it for your own use diff --git a/test b/config/__init__.py similarity index 100% rename from test rename to config/__init__.py diff --git a/config/config.conf-Example b/config/config.conf-Example new file mode 100644 index 0000000..8025800 --- /dev/null +++ b/config/config.conf-Example @@ -0,0 +1,24 @@ +[general] +LOG_LEVEL = VERBOSE +TEST_RUN = True + +[features] +REMOVE_TIMER = 10 +REMOVE_FAILED = True +REMOVE_STALLED = True +REMOVE_METADATA_MISSING = True +REMOVE_ORPHANS = True +REMOVE_UNMONITORED = True +PERMITTED_ATTEMPTS = 3 +NO_STALLED_REMOVAL_QBIT_TAG = Don't Kill If Stalled + +[radarr] +RADARR_URL = http://radarr:7878 +RADARR_KEY = $RADARR_KEY + +[sonarr] +SONARR_URL = http://sonarr:8989 +SONARR_KEY = $SONARR_KEY + +[qbittorrent] +QBITTORRENT_URL = http://qbittorrent:8080 \ No newline at end of file diff --git a/config/config.conf-Explained b/config/config.conf-Explained new file mode 100644 index 0000000..c3d3918 --- /dev/null +++ b/config/config.conf-Explained @@ -0,0 +1,115 @@ +# The config file is only relevant when running main.py locally. +# When running within docker, all settings are to be set via the docker-compose.yml, and this config.conf file will be ignored + +################################# GENERAL SECTION ################################# +# General parameters such as log level / test run +[general] + +###### LOG_LEVEL ###### +# Sets the level at which logging will take place. +# INFO will only show changes applied to Radarr/Sonarr +# VERBOSE will show when script runs (even if it results in no change) +# Type: String +# Permissible Values: CRITICAL, ERROR, WARNING, INFO, VERBOSE, DEBUG +# Is Mandatory: No (Defaults to INFO) +LOG_LEVEL = INFO + +###### TEST_RUN ###### +# Allows you to safely try out this tool. If active, downloads will not be removed. +# Type: Boolean +# Permissible Values: True, False +# Is Mandatory: No (Defaults to False) +TEST_RUN = False + +################################# FEATURES SETTINGS ################################# +# Steers which type of cleaning is applied to the downloads queue. +# Requires QUEUE_CLEANING to be set to True to take effect. +[features] + +###### REMOVE_TIMER ###### +# Sets the frequency how often the queue is cleaned form orphan and stalled downloads +# Type: Integer +# Unit: Minutes +# Is Mandatory: No (Defaults to 10) +REMOVE_TIMER = 10 + +###### REMOVE_FAILED ###### +# Steers whether failed downloads with no connections are removed from the queue +# Failed downloads are not added to the blocklist +# Type: Boolean +# Permissible Values: True, False +# Is Mandatory: No (Defaults to False) +REMOVE_FAILED = False + +###### REMOVE_STALLED ###### +# Steers whether stalled downloads with no connections are removed from the queue +# Stalled downloads are added to the blocklist, so that they are not re-requested in the future +# Type: Boolean +# Permissible Values: True, False +# Is Mandatory: No (Defaults to False) +REMOVE_STALLED = False + +###### REMOVE_METADATA_MISSING ###### +# Steers whether downloads stuck obtaining meta data are removed from the queue +# These downloads are added the blocklist, so that they are not re-requested in the future +# Type: Boolean +# Permissible Values: True, False +# Is Mandatory: No (Defaults to False) +REMOVE_METADATA_MISSING = False + +###### REMOVE_ORPHANS ###### +# Steers whether orphan downloads are removed from the queue +# Orphan downloads those that do not belong to any movie/tvshow anymore (since the movie/TV show was deleted post request) +# Orphan downloads are not added to the block list +# Type: Boolean +# Permissible Values: True, False +# Is Mandatory: No (Defaults to False) +REMOVE_ORPHANS = False + +###### REMOVE_UNMONITORED ###### +# Steers whether downloads belonging to unmonitored movies/TV shows are removed from the queue +# Note: Will only remove from queue if all tv shows depending on the same download are unmonitored +# Unmonitored downloads are not added to the block list +# Type: Boolean +# Permissible Values: True, False +# Is Mandatory: No (Defaults to False) +REMOVE_UNMONITORED = False + +###### PERMITTED_ATTEMPTS ###### +# Defines how many times a download has to be caught as stalled or stuck downloading metadata before it is removed +# Type: Integer +# Unit: Number of scans +# Is Mandatory: No (Defaults to 3) +PERMITTED_ATTEMPTS= 3 + +###### NO_STALLED_REMOVAL_QBIT_TAG ###### +# Downloads in qBittorrent tagged with this tag will not be killed even if they are stalled +# Type: String +# Is Mandatory: No (Defaults to "Don't Kill If Stalled") +NO_STALLED_REMOVAL_QBIT_TAG= Don't Kill If Stalled + +################################# RADARR SECTION ################################# +[radarr] +# Defines radarr instance on which download queue should be decluttered +# RADARR_URL : URL under which the instance can be reached. If not defined, this instance will not be monitored. +# RADARR_KEY : API Key (mandatory if RADARR_URL is specifidd) +RADARR_URL = http://radarrA:7878 +RADARR_KEY = XXXXX + +################################# SONARR SECTION ################################# +[sonarr] +# Please see the documentation under the RADARR section - the explanations the same. +SONARR_URL = http://sonarrA:8989 +SONARR_KEY = XXXXX + +################################# SONARR SECTION ################################# +[sonarr] +# Please see the documentation under the RADARR section - the explanations the same. +SONARR_URL = http://sonarrA:8989 +SONARR_KEY = XXXXX + +################################# QBITTORRENT SECTION ################################# +[qbittorrent] +# Defines URL of qBittorrent +# QBITTORRENT_URL : URL under which the instance can be reached. If not defined, the NO_STALLED_REMOVAL_QBIT_TAG takes no effect +QBITTORRENT_URL = http://qbittorrent:8080 \ No newline at end of file diff --git a/config/config.py b/config/config.py new file mode 100644 index 0000000..33152d9 --- /dev/null +++ b/config/config.py @@ -0,0 +1,114 @@ +#!/usr/bin/env python +import sys +import os +import configparser + +######################################################################################################################## +# Check if in Docker +IS_IN_DOCKER = os.environ.get('IS_IN_DOCKER') + +######################################################################################################################## +def ConfigSectionMap(section): + 'Load the config file into a dictionary' + dict1 = {} + options = config.options(section) + for option in options: + try: + dict1[option] = config.get(section, option) + except: + print("exception on %s!" % option) + dict1[option] = None + return dict1 + +def cast(value, type_): + return type_(value) + +def get_config_value(key, config_section, is_mandatory, datatype, default_value = None): + 'Return for each key the corresponding value from the Docker Environment or the Config File' + if IS_IN_DOCKER: + config_value = os.environ.get(key) + if config_value is not None: + # print(f'The value retrieved for [{config_section}]: {key} is "{config_value}"') + config_value = config_value + # return config_value + elif is_mandatory: + print(f'[ ERROR ]: Variable not specified in Docker environment: {key}' ) + sys.exit(0) + else: + # return default_value + config_value = default_value + + else: + try: + config_value = ConfigSectionMap(config_section).get(key) + except configparser.NoSectionError: + config_value = None + if config_value is not None: + # print(f'The value retrieved for [{config_section}]: {key} is "{config_value}"') + config_value = config_value + # return config_value + elif is_mandatory: + print(f'[ ERROR ]: Mandatory variable not specified in config file, section [{config_section}]: {key} (data type: {datatype.__name__})') + sys.exit(0) + else: + # return default_value + config_value = default_value + + # Apply data type + try: + if datatype == bool: + config_value = eval(str(config_value).capitalize()) + if config_value is not None: config_value = cast(config_value, datatype) + except: + print(f'[ ERROR ]: The value retrieved for [{config_section}]: {key} is "{config_value}" and cannot be converted to data type {datatype}') + sys.exit(0) + return config_value + +######################################################################################################################## +# Load Config File +Config_FileName = 'config.conf' +Config_FileFullPath = os.path.join(os.path.abspath(os.path.dirname(__file__)), Config_FileName) +sys.tracebacklimit = 0 # dont show stack traces in prod mode +config = configparser.ConfigParser() +config.optionxform = str # maintain capitalization of config keys +config.read(Config_FileFullPath) + +######################################################################################################################## +# Load Config +# General +LOG_LEVEL = get_config_value('LOG_LEVEL', 'general', False, str, 'INFO') +TEST_RUN = get_config_value('TEST_RUN', 'general', False, bool, False) + +# Features +REMOVE_TIMER = get_config_value('REMOVE_TIMER', 'features', False, int, 10) +REMOVE_FAILED = get_config_value('REMOVE_FAILED', 'features', False, bool, False) +REMOVE_STALLED = get_config_value('REMOVE_STALLED', 'features', False, bool, False) +REMOVE_METADATA_MISSING = get_config_value('REMOVE_METADATA_MISSING', 'features', False, bool, False) +REMOVE_ORPHANS = get_config_value('REMOVE_ORPHANS' , 'features', False, bool, False) +REMOVE_UNMONITORED = get_config_value('REMOVE_UNMONITORED' , 'features', False, bool, False) +PERMITTED_ATTEMPTS = get_config_value('PERMITTED_ATTEMPTS', 'features', False, int, 3) +NO_STALLED_REMOVAL_QBIT_TAG = get_config_value('NO_STALLED_REMOVAL_QBIT_TAG', 'features', False, str, 'Don\'t Kill If Stalled') + +# Radarr +RADARR_URL = get_config_value('RADARR_URL', 'radarr', False, str) +RADARR_KEY = None if RADARR_URL == None else \ + get_config_value('RADARR_KEY', 'radarr', True, str) + +# Sonarr +SONARR_URL = get_config_value('SONARR_URL', 'sonarr', False, str) +SONARR_KEY = None if SONARR_URL == None else \ + get_config_value('SONARR_KEY', 'sonarr', True, str) + +# qBittorrent +QBITTORRENT_URL = get_config_value('QBITTORRENT_URL', 'qbittorrent', False, str, '') + +######################################################################################################################## +if not (RADARR_URL or SONARR_URL): + print(f'[ ERROR ]: No Radarr/Sonarr URLs specified (nothing to monitor)') + sys.exit(0) + +########### Add API to URLs +if RADARR_URL: RADARR_URL += '/api/v3' +if SONARR_URL: SONARR_URL += '/api/v3' +if QBITTORRENT_URL: QBITTORRENT_URL += '/api/v2' + diff --git a/docker/Dockerfile b/docker/Dockerfile new file mode 100644 index 0000000..310994c --- /dev/null +++ b/docker/Dockerfile @@ -0,0 +1,12 @@ +FROM python:3.9-slim-buster + +ENV IS_IN_DOCKER 1 + +WORKDIR /app + +COPY docker/requirements.txt ./ +RUN pip install --no-cache-dir -r requirements.txt + +COPY . . + +CMD ["python", "main.py"] diff --git a/docker/Sample docker-compose.yml b/docker/Sample docker-compose.yml new file mode 100644 index 0000000..f973a25 --- /dev/null +++ b/docker/Sample docker-compose.yml @@ -0,0 +1,34 @@ +# declutarr - Keeps radarr and sonarr download queues free of stalled and redundant items +declutarr: + build: + context: ./own_coding/declutarr/ + dockerfile: docker/Dockerfile + container_name: declutarr + environment: + TZ: Europe/Zurich + PUID: 1000 + PGID: 1000 + + # General + LOG_LEVEL: INFO + + # features + REMOVE_TIMER: 10 + REMOVE_FAILED: True + REMOVE_STALLED: True + REMOVE_METADATA_MISSING: True + REMOVE_ORPHANS: True + REMOVE_UNMONITORED: True + PERMITTED_ATTEMPTS: 3 + NO_STALLED_REMOVAL_QBIT_TAG: Don't Kill If Stalled + + #Radarr + RADARR_URL: http://radarr:7878 + RADARR_KEY: $RADARR_API_KEY + + #Sonarr + SONARR_URL: http://sonarr:8989 + SONARR_KEY: $SONARR_API_KEY + + #qBitorrent + QBITTORRENT_URL: http://qbittorrent:8080 diff --git a/docker/requirements.txt b/docker/requirements.txt new file mode 100644 index 0000000..207cc9d --- /dev/null +++ b/docker/requirements.txt @@ -0,0 +1,5 @@ +requests +aiohttp +asyncio +python-dateutil +verboselogs \ No newline at end of file diff --git a/main.py b/main.py new file mode 100644 index 0000000..d0271e1 --- /dev/null +++ b/main.py @@ -0,0 +1,90 @@ +########### Import Libraries +import asyncio +import logging, verboselogs +from src.utils.rest import (rest_get) +from requests.exceptions import RequestException +import json +from dateutil.relativedelta import relativedelta as rd +from config.config import ( + IS_IN_DOCKER, + LOG_LEVEL, TEST_RUN, + REMOVE_TIMER, REMOVE_FAILED, REMOVE_STALLED, REMOVE_METADATA_MISSING, REMOVE_ORPHANS, REMOVE_UNMONITORED, PERMITTED_ATTEMPTS, + RADARR_URL, RADARR_KEY, + SONARR_URL, SONARR_KEY, + QBITTORRENT_URL +) +from src.queue_cleaner import (queue_cleaner) + +########### Enabling Logging +# Set up logging +log_level_num=logging.getLevelName(LOG_LEVEL) +logger = verboselogs.VerboseLogger(__name__) +logging.basicConfig( + format=('' if IS_IN_DOCKER else '%(asctime)s ') + ('[%(levelname)-7s]' if LOG_LEVEL=='VERBOSE' else '[%(levelname)s]') + ': %(message)s', + level=log_level_num +) + + +class Defective_Tracker: + # Keeps track of which downloads were already caught as stalled previously + def __init__(self, dict): + self.dict = dict + +# Main function +async def main(): + # Get name of Radarr / Sonarr instances + if RADARR_URL: + RADARR_NAME = (await rest_get(RADARR_URL+'/system/status', RADARR_KEY))['instanceName'] + if SONARR_URL: + SONARR_NAME = (await rest_get(SONARR_URL+'/system/status', SONARR_KEY))['instanceName'] + + # Print Settings + fmt = '{0.days} days {0.hours} hours {0.minutes} minutes' + logger.info('#' * 50) + logger.info('Application Started!') + logger.info('') + logger.info('*** Current Settings ***') + logger.info('%s | Removing failed downloads', str(REMOVE_FAILED)) + logger.info('%s | Removing stalled downloads', str(REMOVE_STALLED)) + logger.info('%s | Removing downloads missing metadata', str(REMOVE_METADATA_MISSING)) + logger.info('%s | Removing orphan downloads', str(REMOVE_ORPHANS)) + logger.info('%s | Removing downloads belonging to unmonitored TV shows/movies', str(REMOVE_UNMONITORED)) + logger.info('Running every %s.', fmt.format(rd(minutes=REMOVE_TIMER))) + logger.info('') + logger.info('*** Configured Instances ***') + if RADARR_URL: logger.info('%s: %s', RADARR_NAME, RADARR_URL) + if SONARR_URL: logger.info('%s: %s', SONARR_NAME, SONARR_URL) + if QBITTORRENT_URL: logger.info('qBittorrent: %s', QBITTORRENT_URL) + logger.info('') + logger.info('#' * 50) + if LOG_LEVEL == 'INFO': + logger.info('[LOG_LEVEL = INFO]: Only logging changes (switch to VERBOSE for more info)') + else: + logger.info(f'') + if TEST_RUN: + logger.info(f'*'* 50) + logger.info(f'*'* 50) + logger.info(f'') + logger.info(f'TEST_RUN FLAG IS SET!') + logger.info(f'THIS IS A TEST RUN AND NO UPDATES/DELETES WILL BE PERFORMED') + logger.info(f'') + logger.info(f'*'* 50) + logger.info(f'*'* 50) + + # Start application + while True: + logger.verbose('-' * 50) + if RADARR_URL: await queue_cleaner('radarr', RADARR_URL, RADARR_KEY, RADARR_NAME, REMOVE_FAILED, REMOVE_STALLED, REMOVE_METADATA_MISSING, REMOVE_ORPHANS, REMOVE_UNMONITORED, PERMITTED_ATTEMPTS, NO_STALLED_REMOVAL_QBIT_TAG, QBITTORRENT_URL, defective_tracker, TEST_RUN) + if SONARR_URL: await queue_cleaner('sonarr', SONARR_URL, SONARR_KEY, SONARR_NAME, REMOVE_FAILED, REMOVE_STALLED, REMOVE_METADATA_MISSING, REMOVE_ORPHANS, REMOVE_UNMONITORED, PERMITTED_ATTEMPTS, NO_STALLED_REMOVAL_QBIT_TAG, QBITTORRENT_URL, defective_tracker, TEST_RUN) + logger.verbose('') + logger.verbose('Queue clean-up complete!') + await asyncio.sleep(REMOVE_TIMER*60) + + return + +if __name__ == '__main__': + instances = {RADARR_URL: {}} if RADARR_URL else {} + \ + {SONARR_URL: {}} if SONARR_URL else {} + defective_tracker = Defective_Tracker(instances) + asyncio.run(main()) + diff --git a/src/__init__.py b/src/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/queue_cleaner.py b/src/queue_cleaner.py new file mode 100644 index 0000000..b198095 --- /dev/null +++ b/src/queue_cleaner.py @@ -0,0 +1,156 @@ +# Cleans the download queue +import logging, verboselogs +logger = verboselogs.VerboseLogger(__name__) +from src.utils.rest import (rest_get, rest_delete) +import json +from src.utils.nest_functions.py import (add_keys_nested_dict, nested_get) +class Deleted_Downloads: + # Keeps track of which downloads have already been deleted (to not double-delete) + def __init__(self, dict): + self.dict = dict + +async def get_queue(BASE_URL, API_KEY, params = {}): + totalRecords = (await rest_get(f'{BASE_URL}/queue', API_KEY, params))['totalRecords'] + if totalRecords == 0: + return None + queue = await rest_get(f'{BASE_URL}/queue', API_KEY, {'page': '1', 'pageSize': totalRecords}|params) + return queue + +async def remove_failed(radarr_or_sonarr, BASE_URL, API_KEY, PERMITTED_ATTEMPTS, queue, deleted_downloads, TEST_RUN): + # Detects failed and triggers delete. Does not add to blocklist + failedItems = [] + for queueItem in queue['records']: + if 'errorMessage' in queueItem and 'status' in queueItem: + if queueItem['status'] == 'failed' or \ + (queueItem['status'] == 'warning' and queueItem['errorMessage'] == 'The download is missing files'): + await remove_download(BASE_URL, API_KEY, queueItem['id'], queueItem['title'], queueItem['downloadId'], 'failed', False, deleted_downloads, TEST_RUN) + failedItems.append(queueItem) + return len(failedItems) + +async def remove_stalled(radarr_or_sonarr, BASE_URL, API_KEY, PERMITTED_ATTEMPTS, defective_tracker, queue, deleted_downloads, NO_STALLED_REMOVAL_QBIT_TAG, QBITTORRENT_URL, TEST_RUN): + # Detects stalled and triggers repeat check and subsequent delete. Adds to blocklist + if QBITTORRENT_URL: + protected_dowloadItems = await rest_get(QBITTORRENT_URL+'/torrents/info','',{'tag': NO_STALLED_REMOVAL_QBIT_TAG}) + protected_downloadIDs = [str.upper(item['hash']) for item in protected_dowloadItems] + else: + protected_downloadIDs = [] + stalledItems = [] + for queueItem in queue['records']: + if 'errorMessage' in queueItem and 'status' in queueItem: + if queueItem['status'] == 'warning' and \ + queueItem['errorMessage'] == 'The download is stalled with no connections': + if queueItem['downloadId'] in protected_downloadIDs: + logger.verbose('>>> Detected stalled download, tagged not to be killed: %s',queueItem['title']) + else: + stalledItems.append(queueItem) + await check_permitted_attempts(stalledItems, 'stalled', True, deleted_downloads, BASE_URL, API_KEY, PERMITTED_ATTEMPTS, defective_tracker, TEST_RUN) + return len(stalledItems) + +async def remove_metadata_missing(radarr_or_sonarr, BASE_URL, API_KEY, PERMITTED_ATTEMPTS, defective_tracker, queue, deleted_downloads, TEST_RUN): + # Detects downloads stuck downloading meta data and triggers repeat check and subsequent delete. Adds to blocklist + missing_metadataItems = [] + for queueItem in queue['records']: + if 'errorMessage' in queueItem and 'status' in queueItem: + if queueItem['status'] == 'queued' and \ + queueItem['errorMessage'] == 'qBittorrent is downloading metadata': + missing_metadataItems.append(queueItem) + await check_permitted_attempts(missing_metadataItems, 'missing metadata', True, deleted_downloads, BASE_URL, API_KEY, PERMITTED_ATTEMPTS, defective_tracker, TEST_RUN) + return len(missing_metadataItems) + +async def remove_orphans(radarr_or_sonarr, BASE_URL, API_KEY, PERMITTED_ATTEMPTS, queue, deleted_downloads, TEST_RUN): + # Removes downloads belonging to movies/tv shows that have been deleted in the meantime + full_queue = await get_queue(BASE_URL, API_KEY, params = {'includeUnknownMovieItems' if radarr_or_sonarr == 'radarr' else 'includeUnknownSeriesItems': 'true'}) + if not full_queue: return 0 # By now the queue may be empty + full_queue_items = [{'id': queueItem['id'], 'title': queueItem['title']} for queueItem in full_queue['records']] + queue_ids = [queueItem['id'] for queueItem in queue['records']] + orphanItems = [{'id': queueItem['id'], 'title': queueItem['title']} for queueItem in full_queue_items if queueItem['id'] not in queue_ids] + for queueItem in orphanItems: + await remove_download(BASE_URL, API_KEY, queueItem['id'], queueItem['title'], queueItem['downloadId'], 'orphan', False, deleted_downloads, TEST_RUN) + return len(orphanItems) + +async def remove_unmonitored(radarr_or_sonarr, BASE_URL, API_KEY, PERMITTED_ATTEMPTS, queue, deleted_downloads, TEST_RUN): + # Removes downloads belonging to movies/tv shows that are not monitored + unmonitoredItems= [] + downloadItems = [] + for queueItem in queue['records']: + if radarr_or_sonarr == 'sonarr': + monitored = (await rest_get(f'{BASE_URL}/episode/{str(queueItem["episodeId"])}', API_KEY))['monitored'] + else: + monitored = (await rest_get(f'{BASE_URL}/movie/{str(queueItem["movieId"])}', API_KEY))['monitored'] + downloadItems.append({'downloadId': queueItem['downloadId'], 'id': queueItem['id'], 'monitored': monitored}) + monitored_downloadIds = [downloadItem['downloadId'] for downloadItem in downloadItems if downloadItem['monitored']] + unmonitoredItems = [downloadItem for downloadItem in downloadItems if downloadItem['downloadId'] not in monitored_downloadIds] + for unmonitoredItem in unmonitoredItems: + await remove_download(BASE_URL, API_KEY, queueItem['id'], queueItem['title'], queueItem['downloadId'], 'unmonitored', False, deleted_downloads, TEST_RUN) + return len(unmonitoredItems) + +async def check_permitted_attempts(current_defective_items, failType, blocklist, deleted_downloads, BASE_URL, API_KEY, PERMITTED_ATTEMPTS, defective_tracker, TEST_RUN): + # Checks if downloads are repeatedly found as stalled / stuck in metadata and if yes, deletes them + # 1. Create list of currently defective + current_defective = {} + for queueItem in current_defective_items: + current_defective[queueItem['id']] = {'title': queueItem['title'],'downloadId': queueItem['downloadId']} + + # 2. Check if those that were previously defective are no longer defective -> those are recovered + try: + recovered_ids = [tracked_id for tracked_id in defective_tracker.dict[BASE_URL][failType] if tracked_id not in current_defective] + except KeyError: + recovered_ids = [] + for recovered_id in recovered_ids: + del defective_tracker.dict[BASE_URL][failType][recovered_id] + # 3. For those that are defective, add attempt + 1 if present before, or make attempt = 0. If exceeding number of permitted attempts, delete hem + download_ids_stuck = [] + for queueId in current_defective: + try: + defective_tracker.dict[BASE_URL][failType][queueId]['Attempts'] += 1 + except KeyError: + await add_keys_nested_dict(defective_tracker.dict,[BASE_URL, failType, queueId], {'title': current_defective[queueId]['title'], 'downloadId': current_defective[queueId]['downloadId'], 'Attempts': 1}) + if current_defective[queueId]['downloadId'] not in download_ids_stuck: + download_ids_stuck.append(current_defective[queueId]['downloadId']) + logger.info('>>> Detected %s download (%s out of %s permitted times): %s', failType, str(defective_tracker.dict[BASE_URL][failType][queueId]['Attempts']), str(PERMITTED_ATTEMPTS), defective_tracker.dict[BASE_URL][failType][queueId]['title']) + if defective_tracker.dict[BASE_URL][failType][queueId]['Attempts'] > PERMITTED_ATTEMPTS: + await remove_download(BASE_URL, API_KEY, queueId, current_defective[queueId]['title'], current_defective[queueId]['downloadId'], failType, blocklist, deleted_downloads, TEST_RUN) + return + +async def remove_download(BASE_URL, API_KEY, queueId, queueTitle, downloadId, failType, blocklist, deleted_downloads, TEST_RUN): + # Removes downloads and creates log entry + if downloadId not in deleted_downloads.dict: + logger.info('>>> Removing %s download: %s', failType, queueTitle) + if not TEST_RUN: await rest_delete(f'{BASE_URL}/queue/{queueId}', API_KEY, {'removeFromClient': 'true', 'blocklist': blocklist}) + deleted_downloads.dict.append(downloadId) + return + +########### MAIN FUNCTION ########### +async def queue_cleaner(radarr_or_sonarr, BASE_URL, API_KEY, NAME, REMOVE_FAILED, REMOVE_STALLED, REMOVE_METADATA_MISSING, REMOVE_ORPHANS, REMOVE_UNMONITORED, PERMITTED_ATTEMPTS, NO_STALLED_REMOVAL_QBIT_TAG, QBITTORRENT_URL, defective_tracker, TEST_RUN): + # Cleans up the downloads queue + logger.verbose('Cleaning queue on %s:', NAME) + try: + queue = await get_queue(BASE_URL, API_KEY) + if not queue: + logger.verbose('>>> Queue is empty.') + return + + deleted_downloads = Deleted_Downloads([]) + items_detected = 0 + if REMOVE_FAILED: + items_detected += await remove_failed(radarr_or_sonarr, BASE_URL, API_KEY, PERMITTED_ATTEMPTS, queue, deleted_downloads, TEST_RUN) + + if REMOVE_STALLED: + items_detected += await remove_stalled(radarr_or_sonarr, BASE_URL, API_KEY, PERMITTED_ATTEMPTS, defective_tracker, queue, deleted_downloads, NO_STALLED_REMOVAL_QBIT_TAG, QBITTORRENT_URL, TEST_RUN) + + if REMOVE_METADATA_MISSING: + items_detected += await remove_metadata_missing(radarr_or_sonarr, BASE_URL, API_KEY, PERMITTED_ATTEMPTS, defective_tracker, queue, deleted_downloads, TEST_RUN) + + if REMOVE_ORPHANS: + items_detected += await remove_orphans(radarr_or_sonarr, BASE_URL, API_KEY, PERMITTED_ATTEMPTS, queue, deleted_downloads, TEST_RUN) + + if REMOVE_UNMONITORED: + items_detected += await remove_unmonitored(radarr_or_sonarr, BASE_URL, API_KEY, PERMITTED_ATTEMPTS, queue, deleted_downloads, TEST_RUN) + + if items_detected == 0: + logger.verbose('>>> Queue is clean.') + except: + logger.warning('>>> Queue cleaning failed on %s.', NAME) + + + diff --git a/src/utils/nest_functions.py b/src/utils/nest_functions.py new file mode 100644 index 0000000..9003a97 --- /dev/null +++ b/src/utils/nest_functions.py @@ -0,0 +1,47 @@ + +async def nested_set(dic, keys, value, matchConditions=None): + # Sets the value of a key in a dictionary to a certain value. + # If multiple items are present, it can filter for a matching item + for key in keys[:-1]: + dic = dic.setdefault(key, {}) + if matchConditions: + i = 0 + match = False + for item in dic: + for matchCondition in matchConditions: + if item[matchCondition] != matchConditions[matchCondition]: + match = False + break + else: + match = True + if match: + dic = dic[i] + break + i += 1 + dic[keys[-1]] = value + + +async def add_keys_nested_dict(d, keys, defaultValue = None): + # Creates a nested value if key does not exist + for key in keys[:-1]: + if key not in d: + d[key] = {} + d = d[key] + d.setdefault(keys[-1], defaultValue) + +async def nested_get(dic, return_attribute, matchConditions): + # Retrieves a list contained in return_attribute, found within dic based on matchConditions + i = 0 + match = False + hits = [] + for item in dic: + for matchCondition in matchConditions: + if item[matchCondition] != matchConditions[matchCondition]: + match = False + break + else: + match = True + if match: + hits.append(dic[i][return_attribute]) + i += 1 + return hits \ No newline at end of file diff --git a/src/utils/rest.py b/src/utils/rest.py new file mode 100644 index 0000000..3dc330a --- /dev/null +++ b/src/utils/rest.py @@ -0,0 +1,72 @@ +########### Functions to call radarr/sonarr APIs +import logging +import asyncio +import requests +from requests.exceptions import RequestException +import json +from config.config import (TEST_RUN) + +# GET +async def rest_get(url, api_key, params=None): + try: + headers = {'X-Api-Key': api_key} # | {'accept': 'application/json'} + response = await asyncio.get_event_loop().run_in_executor(None, lambda: requests.get(url, params=params, headers=headers)) + response.raise_for_status() + return response.json() + except RequestException as e: + logging.error(f'Error making API request to {url}: {e}') + return None + except ValueError as e: + logging.error(f'Error parsing JSON response from {url}: {e}') + return None + +# DELETE +async def rest_delete(url, api_key, params=None): + if TEST_RUN: return + try: + headers = {'X-Api-Key': api_key} + response = await asyncio.get_event_loop().run_in_executor(None, lambda: requests.delete(url, params=params, headers=headers)) + response.raise_for_status() + if response.status_code in [200, 204]: + return None + return response.json() + except RequestException as e: + logging.error(f'Error making API request to {url}: {e}') + return None + except ValueError as e: + logging.error(f'Error parsing JSON response from {url}: {e}') + return None + +# POST +async def rest_post(url, api_key, data): + if TEST_RUN: return + try: + headers = {'X-Api-Key': api_key} | {"content-type": "application/json"} + response = await asyncio.get_event_loop().run_in_executor(None, lambda: requests.post(url, data=data, headers=headers)) + response.raise_for_status() + if response.status_code == 201: + return None + return response.json() + except RequestException as e: + logging.error(f'Error making API request to {url}: {e}') + return None + except ValueError as e: + logging.error(f'Error parsing JSON response from {url}: {e}') + return None + + +# PUT +async def rest_put(url, api_key, data): + if TEST_RUN: return + try: + headers = {'X-Api-Key': api_key} | {"content-type": "application/json"} + response = await asyncio.get_event_loop().run_in_executor(None, lambda: requests.put(url, data=data, headers=headers)) + response.raise_for_status() + return response.json() + except RequestException as e: + logging.error(f'Error making API request to {url}: {e}') + return None + except ValueError as e: + logging.error(f'Error parsing JSON response from {url}: {e}') + return None +