diff --git a/.github/workflows/dev.yml b/.github/workflows/dev.yml index ff80060..d1143c9 100644 --- a/.github/workflows/dev.yml +++ b/.github/workflows/dev.yml @@ -73,10 +73,10 @@ jobs: --build-arg SHORT_COMMIT_ID=$SHORT_COMMIT_ID \ --push \ - - name: "Clean up docker images" - uses: dataaxiom/ghcr-cleanup-action@main - with: - keep-n-tagged: 10 - exclude-tags: dev - dry-run: true - token: ${{secrets.GITHUB_TOKEN}} \ No newline at end of file + # - name: "Clean up docker images" + # uses: dataaxiom/ghcr-cleanup-action@main + # with: + # keep-n-tagged: 10 + # exclude-tags: dev + # dry-run: true + # token: ${{secrets.GITHUB_TOKEN}} \ No newline at end of file diff --git a/CONTRIBUTE.md b/CONTRIBUTE.md index 195f08c..e46caf6 100644 --- a/CONTRIBUTE.md +++ b/CONTRIBUTE.md @@ -39,4 +39,18 @@ Code contributions are very welcome - thanks for helping improve this app! 3) Please only commit code that you have written yourself and is not owned by anybody else 4) Please create a PR against the "dev" branch 5) Once I have reviewed it, I will merge it and it will create teh "dev" image -6) Please help testing that the dev image works, before we then commit it to the "latest" image (from main branch) \ No newline at end of file +6) Please help testing that the dev image works, before we then commit it to the "latest" image (from main branch) + +You do not need to know about how to create docker images to contribute here. +To get started: +1) Clone the git repository to your local machine +2) Create a virtual python environment (python3 -m venv venv) +3) Activate the virtual environment (source venv/bin/activate) +4) Install python libraries (pip install -r docker/requirements.txt) +5) Adjust the config/config.conf to your needs +6) Adjust the code in the files as needed +7) Run the script (python3 main.py) +8) Create a PR once you are ready + + + diff --git a/README.md b/README.md index c4bab8c..c7c0df9 100644 --- a/README.md +++ b/README.md @@ -65,9 +65,9 @@ services: ## Features - REMOVE_TIMER=10 - REMOVE_FAILED=True + - REMOVE_FAILED_IMPORTS=True - REMOVE_METADATA_MISSING=True - REMOVE_MISSING_FILES=True - - REMOVE_NO_FORMAT_UPGRADE=True - REMOVE_ORPHANS=True - REMOVE_SLOW=True - REMOVE_STALLED=True @@ -76,6 +76,7 @@ services: - PERMITTED_ATTEMPTS=3 - NO_STALLED_REMOVAL_QBIT_TAG=Don't Kill - IGNORE_PRIVATE_TRACKERS=True + - FAILED_IMPORT_MESSAGE_PATTERNS = '["Not a Custom Format upgrade for existing", "Not an upgrade for existing"]' ## Radarr - RADARR_URL=http://radarr:7878 - RADARR_KEY=$RADARR_API_KEY @@ -151,6 +152,16 @@ Steers which type of cleaning is applied to the downloads queue - Permissible Values: True, False - Is Mandatory: No (Defaults to False) +**REMOVE_FAILED_IMPORTS** +- Steers whether downloads that failed importing are removed from the queue +- This can happen, for example, when a better version is already present +- Note: Only considers an import failed if the import message contains a warning that is listed on FAILED_IMPORT_MESSAGE_PATTERNS (see below) +- These downloads are added to the blocklist +- If the setting IGNORE_PRIVATE_TRACKERS is true, and the affected torrent is a private tracker, the queue item will be removed, but the torrent files will be kept +- Type: Boolean +- Permissible Values: True, False +- Is Mandatory: No (Defaults to False) + **REMOVE_METADATA_MISSING** - Steers whether downloads stuck obtaining metadata are removed from the queue - These downloads are added to the blocklist, so that they are not re-requested @@ -166,15 +177,6 @@ Steers which type of cleaning is applied to the downloads queue - Permissible Values: True, False - Is Mandatory: No (Defaults to False) -**REMOVE_NO_FORMAT_UPGRADE** -- Steers whether downloads that failed importing since they are not a format upgrade are removed from the queue -- This occurs when a better version is already present -- These downloads are added to the blocklist -- If the setting IGNORE_PRIVATE_TRACKERS is true, and the affected torrent is a private tracker, the queue item will still be removed, but the torrent files will be kept -- Type: Boolean -- Permissible Values: True, False -- Is Mandatory: No (Defaults to False) - **REMOVE_ORPHANS** - Steers whether orphan downloads are removed from the queue - Orphan downloads are those that do not belong to any requested media anymore (Since the media was removed from radarr/sonarr/lidarr/readarr after the download started) @@ -239,6 +241,15 @@ Steers which type of cleaning is applied to the downloads queue - Permissible Values: True, False - Is Mandatory: No (Defaults to True) +**FAILED_IMPORT_MESSAGE_PATTERNS** +- Works in together with REMOVE_FAILED_IMPORTS (only relevant if this setting is true) +- Defines the patterns based on which the tool decides if a import with a warning should be considered failed +- Queue items are considered failed, if any of the specified patterns is contained in one of the messages of the queue item +- Note: If left empty, any import with a warning is considered failed +- Type: List +- Suggested values: ["Not a Custom Format upgrade for existing", "Not an upgrade for existing"] +- Is Mandatory: No (Defaults to [], which means all messages are failures) + --- ### **Radarr section** diff --git a/config/config.conf-Example b/config/config.conf-Example index b0afb55..80a2296 100644 --- a/config/config.conf-Example +++ b/config/config.conf-Example @@ -5,9 +5,9 @@ TEST_RUN = True [features] REMOVE_TIMER = 10 REMOVE_FAILED = True +REMOVE_FAILED_IMPORTS = True REMOVE_METADATA_MISSING = True REMOVE_MISSING_FILES = True -REMOVE_NO_FORMAT_UPGRADE = True REMOVE_ORPHANS = True REMOVE_SLOW = True REMOVE_STALLED = True @@ -16,6 +16,7 @@ MIN_DOWNLOAD_SPEED = 100 PERMITTED_ATTEMPTS = 3 NO_STALLED_REMOVAL_QBIT_TAG = Don't Kill IGNORE_PRIVATE_TRACKERS = FALSE +FAILED_IMPORT_MESSAGE_PATTERNS = ["Not a Custom Format upgrade for existing", "Not an upgrade for existing"] [radarr] RADARR_URL = http://radarr:7878 diff --git a/config/config.py b/config/config.py index 2240c23..dae75f4 100644 --- a/config/config.py +++ b/config/config.py @@ -2,7 +2,7 @@ import sys import os import configparser - +import json ######################################################################################################################## # Check if in Docker IS_IN_DOCKER = os.environ.get('IS_IN_DOCKER') @@ -38,6 +38,7 @@ def get_config_value(key, config_section, is_mandatory, datatype, default_value sys.exit(0) else: # return default_value + # print(f'The default value used for [{config_section}]: {key} is "{default_value}" (data type: {type(default_value).__name__})') config_value = default_value else: @@ -54,15 +55,20 @@ def get_config_value(key, config_section, is_mandatory, datatype, default_value sys.exit(0) else: # return default_value + # print(f'The default value used for [{config_section}]: {key} is "{default_value}" (data type: {type(default_value).__name__})') config_value = default_value # Apply data type try: if datatype == bool: config_value = eval(str(config_value).capitalize()) - if config_value is not None: config_value = cast(config_value, datatype) - except: + elif datatype == list: + config_value = json.loads(config_value) + elif config_value is not None: + config_value = cast(config_value, datatype) + except Exception as e: print(f'[ ERROR ]: The value retrieved for [{config_section}]: {key} is "{config_value}" and cannot be converted to data type {datatype}') + print(e) sys.exit(0) return config_value @@ -77,55 +83,57 @@ config.read(config_file_full_path) ######################################################################################################################## # Load Config -# General -LOG_LEVEL = get_config_value('LOG_LEVEL', 'general', False, str, 'INFO') -TEST_RUN = get_config_value('TEST_RUN', 'general', False, bool, False) -SSL_VERIFICATION = get_config_value('SSL_VERIFICATION', 'general', False, bool, True) +# General +LOG_LEVEL = get_config_value('LOG_LEVEL', 'general', False, str, 'INFO') +TEST_RUN = get_config_value('TEST_RUN', 'general', False, bool, False) +SSL_VERIFICATION = get_config_value('SSL_VERIFICATION', 'general', False, bool, True) -# Features -REMOVE_TIMER = get_config_value('REMOVE_TIMER', 'features', False, float, 10) -REMOVE_FAILED = get_config_value('REMOVE_FAILED', 'features', False, bool, False) -REMOVE_METADATA_MISSING = get_config_value('REMOVE_METADATA_MISSING', 'features', False, bool, False) -REMOVE_MISSING_FILES = get_config_value('REMOVE_MISSING_FILES' , 'features', False, bool, False) -REMOVE_NO_FORMAT_UPGRADE = get_config_value('REMOVE_NO_FORMAT_UPGRADE' , 'features', False, bool, False) -REMOVE_ORPHANS = get_config_value('REMOVE_ORPHANS' , 'features', False, bool, False) -REMOVE_SLOW = get_config_value('REMOVE_SLOW' , 'features', False, bool, False) -REMOVE_STALLED = get_config_value('REMOVE_STALLED', 'features', False, bool, False) -REMOVE_UNMONITORED = get_config_value('REMOVE_UNMONITORED' , 'features', False, bool, False) -MIN_DOWNLOAD_SPEED = get_config_value('MIN_DOWNLOAD_SPEED', 'features', False, int, 0) -PERMITTED_ATTEMPTS = get_config_value('PERMITTED_ATTEMPTS', 'features', False, int, 3) -NO_STALLED_REMOVAL_QBIT_TAG = get_config_value('NO_STALLED_REMOVAL_QBIT_TAG', 'features', False, str, 'Don\'t Kill') -IGNORE_PRIVATE_TRACKERS = get_config_value('IGNORE_PRIVATE_TRACKERS', 'features', False, bool, True) +# Features +REMOVE_TIMER = get_config_value('REMOVE_TIMER', 'features', False, float, 10) +REMOVE_FAILED = get_config_value('REMOVE_FAILED', 'features', False, bool, False) +REMOVE_FAILED_IMPORTS = get_config_value('REMOVE_FAILED_IMPORTS' , 'features', False, bool, False) +REMOVE_METADATA_MISSING = get_config_value('REMOVE_METADATA_MISSING', 'features', False, bool, False) +REMOVE_MISSING_FILES = get_config_value('REMOVE_MISSING_FILES' , 'features', False, bool, False) +REMOVE_NO_FORMAT_UPGRADE = get_config_value('REMOVE_NO_FORMAT_UPGRADE' , 'features', False, bool, False) # OUTDATED - WILL RETURN WARNING +REMOVE_ORPHANS = get_config_value('REMOVE_ORPHANS' , 'features', False, bool, False) +REMOVE_SLOW = get_config_value('REMOVE_SLOW' , 'features', False, bool, False) +REMOVE_STALLED = get_config_value('REMOVE_STALLED', 'features', False, bool, False) +REMOVE_UNMONITORED = get_config_value('REMOVE_UNMONITORED' , 'features', False, bool, False) +MIN_DOWNLOAD_SPEED = get_config_value('MIN_DOWNLOAD_SPEED', 'features', False, int, 0) +PERMITTED_ATTEMPTS = get_config_value('PERMITTED_ATTEMPTS', 'features', False, int, 3) +NO_STALLED_REMOVAL_QBIT_TAG = get_config_value('NO_STALLED_REMOVAL_QBIT_TAG', 'features', False, str, 'Don\'t Kill') +IGNORE_PRIVATE_TRACKERS = get_config_value('IGNORE_PRIVATE_TRACKERS', 'features', False, bool, True) +FAILED_IMPORT_MESSAGE_PATTERNS = get_config_value('FAILED_IMPORT_MESSAGE_PATTERNS','features', False, list, '[]') # Radarr -RADARR_URL = get_config_value('RADARR_URL', 'radarr', False, str) -RADARR_KEY = None if RADARR_URL == None else \ - get_config_value('RADARR_KEY', 'radarr', True, str) +RADARR_URL = get_config_value('RADARR_URL', 'radarr', False, str) +RADARR_KEY = None if RADARR_URL == None else \ + get_config_value('RADARR_KEY', 'radarr', True, str) -# Sonarr -SONARR_URL = get_config_value('SONARR_URL', 'sonarr', False, str) -SONARR_KEY = None if SONARR_URL == None else \ - get_config_value('SONARR_KEY', 'sonarr', True, str) +# Sonarr +SONARR_URL = get_config_value('SONARR_URL', 'sonarr', False, str) +SONARR_KEY = None if SONARR_URL == None else \ + get_config_value('SONARR_KEY', 'sonarr', True, str) -# Lidarr -LIDARR_URL = get_config_value('LIDARR_URL', 'lidarr', False, str) -LIDARR_KEY = None if LIDARR_URL == None else \ - get_config_value('LIDARR_KEY', 'lidarr', True, str) +# Lidarr +LIDARR_URL = get_config_value('LIDARR_URL', 'lidarr', False, str) +LIDARR_KEY = None if LIDARR_URL == None else \ + get_config_value('LIDARR_KEY', 'lidarr', True, str) -# Readarr -READARR_URL = get_config_value('READARR_URL', 'readarr', False, str) -READARR_KEY = None if READARR_URL == None else \ - get_config_value('READARR_KEY', 'readarr', True, str) +# Readarr +READARR_URL = get_config_value('READARR_URL', 'readarr', False, str) +READARR_KEY = None if READARR_URL == None else \ + get_config_value('READARR_KEY', 'readarr', True, str) # Whisparr -WHISPARR_URL = get_config_value('WHISPARR_URL', 'whisparr', False, str) -WHISPARR_KEY = None if WHISPARR_URL == None else \ - get_config_value('WHISPARR_KEY', 'whisparr', True, str) +WHISPARR_URL = get_config_value('WHISPARR_URL', 'whisparr', False, str) +WHISPARR_KEY = None if WHISPARR_URL == None else \ + get_config_value('WHISPARR_KEY', 'whisparr', True, str) -# qBittorrent -QBITTORRENT_URL = get_config_value('QBITTORRENT_URL', 'qbittorrent', False, str, '') -QBITTORRENT_USERNAME = get_config_value('QBITTORRENT_USERNAME', 'qbittorrent', False, str, '') -QBITTORRENT_PASSWORD = get_config_value('QBITTORRENT_PASSWORD', 'qbittorrent', False, str, '') +# qBittorrent +QBITTORRENT_URL = get_config_value('QBITTORRENT_URL', 'qbittorrent', False, str, '') +QBITTORRENT_USERNAME = get_config_value('QBITTORRENT_USERNAME', 'qbittorrent', False, str, '') +QBITTORRENT_PASSWORD = get_config_value('QBITTORRENT_PASSWORD', 'qbittorrent', False, str, '') ######################################################################################################################## ########### Validate settings diff --git a/main.py b/main.py index 5bc1a59..1af0246 100644 --- a/main.py +++ b/main.py @@ -74,9 +74,12 @@ async def main(settingsDict): for instance in settingsDict['INSTANCES']: settingsDict = await getArrInstanceName(settingsDict, instance) + # Check outdated + upgradeChecks(settingsDict) + # Display current settings when loading script showSettings(settingsDict) - + # Check Minimum Version and if instances are reachable and retrieve qbit cookie settingsDict['RADARR_MIN_VERSION'] = '5.3.6.8608' settingsDict['SONARR_MIN_VERSION'] = '4.0.1.1131' diff --git a/src/decluttarr.py b/src/decluttarr.py index 3c593db..4757eb1 100644 --- a/src/decluttarr.py +++ b/src/decluttarr.py @@ -3,9 +3,9 @@ import logging, verboselogs logger = verboselogs.VerboseLogger(__name__) from src.utils.shared import (errorDetails, get_queue) from src.jobs.remove_failed import remove_failed +from src.jobs.remove_failed_imports import remove_failed_imports from src.jobs.remove_metadata_missing import remove_metadata_missing from src.jobs.remove_missing_files import remove_missing_files -from src.jobs.remove_no_format_upgrade import remove_no_format_upgrade from src.jobs.remove_orphans import remove_orphans from src.jobs.remove_slow import remove_slow from src.jobs.remove_stalled import remove_stalled @@ -66,15 +66,15 @@ async def queueCleaner(settingsDict, arr_type, defective_tracker, download_sizes if settingsDict['REMOVE_FAILED']: items_detected += await remove_failed( settingsDict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs) + if settingsDict['REMOVE_FAILED_IMPORTS']: + items_detected += await remove_failed_imports( settingsDict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs) + if settingsDict['REMOVE_METADATA_MISSING']: items_detected += await remove_metadata_missing( settingsDict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs) if settingsDict['REMOVE_MISSING_FILES']: items_detected += await remove_missing_files( settingsDict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs) - if settingsDict['REMOVE_NO_FORMAT_UPGRADE']: - items_detected += await remove_no_format_upgrade( settingsDict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs) - if settingsDict['REMOVE_ORPHANS']: items_detected += await remove_orphans( settingsDict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs, full_queue_param) diff --git a/src/jobs/remove_failed_imports.py b/src/jobs/remove_failed_imports.py new file mode 100644 index 0000000..7d15246 --- /dev/null +++ b/src/jobs/remove_failed_imports.py @@ -0,0 +1,40 @@ +from src.utils.shared import (errorDetails, formattedQueueInfo, get_queue, privateTrackerCheck, protectedDownloadCheck, execute_checks, permittedAttemptsCheck, remove_download) +import sys, os, traceback +import logging, verboselogs +logger = verboselogs.VerboseLogger(__name__) + +async def remove_failed_imports(settingsDict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs): + # Detects downloads stuck downloading meta data and triggers repeat check and subsequent delete. Adds to blocklist + try: + failType = 'failed import' + queue = await get_queue(BASE_URL, API_KEY) + logger.debug('remove_failed_imports/queue IN: %s', formattedQueueInfo(queue)) + if not queue: return 0 + # Find items affected + affectedItems = [] + for queueItem in queue['records']: + if 'status' in queueItem \ + and 'trackedDownloadStatus' in queueItem \ + and 'trackedDownloadState' in queueItem \ + and 'statusMessages' in queueItem: + + if queueItem['status'] == 'completed' \ + and queueItem['trackedDownloadStatus'] == 'warning' \ + and queueItem['trackedDownloadState'] == 'importPending': + + for status_message in queueItem['statusMessages']: + if not settingsDict['FAILED_IMPORT_MESSAGE_PATTERNS'] or any(any(pattern in message for pattern in settingsDict['FAILED_IMPORT_MESSAGE_PATTERNS']) for message in messages): + affectedItems.append(queueItem) + break + + affectedItems = await execute_checks(settingsDict, affectedItems, failType, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, privateDowloadIDs, protectedDownloadIDs, + addToBlocklist = True, + doPrivateTrackerCheck = False, + doProtectedDownloadCheck = True, + doPermittedAttemptsCheck = False, + extraParameters = ['keepTorrentForPrivateTrackers'] + ) + return len(affectedItems) + except Exception as error: + errorDetails(NAME, error) + return 0 diff --git a/src/utils/loadScripts.py b/src/utils/loadScripts.py index bc802d5..280b29c 100644 --- a/src/utils/loadScripts.py +++ b/src/utils/loadScripts.py @@ -41,14 +41,20 @@ def showSettings(settingsDict): logger.info('Version: %s', settingsDict['IMAGE_TAG']) logger.info('Commit: %s', settingsDict['SHORT_COMMIT_ID']) logger.info('') - logger.info('%s | Removing failed downloads', str(settingsDict['REMOVE_FAILED'])) - logger.info('%s | Removing downloads missing metadata', str(settingsDict['REMOVE_METADATA_MISSING'])) - logger.info('%s | Removing downloads missing files', str(settingsDict['REMOVE_MISSING_FILES'])) - logger.info('%s | Removing downloads that fail on import (no format upgrade)', str(settingsDict['REMOVE_NO_FORMAT_UPGRADE'])) - logger.info('%s | Removing orphan downloads', str(settingsDict['REMOVE_ORPHANS'])) - logger.info('%s | Removing slow downloads', str(settingsDict['REMOVE_SLOW'])) - logger.info('%s | Removing stalled downloads', str(settingsDict['REMOVE_STALLED'])) - logger.info('%s | Removing downloads belonging to unmonitored items', str(settingsDict['REMOVE_UNMONITORED'])) + logger.info('%s | Removing failed downloads (%s)', str(settingsDict['REMOVE_FAILED']), 'REMOVE_FAILED') + logger.info('%s | Removing failed imports (%s)', str(settingsDict['REMOVE_FAILED_IMPORTS']), 'REMOVE_FAILED_IMPORTS') + if settingsDict['REMOVE_FAILED_IMPORTS'] and not settingsDict['FAILED_IMPORT_MESSAGE_PATTERNS']: + logger.verbose ('> Any imports with a warning flag are considered failed. No message patterns specified (%s).', 'FAILED_IMPORT_MESSAGE_PATTERNS') + elif settingsDict['REMOVE_FAILED_IMPORTS'] and settingsDict['FAILED_IMPORT_MESSAGE_PATTERNS']: + logger.verbose ('> Imports with a warning flag are considered failed if the status message contains any of the following patterns:') + for pattern in settingsDict['FAILED_IMPORT_MESSAGE_PATTERNS']: + logger.verbose(' - "%s"', pattern) + logger.info('%s | Removing downloads missing metadata (%s)', str(settingsDict['REMOVE_METADATA_MISSING']), 'REMOVE_METADATA_MISSING') + logger.info('%s | Removing downloads missing files (%s)', str(settingsDict['REMOVE_MISSING_FILES']), 'REMOVE_MISSING_FILES') + logger.info('%s | Removing orphan downloads (%s)', str(settingsDict['REMOVE_ORPHANS']), 'REMOVE_ORPHANS') + logger.info('%s | Removing slow downloads (%s)', str(settingsDict['REMOVE_SLOW']), 'REMOVE_SLOW') + logger.info('%s | Removing stalled downloads (%s)', str(settingsDict['REMOVE_STALLED']), 'REMOVE_STALLED') + logger.info('%s | Removing downloads belonging to unmonitored items (%s)', str(settingsDict['REMOVE_UNMONITORED']), 'REMOVE_UNMONITORED') logger.info('') logger.info('Running every: %s', fmt.format(rd(minutes=settingsDict['REMOVE_TIMER']))) if settingsDict['REMOVE_SLOW']: @@ -70,6 +76,16 @@ def showSettings(settingsDict): logger.info('') return +def upgradeChecks(settingsDict): + if settingsDict['REMOVE_NO_FORMAT_UPGRADE']: + logger.warn('❗️' * 10 + ' OUTDATED SETTINGS ' + '❗️' * 10 ) + logger.warn('') + logger.warn("❗️ %s was replaced with %s.", 'REMOVE_NO_FORMAT_UPGRADE', 'REMOVE_FAILED_IMPORTS') + logger.warn("❗️ Please check the ReadMe and update your settings.") + logger.warn('') + logger.warn('❗️' * 29) + logger.warn('') + return async def instanceChecks(settingsDict): # Checks if the arr and qbit instances are reachable, and returns the settings dictionary with the qbit cookie