diff --git a/.gitignore b/.gitignore index f01f1f6..37b3c86 100644 --- a/.gitignore +++ b/.gitignore @@ -4,4 +4,5 @@ __pycache__/ config/config.conf ToDo snip*.py -venv \ No newline at end of file +venv +testMagnets.txt \ No newline at end of file diff --git a/README.md b/README.md index 3b2db0f..1913507 100644 --- a/README.md +++ b/README.md @@ -52,6 +52,7 @@ services: - MIN_DOWNLOAD_SPEED=100 - PERMITTED_ATTEMPTS=3 - NO_STALLED_REMOVAL_QBIT_TAG=Don't Kill + - IGNORE_PRIVATE_TRACKERS=True # Radarr - RADARR_URL=http://radarr:7878 - RADARR_KEY=$RADARR_API_KEY @@ -80,7 +81,8 @@ Note: The `config.conf` is disregarded when running via docker-compose.yml **LOG_LEVEL** - Sets the level at which logging will take place - `INFO` will only show changes applied to radarr/sonarr/lidarr -- `VERBOSE` will show when script runs (even if it results in no change) +- `VERBOSE` shows each check being performed even if no change is applied +- `DEBUG` shows very granular information, only required for debugging - Type: String - Permissible Values: CRITICAL, ERROR, WARNING, INFO, VERBOSE, DEBUG - Is Mandatory: No (Defaults to INFO) @@ -171,14 +173,22 @@ Note: The `config.conf` is disregarded when running via docker-compose.yml - Is Mandatory: No (Defaults to 3) **NO_STALLED_REMOVAL_QBIT_TAG** -- Downloads in qBittorrent tagged with this tag will not be killed even if they are stalled -- Also protects slow downloads or those stuck downloading meta data +- Downloads in qBittorrent tagged with this tag will not be removed +- Applies to the following: Slow, Stalled, Unmonitored, Orphans, Metadata Missing +- These will continue to be removed (since considered broken): Failed, Files Missing - Tag is automatically created in qBittorrent (required qBittorrent is reachable on `QBITTORRENT_URL`) - Also protects unmonitored downloads from being removed (relevant for multi-season packs) - Type: String - Is Mandatory: No (Defaults to `Don't Kill`) ---- +**IGNORE_PRIVATE_TRACKERS** +- Private torrents in qBittorrent will not be removed from the queue if this is set to true +- Applies to the following: Slow, Stalled, Unmonitored, Orphans, Metadata Missing +- These will continue to be removed (since considered broken): Failed, Files Missing +- Type: Boolean +- Permissible Values: True, False +- Is Mandatory: No (Defaults to True) + ### **Radarr section** - Defines radarr instance on which download queue should be decluttered diff --git a/config/config.conf-Example b/config/config.conf-Example index 373b11f..5127c02 100644 --- a/config/config.conf-Example +++ b/config/config.conf-Example @@ -15,6 +15,7 @@ REMOVE_MISSING_FILES = True MIN_DOWNLOAD_SPEED = 100 PERMITTED_ATTEMPTS = 3 NO_STALLED_REMOVAL_QBIT_TAG = Don't Kill +IGNORE_PRIVATE_TRACKERS = FALSE [radarr] RADARR_URL = http://radarr:7878 diff --git a/config/config.py b/config/config.py index b209a7f..b882834 100644 --- a/config/config.py +++ b/config/config.py @@ -80,7 +80,7 @@ LOG_LEVEL = get_config_value('LOG_LEVEL', TEST_RUN = get_config_value('TEST_RUN', 'general', False, bool, False) # Features -REMOVE_TIMER = get_config_value('REMOVE_TIMER', 'features', False, int, 10) +REMOVE_TIMER = get_config_value('REMOVE_TIMER', 'features', False, float, 10) REMOVE_FAILED = get_config_value('REMOVE_FAILED', 'features', False, bool, False) REMOVE_STALLED = get_config_value('REMOVE_STALLED', 'features', False, bool, False) REMOVE_METADATA_MISSING = get_config_value('REMOVE_METADATA_MISSING', 'features', False, bool, False) @@ -91,6 +91,7 @@ REMOVE_SLOW = get_config_value('REMOVE_SLOW' , MIN_DOWNLOAD_SPEED = get_config_value('MIN_DOWNLOAD_SPEED', 'features', False, int, 0) PERMITTED_ATTEMPTS = get_config_value('PERMITTED_ATTEMPTS', 'features', False, int, 3) NO_STALLED_REMOVAL_QBIT_TAG = get_config_value('NO_STALLED_REMOVAL_QBIT_TAG', 'features', False, str, 'Don\'t Kill') +IGNORE_PRIVATE_TRACKERS = get_config_value('IGNORE_PRIVATE_TRACKERS', 'features', False, bool, True) # Radarr RADARR_URL = get_config_value('RADARR_URL', 'radarr', False, str) diff --git a/main.py b/main.py index f5cbf10..2bdb565 100644 --- a/main.py +++ b/main.py @@ -6,7 +6,7 @@ from requests.exceptions import RequestException import json from dateutil.relativedelta import relativedelta as rd from config.config import settings_dict -from src.queue_cleaner import queue_cleaner +from src.decluttarr import queueCleaner #print(json.dumps(settings_dict,indent=4)) import requests import platform @@ -24,7 +24,7 @@ class Defective_Tracker: # Keeps track of which downloads were already caught as stalled previously def __init__(self, dict): self.dict = dict -class Download_Sizes: +class Download_Sizes_Tracker: # Keeps track of the file sizes of the downloads def __init__(self, dict): self.dict = dict @@ -67,7 +67,9 @@ async def main(): logger.info('Running every: %s', fmt.format(rd(minutes=settings_dict['REMOVE_TIMER']))) if settings_dict['REMOVE_SLOW']: logger.info('%s | Minimum speed enforced: ', str(settings_dict['MIN_DOWNLOAD_SPEED']) + 'KB/s') logger.info('Permitted number of times before stalled/missing metadata/slow downloads are removed: %s', str(settings_dict['PERMITTED_ATTEMPTS'])) - if settings_dict['QBITTORRENT_URL']: logger.info('Downloads with this tag will be skipped: \"%s\"', settings_dict['NO_STALLED_REMOVAL_QBIT_TAG']) + if settings_dict['QBITTORRENT_URL']: + logger.info('Downloads with this tag will be skipped: \"%s\"', settings_dict['NO_STALLED_REMOVAL_QBIT_TAG']) + logger.info('Private Trackers will be skipped: %s', settings_dict['IGNORE_PRIVATE_TRACKERS']) logger.info('') logger.info('*** Configured Instances ***') @@ -85,11 +87,11 @@ async def main(): except Exception as error: error_occured = True logger.error('-- | %s *** Error: %s ***', settings_dict['RADARR_NAME'], error) - - radarr_version = (await rest_get(settings_dict['RADARR_URL']+'/system/status', settings_dict['RADARR_KEY']))['version'] - if version.parse(radarr_version) < version.parse('5.3.6.8608'): - error_occured = True - logger.error('-- | %s *** Error: %s ***', settings_dict['RADARR_NAME'], 'Please update Radarr to at least version 5.3.6.8608. Current version: ' + radarr_version) + if not error_occured: + radarr_version = (await rest_get(settings_dict['RADARR_URL']+'/system/status', settings_dict['RADARR_KEY']))['version'] + if version.parse(radarr_version) < version.parse('5.3.6.8608'): + error_occured = True + logger.error('-- | %s *** Error: %s ***', settings_dict['RADARR_NAME'], 'Please update Radarr to at least version 5.3.6.8608. Current version: ' + radarr_version) if not error_occured: logger.info('OK | %s', settings_dict['RADARR_NAME']) @@ -99,11 +101,11 @@ async def main(): except Exception as error: error_occured = True logger.error('-- | %s *** Error: %s ***', settings_dict['SONARR_NAME'], error) - - sonarr_version = (await rest_get(settings_dict['SONARR_URL']+'/system/status', settings_dict['SONARR_KEY']))['version'] - if version.parse(sonarr_version) < version.parse('4.0.1.1131'): - error_occured = True - logger.error('-- | %s *** Error: %s ***', settings_dict['SONARR_NAME'], 'Please update Sonarr to at least version 4.0.1.1131. Current version: ' + sonarr_version) + if not error_occured: + sonarr_version = (await rest_get(settings_dict['SONARR_URL']+'/system/status', settings_dict['SONARR_KEY']))['version'] + if version.parse(sonarr_version) < version.parse('4.0.1.1131'): + error_occured = True + logger.error('-- | %s *** Error: %s ***', settings_dict['SONARR_NAME'], 'Please update Sonarr to at least version 4.0.1.1131. Current version: ' + sonarr_version) if not error_occured: logger.info('OK | %s', settings_dict['SONARR_NAME']) @@ -142,8 +144,8 @@ async def main(): logger.info(f'*'* 50) logger.info(f'*'* 50) logger.info(f'') - logger.info(f'TEST_RUN FLAG IS SET!') - logger.info(f'THIS IS A TEST RUN AND NO UPDATES/DELETES WILL BE PERFORMED') + logger.info(f'!! TEST_RUN FLAG IS SET !!') + logger.info(f'NO UPDATES/DELETES WILL BE PERFORMED') logger.info(f'') logger.info(f'*'* 50) logger.info(f'*'* 50) @@ -160,9 +162,19 @@ async def main(): # Start application while True: logger.verbose('-' * 50) - if settings_dict['RADARR_URL']: await queue_cleaner(settings_dict, 'radarr', defective_tracker, download_sizes) - if settings_dict['SONARR_URL']: await queue_cleaner(settings_dict, 'sonarr', defective_tracker, download_sizes) - if settings_dict['LIDARR_URL']: await queue_cleaner(settings_dict, 'lidarr', defective_tracker, download_sizes) + # Cache protected (via Tag) and private torrents + protectedDownloadIDs = [] + privateDowloadIDs = [] + if settings_dict['QBITTORRENT_URL']: + protectedDowloadItems = await rest_get(settings_dict['QBITTORRENT_URL']+'/torrents/info',params={'tag': settings_dict['NO_STALLED_REMOVAL_QBIT_TAG']}, cookies=settings_dict['QBIT_COOKIE'] ) + protectedDownloadIDs = [str.upper(item['hash']) for item in protectedDowloadItems] + if settings_dict['IGNORE_PRIVATE_TRACKERS']: + privateDowloadItems = await rest_get(settings_dict['QBITTORRENT_URL']+'/torrents/info',params={}, cookies=settings_dict['QBIT_COOKIE'] ) + privateDowloadIDs = [str.upper(item['hash']) for item in privateDowloadItems if item.get('is_private', False)] + + if settings_dict['RADARR_URL']: await queueCleaner(settings_dict, 'radarr', defective_tracker, download_sizes_tracker, protectedDownloadIDs, privateDowloadIDs) + if settings_dict['SONARR_URL']: await queueCleaner(settings_dict, 'sonarr', defective_tracker, download_sizes_tracker, protectedDownloadIDs, privateDowloadIDs) + if settings_dict['LIDARR_URL']: await queueCleaner(settings_dict, 'lidarr', defective_tracker, download_sizes_tracker, protectedDownloadIDs, privateDowloadIDs) logger.verbose('') logger.verbose('Queue clean-up complete!') await asyncio.sleep(settings_dict['REMOVE_TIMER']*60) @@ -173,6 +185,6 @@ if __name__ == '__main__': {settings_dict['SONARR_URL']: {}} if settings_dict['SONARR_URL'] else {} + \ {settings_dict['LIDARR_URL']: {}} if settings_dict['LIDARR_URL'] else {} defective_tracker = Defective_Tracker(instances) - download_sizes = Download_Sizes({}) + download_sizes_tracker = Download_Sizes_Tracker({}) asyncio.run(main()) diff --git a/src/decluttarr.py b/src/decluttarr.py new file mode 100644 index 0000000..81be668 --- /dev/null +++ b/src/decluttarr.py @@ -0,0 +1,77 @@ +# Cleans the download queue +import logging, verboselogs +logger = verboselogs.VerboseLogger(__name__) +from src.utils.shared import (errorDetails, get_queue) +from src.remove_failed import remove_failed +from src.remove_metadata_missing import remove_metadata_missing +from src.remove_missing_files import remove_missing_files +from src.remove_orphans import remove_orphans +from src.remove_slow import remove_slow +from src.remove_stalled import remove_stalled +from src.remove_unmonitored import remove_unmonitored + +class Deleted_Downloads: + # Keeps track of which downloads have already been deleted (to not double-delete) + def __init__(self, dict): + self.dict = dict + + +async def queueCleaner(settings_dict, arr_type, defective_tracker, download_sizes_tracker, protectedDownloadIDs, privateDowloadIDs): + # Read out correct instance depending on radarr/sonarr flag + run_dict = {} + if arr_type == 'radarr': + BASE_URL = settings_dict['RADARR_URL'] + API_KEY = settings_dict['RADARR_KEY'] + NAME = settings_dict['RADARR_NAME'] + full_queue_param = 'includeUnknownMovieItems' + elif arr_type == 'sonarr': + BASE_URL = settings_dict['SONARR_URL'] + API_KEY = settings_dict['SONARR_KEY'] + NAME = settings_dict['SONARR_NAME'] + full_queue_param = 'includeUnknownSeriesItems' + elif arr_type == 'lidarr': + BASE_URL = settings_dict['LIDARR_URL'] + API_KEY = settings_dict['LIDARR_KEY'] + NAME = settings_dict['LIDARR_NAME'] + full_queue_param = 'includeUnknownArtistItems' + else: + logger.error('Unknown arr_type specified, exiting: %s', str(arr_type)) + sys.exit() + + # Cleans up the downloads queue + logger.verbose('Cleaning queue on %s:', NAME) + + full_queue = await get_queue(BASE_URL, API_KEY, params = {full_queue_param: True}) + if not full_queue: + logger.verbose('>>> Queue is empty.') + return + + deleted_downloads = Deleted_Downloads([]) + items_detected = 0 + try: + if settings_dict['REMOVE_FAILED']: + items_detected += await remove_failed( settings_dict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs) + + if settings_dict['REMOVE_STALLED']: + items_detected += await remove_stalled( settings_dict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs) + + if settings_dict['REMOVE_METADATA_MISSING']: + items_detected += await remove_metadata_missing( settings_dict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs) + + if settings_dict['REMOVE_ORPHANS']: + items_detected += await remove_orphans( settings_dict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs, full_queue_param) + + if settings_dict['REMOVE_UNMONITORED']: + items_detected += await remove_unmonitored( settings_dict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs, arr_type) + + if settings_dict['REMOVE_MISSING_FILES']: + items_detected += await remove_missing_files( settings_dict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs) + + if settings_dict['REMOVE_SLOW']: + items_detected += await remove_slow( settings_dict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs, download_sizes_tracker) + + if items_detected == 0: + logger.verbose('>>> Queue is clean.') + except Exception as error: + errorDetails(NAME, error) + return \ No newline at end of file diff --git a/src/queue_cleaner.py b/src/queue_cleaner.py deleted file mode 100644 index fdd4764..0000000 --- a/src/queue_cleaner.py +++ /dev/null @@ -1,308 +0,0 @@ -# Cleans the download queue -import logging, verboselogs -logger = verboselogs.VerboseLogger(__name__) -from src.utils.rest import (rest_get, rest_delete, rest_post) -import json -from src.utils.nest_functions import (add_keys_nested_dict, nested_get) -import sys, os -import traceback - - -class Deleted_Downloads: - # Keeps track of which downloads have already been deleted (to not double-delete) - def __init__(self, dict): - self.dict = dict - -async def get_queue(BASE_URL, API_KEY, params = {}): - await rest_post(url=BASE_URL+'/command', json={'name': 'RefreshMonitoredDownloads'}, headers={'X-Api-Key': API_KEY}) - totalRecords = (await rest_get(f'{BASE_URL}/queue', API_KEY, params))['totalRecords'] - if totalRecords == 0: - return None - queue = await rest_get(f'{BASE_URL}/queue', API_KEY, {'page': '1', 'pageSize': totalRecords}|params) - return queue - -async def remove_failed(settings_dict, BASE_URL, API_KEY, deleted_downloads): - # Detects failed and triggers delete. Does not add to blocklist - queue = await get_queue(BASE_URL, API_KEY) - if not queue: return 0 - failedItems = [] - for queueItem in queue['records']: - if 'errorMessage' in queueItem and 'status' in queueItem: - if queueItem['status'] == 'failed' or \ - (queueItem['status'] == 'warning' and queueItem['errorMessage'] == 'The download is missing files'): - await remove_download(BASE_URL, API_KEY, queueItem['id'], queueItem['title'], queueItem['downloadId'], 'failed', False, deleted_downloads, settings_dict['TEST_RUN']) - failedItems.append(queueItem) - return len(failedItems) - -async def remove_stalled(settings_dict, BASE_URL, API_KEY, deleted_downloads, defective_tracker): - # Detects stalled and triggers repeat check and subsequent delete. Adds to blocklist - queue = await get_queue(BASE_URL, API_KEY) - if not queue: return 0 - logger.debug('remove_stalled/queue: %s', str(queue)) - if settings_dict['QBITTORRENT_URL']: - protected_dowloadItems = await rest_get(settings_dict['QBITTORRENT_URL']+'/torrents/info',params={'tag': settings_dict['NO_STALLED_REMOVAL_QBIT_TAG']}, cookies=settings_dict['QBIT_COOKIE'] ) - protected_downloadIDs = [str.upper(item['hash']) for item in protected_dowloadItems] - else: - protected_downloadIDs = [] - stalledItems = [] - already_detected = [] - for queueItem in queue['records']: - if 'errorMessage' in queueItem and 'status' in queueItem: - if queueItem['status'] == 'warning' and \ - queueItem['errorMessage'] == 'The download is stalled with no connections': - if queueItem['downloadId'] in protected_downloadIDs: - if queueItem['downloadId'] not in already_detected: - already_detected.append(queueItem['downloadId']) - logger.verbose('>>> Detected stalled download, tagged not to be killed: %s',queueItem['title']) - else: - stalledItems.append(queueItem) - await check_permitted_attempts(settings_dict, stalledItems, 'stalled', True, deleted_downloads, BASE_URL, API_KEY, defective_tracker) - queue = await get_queue(BASE_URL, API_KEY) - logger.debug('remove_stalled/queue OUT: %s', str(queue)) - return len(stalledItems) - -async def test_remove_ALL(settings_dict, BASE_URL, API_KEY, deleted_downloads, defective_tracker): - # Detects stalled and triggers repeat check and subsequent delete. Adds to blocklist - queue = await get_queue(BASE_URL, API_KEY) - if not queue: return 0 - logger.debug('test_remove_ALL/queue: %s', str(queue)) - stalledItems = [] - for queueItem in queue['records']: - stalledItems.append(queueItem) - await check_permitted_attempts(settings_dict, stalledItems, 'stalled', True, deleted_downloads, BASE_URL, API_KEY, defective_tracker) - logger.debug('test_remove_ALL/queue OUT: %s', str(await get_queue(BASE_URL, API_KEY) )) - return len(stalledItems) - - -async def remove_metadata_missing(settings_dict, BASE_URL, API_KEY, deleted_downloads, defective_tracker): - # Detects downloads stuck downloading meta data and triggers repeat check and subsequent delete. Adds to blocklist - queue = await get_queue(BASE_URL, API_KEY) - if not queue: return 0 - logger.debug('remove_metadata_missing/queue: %s', str(queue)) - missing_metadataItems = [] - for queueItem in queue['records']: - if 'errorMessage' in queueItem and 'status' in queueItem: - if queueItem['status'] == 'queued' and \ - queueItem['errorMessage'] == 'qBittorrent is downloading metadata': - missing_metadataItems.append(queueItem) - await check_permitted_attempts(settings_dict, missing_metadataItems, 'missing metadata', True, deleted_downloads, BASE_URL, API_KEY, defective_tracker) - logger.debug('remove_metadata_missing/queue OUT: %s', str(await get_queue(BASE_URL, API_KEY) )) - return len(missing_metadataItems) - -async def remove_orphans(settings_dict, BASE_URL, API_KEY, deleted_downloads, full_queue_param): - # Removes downloads belonging to movies/tv shows that have been deleted in the meantime - full_queue = await get_queue(BASE_URL, API_KEY, params = {full_queue_param: True}) - if not full_queue: return 0 # By now the queue may be empty - queue = await get_queue(BASE_URL, API_KEY) - logger.debug('remove_orphans/full queue IN: %s', str(full_queue)) - logger.debug('remove_orphans/queue IN: %s', str(queue)) - full_queue_items = [{'id': queueItem['id'], 'title': queueItem['title'], 'downloadId': queueItem['downloadId']} for queueItem in full_queue['records']] - if queue: - queue_ids = [queueItem['id'] for queueItem in queue['records']] - else: - queue_ids = [] - orphanItems = [{'id': queueItem['id'], 'title': queueItem['title'], 'downloadId': queueItem['downloadId']} for queueItem in full_queue_items if queueItem['id'] not in queue_ids] - for queueItem in orphanItems: - await remove_download(settings_dict, BASE_URL, API_KEY, queueItem['id'], queueItem['title'], queueItem['downloadId'], 'orphan', False, deleted_downloads) - logger.debug('remove_orphans/full queue OUT: %s', str(await get_queue(BASE_URL, API_KEY, params = {full_queue_param: True}))) - logger.debug('remove_orphans/queue OUT: %s', str(await get_queue(BASE_URL, API_KEY) )) - return len(orphanItems) - -async def remove_unmonitored(settings_dict, BASE_URL, API_KEY, deleted_downloads, arr_type): - # Removes downloads belonging to movies/tv shows that are not monitored - queue = await get_queue(BASE_URL, API_KEY) - if not queue: return 0 - logger.debug('remove_unmonitored/queue IN: %s', str(queue)) - downloadItems = [] - if settings_dict['QBITTORRENT_URL']: - protected_dowloadItems = await rest_get(settings_dict['QBITTORRENT_URL']+'/torrents/info',params={'tag': settings_dict['NO_STALLED_REMOVAL_QBIT_TAG']}, cookies=settings_dict['QBIT_COOKIE'] ) - protected_downloadIDs = [str.upper(item['hash']) for item in protected_dowloadItems] - else: - protected_downloadIDs = [] - for queueItem in queue['records']: - if arr_type == 'sonarr': - monitored = (await rest_get(f'{BASE_URL}/episode/{str(queueItem["episodeId"])}', API_KEY))['monitored'] - elif arr_type == 'radarr': - monitored = (await rest_get(f'{BASE_URL}/movie/{str(queueItem["movieId"])}', API_KEY))['monitored'] - elif arr_type == 'lidarr': - monitored = (await rest_get(f'{BASE_URL}/album/{str(queueItem["albumId"])}', API_KEY))['monitored'] - downloadItems.append({'downloadId': queueItem['downloadId'], 'id': queueItem['id'], 'monitored': monitored, 'title': queueItem['title']}) - unmonitoredItems= [] - already_detected = [] - for downloadItem in downloadItems: - if not downloadItem['monitored']: - if downloadItem['downloadId'] in protected_downloadIDs: - if downloadItem['downloadId'] not in already_detected: - already_detected.append(queueItem['downloadId']) - logger.verbose('>>> Detected unmonitored download, tagged not to be killed: %s',downloadItem['title']) - else: - unmonitoredItems.append(downloadItem) - - for queueItem in unmonitoredItems: - await remove_download(settings_dict, BASE_URL, API_KEY, queueItem['id'], queueItem['title'], queueItem['downloadId'], 'unmonitored', False, deleted_downloads) - logger.debug('remove_unmonitored/queue OUT: %s', str(await get_queue(BASE_URL, API_KEY) )) - return len(unmonitoredItems) - -async def remove_missing_files(settings_dict, BASE_URL, API_KEY, deleted_downloads, defective_tracker): - # Detects downloads stuck downloading meta data and triggers repeat check and subsequent delete. Adds to blocklist - queue = await get_queue(BASE_URL, API_KEY) - if not queue: return 0 - logger.debug('remove_missing_files/queue: %s', str(queue)) - missing_filesItems = [] - for queueItem in queue['records']: - if 'errorMessage' in queueItem and 'status' in queueItem: - if queueItem['status'] == 'warning' and \ - queueItem['errorMessage'] == 'DownloadClientQbittorrentTorrentStateMissingFiles': - missing_filesItems.append(queueItem) - for queueItem in missing_filesItems: - await remove_download(settings_dict, BASE_URL, API_KEY, queueItem['id'], queueItem['title'], queueItem['downloadId'], 'missing files', False, deleted_downloads) - logger.debug('remove_missing_files/queue OUT: %s', str(await get_queue(BASE_URL, API_KEY) )) - return len(missing_filesItems) - -async def remove_slow(settings_dict, BASE_URL, API_KEY, deleted_downloads, defective_tracker, download_sizes): - # Detects slow downloads and triggers delete. Adds to blocklist - queue = await get_queue(BASE_URL, API_KEY) - if not queue: return 0 - logger.debug('remove_slow/queue: %s', str(queue)) - if settings_dict['QBITTORRENT_URL']: - protected_dowloadItems = await rest_get(settings_dict['QBITTORRENT_URL']+'/torrents/info',params={'tag': settings_dict['NO_STALLED_REMOVAL_QBIT_TAG']}, cookies=settings_dict['QBIT_COOKIE'] ) - protected_downloadIDs = [str.upper(item['hash']) for item in protected_dowloadItems] - else: - protected_downloadIDs = [] - slowItems = [] - already_detected = [] - - for queueItem in queue['records']: - if 'downloadId' in queueItem and 'size' in queueItem and 'sizeleft' in queueItem and 'status' in queueItem: - downloaded_size = int((queueItem['size'] - queueItem['sizeleft']) / 1000) - speed = (downloaded_size - download_sizes.dict.get(queueItem['downloadId'], 0)) / (settings_dict['REMOVE_TIMER'] * 60) - if queueItem['status'] == 'downloading' and \ - queueItem['downloadId'] in download_sizes.dict and \ - speed < settings_dict['MIN_DOWNLOAD_SPEED']: - if queueItem['downloadId'] in protected_downloadIDs: - if queueItem['downloadId'] not in already_detected: - already_detected.append(queueItem['downloadId']) - logger.verbose('>>> Detected slow download, tagged not to be killed: %s (%dKB/s)',queueItem['title'], speed) - else: - slowItems.append(queueItem) - download_sizes.dict[queueItem['downloadId']] = downloaded_size - await check_permitted_attempts(settings_dict, slowItems, 'slow', True, deleted_downloads, BASE_URL, API_KEY, defective_tracker) - queue = await get_queue(BASE_URL, API_KEY) - logger.debug('remove_slow/download_sizes.dict: %s', str(download_sizes.dict)) - logger.debug('remove_slow/queue OUT: %s', str(queue)) - return len(slowItems) - -async def check_permitted_attempts(settings_dict, current_defective_items, failType, blocklist, deleted_downloads, BASE_URL, API_KEY, defective_tracker): - # Checks if downloads are repeatedly found as stalled / stuck in metadata and if yes, deletes them - # 1. Create list of currently defective - current_defective = {} - for queueItem in current_defective_items: - current_defective[queueItem['id']] = {'title': queueItem['title'],'downloadId': queueItem['downloadId']} - logger.debug('check_permitted_attempts/current_defective: %s', str(current_defective)) - - # 2. Check if those that were previously defective are no longer defective -> those are recovered - try: - recovered_ids = [tracked_id for tracked_id in defective_tracker.dict[BASE_URL][failType] if tracked_id not in current_defective] - except KeyError: - recovered_ids = [] - - logger.debug('check_permitted_attempts/recovered_ids: %s' + str(recovered_ids)) - - for recovered_id in recovered_ids: - del defective_tracker.dict[BASE_URL][failType][recovered_id] - - logger.debug('check_permitted_attempts/defective_tracker.dict IN: %s', str(defective_tracker.dict)) - # 3. For those that are defective, add attempt + 1 if present before, or make attempt = 0. If exceeding number of permitted attempts, delete hem - download_ids_stuck = [] - for queueId in current_defective: - try: - defective_tracker.dict[BASE_URL][failType][queueId]['Attempts'] += 1 - except KeyError: - await add_keys_nested_dict(defective_tracker.dict,[BASE_URL, failType, queueId], {'title': current_defective[queueId]['title'], 'downloadId': current_defective[queueId]['downloadId'], 'Attempts': 1}) - if current_defective[queueId]['downloadId'] not in download_ids_stuck: - download_ids_stuck.append(current_defective[queueId]['downloadId']) - logger.info('>>> Detected %s download (%s out of %s permitted times): %s', failType, str(defective_tracker.dict[BASE_URL][failType][queueId]['Attempts']), str(settings_dict['PERMITTED_ATTEMPTS']), defective_tracker.dict[BASE_URL][failType][queueId]['title']) - if defective_tracker.dict[BASE_URL][failType][queueId]['Attempts'] > settings_dict['PERMITTED_ATTEMPTS']: - await remove_download(settings_dict, BASE_URL, API_KEY, queueId, current_defective[queueId]['title'], current_defective[queueId]['downloadId'], failType, blocklist, deleted_downloads) - logger.debug('check_permitted_attempts/defective_tracker.dict OUT: %s', str(defective_tracker.dict)) - return - -async def remove_download(settings_dict, BASE_URL, API_KEY, queueId, queueTitle, downloadId, failType, blocklist, deleted_downloads): - # Removes downloads and creates log entry - logger.debug('remove_download/deleted_downloads.dict IN: %s' + str(deleted_downloads.dict)) - if downloadId not in deleted_downloads.dict: - logger.info('>>> Removing %s download: %s', failType, queueTitle) - if not settings_dict['TEST_RUN']: await rest_delete(f'{BASE_URL}/queue/{queueId}', API_KEY, {'removeFromClient': True, 'blocklist': blocklist}) - deleted_downloads.dict.append(downloadId) - - logger.debug('remove_download/deleted_downloads.dict OUT: %s' + str(deleted_downloads.dict)) - return - -########### MAIN FUNCTION ########### -async def queue_cleaner(settings_dict, arr_type, defective_tracker, download_sizes): - # Read out correct instance depending on radarr/sonarr flag - run_dict = {} - if arr_type == 'radarr': - BASE_URL = settings_dict['RADARR_URL'] - API_KEY = settings_dict['RADARR_KEY'] - NAME = settings_dict['RADARR_NAME'] - full_queue_param = 'includeUnknownMovieItems' - elif arr_type == 'sonarr': - BASE_URL = settings_dict['SONARR_URL'] - API_KEY = settings_dict['SONARR_KEY'] - NAME = settings_dict['SONARR_NAME'] - full_queue_param = 'includeUnknownSeriesItems' - elif arr_type == 'lidarr': - BASE_URL = settings_dict['LIDARR_URL'] - API_KEY = settings_dict['LIDARR_KEY'] - NAME = settings_dict['LIDARR_NAME'] - full_queue_param = 'includeUnknownArtistItems' - else: - logger.error('Unknown arr_type specified, exiting: %s', str(arr_type)) - sys.exit() - - # Cleans up the downloads queue - logger.verbose('Cleaning queue on %s:', NAME) - try: - - full_queue = await get_queue(BASE_URL, API_KEY, params = {full_queue_param: True}) - if not full_queue: - logger.verbose('>>> Queue is empty.') - return - - deleted_downloads = Deleted_Downloads([]) - items_detected = 0 - - #items_detected += await test_remove_ALL( settings_dict, BASE_URL, API_KEY, deleted_downloads, defective_tracker) - - if settings_dict['REMOVE_FAILED']: - items_detected += await remove_failed( settings_dict, BASE_URL, API_KEY, deleted_downloads) - - if settings_dict['REMOVE_STALLED']: - items_detected += await remove_stalled( settings_dict, BASE_URL, API_KEY, deleted_downloads, defective_tracker) - - if settings_dict['REMOVE_METADATA_MISSING']: - items_detected += await remove_metadata_missing( settings_dict, BASE_URL, API_KEY, deleted_downloads, defective_tracker) - - if settings_dict['REMOVE_ORPHANS']: - items_detected += await remove_orphans( settings_dict, BASE_URL, API_KEY, deleted_downloads, full_queue_param) - - if settings_dict['REMOVE_UNMONITORED']: - items_detected += await remove_unmonitored( settings_dict, BASE_URL, API_KEY, deleted_downloads, arr_type) - - if settings_dict['REMOVE_MISSING_FILES']: - items_detected += await remove_missing_files( settings_dict, BASE_URL, API_KEY, deleted_downloads, arr_type) - - if settings_dict['REMOVE_SLOW']: - items_detected += await remove_slow( settings_dict, BASE_URL, API_KEY, deleted_downloads, defective_tracker, download_sizes) - - if items_detected == 0: - logger.verbose('>>> Queue is clean.') - except Exception as error: - exc_type, exc_obj, exc_tb = sys.exc_info() - fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] - logger.warning('>>> Queue cleaning failed on %s. (File: %s / Line: %s / Error Message: %s / Error Type: %s)', NAME, fname, exc_tb.tb_lineno, error, exc_type) - logger.debug(traceback.format_exc()) - - - diff --git a/src/remove_failed.py b/src/remove_failed.py new file mode 100644 index 0000000..0a85c02 --- /dev/null +++ b/src/remove_failed.py @@ -0,0 +1,28 @@ +from src.utils.shared import (errorDetails, formattedQueueInfo, get_queue, privateTrackerCheck, protectedDownloadCheck, execute_checks, permittedAttemptsCheck, remove_download) +import sys, os, traceback +import logging, verboselogs +logger = verboselogs.VerboseLogger(__name__) + +async def remove_failed(settings_dict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs): + # Detects failed and triggers delete. Does not add to blocklist + try: + failType = 'failed' + queue = await get_queue(BASE_URL, API_KEY) + if not queue: return 0 + logger.debug('remove_failed/queue IN: %s', formattedQueueInfo(queue)) + # Find items affected + affectedItems = [] + for queueItem in queue['records']: + if 'errorMessage' in queueItem and 'status' in queueItem: + if queueItem['status'] == 'failed': + affectedItems.append(queueItem) + affectedItems = await execute_checks(settings_dict, affectedItems, failType, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, privateDowloadIDs, protectedDownloadIDs, + addToBlocklist = False, + doPrivateTrackerCheck = False, + doProtectedDownloadCheck = False, + doPermittedAttemptsCheck = False) + return len(affectedItems) + except Exception as error: + errorDetails(NAME, error) + return 0 + diff --git a/src/remove_metadata_missing.py b/src/remove_metadata_missing.py new file mode 100644 index 0000000..31629c7 --- /dev/null +++ b/src/remove_metadata_missing.py @@ -0,0 +1,27 @@ +from src.utils.shared import (errorDetails, formattedQueueInfo, get_queue, privateTrackerCheck, protectedDownloadCheck, execute_checks, permittedAttemptsCheck, remove_download) +import sys, os, traceback +import logging, verboselogs +logger = verboselogs.VerboseLogger(__name__) + +async def remove_metadata_missing(settings_dict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs): + # Detects downloads stuck downloading meta data and triggers repeat check and subsequent delete. Adds to blocklist + try: + failType = 'missing metadata' + queue = await get_queue(BASE_URL, API_KEY) + if not queue: return 0 + logger.debug('remove_metadata_missing/queue IN: %s', formattedQueueInfo(queue)) + # Find items affected + affectedItems = [] + for queueItem in queue['records']: + if 'errorMessage' in queueItem and 'status' in queueItem: + if queueItem['status'] == 'queued' and queueItem['errorMessage'] == 'qBittorrent is downloading metadata': + affectedItems.append(queueItem) + affectedItems = await execute_checks(settings_dict, affectedItems, failType, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, privateDowloadIDs, protectedDownloadIDs, + addToBlocklist = True, + doPrivateTrackerCheck = True, + doProtectedDownloadCheck = True, + doPermittedAttemptsCheck = True) + return len(affectedItems) + except Exception as error: + errorDetails(NAME, error) + return 0 diff --git a/src/remove_missing_files.py b/src/remove_missing_files.py new file mode 100644 index 0000000..5dfbfb0 --- /dev/null +++ b/src/remove_missing_files.py @@ -0,0 +1,29 @@ +from src.utils.shared import (errorDetails, formattedQueueInfo, get_queue, privateTrackerCheck, protectedDownloadCheck, execute_checks, permittedAttemptsCheck, remove_download) +import sys, os, traceback +import logging, verboselogs +logger = verboselogs.VerboseLogger(__name__) + +async def remove_missing_files(settings_dict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs): + # Detects downloads broken because of missing files. Does not add to blocklist + try: + failType = 'missing files' + queue = await get_queue(BASE_URL, API_KEY) + if not queue: return 0 + logger.debug('remove_missing_files/queue IN: %s', formattedQueueInfo(queue)) + # Find items affected + affectedItems = [] + for queueItem in queue['records']: + if 'errorMessage' in queueItem and 'status' in queueItem: + if (queueItem['status'] == 'warning' and + (queueItem['errorMessage'] == 'DownloadClientQbittorrentTorrentStateMissingFiles' or + queueItem['errorMessage'] == 'The download is missing files')): + affectedItems.append(queueItem) + affectedItems = await execute_checks(settings_dict, affectedItems, failType, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, privateDowloadIDs, protectedDownloadIDs, + addToBlocklist = False, + doPrivateTrackerCheck = False, + doProtectedDownloadCheck = False, + doPermittedAttemptsCheck = False) + return len(affectedItems) + except Exception as error: + errorDetails(NAME, error) + return 0 \ No newline at end of file diff --git a/src/remove_orphans.py b/src/remove_orphans.py new file mode 100644 index 0000000..54bfa08 --- /dev/null +++ b/src/remove_orphans.py @@ -0,0 +1,34 @@ +from src.utils.shared import (errorDetails, formattedQueueInfo, get_queue, privateTrackerCheck, protectedDownloadCheck, execute_checks, permittedAttemptsCheck, remove_download) +import sys, os, traceback +import logging, verboselogs +logger = verboselogs.VerboseLogger(__name__) + +async def remove_orphans(settings_dict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs, full_queue_param): + # Removes downloads belonging to movies/tv shows that have been deleted in the meantime. Does not add to blocklist + try: + failType = 'orphan' + full_queue = await get_queue(BASE_URL, API_KEY, params = {full_queue_param: True}) + if not full_queue: return 0 # By now the queue may be empty + queue = await get_queue(BASE_URL, API_KEY) + logger.debug('remove_orphans/full queue IN: %s', str(full_queue)) + logger.debug('remove_orphans/queue IN: %s', str(queue)) + + # Find items affected + # 1. create a list of the "known" queue items + queueIDs = [queueItem['id'] for queueItem in queue['records']] if queue else [] + affectedItems = [] + # 2. compare all queue items against the known ones, and those that are not found are the "unknown" or "orphan" ones + for queueItem in full_queue['records']: + if queueItem['id'] not in queueIDs: + affectedItems.append(queueItem) + + affectedItems = await execute_checks(settings_dict, affectedItems, failType, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, privateDowloadIDs, protectedDownloadIDs, + addToBlocklist = False, + doPrivateTrackerCheck = True, + doProtectedDownloadCheck = True, + doPermittedAttemptsCheck = False) + logger.debug('remove_orphans/full queue OUT: %s', str(await get_queue(BASE_URL, API_KEY, params = {full_queue_param: True}))) + return len(affectedItems) + except Exception as error: + errorDetails(NAME, error) + return 0 \ No newline at end of file diff --git a/src/remove_slow.py b/src/remove_slow.py new file mode 100644 index 0000000..a328ed6 --- /dev/null +++ b/src/remove_slow.py @@ -0,0 +1,61 @@ +from src.utils.shared import (errorDetails, formattedQueueInfo, get_queue, privateTrackerCheck, protectedDownloadCheck, execute_checks, permittedAttemptsCheck, remove_download) +import sys, os, traceback +import logging, verboselogs +logger = verboselogs.VerboseLogger(__name__) + +async def remove_slow(settings_dict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs, download_sizes_tracker): + # Detects slow downloads and triggers delete. Adds to blocklist + try: + failType = 'slow' + logger.debug('remove_slow/queue IN: %s', formattedQueueInfo(queue)) + queue = await get_queue(BASE_URL, API_KEY) + if not queue: return 0 + # Find items affected + affectedItems = [] + alreadyCheckedDownloadIDs = [] + for queueItem in queue['records']: + if 'downloadId' in queueItem and 'size' in queueItem and 'sizeleft' in queueItem and 'status' in queueItem: + if queueItem['downloadId'] not in alreadyCheckedDownloadIDs: + alreadyCheckedDownloadIDs.append(queueItem['downloadId']) # One downloadId may occur in multiple queueItems - only check once for all of them per iteration + # determine if the downloaded bit on average between this and the last iteration is greater than the min threshold + downloadedSize, previousSize, increment, speed = await getDownloadedSize(settings_dict, queueItem, download_sizes_tracker) + if queueItem['status'] == 'downloading' and \ + queueItem['downloadId'] in download_sizes_tracker.dict and \ + speed is not None: + if speed < settings_dict['MIN_DOWNLOAD_SPEED']: + affectedItems.append(queueItem) + logger.debug('remove_slow/slow speed detected: %s (Speed: %d KB/s, KB now: %s, KB previous: %s, Diff: %s, In Minutes: %s', \ + queueItem['title'], speed, downloadedSize, previousSize, increment, settings_dict['REMOVE_TIMER']) + + + affectedItems = await execute_checks(settings_dict, affectedItems, failType, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, privateDowloadIDs, protectedDownloadIDs, + addToBlocklist = True, + doPrivateTrackerCheck = True, + doProtectedDownloadCheck = True, + doPermittedAttemptsCheck = True) + return len(affectedItems) + except Exception as error: + errorDetails(NAME, error) + return 0 + +from src.utils.rest import (rest_get) +async def getDownloadedSize(settings_dict, queueItem, download_sizes_tracker): + # Determines the speed of download + # Since Sonarr/Radarr do not update the downlodedSize on realtime, if possible, fetch it directly from qBit + if settings_dict['QBITTORRENT_URL']: + qbitInfo = await rest_get(settings_dict['QBITTORRENT_URL']+'/torrents/info',params={'hashes': queueItem['downloadId']}, cookies=settings_dict['QBIT_COOKIE'] ) + downloadedSize = qbitInfo[0]['completed'] + else: + logger.debug('getDownloadedSize/WARN: Using imprecise method to determine download increments because no direct qBIT query is possible') + downloadedSize = queueItem['size'] - queueItem['sizeleft'] + if queueItem['downloadId'] in download_sizes_tracker.dict: + previousSize = download_sizes_tracker.dict.get(queueItem['downloadId']) + increment = downloadedSize - previousSize + speed = round(increment / 1000 / (settings_dict['REMOVE_TIMER'] * 60),1) + else: + previousSize = None + increment = None + speed = None + + download_sizes_tracker.dict[queueItem['downloadId']] = downloadedSize + return downloadedSize, previousSize, increment, speed diff --git a/src/remove_stalled.py b/src/remove_stalled.py new file mode 100644 index 0000000..0a1bf13 --- /dev/null +++ b/src/remove_stalled.py @@ -0,0 +1,27 @@ +from src.utils.shared import (errorDetails, formattedQueueInfo, get_queue, privateTrackerCheck, protectedDownloadCheck, execute_checks, permittedAttemptsCheck, remove_download) +import sys, os, traceback +import logging, verboselogs +logger = verboselogs.VerboseLogger(__name__) + +async def remove_stalled(settings_dict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs): + # Detects stalled and triggers repeat check and subsequent delete. Adds to blocklist + try: + failType = 'stalled' + logger.debug('remove_stalled/queue IN: %s', formattedQueueInfo(queue)) + queue = await get_queue(BASE_URL, API_KEY) + if not queue: return 0 + # Find items affected + affectedItems = [] + for queueItem in queue['records']: + if 'errorMessage' in queueItem and 'status' in queueItem: + if queueItem['status'] == 'warning' and queueItem['errorMessage'] == 'The download is stalled with no connections': + affectedItems.append(queueItem) + affectedItems = await execute_checks(settings_dict, affectedItems, failType, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, privateDowloadIDs, protectedDownloadIDs, + addToBlocklist = True, + doPrivateTrackerCheck = True, + doProtectedDownloadCheck = True, + doPermittedAttemptsCheck = True) + return len(affectedItems) + except Exception as error: + errorDetails(NAME, error) + return 0 diff --git a/src/remove_unmonitored.py b/src/remove_unmonitored.py new file mode 100644 index 0000000..0192c4a --- /dev/null +++ b/src/remove_unmonitored.py @@ -0,0 +1,39 @@ +from src.utils.shared import (errorDetails, formattedQueueInfo, get_queue, privateTrackerCheck, protectedDownloadCheck, execute_checks, permittedAttemptsCheck, remove_download) +import sys, os, traceback +import logging, verboselogs +logger = verboselogs.VerboseLogger(__name__) +from src.utils.rest import rest_get + +async def remove_unmonitored(settings_dict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs, arr_type): + # Removes downloads belonging to movies/tv shows that are not monitored. Does not add to blocklist + try: + failType = 'unmonitored' + logger.debug('remove_unmonitored/queue IN: %s', formattedQueueInfo(queue)) + queue = await get_queue(BASE_URL, API_KEY) + if not queue: return 0 + # Find items affected + monitoredDownloadIDs = [] + for queueItem in queue['records']: + if arr_type == 'sonarr': + isMonitored = (await rest_get(f'{BASE_URL}/episode/{str(queueItem["episodeId"])}', API_KEY))['monitored'] + elif arr_type == 'radarr': + isMonitored = (await rest_get(f'{BASE_URL}/movie/{str(queueItem["movieId"])}', API_KEY))['monitored'] + elif arr_type == 'lidarr': + isMonitored = (await rest_get(f'{BASE_URL}/album/{str(queueItem["albumId"])}', API_KEY))['monitored'] + if isMonitored: + monitoredDownloadIDs.append(queueItem['downloadId']) + + affectedItems = [] + for queueItem in queue['records']: + if queueItem['downloadId'] not in monitoredDownloadIDs: + affectedItems.append(queueItem) # One downloadID may be shared by multiple queueItems. Only removes it if ALL queueitems are unmonitored + + affectedItems = await execute_checks(settings_dict, affectedItems, failType, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, privateDowloadIDs, protectedDownloadIDs, + addToBlocklist = False, + doPrivateTrackerCheck = True, + doProtectedDownloadCheck = True, + doPermittedAttemptsCheck = False) + return len(affectedItems) + except Exception as error: + errorDetails(NAME, error) + return 0 \ No newline at end of file diff --git a/src/utils/nest_functions.py b/src/utils/nest_functions.py index 343b5a5..1917bf5 100644 --- a/src/utils/nest_functions.py +++ b/src/utils/nest_functions.py @@ -1,5 +1,5 @@ -async def nested_set(dic, keys, value, matchConditions=None): +def nested_set(dic, keys, value, matchConditions=None): # Sets the value of a key in a dictionary to a certain value. # If multiple items are present, it can filter for a matching item for key in keys[:-1]: @@ -21,7 +21,7 @@ async def nested_set(dic, keys, value, matchConditions=None): dic[keys[-1]] = value -async def add_keys_nested_dict(d, keys, defaultValue = None): +def add_keys_nested_dict(d, keys, defaultValue = None): # Creates a nested value if key does not exist for key in keys[:-1]: if key not in d: @@ -29,7 +29,7 @@ async def add_keys_nested_dict(d, keys, defaultValue = None): d = d[key] d.setdefault(keys[-1], defaultValue) -async def nested_get(dic, return_attribute, matchConditions): +def nested_get(dic, return_attribute, matchConditions): # Retrieves a list contained in return_attribute, found within dic based on matchConditions i = 0 match = False diff --git a/src/utils/shared.py b/src/utils/shared.py new file mode 100644 index 0000000..b97880c --- /dev/null +++ b/src/utils/shared.py @@ -0,0 +1,127 @@ +# Shared Functions +import logging, verboselogs +logger = verboselogs.VerboseLogger(__name__) +from src.utils.rest import (rest_get, rest_delete, rest_post) +from src.utils.nest_functions import (add_keys_nested_dict, nested_get) +import sys, os, traceback + +async def get_queue(BASE_URL, API_KEY, params = {}): + # Retrieves the current queue + await rest_post(url=BASE_URL+'/command', json={'name': 'RefreshMonitoredDownloads'}, headers={'X-Api-Key': API_KEY}) + totalRecords = (await rest_get(f'{BASE_URL}/queue', API_KEY, params))['totalRecords'] + if totalRecords == 0: + return None + queue = await rest_get(f'{BASE_URL}/queue', API_KEY, {'page': '1', 'pageSize': totalRecords}|params) + return queue + +def privateTrackerCheck(settings_dict, affectedItems, failType, privateDowloadIDs): + # Ignores private tracker items (if setting is turned on) + for affectedItem in reversed(affectedItems): + if settings_dict['IGNORE_PRIVATE_TRACKERS'] and affectedItem['downloadId'] in privateDowloadIDs: + affectedItems.remove(affectedItem) + return affectedItems + +def protectedDownloadCheck(settings_dict, affectedItems, failType, protectedDownloadIDs): + # Checks if torrent is protected and skips + for affectedItem in reversed(affectedItems): + if affectedItem['downloadId'] in protectedDownloadIDs: + logger.verbose('>>> Detected %s download, tagged not to be killed: %s',failType, affectedItem['title']) + affectedItems.remove(affectedItem) + return affectedItems + + +async def execute_checks(settings_dict, affectedItems, failType, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, privateDowloadIDs, protectedDownloadIDs, addToBlocklist, doPrivateTrackerCheck, doProtectedDownloadCheck, doPermittedAttemptsCheck): + # Goes over the affected items and performs the checks that are parametrized + try: + # De-duplicates the affected items (one downloadid may be shared by multiple affected items) + downloadIDs = [] + for affectedItem in reversed(affectedItems): + if affectedItem['downloadId'] not in downloadIDs: + downloadIDs.append(affectedItem['downloadId']) + else: + affectedItems.remove(affectedItem) + # Skips protected items + if doPrivateTrackerCheck: + affectedItems = privateTrackerCheck(settings_dict, affectedItems, failType, privateDowloadIDs) + if doProtectedDownloadCheck: + affectedItems = protectedDownloadCheck(settings_dict, affectedItems, failType, protectedDownloadIDs) + # Checks if failing more often than permitted + if doPermittedAttemptsCheck: + affectedItems = permittedAttemptsCheck(settings_dict, affectedItems, failType, BASE_URL, defective_tracker) + # Deletes all downloads that have not survived the checks + for affectedItem in affectedItems: + await remove_download(settings_dict, BASE_URL, API_KEY, affectedItem, failType, addToBlocklist, deleted_downloads) + # Exit Logs + if settings_dict['LOG_LEVEL'] == 'DEBUG': + queue = await get_queue(BASE_URL, API_KEY) + logger.debug('execute_checks/queue OUT (failType: %s): %s', failType, formattedQueueInfo(queue)) + # Return removed items + return affectedItems + except Exception as error: + errorDetails(NAME, error) + return [] + +def permittedAttemptsCheck(settings_dict, affectedItems, failType, BASE_URL, defective_tracker): + # Checks if downloads are repeatedly found as stalled / stuck in metadata. Removes the items that are not exeeding permitted attempts + # Shows all affected items (for debugging) + logger.debug('permittedAttemptsCheck/affectedItems: %s', ', '.join(f"{affectedItem['id']}:{affectedItem['title']}:{affectedItem['downloadId']}" for affectedItem in affectedItems)) + + # 2. Check if those that were previously defective are no longer defective -> those are recovered + affectedDownloadIDs = [affectedItem['downloadId'] for affectedItem in affectedItems] + try: + recoveredDownloadIDs = [trackedDownloadIDs for trackedDownloadIDs in defective_tracker.dict[BASE_URL][failType] if trackedDownloadIDs not in affectedDownloadIDs] + except KeyError: + recoveredDownloadIDs = [] + logger.debug('permittedAttemptsCheck/recoveredDownloadIDs: %s', str(recoveredDownloadIDs)) + for recoveredDownloadID in recoveredDownloadIDs: + del defective_tracker.dict[BASE_URL][failType][recoveredDownloadID] + logger.debug('permittedAttemptsCheck/defective_tracker.dict IN: %s', str(defective_tracker.dict)) + + # 3. For those that are defective, add attempt + 1 if present before, or make attempt = 1. + for affectedItem in reversed(affectedItems): + try: + defective_tracker.dict[BASE_URL][failType][affectedItem['downloadId']]['Attempts'] += 1 + except KeyError: + add_keys_nested_dict(defective_tracker.dict,[BASE_URL, failType, affectedItem['downloadId']], {'title': affectedItem['title'], 'Attempts': 1}) + logger.info('>>> Detected %s download (%s out of %s permitted times): %s', failType, str(defective_tracker.dict[BASE_URL][failType][affectedItem['downloadId']]['Attempts']), str(settings_dict['PERMITTED_ATTEMPTS']), affectedItem['title']) + # If not exceeding the number of permitted times, remove from being affected + if defective_tracker.dict[BASE_URL][failType][affectedItem['downloadId']]['Attempts'] <= settings_dict['PERMITTED_ATTEMPTS']: + affectedItems.remove(affectedItem) + logger.debug('permittedAttemptsCheck/defective_tracker.dict OUT: %s', str(defective_tracker.dict)) + return affectedItems + +async def remove_download(settings_dict, BASE_URL, API_KEY, affectedItem, failType, addToBlocklist, deleted_downloads): + # Removes downloads and creates log entry + logger.debug('remove_download/deleted_downloads.dict IN: %s', str(deleted_downloads.dict)) + if affectedItem['downloadId'] not in deleted_downloads.dict: + logger.info('>>> Removing %s download: %s', failType, affectedItem['title']) + if not settings_dict['TEST_RUN']: + await rest_delete(f'{BASE_URL}/queue/{affectedItem["id"]}', API_KEY, {'removeFromClient': True, 'blocklist': addToBlocklist}) + deleted_downloads.dict.append(affectedItem['downloadId']) + + logger.debug('remove_download/deleted_downloads.dict OUT: %s', str(deleted_downloads.dict)) + return + + +def errorDetails(NAME, error): + exc_type, exc_obj, exc_tb = sys.exc_info() + fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] + logger.warning('>>> Queue cleaning failed on %s. (File: %s / Line: %s / Error Message: %s / Error Type: %s)', NAME, fname, exc_tb.tb_lineno, error, exc_type) + logger.debug(traceback.format_exc()) + return + +def formattedQueueInfo(queue): + # Returns queueID, title, and downloadID + formatted_list = [] + for record in queue['records']: + download_id = record['downloadId'] + title = record['title'] + item_id = record['id'] + # Check if there is an entry with the same download_id and title + existing_entry = next((item for item in formatted_list if item['downloadId'] == download_id), None) + if existing_entry: + existing_entry['IDs'].append(item_id) + else: + new_entry = {'downloadId': download_id, 'downloadTitle': title, 'IDs': [item_id]} + formatted_list.append(new_entry) + return(formatted_list)