diff --git a/README.md b/README.md index 02530cf..c9185ef 100644 --- a/README.md +++ b/README.md @@ -14,9 +14,9 @@ You may run this locally by launching main.py, or by pulling the docker image. You can find a sample docker-compose.yml in the docker folder. ## Dependencies & Hints -Use Sonarr v4 & Radarr v5 (currently 'nightly' tag instead of 'latest'), else certain features may not work correctly. -qBittorrent is recommended but not required. If you don't use qBittorrent, certain features won't work (such as tag-protection) -If you see strange errors such as "found 10 / 3 times", consider turning on the setting "Reject Blocklisted Torrent Hashes While Grabbing" on indexer-level (available in the nightly versions of sonarr/radarr (Untested: setting by now may also exist in readarr & lidarr)) +- Use Sonarr v4 & Radarr v5 (currently 'nightly' tag instead of 'latest'), else certain features may not work correctly. +- qBittorrent is recommended but not required. If you don't use qBittorrent, certain features won't work (such as tag-protection) +- If you see strange errors such as "found 10 / 3 times", consider turning on the setting "Reject Blocklisted Torrent Hashes While Grabbing" on indexer-level (available in the nightly versions of the *arr apps) ## Getting started There's two ways to run this: diff --git a/src/remove_failed.py b/src/remove_failed.py index 0a85c02..a140559 100644 --- a/src/remove_failed.py +++ b/src/remove_failed.py @@ -8,8 +8,8 @@ async def remove_failed(settings_dict, BASE_URL, API_KEY, NAME, deleted_download try: failType = 'failed' queue = await get_queue(BASE_URL, API_KEY) - if not queue: return 0 logger.debug('remove_failed/queue IN: %s', formattedQueueInfo(queue)) + if not queue: return 0 # Find items affected affectedItems = [] for queueItem in queue['records']: diff --git a/src/remove_metadata_missing.py b/src/remove_metadata_missing.py index 31629c7..6c96311 100644 --- a/src/remove_metadata_missing.py +++ b/src/remove_metadata_missing.py @@ -8,8 +8,8 @@ async def remove_metadata_missing(settings_dict, BASE_URL, API_KEY, NAME, delete try: failType = 'missing metadata' queue = await get_queue(BASE_URL, API_KEY) - if not queue: return 0 logger.debug('remove_metadata_missing/queue IN: %s', formattedQueueInfo(queue)) + if not queue: return 0 # Find items affected affectedItems = [] for queueItem in queue['records']: diff --git a/src/remove_missing_files.py b/src/remove_missing_files.py index 5dfbfb0..98c1349 100644 --- a/src/remove_missing_files.py +++ b/src/remove_missing_files.py @@ -8,8 +8,8 @@ async def remove_missing_files(settings_dict, BASE_URL, API_KEY, NAME, deleted_d try: failType = 'missing files' queue = await get_queue(BASE_URL, API_KEY) - if not queue: return 0 logger.debug('remove_missing_files/queue IN: %s', formattedQueueInfo(queue)) + if not queue: return 0 # Find items affected affectedItems = [] for queueItem in queue['records']: diff --git a/src/remove_orphans.py b/src/remove_orphans.py index 54bfa08..f2c044e 100644 --- a/src/remove_orphans.py +++ b/src/remove_orphans.py @@ -8,9 +8,9 @@ async def remove_orphans(settings_dict, BASE_URL, API_KEY, NAME, deleted_downloa try: failType = 'orphan' full_queue = await get_queue(BASE_URL, API_KEY, params = {full_queue_param: True}) - if not full_queue: return 0 # By now the queue may be empty queue = await get_queue(BASE_URL, API_KEY) logger.debug('remove_orphans/full queue IN: %s', str(full_queue)) + if not full_queue: return 0 # By now the queue may be empty logger.debug('remove_orphans/queue IN: %s', str(queue)) # Find items affected diff --git a/src/remove_slow.py b/src/remove_slow.py index 58ec9d1..6975573 100644 --- a/src/remove_slow.py +++ b/src/remove_slow.py @@ -9,7 +9,7 @@ async def remove_slow(settings_dict, BASE_URL, API_KEY, NAME, deleted_downloads, failType = 'slow' queue = await get_queue(BASE_URL, API_KEY) logger.debug('remove_slow/queue IN: %s', formattedQueueInfo(queue)) - if not queue: return 0 + if not queue: return 0 # Find items affected affectedItems = [] alreadyCheckedDownloadIDs = [] diff --git a/src/remove_stalled.py b/src/remove_stalled.py index bf70e38..d386435 100644 --- a/src/remove_stalled.py +++ b/src/remove_stalled.py @@ -9,7 +9,7 @@ async def remove_stalled(settings_dict, BASE_URL, API_KEY, NAME, deleted_downloa failType = 'stalled' queue = await get_queue(BASE_URL, API_KEY) logger.debug('remove_stalled/queue IN: %s', formattedQueueInfo(queue)) - if not queue: return 0 + if not queue: return 0 # Find items affected affectedItems = [] for queueItem in queue['records']: diff --git a/src/remove_unmonitored.py b/src/remove_unmonitored.py index df32831..6036875 100644 --- a/src/remove_unmonitored.py +++ b/src/remove_unmonitored.py @@ -10,7 +10,7 @@ async def remove_unmonitored(settings_dict, BASE_URL, API_KEY, NAME, deleted_dow failType = 'unmonitored' queue = await get_queue(BASE_URL, API_KEY) logger.debug('remove_unmonitored/queue IN: %s', formattedQueueInfo(queue)) - if not queue: return 0 + if not queue: return 0 # Find items affected monitoredDownloadIDs = [] for queueItem in queue['records']: diff --git a/src/utils/shared.py b/src/utils/shared.py index b7bedbd..742df02 100644 --- a/src/utils/shared.py +++ b/src/utils/shared.py @@ -82,14 +82,17 @@ def permittedAttemptsCheck(settings_dict, affectedItems, failType, BASE_URL, def try: defective_tracker.dict[BASE_URL][failType][affectedItem['downloadId']]['Attempts'] += 1 except KeyError: - add_keys_nested_dict(defective_tracker.dict,[BASE_URL, failType, affectedItem['downloadId']], {'title': affectedItem['title'], 'Attempts': 1}) - logger.info('>>> Detected %s download (%s out of %s permitted times): %s', failType, str(defective_tracker.dict[BASE_URL][failType][affectedItem['downloadId']]['Attempts']), str(settings_dict['PERMITTED_ATTEMPTS']), affectedItem['title']) + add_keys_nested_dict(defective_tracker.dict,[BASE_URL, failType, affectedItem['downloadId']], {'title': affectedItem['title'], 'Attempts': 1}) + attempts_left = settings_dict['PERMITTED_ATTEMPTS'] - defective_tracker.dict[BASE_URL][failType][affectedItem['downloadId']]['Attempts'] # If not exceeding the number of permitted times, remove from being affected - if defective_tracker.dict[BASE_URL][failType][affectedItem['downloadId']]['Attempts'] <= settings_dict['PERMITTED_ATTEMPTS']: + if attempts_left >= 0: # Still got attempts left + logger.info('>>> Detected %s download (%s out of %s permitted times): %s', failType, str(defective_tracker.dict[BASE_URL][failType][affectedItem['downloadId']]['Attempts']), str(settings_dict['PERMITTED_ATTEMPTS']), affectedItem['title']) affectedItems.remove(affectedItem) - # else: - # # Will be deleted - reset the counter to 0 - # del defective_tracker.dict[BASE_URL][failType][affectedItem['downloadId']] + if attempts_left <= -1: # Too many attempts + logger.info('>>> Detected %s download too many times (%s out of %s permitted times): %s', failType, str(defective_tracker.dict[BASE_URL][failType][affectedItem['downloadId']]['Attempts']), str(settings_dict['PERMITTED_ATTEMPTS']), affectedItem['title']) + if attempts_left < -2: # Too many attempts and should already have been removed + # If supposedly deleted item keeps coming back, print out guidance for "Reject Blocklisted Torrent Hashes While Grabbing" + logger.verbose('>>> [Tip!] Since this download should already have been removed in a previous iteration but keeps coming back, this indicates the blocking of the torrent does not work correctly. Consider turning on the option "Reject Blocklisted Torrent Hashes While Grabbing" on the indexer in the *arr app: %s', affectedItem['title']) logger.debug('permittedAttemptsCheck/defective_tracker.dict OUT: %s', str(defective_tracker.dict)) return affectedItems @@ -114,17 +117,22 @@ def errorDetails(NAME, error): return def formattedQueueInfo(queue): - # Returns queueID, title, and downloadID - formatted_list = [] - for record in queue['records']: - download_id = record['downloadId'] - title = record['title'] - item_id = record['id'] - # Check if there is an entry with the same download_id and title - existing_entry = next((item for item in formatted_list if item['downloadId'] == download_id), None) - if existing_entry: - existing_entry['IDs'].append(item_id) - else: - new_entry = {'downloadId': download_id, 'downloadTitle': title, 'IDs': [item_id]} - formatted_list.append(new_entry) - return(formatted_list) + try: + # Returns queueID, title, and downloadID + if not queue: return 'empty' + formatted_list = [] + for record in queue['records']: + download_id = record['downloadId'] + title = record['title'] + item_id = record['id'] + # Check if there is an entry with the same download_id and title + existing_entry = next((item for item in formatted_list if item['downloadId'] == download_id), None) + if existing_entry: + existing_entry['IDs'].append(item_id) + else: + new_entry = {'downloadId': download_id, 'downloadTitle': title, 'IDs': [item_id]} + formatted_list.append(new_entry) + return(formatted_list) + except Exception as error: + errorDetails('formattedQueueInfo', error) + return 'error' \ No newline at end of file