diff --git a/.github/workflows/dev.yml b/.github/workflows/dev.yml index d1143c9..bb52dd9 100644 --- a/.github/workflows/dev.yml +++ b/.github/workflows/dev.yml @@ -25,9 +25,10 @@ jobs: run: | python -m pip install --upgrade pip pip install -r docker/requirements.txt + - name: Test with pytest run: | - python3 -m pytest --import-mode=append tests/ + python3 -m pytest -o log_cli=false build-dev: needs: unit-tests diff --git a/.pytest.ini b/.pytest.ini new file mode 100644 index 0000000..308c2d9 --- /dev/null +++ b/.pytest.ini @@ -0,0 +1,8 @@ +[pytest] +log_cli = true +addopts = -q --tb=short -s +log_cli_level = INFO +log_cli_format = %(asctime)s - %(levelname)s - %(name)s - %(message)s +log_cli_date_format = %Y-%m-%d %H:%M:%S +testpaths = + tests \ No newline at end of file diff --git a/CONTRIBUTE.md b/CONTRIBUTING.md similarity index 55% rename from CONTRIBUTE.md rename to CONTRIBUTING.md index e46caf6..1ab44ad 100644 --- a/CONTRIBUTE.md +++ b/CONTRIBUTING.md @@ -19,7 +19,7 @@ To raise a new feature request, please go through the following steps: - Be willing to provide more details if asked for them and help testing the feature ## Bug Reports -Bugs in this application are scarce. If there are any, there most likely features ;-) +Bugs in this application are scarce. If there are any, they're most likely features ;-) Please go follow these steps to submit a bug: - Check if this bug has previously been reported - Add [Bug] at the beginning of the issue title @@ -29,28 +29,33 @@ Please go follow these steps to submit a bug: 2) Turn off all remove functions but one where you expect a removal (example: REMOVE_STALLED: True and the rest on False) 3) Let it run until the supposed remove should be trigged 4) Paste the full logs to a pastebin -- If helpful: Paste a screenshot of qbit and the affected *arr app to a pasteimg -- Be willing to provide more details if asked for them and help testing the bug fix +5) Share your settings (docker-compose or config.conf) +6) Optional: If helpful, share screenshots showing the problem (from your arr-app or qbit) +7) Be responsive and provide more details if asked for them, and help testing the bug fix ### Code Contributions Code contributions are very welcome - thanks for helping improve this app! -1) Please always branch out from the "dev" branch, not from the "main" branch -2) Please test your code locally -3) Please only commit code that you have written yourself and is not owned by anybody else -4) Please create a PR against the "dev" branch -5) Once I have reviewed it, I will merge it and it will create teh "dev" image -6) Please help testing that the dev image works, before we then commit it to the "latest" image (from main branch) +1) Always branch out from the "dev" branch, not from the "main" branch +2) Test your code locally +3) Only commit code that you have written yourself and is not owned by anybody else +4) Create a PR against the "dev" branch +5) Be responsive to code review +5) Once the code is reviewed and OK, it will be merged to dev branch, which will create the "dev"-docker image +6) Help testing that the dev image works +7= Finally, we will then commit the change to the main branch, which will create the "latest"-docker image You do not need to know about how to create docker images to contribute here. To get started: -1) Clone the git repository to your local machine -2) Create a virtual python environment (python3 -m venv venv) -3) Activate the virtual environment (source venv/bin/activate) -4) Install python libraries (pip install -r docker/requirements.txt) +1) Create a fork of decluttarr +2) Clone the git repository from the dev branch to your local machine `git clone -b dev https://github.com/yourName/decluttarr` +2) Create a virtual python environment (`python3 -m venv venv`) +3) Activate the virtual environment (`source venv/bin/activate`) +4) Install python libraries (`pip install -r docker/requirements.txt`) 5) Adjust the config/config.conf to your needs 6) Adjust the code in the files as needed -7) Run the script (python3 main.py) -8) Create a PR once you are ready - +7) Run the script (`python3 main.py`) +8) Push your changes to your own git repo +9) Test the dev-image it creates automatically +10) Create the PR from your repo to ManiMatter/decluttarr (dev branch) diff --git a/config/definitions.py b/config/definitions.py index 49b7307..3c54354 100644 --- a/config/definitions.py +++ b/config/definitions.py @@ -56,8 +56,7 @@ QBITTORRENT_PASSWORD = get_config_value('QBITTORRENT_PASSWORD', ######################################################################################################################## ########### Validate settings - -if not (RADARR_URL or SONARR_URL or LIDARR_URL or READARR_URL or WHISPARR_URL): +if not (IS_IN_PYTEST or RADARR_URL or SONARR_URL or LIDARR_URL or READARR_URL or WHISPARR_URL): print(f'[ ERROR ]: No Radarr/Sonarr/Lidarr/Readarr/Whisparr URLs specified (nothing to monitor)') exit() diff --git a/config/env_vars.py b/config/env_vars.py index 3ed7d65..5bf0d93 100644 --- a/config/env_vars.py +++ b/config/env_vars.py @@ -2,3 +2,4 @@ import os IS_IN_DOCKER = os.environ.get('IS_IN_DOCKER') IMAGE_TAG = os.environ.get('IMAGE_TAG', 'Local') SHORT_COMMIT_ID = os.environ.get('SHORT_COMMIT_ID', 'n/a') +IS_IN_PYTEST = os.environ.get('IS_IN_PYTEST') \ No newline at end of file diff --git a/main.py b/main.py index a60f3db..053fccf 100644 --- a/main.py +++ b/main.py @@ -8,6 +8,7 @@ from config.definitions import settingsDict from src.utils.loadScripts import * from src.decluttarr import queueCleaner from src.utils.rest import rest_get, rest_post +from src.utils.trackers import Defective_Tracker, Download_Sizes_Tracker # Hide SSL Verification Warnings if settingsDict['SSL_VERIFICATION']==False: @@ -17,16 +18,6 @@ if settingsDict['SSL_VERIFICATION']==False: # Set up logging setLoggingFormat(settingsDict) -# Set up classes that allow tracking of items from one loop to the next -class Defective_Tracker: - # Keeps track of which downloads were already caught as stalled previously - def __init__(self, dict): - self.dict = dict -class Download_Sizes_Tracker: - # Keeps track of the file sizes of the downloads - def __init__(self, dict): - self.dict = dict - # Main function async def main(settingsDict): # Adds to settings Dict the instances that are actually configures @@ -49,17 +40,20 @@ async def main(settingsDict): # Check outdated upgradeChecks(settingsDict) - # Display current settings when loading script + # Welcome Message + showWelcome() + + # Current Settings showSettings(settingsDict) - + # Check Minimum Version and if instances are reachable and retrieve qbit cookie settingsDict = await instanceChecks(settingsDict) # Create qBit protection tag if not existing await createQbitProtectionTag(settingsDict) - # Show Logger settings - showLoggerSettings(settingsDict) + # Show Logger Level + showLoggerLevel(settingsDict) # Start Cleaning while True: diff --git a/src/decluttarr.py b/src/decluttarr.py index e556788..bad6276 100644 --- a/src/decluttarr.py +++ b/src/decluttarr.py @@ -10,12 +10,7 @@ from src.jobs.remove_orphans import remove_orphans from src.jobs.remove_slow import remove_slow from src.jobs.remove_stalled import remove_stalled from src.jobs.remove_unmonitored import remove_unmonitored - -class Deleted_Downloads: - # Keeps track of which downloads have already been deleted (to not double-delete) - def __init__(self, dict): - self.dict = dict - +from src.utils.trackers import Deleted_Downloads async def queueCleaner(settingsDict, arr_type, defective_tracker, download_sizes_tracker, protectedDownloadIDs, privateDowloadIDs): # Read out correct instance depending on radarr/sonarr flag diff --git a/src/jobs/remove_failed_imports.py b/src/jobs/remove_failed_imports.py index dc2a7cf..bf84582 100644 --- a/src/jobs/remove_failed_imports.py +++ b/src/jobs/remove_failed_imports.py @@ -1,4 +1,4 @@ -from src.utils.shared import (errorDetails, formattedQueueInfo, get_queue, privateTrackerCheck, protectedDownloadCheck, execute_checks, permittedAttemptsCheck, remove_download) +from src.utils.shared import (errorDetails, formattedQueueInfo, get_queue, execute_checks) import sys, os, traceback import logging, verboselogs logger = verboselogs.VerboseLogger(__name__) @@ -7,45 +7,70 @@ async def remove_failed_imports(settingsDict, BASE_URL, API_KEY, NAME, deleted_d # Detects downloads stuck downloading meta data and triggers repeat check and subsequent delete. Adds to blocklist try: failType = 'failed import' - queue = await get_queue(BASE_URL, API_KEY) + queue = await get_queue(BASE_URL, API_KEY) logger.debug('remove_failed_imports/queue IN: %s', formattedQueueInfo(queue)) if not queue: return 0 + # Find items affected affectedItems = [] + + # Check if any patterns have been specified + patterns = settingsDict.get('FAILED_IMPORT_MESSAGE_PATTERNS', []) + if not patterns: # If patterns is empty or not present + patterns = None + for queueItem in queue['records']: if 'status' in queueItem \ and 'trackedDownloadStatus' in queueItem \ and 'trackedDownloadState' in queueItem \ and 'statusMessages' in queueItem: + removal_messages = [] if queueItem['status'] == 'completed' \ and queueItem['trackedDownloadStatus'] == 'warning' \ and queueItem['trackedDownloadState'] in {'importPending', 'importFailed', 'importBlocked'}: # Find messages that find specified pattern and put them into a "removal_message" that will be displayed in the logger when removing the affected item - removal_messages = ['Tracked Download State: ' + queueItem['trackedDownloadState']] - for statusMessage in queueItem['statusMessages']: - if not settingsDict['FAILED_IMPORT_MESSAGE_PATTERNS']: # No patterns defined - including all status messages in the removal_messages - removal_messages.append ('Status Messages (All):') - removal_messages.extend(f"- {msg}" for msg in statusMessage.get('messages', [])) - break - - removal_messages.append ('Status Messages (matching specified patterns):') - messages = statusMessage.get('messages', []) - for message in messages: - if any(pattern in message for pattern in settingsDict['FAILED_IMPORT_MESSAGE_PATTERNS']): - removal_messages.append(f"- {message}") + if not patterns: + # No patterns defined - including all status messages in the removal_messages + removal_messages.append ('>>>>> Status Messages (All):') + for statusMessage in queueItem['statusMessages']: + removal_messages.extend(f">>>>> - {message}" for message in statusMessage.get('messages', [])) + else: + # Specific patterns defined - only removing if any of these are matched + for statusMessage in queueItem['statusMessages']: + messages = statusMessage.get('messages', []) + for message in messages: + if any(pattern in message for pattern in patterns): + removal_messages.append(f">>>>> - {message}") + if removal_messages: + removal_messages.insert (0, '>>>>> Status Messages (matching specified patterns):') + + if removal_messages: + removal_messages = list(dict.fromkeys(removal_messages)) # deduplication + removal_messages.insert(0,'>>>>> Tracked Download State: ' + queueItem['trackedDownloadState']) + queueItem['removal_messages'] = removal_messages + affectedItems.append(queueItem) - queueItem['removal_messages'] = removal_messages - affectedItems.append(queueItem) + check_kwargs = { + 'settingsDict': settingsDict, + 'affectedItems': affectedItems, + 'failType': failType, + 'BASE_URL': BASE_URL, + 'API_KEY': API_KEY, + 'NAME': NAME, + 'deleted_downloads': deleted_downloads, + 'defective_tracker': defective_tracker, + 'privateDowloadIDs': privateDowloadIDs, + 'protectedDownloadIDs': protectedDownloadIDs, + 'addToBlocklist': True, + 'doPrivateTrackerCheck': False, + 'doProtectedDownloadCheck': True, + 'doPermittedAttemptsCheck': False, + 'extraParameters': {'keepTorrentForPrivateTrackers': True} + } + affectedItems = await execute_checks(**check_kwargs) - affectedItems = await execute_checks(settingsDict, affectedItems, failType, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, privateDowloadIDs, protectedDownloadIDs, - addToBlocklist = True, - doPrivateTrackerCheck = False, - doProtectedDownloadCheck = True, - doPermittedAttemptsCheck = False, - extraParameters = {'keepTorrentForPrivateTrackers': True} - ) return len(affectedItems) except Exception as error: errorDetails(NAME, error) diff --git a/src/utils/loadScripts.py b/src/utils/loadScripts.py index 88a9050..a7bb1e1 100644 --- a/src/utils/loadScripts.py +++ b/src/utils/loadScripts.py @@ -51,16 +51,19 @@ async def getProtectedAndPrivateFromQbit(settingsDict): return protectedDownloadIDs, privateDowloadIDs - -def showSettings(settingsDict): - # Prints out the settings - fmt = '{0.days} days {0.hours} hours {0.minutes} minutes' +def showWelcome(): + # Welcome Message logger.info('#' * 50) logger.info('Decluttarr - Application Started!') logger.info('') logger.info('Like this app? Thanks for giving it a ⭐️ on GitHub!') logger.info('https://github.com/ManiMatter/decluttarr/') - logger.info('') + logger.info('') + return + +def showSettings(settingsDict): + # Settings Message + fmt = '{0.days} days {0.hours} hours {0.minutes} minutes' logger.info('*** Current Settings ***') logger.info('Version: %s', settingsDict['IMAGE_TAG']) logger.info('Commit: %s', settingsDict['SHORT_COMMIT_ID']) @@ -205,7 +208,7 @@ async def createQbitProtectionTag(settingsDict): if not settingsDict['TEST_RUN']: await rest_post(url=settingsDict['QBITTORRENT_URL']+'/torrents/createTags', data={'tags': settingsDict['NO_STALLED_REMOVAL_QBIT_TAG']}, headers={'content-type': 'application/x-www-form-urlencoded'}, cookies=settingsDict['QBIT_COOKIE']) -def showLoggerSettings(settingsDict): +def showLoggerLevel(settingsDict): logger.info('#' * 50) if settingsDict['LOG_LEVEL'] == 'INFO': logger.info('LOG_LEVEL = INFO: Only logging changes (switch to VERBOSE for more info)') diff --git a/src/utils/main.py b/src/utils/main.py new file mode 100644 index 0000000..053fccf --- /dev/null +++ b/src/utils/main.py @@ -0,0 +1,77 @@ +# Import Libraries +import asyncio +import logging, verboselogs +logger = verboselogs.VerboseLogger(__name__) +import json +# Import Functions +from config.definitions import settingsDict +from src.utils.loadScripts import * +from src.decluttarr import queueCleaner +from src.utils.rest import rest_get, rest_post +from src.utils.trackers import Defective_Tracker, Download_Sizes_Tracker + +# Hide SSL Verification Warnings +if settingsDict['SSL_VERIFICATION']==False: + import warnings + warnings.filterwarnings("ignore", message="Unverified HTTPS request") + +# Set up logging +setLoggingFormat(settingsDict) + +# Main function +async def main(settingsDict): +# Adds to settings Dict the instances that are actually configures + settingsDict['INSTANCES'] = [] + for arrApplication in settingsDict['SUPPORTED_ARR_APPS']: + if settingsDict[arrApplication + '_URL']: + settingsDict['INSTANCES'].append(arrApplication) + + # Pre-populates the dictionaries (in classes) that track the items that were already caught as having problems or removed + defectiveTrackingInstances = {} + for instance in settingsDict['INSTANCES']: + defectiveTrackingInstances[instance] = {} + defective_tracker = Defective_Tracker(defectiveTrackingInstances) + download_sizes_tracker = Download_Sizes_Tracker({}) + + # Get name of arr-instances + for instance in settingsDict['INSTANCES']: + settingsDict = await getArrInstanceName(settingsDict, instance) + + # Check outdated + upgradeChecks(settingsDict) + + # Welcome Message + showWelcome() + + # Current Settings + showSettings(settingsDict) + + # Check Minimum Version and if instances are reachable and retrieve qbit cookie + settingsDict = await instanceChecks(settingsDict) + + # Create qBit protection tag if not existing + await createQbitProtectionTag(settingsDict) + + # Show Logger Level + showLoggerLevel(settingsDict) + + # Start Cleaning + while True: + logger.verbose('-' * 50) + # Cache protected (via Tag) and private torrents + protectedDownloadIDs, privateDowloadIDs = await getProtectedAndPrivateFromQbit(settingsDict) + + # Run script for each instance + for instance in settingsDict['INSTANCES']: + await queueCleaner(settingsDict, instance, defective_tracker, download_sizes_tracker, protectedDownloadIDs, privateDowloadIDs) + logger.verbose('') + logger.verbose('Queue clean-up complete!') + + # Wait for the next run + await asyncio.sleep(settingsDict['REMOVE_TIMER']*60) + return + +if __name__ == '__main__': + asyncio.run(main(settingsDict)) + + diff --git a/src/utils/shared.py b/src/utils/shared.py index b3b7d42..3ad2756 100644 --- a/src/utils/shared.py +++ b/src/utils/shared.py @@ -55,7 +55,7 @@ def protectedDownloadCheck(settingsDict, affectedItems, failType, protectedDownl return affectedItems -async def execute_checks(settingsDict, affectedItems, failType, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, privateDowloadIDs, protectedDownloadIDs, addToBlocklist, doPrivateTrackerCheck, doProtectedDownloadCheck, doPermittedAttemptsCheck, extraParameters = []): +async def execute_checks(settingsDict, affectedItems, failType, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, privateDowloadIDs, protectedDownloadIDs, addToBlocklist, doPrivateTrackerCheck, doProtectedDownloadCheck, doPermittedAttemptsCheck, extraParameters = {}): # Goes over the affected items and performs the checks that are parametrized try: # De-duplicates the affected items (one downloadid may be shared by multiple affected items) @@ -143,8 +143,8 @@ async def remove_download(settingsDict, BASE_URL, API_KEY, affectedItem, failTyp logger.info('>>> Removing %s download (without removing from torrent client): %s', failType, affectedItem['title']) # Print out detailed removal messages (if any were added in the jobs) - if removal_messages in affectedItem: - for removal_message in affectedItem.removal_messages: + if 'removal_messages' in affectedItem: + for removal_message in affectedItem['removal_messages']: logger.info(removal_message) if not settingsDict['TEST_RUN']: diff --git a/src/utils/trackers.py b/src/utils/trackers.py new file mode 100644 index 0000000..24701c8 --- /dev/null +++ b/src/utils/trackers.py @@ -0,0 +1,15 @@ +# Set up classes that allow tracking of items from one loop to the next +class Defective_Tracker: + # Keeps track of which downloads were already caught as stalled previously + def __init__(self, dict): + self.dict = dict + +class Download_Sizes_Tracker: + # Keeps track of the file sizes of the downloads + def __init__(self, dict): + self.dict = dict + +class Deleted_Downloads: + # Keeps track of which downloads have already been deleted (to not double-delete) + def __init__(self, dict): + self.dict = dict \ No newline at end of file diff --git a/tests/jobs/remove_failed_imports/mock_data/mock_data_1.json b/tests/jobs/remove_failed_imports/mock_data/mock_data_1.json new file mode 100644 index 0000000..9f72019 --- /dev/null +++ b/tests/jobs/remove_failed_imports/mock_data/mock_data_1.json @@ -0,0 +1,33 @@ +{ + "records": [ + { + "id": 1, + "downloadId": "A123", + "title": "Sonarr Title 1", + "status": "completed", + "trackedDownloadStatus": "ok", + "trackedDownloadState": "importing", + "statusMessages": [] + }, + { + "id": 2, + "downloadId": "B123", + "title": "Sonarr Title 2", + "status": "completed", + "trackedDownloadStatus": "warning", + "trackedDownloadState": "importBlocked", + "statusMessages": [ + { + "title": "One or more episodes expected in this release were not imported or missing from the release", + "messages": [] + }, + { + "title": "Sonarr Title 2.mkv", + "messages": [ + "Episode XYZ was not found in the grabbed release: Sonarr Title 2.mkv" + ] + } + ] + } + ] +} diff --git a/tests/jobs/remove_failed_imports/mock_data/mock_data_2.json b/tests/jobs/remove_failed_imports/mock_data/mock_data_2.json new file mode 100644 index 0000000..a6a6922 --- /dev/null +++ b/tests/jobs/remove_failed_imports/mock_data/mock_data_2.json @@ -0,0 +1,32 @@ +{ + "records": [ + { + "id": 1, + "downloadId": "A123", + "title": "Sonarr Title 1", + "status": "completed", + "trackedDownloadStatus": "warning", + "trackedDownloadState": "importBlocked", + "statusMessages": [ + { + "title": "First Message", + "messages": [ + "Message 1 - hello world" + ] + }, + { + "title": "Duplicate of First Message", + "messages": [ + "Message 1 - hello world" + ] + }, + { + "title": "Second of Message", + "messages": [ + "Message 2 - goodbye all" + ] + } + ] + } + ] +} diff --git a/tests/jobs/remove_failed_imports/mock_data/mock_data_3.json b/tests/jobs/remove_failed_imports/mock_data/mock_data_3.json new file mode 100644 index 0000000..1f02acb --- /dev/null +++ b/tests/jobs/remove_failed_imports/mock_data/mock_data_3.json @@ -0,0 +1,60 @@ +{ + "records": [ + { + "id": 1, + "downloadId": "A123", + "title": "Sonarr Title 1", + "status": "completed", + "trackedDownloadStatus": "warning", + "trackedDownloadState": "importPending", + "statusMessages": [ + { + "title": "First Message", + "messages": [ + "Message 1 - hello world" + ] + }, + { + "title": "Duplicate of First Message", + "messages": [ + "Message 1 - hello world" + ] + }, + { + "title": "Second of Message", + "messages": [ + "Message 2 - goodbye all" + ] + } + ] + }, + { + "id": 2, + "downloadId": "B123", + "title": "Sonarr Title 2", + "status": "completed", + "trackedDownloadStatus": "warning", + "trackedDownloadState": "importFailed", + "statusMessages": [ + { + "title": "First Message", + "messages": [ + "Message 1 - hello world" + ] + }, + { + "title": "Duplicate of First Message", + "messages": [ + "Message 1 - hello world" + ] + }, + { + "title": "Second of Message", + "messages": [ + "Message 2 - goodbye all" + ] + } + ] + } + ] +} diff --git a/tests/jobs/remove_failed_imports/remove_failed_imports_utils.py b/tests/jobs/remove_failed_imports/remove_failed_imports_utils.py new file mode 100644 index 0000000..68eef7e --- /dev/null +++ b/tests/jobs/remove_failed_imports/remove_failed_imports_utils.py @@ -0,0 +1,65 @@ +import os +os.environ['IS_IN_PYTEST'] = 'true' +import logging +import json +import pytest +from typing import Dict, Set, Any +from unittest.mock import AsyncMock +from src.jobs.remove_failed_imports import remove_failed_imports + + +# Utility function to load mock data +def load_mock_data(file_name): + with open(file_name, 'r') as file: + return json.load(file) + +async def mock_get_queue(mock_data): + logging.debug("Mock get_queue called") + return mock_data + +async def run_test( + settingsDict: Dict[str, Any], + expected_removal_messages: Dict[int, Set[str]], + mock_data_file: str, + monkeypatch: pytest.MonkeyPatch +) -> None: + # Load mock data + mock_data = load_mock_data(mock_data_file) + + # Create an AsyncMock for execute_checks with side effect + execute_checks_mock = AsyncMock() + + # Define a side effect function + def side_effect(*args, **kwargs): + logging.debug("Mock execute_checks called with kwargs: %s", kwargs) + # Return the affectedItems from kwargs + return kwargs.get('affectedItems', []) + + # Attach side effect to the mock + execute_checks_mock.side_effect = side_effect + + # Create an async mock for get_queue that returns mock_data + mock_get_queue = AsyncMock(return_value=mock_data) + + # Patch the methods + monkeypatch.setattr('src.jobs.remove_failed_imports.get_queue', mock_get_queue) + monkeypatch.setattr('src.jobs.remove_failed_imports.execute_checks', execute_checks_mock) + + # Call the function + await remove_failed_imports(settingsDict=settingsDict, BASE_URL='', API_KEY='', NAME='', deleted_downloads=set(), defective_tracker=set(), protectedDownloadIDs=set(), privateDowloadIDs=set()) + + # Assertions + assert execute_checks_mock.called # Ensure the mock was called + + # Assert expected items are there + args, kwargs = execute_checks_mock.call_args + affectedItems = kwargs.get('affectedItems', []) + affectedItems_ids = {item['id'] for item in affectedItems} + expectedItems_ids = set(expected_removal_messages.keys()) + assert len(affectedItems) == len(expected_removal_messages) + assert affectedItems_ids == expectedItems_ids + + # Assert all expected messages are there + for affectedItem in affectedItems: + assert 'removal_messages' in affectedItem + assert expected_removal_messages[affectedItem['id']] == set(affectedItem.get('removal_messages', [])) diff --git a/tests/jobs/remove_failed_imports/test_remove_failed_imports_1.py b/tests/jobs/remove_failed_imports/test_remove_failed_imports_1.py new file mode 100644 index 0000000..1464767 --- /dev/null +++ b/tests/jobs/remove_failed_imports/test_remove_failed_imports_1.py @@ -0,0 +1,39 @@ +import pytest +from remove_failed_imports_utils import run_test +mock_data_file = 'tests/jobs/remove_failed_imports/mock_data/mock_data_1.json' + +@pytest.mark.asyncio +async def test_with_pattern_one_message(monkeypatch): + settingsDict = {'FAILED_IMPORT_MESSAGE_PATTERNS': ['not found in the grabbed release']} + expected_removal_messages = { + 2: { + '>>>>> Tracked Download State: importBlocked', + '>>>>> Status Messages (matching specified patterns):', + '>>>>> - Episode XYZ was not found in the grabbed release: Sonarr Title 2.mkv', + } + } + await run_test(settingsDict, expected_removal_messages, mock_data_file, monkeypatch) + +@pytest.mark.asyncio +async def test_with_empty_pattern_one_message(monkeypatch): + settingsDict = {'FAILED_IMPORT_MESSAGE_PATTERNS': []} + expected_removal_messages = { + 2: { + '>>>>> Tracked Download State: importBlocked', + '>>>>> Status Messages (All):', + '>>>>> - Episode XYZ was not found in the grabbed release: Sonarr Title 2.mkv', + } + } + await run_test(settingsDict, expected_removal_messages, mock_data_file, monkeypatch) + +@pytest.mark.asyncio +async def test_without_pattern_one_message(monkeypatch): + settingsDict = {} + expected_removal_messages = { + 2: { + '>>>>> Tracked Download State: importBlocked', + '>>>>> Status Messages (All):', + '>>>>> - Episode XYZ was not found in the grabbed release: Sonarr Title 2.mkv', + } + } + await run_test(settingsDict, expected_removal_messages, mock_data_file, monkeypatch) diff --git a/tests/jobs/remove_failed_imports/test_remove_failed_imports_2.py b/tests/jobs/remove_failed_imports/test_remove_failed_imports_2.py new file mode 100644 index 0000000..827b096 --- /dev/null +++ b/tests/jobs/remove_failed_imports/test_remove_failed_imports_2.py @@ -0,0 +1,41 @@ +import pytest +from remove_failed_imports_utils import run_test +mock_data_file = 'tests/jobs/remove_failed_imports/mock_data/mock_data_2.json' + +@pytest.mark.asyncio +async def test_multiple_status_messages_multiple_pattern(monkeypatch): + settingsDict = {'FAILED_IMPORT_MESSAGE_PATTERNS': ['world', 'all']} + expected_removal_messages = { + 1: { + '>>>>> Tracked Download State: importBlocked', + '>>>>> Status Messages (matching specified patterns):', + '>>>>> - Message 1 - hello world', + '>>>>> - Message 2 - goodbye all', + } + } + await run_test(settingsDict, expected_removal_messages, mock_data_file, monkeypatch) + +@pytest.mark.asyncio +async def test_multiple_status_messages_single_pattern(monkeypatch): + settingsDict = {'FAILED_IMPORT_MESSAGE_PATTERNS': ['world']} + expected_removal_messages = { + 1: { + '>>>>> Tracked Download State: importBlocked', + '>>>>> Status Messages (matching specified patterns):', + '>>>>> - Message 1 - hello world' + } + } + await run_test(settingsDict, expected_removal_messages, mock_data_file, monkeypatch) + +@pytest.mark.asyncio +async def test_multiple_status_messages_no_pattern(monkeypatch): + settingsDict = {} + expected_removal_messages = { + 1: { + '>>>>> Tracked Download State: importBlocked', + '>>>>> Status Messages (All):', + '>>>>> - Message 1 - hello world', + '>>>>> - Message 2 - goodbye all', + } + } + await run_test(settingsDict, expected_removal_messages, mock_data_file, monkeypatch) diff --git a/tests/jobs/remove_failed_imports/test_remove_failed_imports_3.py b/tests/jobs/remove_failed_imports/test_remove_failed_imports_3.py new file mode 100644 index 0000000..9522ea5 --- /dev/null +++ b/tests/jobs/remove_failed_imports/test_remove_failed_imports_3.py @@ -0,0 +1,59 @@ +import pytest +from remove_failed_imports_utils import run_test +mock_data_file = 'tests/jobs/remove_failed_imports/mock_data/mock_data_3.json' + +@pytest.mark.asyncio +async def test_multiple_statuses_multiple_pattern(monkeypatch): + settingsDict = {'FAILED_IMPORT_MESSAGE_PATTERNS': ['world', 'all']} + expected_removal_messages = { + 1: { + '>>>>> Tracked Download State: importPending', + '>>>>> Status Messages (matching specified patterns):', + '>>>>> - Message 1 - hello world', + '>>>>> - Message 2 - goodbye all', + }, + 2: { + '>>>>> Tracked Download State: importFailed', + '>>>>> Status Messages (matching specified patterns):', + '>>>>> - Message 1 - hello world', + '>>>>> - Message 2 - goodbye all', + } + } + await run_test(settingsDict, expected_removal_messages, mock_data_file, monkeypatch) + +@pytest.mark.asyncio +async def test_multiple_statuses_single_pattern(monkeypatch): + settingsDict = {'FAILED_IMPORT_MESSAGE_PATTERNS': ['world']} + expected_removal_messages = { + 1: { + '>>>>> Tracked Download State: importPending', + '>>>>> Status Messages (matching specified patterns):', + '>>>>> - Message 1 - hello world' + }, + 2: { + '>>>>> Tracked Download State: importFailed', + '>>>>> Status Messages (matching specified patterns):', + '>>>>> - Message 1 - hello world' + } + } + await run_test(settingsDict, expected_removal_messages, mock_data_file, monkeypatch) + + +@pytest.mark.asyncio +async def test_multiple_statuses_no_pattern(monkeypatch): + settingsDict = {} + expected_removal_messages = { + 1: { + '>>>>> Tracked Download State: importPending', + '>>>>> Status Messages (All):', + '>>>>> - Message 1 - hello world', + '>>>>> - Message 2 - goodbye all', + }, + 2: { + '>>>>> Tracked Download State: importFailed', + '>>>>> Status Messages (All):', + '>>>>> - Message 1 - hello world', + '>>>>> - Message 2 - goodbye all', + } + } + await run_test(settingsDict, expected_removal_messages, mock_data_file, monkeypatch) diff --git a/tests/test_nest_functions.py b/tests/utils/nest_functions/test_nest_functions.py similarity index 100% rename from tests/test_nest_functions.py rename to tests/utils/nest_functions/test_nest_functions.py diff --git a/tests/utils/remove_download/mock_data/mock_data_1.json b/tests/utils/remove_download/mock_data/mock_data_1.json new file mode 100644 index 0000000..4afee77 --- /dev/null +++ b/tests/utils/remove_download/mock_data/mock_data_1.json @@ -0,0 +1,11 @@ +{ + "id": 1, + "downloadId": "A", + "title": "Sonarr Title 1", + "removal_messages": [ + ">>>>> Tracked Download State: importBlocked", + ">>>>> Status Messages (matching specified patterns):", + ">>>>> - Episode XYZ was not found in the grabbed release: Sonarr Title 2.mkv", + ">>>>> - And yet another message" + ] +} diff --git a/tests/utils/remove_download/remove_download_utils.py b/tests/utils/remove_download/remove_download_utils.py new file mode 100644 index 0000000..9831b72 --- /dev/null +++ b/tests/utils/remove_download/remove_download_utils.py @@ -0,0 +1,52 @@ +import os +os.environ['IS_IN_PYTEST'] = 'true' +import logging +import json +import pytest +from typing import Dict, Set, Any +from src.utils.shared import remove_download +from src.utils.trackers import Deleted_Downloads + + + +# Utility function to load mock data +def load_mock_data(file_name): + with open(file_name, 'r') as file: + return json.load(file) + +async def mock_rest_delete() -> None: + logger.debug(f"Mock rest_delete called with URL") + + +async def run_test( + settingsDict: Dict[str, Any], + expected_removal_messages: Set[str], + failType: str, + removeFromClient: bool, + mock_data_file: str, + monkeypatch: pytest.MonkeyPatch, + caplog: pytest.LogCaptureFixture +) -> None: + # Load mock data + affectedItem = load_mock_data(mock_data_file) + + # Mock the `rest_delete` function + monkeypatch.setattr('src.utils.shared.rest_delete', mock_rest_delete) + + # Call the function + with caplog.at_level(logging.INFO): + # Call the function and assert no exceptions + try: + deleted_downloads = Deleted_Downloads([]) + await remove_download(settingsDict=settingsDict, BASE_URL='', API_KEY='', affectedItem=affectedItem, failType=failType, addToBlocklist=True, deleted_downloads=deleted_downloads, removeFromClient=removeFromClient) + except Exception as e: + pytest.fail(f"remove_download raised an exception: {e}") + + # Assertions: + # Check that expected log messages are in the captured log + log_messages = {record.message for record in caplog.records if record.levelname == 'INFO'} + + assert expected_removal_messages == log_messages + + # Check that the affectedItem's downloadId was added to deleted_downloads + assert affectedItem['downloadId'] in deleted_downloads.dict diff --git a/tests/utils/remove_download/test_remove_download_1.py b/tests/utils/remove_download/test_remove_download_1.py new file mode 100644 index 0000000..4761a12 --- /dev/null +++ b/tests/utils/remove_download/test_remove_download_1.py @@ -0,0 +1,33 @@ + +import pytest +from remove_download_utils import run_test +# Parameters identical across all tests +mock_data_file = 'tests/utils/remove_download/mock_data/mock_data_1.json' +failType = 'failed import' + +@pytest.mark.asyncio +async def test_removal_with_removal_messages(monkeypatch, caplog): + settingsDict = {'TEST_RUN': True} + removeFromClient = True + expected_removal_messages = { + '>>> Removing failed import download: Sonarr Title 1', + '>>>>> Tracked Download State: importBlocked', + '>>>>> Status Messages (matching specified patterns):', + '>>>>> - Episode XYZ was not found in the grabbed release: Sonarr Title 2.mkv', + '>>>>> - And yet another message' + } + await run_test(settingsDict=settingsDict, expected_removal_messages=expected_removal_messages, failType=failType, removeFromClient=removeFromClient, mock_data_file=mock_data_file, monkeypatch=monkeypatch, caplog=caplog) + + +@pytest.mark.asyncio +async def test_schizophrenic_removal_with_removal_messages(monkeypatch, caplog): + settingsDict = {'TEST_RUN': True} + removeFromClient = False + expected_removal_messages = { + '>>> Removing failed import download (without removing from torrent client): Sonarr Title 1', + '>>>>> Tracked Download State: importBlocked', + '>>>>> Status Messages (matching specified patterns):', + '>>>>> - Episode XYZ was not found in the grabbed release: Sonarr Title 2.mkv', + '>>>>> - And yet another message' + } + await run_test(settingsDict=settingsDict, expected_removal_messages=expected_removal_messages, failType=failType, removeFromClient=removeFromClient, mock_data_file=mock_data_file, monkeypatch=monkeypatch, caplog=caplog)