Bug Fixes & Automated Testing - remove_failed_imports.py #patch

This commit is contained in:
Benjamin Harder
2024-08-03 00:17:01 +02:00
parent d4c146a7d1
commit db50942ee8
21 changed files with 582 additions and 45 deletions

View File

@@ -25,9 +25,10 @@ jobs:
run: | run: |
python -m pip install --upgrade pip python -m pip install --upgrade pip
pip install -r docker/requirements.txt pip install -r docker/requirements.txt
- name: Test with pytest - name: Test with pytest
run: | run: |
python3 -m pytest --import-mode=append tests/ python3 -m pytest -o log_cli=false
build-dev: build-dev:
needs: unit-tests needs: unit-tests

8
.pytest.ini Normal file
View File

@@ -0,0 +1,8 @@
[pytest]
log_cli = true
addopts = -q --tb=short -s
log_cli_level = INFO
log_cli_format = %(asctime)s - %(levelname)s - %(name)s - %(message)s
log_cli_date_format = %Y-%m-%d %H:%M:%S
testpaths =
tests

View File

@@ -56,8 +56,7 @@ QBITTORRENT_PASSWORD = get_config_value('QBITTORRENT_PASSWORD',
######################################################################################################################## ########################################################################################################################
########### Validate settings ########### Validate settings
if not (IS_IN_PYTEST or RADARR_URL or SONARR_URL or LIDARR_URL or READARR_URL or WHISPARR_URL):
if not (RADARR_URL or SONARR_URL or LIDARR_URL or READARR_URL or WHISPARR_URL):
print(f'[ ERROR ]: No Radarr/Sonarr/Lidarr/Readarr/Whisparr URLs specified (nothing to monitor)') print(f'[ ERROR ]: No Radarr/Sonarr/Lidarr/Readarr/Whisparr URLs specified (nothing to monitor)')
exit() exit()

View File

@@ -2,3 +2,4 @@ import os
IS_IN_DOCKER = os.environ.get('IS_IN_DOCKER') IS_IN_DOCKER = os.environ.get('IS_IN_DOCKER')
IMAGE_TAG = os.environ.get('IMAGE_TAG', 'Local') IMAGE_TAG = os.environ.get('IMAGE_TAG', 'Local')
SHORT_COMMIT_ID = os.environ.get('SHORT_COMMIT_ID', 'n/a') SHORT_COMMIT_ID = os.environ.get('SHORT_COMMIT_ID', 'n/a')
IS_IN_PYTEST = os.environ.get('IS_IN_PYTEST')

11
main.py
View File

@@ -8,6 +8,7 @@ from config.definitions import settingsDict
from src.utils.loadScripts import * from src.utils.loadScripts import *
from src.decluttarr import queueCleaner from src.decluttarr import queueCleaner
from src.utils.rest import rest_get, rest_post from src.utils.rest import rest_get, rest_post
from src.utils.trackers import Defective_Tracker, Download_Sizes_Tracker
# Hide SSL Verification Warnings # Hide SSL Verification Warnings
if settingsDict['SSL_VERIFICATION']==False: if settingsDict['SSL_VERIFICATION']==False:
@@ -17,16 +18,6 @@ if settingsDict['SSL_VERIFICATION']==False:
# Set up logging # Set up logging
setLoggingFormat(settingsDict) setLoggingFormat(settingsDict)
# Set up classes that allow tracking of items from one loop to the next
class Defective_Tracker:
# Keeps track of which downloads were already caught as stalled previously
def __init__(self, dict):
self.dict = dict
class Download_Sizes_Tracker:
# Keeps track of the file sizes of the downloads
def __init__(self, dict):
self.dict = dict
# Main function # Main function
async def main(settingsDict): async def main(settingsDict):
# Adds to settings Dict the instances that are actually configures # Adds to settings Dict the instances that are actually configures

View File

@@ -10,12 +10,7 @@ from src.jobs.remove_orphans import remove_orphans
from src.jobs.remove_slow import remove_slow from src.jobs.remove_slow import remove_slow
from src.jobs.remove_stalled import remove_stalled from src.jobs.remove_stalled import remove_stalled
from src.jobs.remove_unmonitored import remove_unmonitored from src.jobs.remove_unmonitored import remove_unmonitored
from src.utils.trackers import Deleted_Downloads
class Deleted_Downloads:
# Keeps track of which downloads have already been deleted (to not double-delete)
def __init__(self, dict):
self.dict = dict
async def queueCleaner(settingsDict, arr_type, defective_tracker, download_sizes_tracker, protectedDownloadIDs, privateDowloadIDs): async def queueCleaner(settingsDict, arr_type, defective_tracker, download_sizes_tracker, protectedDownloadIDs, privateDowloadIDs):
# Read out correct instance depending on radarr/sonarr flag # Read out correct instance depending on radarr/sonarr flag

View File

@@ -1,4 +1,4 @@
from src.utils.shared import (errorDetails, formattedQueueInfo, get_queue, privateTrackerCheck, protectedDownloadCheck, execute_checks, permittedAttemptsCheck, remove_download) from src.utils.shared import (errorDetails, formattedQueueInfo, get_queue, execute_checks)
import sys, os, traceback import sys, os, traceback
import logging, verboselogs import logging, verboselogs
logger = verboselogs.VerboseLogger(__name__) logger = verboselogs.VerboseLogger(__name__)
@@ -7,45 +7,70 @@ async def remove_failed_imports(settingsDict, BASE_URL, API_KEY, NAME, deleted_d
# Detects downloads stuck downloading meta data and triggers repeat check and subsequent delete. Adds to blocklist # Detects downloads stuck downloading meta data and triggers repeat check and subsequent delete. Adds to blocklist
try: try:
failType = 'failed import' failType = 'failed import'
queue = await get_queue(BASE_URL, API_KEY) queue = await get_queue(BASE_URL, API_KEY)
logger.debug('remove_failed_imports/queue IN: %s', formattedQueueInfo(queue)) logger.debug('remove_failed_imports/queue IN: %s', formattedQueueInfo(queue))
if not queue: return 0 if not queue: return 0
# Find items affected # Find items affected
affectedItems = [] affectedItems = []
# Check if any patterns have been specified
patterns = settingsDict.get('FAILED_IMPORT_MESSAGE_PATTERNS', [])
if not patterns: # If patterns is empty or not present
patterns = None
for queueItem in queue['records']: for queueItem in queue['records']:
if 'status' in queueItem \ if 'status' in queueItem \
and 'trackedDownloadStatus' in queueItem \ and 'trackedDownloadStatus' in queueItem \
and 'trackedDownloadState' in queueItem \ and 'trackedDownloadState' in queueItem \
and 'statusMessages' in queueItem: and 'statusMessages' in queueItem:
removal_messages = []
if queueItem['status'] == 'completed' \ if queueItem['status'] == 'completed' \
and queueItem['trackedDownloadStatus'] == 'warning' \ and queueItem['trackedDownloadStatus'] == 'warning' \
and queueItem['trackedDownloadState'] in {'importPending', 'importFailed', 'importBlocked'}: and queueItem['trackedDownloadState'] in {'importPending', 'importFailed', 'importBlocked'}:
# Find messages that find specified pattern and put them into a "removal_message" that will be displayed in the logger when removing the affected item # Find messages that find specified pattern and put them into a "removal_message" that will be displayed in the logger when removing the affected item
removal_messages = ['Tracked Download State: ' + queueItem['trackedDownloadState']] if not patterns:
for statusMessage in queueItem['statusMessages']: # No patterns defined - including all status messages in the removal_messages
if not settingsDict['FAILED_IMPORT_MESSAGE_PATTERNS']: # No patterns defined - including all status messages in the removal_messages removal_messages.append ('>>>>> Status Messages (All):')
removal_messages.append ('Status Messages (All):') for statusMessage in queueItem['statusMessages']:
removal_messages.extend(f"- {msg}" for msg in statusMessage.get('messages', [])) removal_messages.extend(f">>>>> - {message}" for message in statusMessage.get('messages', []))
break else:
# Specific patterns defined - only removing if any of these are matched
removal_messages.append ('Status Messages (matching specified patterns):') for statusMessage in queueItem['statusMessages']:
messages = statusMessage.get('messages', []) messages = statusMessage.get('messages', [])
for message in messages: for message in messages:
if any(pattern in message for pattern in settingsDict['FAILED_IMPORT_MESSAGE_PATTERNS']): if any(pattern in message for pattern in patterns):
removal_messages.append(f"- {message}") removal_messages.append(f">>>>> - {message}")
if removal_messages:
removal_messages.insert (0, '>>>>> Status Messages (matching specified patterns):')
if removal_messages:
removal_messages = list(dict.fromkeys(removal_messages)) # deduplication
removal_messages.insert(0,'>>>>> Tracked Download State: ' + queueItem['trackedDownloadState'])
queueItem['removal_messages'] = removal_messages
affectedItems.append(queueItem)
queueItem['removal_messages'] = removal_messages check_kwargs = {
affectedItems.append(queueItem) 'settingsDict': settingsDict,
'affectedItems': affectedItems,
'failType': failType,
'BASE_URL': BASE_URL,
'API_KEY': API_KEY,
'NAME': NAME,
'deleted_downloads': deleted_downloads,
'defective_tracker': defective_tracker,
'privateDowloadIDs': privateDowloadIDs,
'protectedDownloadIDs': protectedDownloadIDs,
'addToBlocklist': True,
'doPrivateTrackerCheck': False,
'doProtectedDownloadCheck': True,
'doPermittedAttemptsCheck': False,
'extraParameters': {'keepTorrentForPrivateTrackers': True}
}
affectedItems = await execute_checks(**check_kwargs)
affectedItems = await execute_checks(settingsDict, affectedItems, failType, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, privateDowloadIDs, protectedDownloadIDs,
addToBlocklist = True,
doPrivateTrackerCheck = False,
doProtectedDownloadCheck = True,
doPermittedAttemptsCheck = False,
extraParameters = {'keepTorrentForPrivateTrackers': True}
)
return len(affectedItems) return len(affectedItems)
except Exception as error: except Exception as error:
errorDetails(NAME, error) errorDetails(NAME, error)

77
src/utils/main.py Normal file
View File

@@ -0,0 +1,77 @@
# Import Libraries
import asyncio
import logging, verboselogs
logger = verboselogs.VerboseLogger(__name__)
import json
# Import Functions
from config.definitions import settingsDict
from src.utils.loadScripts import *
from src.decluttarr import queueCleaner
from src.utils.rest import rest_get, rest_post
from src.utils.trackers import Defective_Tracker, Download_Sizes_Tracker
# Hide SSL Verification Warnings
if settingsDict['SSL_VERIFICATION']==False:
import warnings
warnings.filterwarnings("ignore", message="Unverified HTTPS request")
# Set up logging
setLoggingFormat(settingsDict)
# Main function
async def main(settingsDict):
# Adds to settings Dict the instances that are actually configures
settingsDict['INSTANCES'] = []
for arrApplication in settingsDict['SUPPORTED_ARR_APPS']:
if settingsDict[arrApplication + '_URL']:
settingsDict['INSTANCES'].append(arrApplication)
# Pre-populates the dictionaries (in classes) that track the items that were already caught as having problems or removed
defectiveTrackingInstances = {}
for instance in settingsDict['INSTANCES']:
defectiveTrackingInstances[instance] = {}
defective_tracker = Defective_Tracker(defectiveTrackingInstances)
download_sizes_tracker = Download_Sizes_Tracker({})
# Get name of arr-instances
for instance in settingsDict['INSTANCES']:
settingsDict = await getArrInstanceName(settingsDict, instance)
# Check outdated
upgradeChecks(settingsDict)
# Welcome Message
showWelcome()
# Current Settings
showSettings(settingsDict)
# Check Minimum Version and if instances are reachable and retrieve qbit cookie
settingsDict = await instanceChecks(settingsDict)
# Create qBit protection tag if not existing
await createQbitProtectionTag(settingsDict)
# Show Logger Level
showLoggerLevel(settingsDict)
# Start Cleaning
while True:
logger.verbose('-' * 50)
# Cache protected (via Tag) and private torrents
protectedDownloadIDs, privateDowloadIDs = await getProtectedAndPrivateFromQbit(settingsDict)
# Run script for each instance
for instance in settingsDict['INSTANCES']:
await queueCleaner(settingsDict, instance, defective_tracker, download_sizes_tracker, protectedDownloadIDs, privateDowloadIDs)
logger.verbose('')
logger.verbose('Queue clean-up complete!')
# Wait for the next run
await asyncio.sleep(settingsDict['REMOVE_TIMER']*60)
return
if __name__ == '__main__':
asyncio.run(main(settingsDict))

View File

@@ -55,7 +55,7 @@ def protectedDownloadCheck(settingsDict, affectedItems, failType, protectedDownl
return affectedItems return affectedItems
async def execute_checks(settingsDict, affectedItems, failType, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, privateDowloadIDs, protectedDownloadIDs, addToBlocklist, doPrivateTrackerCheck, doProtectedDownloadCheck, doPermittedAttemptsCheck, extraParameters = []): async def execute_checks(settingsDict, affectedItems, failType, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, privateDowloadIDs, protectedDownloadIDs, addToBlocklist, doPrivateTrackerCheck, doProtectedDownloadCheck, doPermittedAttemptsCheck, extraParameters = {}):
# Goes over the affected items and performs the checks that are parametrized # Goes over the affected items and performs the checks that are parametrized
try: try:
# De-duplicates the affected items (one downloadid may be shared by multiple affected items) # De-duplicates the affected items (one downloadid may be shared by multiple affected items)
@@ -143,8 +143,8 @@ async def remove_download(settingsDict, BASE_URL, API_KEY, affectedItem, failTyp
logger.info('>>> Removing %s download (without removing from torrent client): %s', failType, affectedItem['title']) logger.info('>>> Removing %s download (without removing from torrent client): %s', failType, affectedItem['title'])
# Print out detailed removal messages (if any were added in the jobs) # Print out detailed removal messages (if any were added in the jobs)
if removal_messages in affectedItem: if 'removal_messages' in affectedItem:
for removal_message in affectedItem.removal_messages: for removal_message in affectedItem['removal_messages']:
logger.info(removal_message) logger.info(removal_message)
if not settingsDict['TEST_RUN']: if not settingsDict['TEST_RUN']:

15
src/utils/trackers.py Normal file
View File

@@ -0,0 +1,15 @@
# Set up classes that allow tracking of items from one loop to the next
class Defective_Tracker:
# Keeps track of which downloads were already caught as stalled previously
def __init__(self, dict):
self.dict = dict
class Download_Sizes_Tracker:
# Keeps track of the file sizes of the downloads
def __init__(self, dict):
self.dict = dict
class Deleted_Downloads:
# Keeps track of which downloads have already been deleted (to not double-delete)
def __init__(self, dict):
self.dict = dict

View File

@@ -0,0 +1,33 @@
{
"records": [
{
"id": 1,
"downloadId": "A123",
"title": "Sonarr Title 1",
"status": "completed",
"trackedDownloadStatus": "ok",
"trackedDownloadState": "importing",
"statusMessages": []
},
{
"id": 2,
"downloadId": "B123",
"title": "Sonarr Title 2",
"status": "completed",
"trackedDownloadStatus": "warning",
"trackedDownloadState": "importBlocked",
"statusMessages": [
{
"title": "One or more episodes expected in this release were not imported or missing from the release",
"messages": []
},
{
"title": "Sonarr Title 2.mkv",
"messages": [
"Episode XYZ was not found in the grabbed release: Sonarr Title 2.mkv"
]
}
]
}
]
}

View File

@@ -0,0 +1,32 @@
{
"records": [
{
"id": 1,
"downloadId": "A123",
"title": "Sonarr Title 1",
"status": "completed",
"trackedDownloadStatus": "warning",
"trackedDownloadState": "importBlocked",
"statusMessages": [
{
"title": "First Message",
"messages": [
"Message 1 - hello world"
]
},
{
"title": "Duplicate of First Message",
"messages": [
"Message 1 - hello world"
]
},
{
"title": "Second of Message",
"messages": [
"Message 2 - goodbye all"
]
}
]
}
]
}

View File

@@ -0,0 +1,60 @@
{
"records": [
{
"id": 1,
"downloadId": "A123",
"title": "Sonarr Title 1",
"status": "completed",
"trackedDownloadStatus": "warning",
"trackedDownloadState": "importPending",
"statusMessages": [
{
"title": "First Message",
"messages": [
"Message 1 - hello world"
]
},
{
"title": "Duplicate of First Message",
"messages": [
"Message 1 - hello world"
]
},
{
"title": "Second of Message",
"messages": [
"Message 2 - goodbye all"
]
}
]
},
{
"id": 2,
"downloadId": "B123",
"title": "Sonarr Title 2",
"status": "completed",
"trackedDownloadStatus": "warning",
"trackedDownloadState": "importFailed",
"statusMessages": [
{
"title": "First Message",
"messages": [
"Message 1 - hello world"
]
},
{
"title": "Duplicate of First Message",
"messages": [
"Message 1 - hello world"
]
},
{
"title": "Second of Message",
"messages": [
"Message 2 - goodbye all"
]
}
]
}
]
}

View File

@@ -0,0 +1,65 @@
import os
os.environ['IS_IN_PYTEST'] = 'true'
import logging
import json
import pytest
from typing import Dict, Set, Any
from unittest.mock import AsyncMock
from src.jobs.remove_failed_imports import remove_failed_imports
# Utility function to load mock data
def load_mock_data(file_name):
with open(file_name, 'r') as file:
return json.load(file)
async def mock_get_queue(mock_data):
logging.debug("Mock get_queue called")
return mock_data
async def run_test(
settingsDict: Dict[str, Any],
expected_removal_messages: Dict[int, Set[str]],
mock_data_file: str,
monkeypatch: pytest.MonkeyPatch
) -> None:
# Load mock data
mock_data = load_mock_data(mock_data_file)
# Create an AsyncMock for execute_checks with side effect
execute_checks_mock = AsyncMock()
# Define a side effect function
def side_effect(*args, **kwargs):
logging.debug("Mock execute_checks called with kwargs: %s", kwargs)
# Return the affectedItems from kwargs
return kwargs.get('affectedItems', [])
# Attach side effect to the mock
execute_checks_mock.side_effect = side_effect
# Create an async mock for get_queue that returns mock_data
mock_get_queue = AsyncMock(return_value=mock_data)
# Patch the methods
monkeypatch.setattr('src.jobs.remove_failed_imports.get_queue', mock_get_queue)
monkeypatch.setattr('src.jobs.remove_failed_imports.execute_checks', execute_checks_mock)
# Call the function
await remove_failed_imports(settingsDict=settingsDict, BASE_URL='', API_KEY='', NAME='', deleted_downloads=set(), defective_tracker=set(), protectedDownloadIDs=set(), privateDowloadIDs=set())
# Assertions
assert execute_checks_mock.called # Ensure the mock was called
# Assert expected items are there
args, kwargs = execute_checks_mock.call_args
affectedItems = kwargs.get('affectedItems', [])
affectedItems_ids = {item['id'] for item in affectedItems}
expectedItems_ids = set(expected_removal_messages.keys())
assert len(affectedItems) == len(expected_removal_messages)
assert affectedItems_ids == expectedItems_ids
# Assert all expected messages are there
for affectedItem in affectedItems:
assert 'removal_messages' in affectedItem
assert expected_removal_messages[affectedItem['id']] == set(affectedItem.get('removal_messages', []))

View File

@@ -0,0 +1,39 @@
import pytest
from remove_failed_imports_utils import run_test
mock_data_file = 'tests/jobs/remove_failed_imports/mock_data/mock_data_1.json'
@pytest.mark.asyncio
async def test_with_pattern_one_message(monkeypatch):
settingsDict = {'FAILED_IMPORT_MESSAGE_PATTERNS': ['not found in the grabbed release']}
expected_removal_messages = {
2: {
'>>>>> Tracked Download State: importBlocked',
'>>>>> Status Messages (matching specified patterns):',
'>>>>> - Episode XYZ was not found in the grabbed release: Sonarr Title 2.mkv',
}
}
await run_test(settingsDict, expected_removal_messages, mock_data_file, monkeypatch)
@pytest.mark.asyncio
async def test_with_empty_pattern_one_message(monkeypatch):
settingsDict = {'FAILED_IMPORT_MESSAGE_PATTERNS': []}
expected_removal_messages = {
2: {
'>>>>> Tracked Download State: importBlocked',
'>>>>> Status Messages (All):',
'>>>>> - Episode XYZ was not found in the grabbed release: Sonarr Title 2.mkv',
}
}
await run_test(settingsDict, expected_removal_messages, mock_data_file, monkeypatch)
@pytest.mark.asyncio
async def test_without_pattern_one_message(monkeypatch):
settingsDict = {}
expected_removal_messages = {
2: {
'>>>>> Tracked Download State: importBlocked',
'>>>>> Status Messages (All):',
'>>>>> - Episode XYZ was not found in the grabbed release: Sonarr Title 2.mkv',
}
}
await run_test(settingsDict, expected_removal_messages, mock_data_file, monkeypatch)

View File

@@ -0,0 +1,41 @@
import pytest
from remove_failed_imports_utils import run_test
mock_data_file = 'tests/jobs/remove_failed_imports/mock_data/mock_data_2.json'
@pytest.mark.asyncio
async def test_multiple_status_messages_multiple_pattern(monkeypatch):
settingsDict = {'FAILED_IMPORT_MESSAGE_PATTERNS': ['world', 'all']}
expected_removal_messages = {
1: {
'>>>>> Tracked Download State: importBlocked',
'>>>>> Status Messages (matching specified patterns):',
'>>>>> - Message 1 - hello world',
'>>>>> - Message 2 - goodbye all',
}
}
await run_test(settingsDict, expected_removal_messages, mock_data_file, monkeypatch)
@pytest.mark.asyncio
async def test_multiple_status_messages_single_pattern(monkeypatch):
settingsDict = {'FAILED_IMPORT_MESSAGE_PATTERNS': ['world']}
expected_removal_messages = {
1: {
'>>>>> Tracked Download State: importBlocked',
'>>>>> Status Messages (matching specified patterns):',
'>>>>> - Message 1 - hello world'
}
}
await run_test(settingsDict, expected_removal_messages, mock_data_file, monkeypatch)
@pytest.mark.asyncio
async def test_multiple_status_messages_no_pattern(monkeypatch):
settingsDict = {}
expected_removal_messages = {
1: {
'>>>>> Tracked Download State: importBlocked',
'>>>>> Status Messages (All):',
'>>>>> - Message 1 - hello world',
'>>>>> - Message 2 - goodbye all',
}
}
await run_test(settingsDict, expected_removal_messages, mock_data_file, monkeypatch)

View File

@@ -0,0 +1,59 @@
import pytest
from remove_failed_imports_utils import run_test
mock_data_file = 'tests/jobs/remove_failed_imports/mock_data/mock_data_3.json'
@pytest.mark.asyncio
async def test_multiple_statuses_multiple_pattern(monkeypatch):
settingsDict = {'FAILED_IMPORT_MESSAGE_PATTERNS': ['world', 'all']}
expected_removal_messages = {
1: {
'>>>>> Tracked Download State: importPending',
'>>>>> Status Messages (matching specified patterns):',
'>>>>> - Message 1 - hello world',
'>>>>> - Message 2 - goodbye all',
},
2: {
'>>>>> Tracked Download State: importFailed',
'>>>>> Status Messages (matching specified patterns):',
'>>>>> - Message 1 - hello world',
'>>>>> - Message 2 - goodbye all',
}
}
await run_test(settingsDict, expected_removal_messages, mock_data_file, monkeypatch)
@pytest.mark.asyncio
async def test_multiple_statuses_single_pattern(monkeypatch):
settingsDict = {'FAILED_IMPORT_MESSAGE_PATTERNS': ['world']}
expected_removal_messages = {
1: {
'>>>>> Tracked Download State: importPending',
'>>>>> Status Messages (matching specified patterns):',
'>>>>> - Message 1 - hello world'
},
2: {
'>>>>> Tracked Download State: importFailed',
'>>>>> Status Messages (matching specified patterns):',
'>>>>> - Message 1 - hello world'
}
}
await run_test(settingsDict, expected_removal_messages, mock_data_file, monkeypatch)
@pytest.mark.asyncio
async def test_multiple_statuses_no_pattern(monkeypatch):
settingsDict = {}
expected_removal_messages = {
1: {
'>>>>> Tracked Download State: importPending',
'>>>>> Status Messages (All):',
'>>>>> - Message 1 - hello world',
'>>>>> - Message 2 - goodbye all',
},
2: {
'>>>>> Tracked Download State: importFailed',
'>>>>> Status Messages (All):',
'>>>>> - Message 1 - hello world',
'>>>>> - Message 2 - goodbye all',
}
}
await run_test(settingsDict, expected_removal_messages, mock_data_file, monkeypatch)

View File

@@ -0,0 +1,11 @@
{
"id": 1,
"downloadId": "A",
"title": "Sonarr Title 1",
"removal_messages": [
">>>>> Tracked Download State: importBlocked",
">>>>> Status Messages (matching specified patterns):",
">>>>> - Episode XYZ was not found in the grabbed release: Sonarr Title 2.mkv",
">>>>> - And yet another message"
]
}

View File

@@ -0,0 +1,52 @@
import os
os.environ['IS_IN_PYTEST'] = 'true'
import logging
import json
import pytest
from typing import Dict, Set, Any
from src.utils.shared import remove_download
from src.utils.trackers import Deleted_Downloads
# Utility function to load mock data
def load_mock_data(file_name):
with open(file_name, 'r') as file:
return json.load(file)
async def mock_rest_delete() -> None:
logger.debug(f"Mock rest_delete called with URL")
async def run_test(
settingsDict: Dict[str, Any],
expected_removal_messages: Set[str],
failType: str,
removeFromClient: bool,
mock_data_file: str,
monkeypatch: pytest.MonkeyPatch,
caplog: pytest.LogCaptureFixture
) -> None:
# Load mock data
affectedItem = load_mock_data(mock_data_file)
# Mock the `rest_delete` function
monkeypatch.setattr('src.utils.shared.rest_delete', mock_rest_delete)
# Call the function
with caplog.at_level(logging.INFO):
# Call the function and assert no exceptions
try:
deleted_downloads = Deleted_Downloads([])
await remove_download(settingsDict=settingsDict, BASE_URL='', API_KEY='', affectedItem=affectedItem, failType=failType, addToBlocklist=True, deleted_downloads=deleted_downloads, removeFromClient=removeFromClient)
except Exception as e:
pytest.fail(f"remove_download raised an exception: {e}")
# Assertions:
# Check that expected log messages are in the captured log
log_messages = {record.message for record in caplog.records if record.levelname == 'INFO'}
assert expected_removal_messages == log_messages
# Check that the affectedItem's downloadId was added to deleted_downloads
assert affectedItem['downloadId'] in deleted_downloads.dict

View File

@@ -0,0 +1,33 @@
import pytest
from remove_download_utils import run_test
# Parameters identical across all tests
mock_data_file = 'tests/utils/remove_download/mock_data/mock_data_1.json'
failType = 'failed import'
@pytest.mark.asyncio
async def test_removal_with_removal_messages(monkeypatch, caplog):
settingsDict = {'TEST_RUN': True}
removeFromClient = True
expected_removal_messages = {
'>>> Removing failed import download: Sonarr Title 1',
'>>>>> Tracked Download State: importBlocked',
'>>>>> Status Messages (matching specified patterns):',
'>>>>> - Episode XYZ was not found in the grabbed release: Sonarr Title 2.mkv',
'>>>>> - And yet another message'
}
await run_test(settingsDict=settingsDict, expected_removal_messages=expected_removal_messages, failType=failType, removeFromClient=removeFromClient, mock_data_file=mock_data_file, monkeypatch=monkeypatch, caplog=caplog)
@pytest.mark.asyncio
async def test_schizophrenic_removal_with_removal_messages(monkeypatch, caplog):
settingsDict = {'TEST_RUN': True}
removeFromClient = False
expected_removal_messages = {
'>>> Removing failed import download (without removing from torrent client): Sonarr Title 1',
'>>>>> Tracked Download State: importBlocked',
'>>>>> Status Messages (matching specified patterns):',
'>>>>> - Episode XYZ was not found in the grabbed release: Sonarr Title 2.mkv',
'>>>>> - And yet another message'
}
await run_test(settingsDict=settingsDict, expected_removal_messages=expected_removal_messages, failType=failType, removeFromClient=removeFromClient, mock_data_file=mock_data_file, monkeypatch=monkeypatch, caplog=caplog)