Merge pull request #141 from ManiMatter/dev

Bug Fixes & Automated Testing - remove_failed_imports.py #patch
This commit is contained in:
ManiMatter
2024-08-03 13:22:21 +02:00
committed by GitHub
23 changed files with 619 additions and 71 deletions

View File

@@ -25,9 +25,10 @@ jobs:
run: |
python -m pip install --upgrade pip
pip install -r docker/requirements.txt
- name: Test with pytest
run: |
python3 -m pytest --import-mode=append tests/
python3 -m pytest -o log_cli=false
build-dev:
needs: unit-tests

8
.pytest.ini Normal file
View File

@@ -0,0 +1,8 @@
[pytest]
log_cli = true
addopts = -q --tb=short -s
log_cli_level = INFO
log_cli_format = %(asctime)s - %(levelname)s - %(name)s - %(message)s
log_cli_date_format = %Y-%m-%d %H:%M:%S
testpaths =
tests

View File

@@ -19,7 +19,7 @@ To raise a new feature request, please go through the following steps:
- Be willing to provide more details if asked for them and help testing the feature
## Bug Reports
Bugs in this application are scarce. If there are any, there most likely features ;-)
Bugs in this application are scarce. If there are any, they're most likely features ;-)
Please go follow these steps to submit a bug:
- Check if this bug has previously been reported
- Add [Bug] at the beginning of the issue title
@@ -29,28 +29,33 @@ Please go follow these steps to submit a bug:
2) Turn off all remove functions but one where you expect a removal (example: REMOVE_STALLED: True and the rest on False)
3) Let it run until the supposed remove should be trigged
4) Paste the full logs to a pastebin
- If helpful: Paste a screenshot of qbit and the affected *arr app to a pasteimg
- Be willing to provide more details if asked for them and help testing the bug fix
5) Share your settings (docker-compose or config.conf)
6) Optional: If helpful, share screenshots showing the problem (from your arr-app or qbit)
7) Be responsive and provide more details if asked for them, and help testing the bug fix
### Code Contributions
Code contributions are very welcome - thanks for helping improve this app!
1) Please always branch out from the "dev" branch, not from the "main" branch
2) Please test your code locally
3) Please only commit code that you have written yourself and is not owned by anybody else
4) Please create a PR against the "dev" branch
5) Once I have reviewed it, I will merge it and it will create teh "dev" image
6) Please help testing that the dev image works, before we then commit it to the "latest" image (from main branch)
1) Always branch out from the "dev" branch, not from the "main" branch
2) Test your code locally
3) Only commit code that you have written yourself and is not owned by anybody else
4) Create a PR against the "dev" branch
5) Be responsive to code review
5) Once the code is reviewed and OK, it will be merged to dev branch, which will create the "dev"-docker image
6) Help testing that the dev image works
7= Finally, we will then commit the change to the main branch, which will create the "latest"-docker image
You do not need to know about how to create docker images to contribute here.
To get started:
1) Clone the git repository to your local machine
2) Create a virtual python environment (python3 -m venv venv)
3) Activate the virtual environment (source venv/bin/activate)
4) Install python libraries (pip install -r docker/requirements.txt)
1) Create a fork of decluttarr
2) Clone the git repository from the dev branch to your local machine `git clone -b dev https://github.com/yourName/decluttarr`
2) Create a virtual python environment (`python3 -m venv venv`)
3) Activate the virtual environment (`source venv/bin/activate`)
4) Install python libraries (`pip install -r docker/requirements.txt`)
5) Adjust the config/config.conf to your needs
6) Adjust the code in the files as needed
7) Run the script (python3 main.py)
8) Create a PR once you are ready
7) Run the script (`python3 main.py`)
8) Push your changes to your own git repo
9) Test the dev-image it creates automatically
10) Create the PR from your repo to ManiMatter/decluttarr (dev branch)

View File

@@ -56,8 +56,7 @@ QBITTORRENT_PASSWORD = get_config_value('QBITTORRENT_PASSWORD',
########################################################################################################################
########### Validate settings
if not (RADARR_URL or SONARR_URL or LIDARR_URL or READARR_URL or WHISPARR_URL):
if not (IS_IN_PYTEST or RADARR_URL or SONARR_URL or LIDARR_URL or READARR_URL or WHISPARR_URL):
print(f'[ ERROR ]: No Radarr/Sonarr/Lidarr/Readarr/Whisparr URLs specified (nothing to monitor)')
exit()

View File

@@ -2,3 +2,4 @@ import os
IS_IN_DOCKER = os.environ.get('IS_IN_DOCKER')
IMAGE_TAG = os.environ.get('IMAGE_TAG', 'Local')
SHORT_COMMIT_ID = os.environ.get('SHORT_COMMIT_ID', 'n/a')
IS_IN_PYTEST = os.environ.get('IS_IN_PYTEST')

22
main.py
View File

@@ -8,6 +8,7 @@ from config.definitions import settingsDict
from src.utils.loadScripts import *
from src.decluttarr import queueCleaner
from src.utils.rest import rest_get, rest_post
from src.utils.trackers import Defective_Tracker, Download_Sizes_Tracker
# Hide SSL Verification Warnings
if settingsDict['SSL_VERIFICATION']==False:
@@ -17,16 +18,6 @@ if settingsDict['SSL_VERIFICATION']==False:
# Set up logging
setLoggingFormat(settingsDict)
# Set up classes that allow tracking of items from one loop to the next
class Defective_Tracker:
# Keeps track of which downloads were already caught as stalled previously
def __init__(self, dict):
self.dict = dict
class Download_Sizes_Tracker:
# Keeps track of the file sizes of the downloads
def __init__(self, dict):
self.dict = dict
# Main function
async def main(settingsDict):
# Adds to settings Dict the instances that are actually configures
@@ -49,17 +40,20 @@ async def main(settingsDict):
# Check outdated
upgradeChecks(settingsDict)
# Display current settings when loading script
# Welcome Message
showWelcome()
# Current Settings
showSettings(settingsDict)
# Check Minimum Version and if instances are reachable and retrieve qbit cookie
settingsDict = await instanceChecks(settingsDict)
# Create qBit protection tag if not existing
await createQbitProtectionTag(settingsDict)
# Show Logger settings
showLoggerSettings(settingsDict)
# Show Logger Level
showLoggerLevel(settingsDict)
# Start Cleaning
while True:

View File

@@ -10,12 +10,7 @@ from src.jobs.remove_orphans import remove_orphans
from src.jobs.remove_slow import remove_slow
from src.jobs.remove_stalled import remove_stalled
from src.jobs.remove_unmonitored import remove_unmonitored
class Deleted_Downloads:
# Keeps track of which downloads have already been deleted (to not double-delete)
def __init__(self, dict):
self.dict = dict
from src.utils.trackers import Deleted_Downloads
async def queueCleaner(settingsDict, arr_type, defective_tracker, download_sizes_tracker, protectedDownloadIDs, privateDowloadIDs):
# Read out correct instance depending on radarr/sonarr flag

View File

@@ -1,4 +1,4 @@
from src.utils.shared import (errorDetails, formattedQueueInfo, get_queue, privateTrackerCheck, protectedDownloadCheck, execute_checks, permittedAttemptsCheck, remove_download)
from src.utils.shared import (errorDetails, formattedQueueInfo, get_queue, execute_checks)
import sys, os, traceback
import logging, verboselogs
logger = verboselogs.VerboseLogger(__name__)
@@ -7,45 +7,70 @@ async def remove_failed_imports(settingsDict, BASE_URL, API_KEY, NAME, deleted_d
# Detects downloads stuck downloading meta data and triggers repeat check and subsequent delete. Adds to blocklist
try:
failType = 'failed import'
queue = await get_queue(BASE_URL, API_KEY)
queue = await get_queue(BASE_URL, API_KEY)
logger.debug('remove_failed_imports/queue IN: %s', formattedQueueInfo(queue))
if not queue: return 0
# Find items affected
affectedItems = []
# Check if any patterns have been specified
patterns = settingsDict.get('FAILED_IMPORT_MESSAGE_PATTERNS', [])
if not patterns: # If patterns is empty or not present
patterns = None
for queueItem in queue['records']:
if 'status' in queueItem \
and 'trackedDownloadStatus' in queueItem \
and 'trackedDownloadState' in queueItem \
and 'statusMessages' in queueItem:
removal_messages = []
if queueItem['status'] == 'completed' \
and queueItem['trackedDownloadStatus'] == 'warning' \
and queueItem['trackedDownloadState'] in {'importPending', 'importFailed', 'importBlocked'}:
# Find messages that find specified pattern and put them into a "removal_message" that will be displayed in the logger when removing the affected item
removal_messages = ['Tracked Download State: ' + queueItem['trackedDownloadState']]
for statusMessage in queueItem['statusMessages']:
if not settingsDict['FAILED_IMPORT_MESSAGE_PATTERNS']: # No patterns defined - including all status messages in the removal_messages
removal_messages.append ('Status Messages (All):')
removal_messages.extend(f"- {msg}" for msg in statusMessage.get('messages', []))
break
removal_messages.append ('Status Messages (matching specified patterns):')
messages = statusMessage.get('messages', [])
for message in messages:
if any(pattern in message for pattern in settingsDict['FAILED_IMPORT_MESSAGE_PATTERNS']):
removal_messages.append(f"- {message}")
if not patterns:
# No patterns defined - including all status messages in the removal_messages
removal_messages.append ('>>>>> Status Messages (All):')
for statusMessage in queueItem['statusMessages']:
removal_messages.extend(f">>>>> - {message}" for message in statusMessage.get('messages', []))
else:
# Specific patterns defined - only removing if any of these are matched
for statusMessage in queueItem['statusMessages']:
messages = statusMessage.get('messages', [])
for message in messages:
if any(pattern in message for pattern in patterns):
removal_messages.append(f">>>>> - {message}")
if removal_messages:
removal_messages.insert (0, '>>>>> Status Messages (matching specified patterns):')
if removal_messages:
removal_messages = list(dict.fromkeys(removal_messages)) # deduplication
removal_messages.insert(0,'>>>>> Tracked Download State: ' + queueItem['trackedDownloadState'])
queueItem['removal_messages'] = removal_messages
affectedItems.append(queueItem)
queueItem['removal_messages'] = removal_messages
affectedItems.append(queueItem)
check_kwargs = {
'settingsDict': settingsDict,
'affectedItems': affectedItems,
'failType': failType,
'BASE_URL': BASE_URL,
'API_KEY': API_KEY,
'NAME': NAME,
'deleted_downloads': deleted_downloads,
'defective_tracker': defective_tracker,
'privateDowloadIDs': privateDowloadIDs,
'protectedDownloadIDs': protectedDownloadIDs,
'addToBlocklist': True,
'doPrivateTrackerCheck': False,
'doProtectedDownloadCheck': True,
'doPermittedAttemptsCheck': False,
'extraParameters': {'keepTorrentForPrivateTrackers': True}
}
affectedItems = await execute_checks(**check_kwargs)
affectedItems = await execute_checks(settingsDict, affectedItems, failType, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, privateDowloadIDs, protectedDownloadIDs,
addToBlocklist = True,
doPrivateTrackerCheck = False,
doProtectedDownloadCheck = True,
doPermittedAttemptsCheck = False,
extraParameters = {'keepTorrentForPrivateTrackers': True}
)
return len(affectedItems)
except Exception as error:
errorDetails(NAME, error)

View File

@@ -51,16 +51,19 @@ async def getProtectedAndPrivateFromQbit(settingsDict):
return protectedDownloadIDs, privateDowloadIDs
def showSettings(settingsDict):
# Prints out the settings
fmt = '{0.days} days {0.hours} hours {0.minutes} minutes'
def showWelcome():
# Welcome Message
logger.info('#' * 50)
logger.info('Decluttarr - Application Started!')
logger.info('')
logger.info('Like this app? Thanks for giving it a ⭐️ on GitHub!')
logger.info('https://github.com/ManiMatter/decluttarr/')
logger.info('')
logger.info('')
return
def showSettings(settingsDict):
# Settings Message
fmt = '{0.days} days {0.hours} hours {0.minutes} minutes'
logger.info('*** Current Settings ***')
logger.info('Version: %s', settingsDict['IMAGE_TAG'])
logger.info('Commit: %s', settingsDict['SHORT_COMMIT_ID'])
@@ -205,7 +208,7 @@ async def createQbitProtectionTag(settingsDict):
if not settingsDict['TEST_RUN']:
await rest_post(url=settingsDict['QBITTORRENT_URL']+'/torrents/createTags', data={'tags': settingsDict['NO_STALLED_REMOVAL_QBIT_TAG']}, headers={'content-type': 'application/x-www-form-urlencoded'}, cookies=settingsDict['QBIT_COOKIE'])
def showLoggerSettings(settingsDict):
def showLoggerLevel(settingsDict):
logger.info('#' * 50)
if settingsDict['LOG_LEVEL'] == 'INFO':
logger.info('LOG_LEVEL = INFO: Only logging changes (switch to VERBOSE for more info)')

77
src/utils/main.py Normal file
View File

@@ -0,0 +1,77 @@
# Import Libraries
import asyncio
import logging, verboselogs
logger = verboselogs.VerboseLogger(__name__)
import json
# Import Functions
from config.definitions import settingsDict
from src.utils.loadScripts import *
from src.decluttarr import queueCleaner
from src.utils.rest import rest_get, rest_post
from src.utils.trackers import Defective_Tracker, Download_Sizes_Tracker
# Hide SSL Verification Warnings
if settingsDict['SSL_VERIFICATION']==False:
import warnings
warnings.filterwarnings("ignore", message="Unverified HTTPS request")
# Set up logging
setLoggingFormat(settingsDict)
# Main function
async def main(settingsDict):
# Adds to settings Dict the instances that are actually configures
settingsDict['INSTANCES'] = []
for arrApplication in settingsDict['SUPPORTED_ARR_APPS']:
if settingsDict[arrApplication + '_URL']:
settingsDict['INSTANCES'].append(arrApplication)
# Pre-populates the dictionaries (in classes) that track the items that were already caught as having problems or removed
defectiveTrackingInstances = {}
for instance in settingsDict['INSTANCES']:
defectiveTrackingInstances[instance] = {}
defective_tracker = Defective_Tracker(defectiveTrackingInstances)
download_sizes_tracker = Download_Sizes_Tracker({})
# Get name of arr-instances
for instance in settingsDict['INSTANCES']:
settingsDict = await getArrInstanceName(settingsDict, instance)
# Check outdated
upgradeChecks(settingsDict)
# Welcome Message
showWelcome()
# Current Settings
showSettings(settingsDict)
# Check Minimum Version and if instances are reachable and retrieve qbit cookie
settingsDict = await instanceChecks(settingsDict)
# Create qBit protection tag if not existing
await createQbitProtectionTag(settingsDict)
# Show Logger Level
showLoggerLevel(settingsDict)
# Start Cleaning
while True:
logger.verbose('-' * 50)
# Cache protected (via Tag) and private torrents
protectedDownloadIDs, privateDowloadIDs = await getProtectedAndPrivateFromQbit(settingsDict)
# Run script for each instance
for instance in settingsDict['INSTANCES']:
await queueCleaner(settingsDict, instance, defective_tracker, download_sizes_tracker, protectedDownloadIDs, privateDowloadIDs)
logger.verbose('')
logger.verbose('Queue clean-up complete!')
# Wait for the next run
await asyncio.sleep(settingsDict['REMOVE_TIMER']*60)
return
if __name__ == '__main__':
asyncio.run(main(settingsDict))

View File

@@ -55,7 +55,7 @@ def protectedDownloadCheck(settingsDict, affectedItems, failType, protectedDownl
return affectedItems
async def execute_checks(settingsDict, affectedItems, failType, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, privateDowloadIDs, protectedDownloadIDs, addToBlocklist, doPrivateTrackerCheck, doProtectedDownloadCheck, doPermittedAttemptsCheck, extraParameters = []):
async def execute_checks(settingsDict, affectedItems, failType, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, privateDowloadIDs, protectedDownloadIDs, addToBlocklist, doPrivateTrackerCheck, doProtectedDownloadCheck, doPermittedAttemptsCheck, extraParameters = {}):
# Goes over the affected items and performs the checks that are parametrized
try:
# De-duplicates the affected items (one downloadid may be shared by multiple affected items)
@@ -143,8 +143,8 @@ async def remove_download(settingsDict, BASE_URL, API_KEY, affectedItem, failTyp
logger.info('>>> Removing %s download (without removing from torrent client): %s', failType, affectedItem['title'])
# Print out detailed removal messages (if any were added in the jobs)
if removal_messages in affectedItem:
for removal_message in affectedItem.removal_messages:
if 'removal_messages' in affectedItem:
for removal_message in affectedItem['removal_messages']:
logger.info(removal_message)
if not settingsDict['TEST_RUN']:

15
src/utils/trackers.py Normal file
View File

@@ -0,0 +1,15 @@
# Set up classes that allow tracking of items from one loop to the next
class Defective_Tracker:
# Keeps track of which downloads were already caught as stalled previously
def __init__(self, dict):
self.dict = dict
class Download_Sizes_Tracker:
# Keeps track of the file sizes of the downloads
def __init__(self, dict):
self.dict = dict
class Deleted_Downloads:
# Keeps track of which downloads have already been deleted (to not double-delete)
def __init__(self, dict):
self.dict = dict

View File

@@ -0,0 +1,33 @@
{
"records": [
{
"id": 1,
"downloadId": "A123",
"title": "Sonarr Title 1",
"status": "completed",
"trackedDownloadStatus": "ok",
"trackedDownloadState": "importing",
"statusMessages": []
},
{
"id": 2,
"downloadId": "B123",
"title": "Sonarr Title 2",
"status": "completed",
"trackedDownloadStatus": "warning",
"trackedDownloadState": "importBlocked",
"statusMessages": [
{
"title": "One or more episodes expected in this release were not imported or missing from the release",
"messages": []
},
{
"title": "Sonarr Title 2.mkv",
"messages": [
"Episode XYZ was not found in the grabbed release: Sonarr Title 2.mkv"
]
}
]
}
]
}

View File

@@ -0,0 +1,32 @@
{
"records": [
{
"id": 1,
"downloadId": "A123",
"title": "Sonarr Title 1",
"status": "completed",
"trackedDownloadStatus": "warning",
"trackedDownloadState": "importBlocked",
"statusMessages": [
{
"title": "First Message",
"messages": [
"Message 1 - hello world"
]
},
{
"title": "Duplicate of First Message",
"messages": [
"Message 1 - hello world"
]
},
{
"title": "Second of Message",
"messages": [
"Message 2 - goodbye all"
]
}
]
}
]
}

View File

@@ -0,0 +1,60 @@
{
"records": [
{
"id": 1,
"downloadId": "A123",
"title": "Sonarr Title 1",
"status": "completed",
"trackedDownloadStatus": "warning",
"trackedDownloadState": "importPending",
"statusMessages": [
{
"title": "First Message",
"messages": [
"Message 1 - hello world"
]
},
{
"title": "Duplicate of First Message",
"messages": [
"Message 1 - hello world"
]
},
{
"title": "Second of Message",
"messages": [
"Message 2 - goodbye all"
]
}
]
},
{
"id": 2,
"downloadId": "B123",
"title": "Sonarr Title 2",
"status": "completed",
"trackedDownloadStatus": "warning",
"trackedDownloadState": "importFailed",
"statusMessages": [
{
"title": "First Message",
"messages": [
"Message 1 - hello world"
]
},
{
"title": "Duplicate of First Message",
"messages": [
"Message 1 - hello world"
]
},
{
"title": "Second of Message",
"messages": [
"Message 2 - goodbye all"
]
}
]
}
]
}

View File

@@ -0,0 +1,65 @@
import os
os.environ['IS_IN_PYTEST'] = 'true'
import logging
import json
import pytest
from typing import Dict, Set, Any
from unittest.mock import AsyncMock
from src.jobs.remove_failed_imports import remove_failed_imports
# Utility function to load mock data
def load_mock_data(file_name):
with open(file_name, 'r') as file:
return json.load(file)
async def mock_get_queue(mock_data):
logging.debug("Mock get_queue called")
return mock_data
async def run_test(
settingsDict: Dict[str, Any],
expected_removal_messages: Dict[int, Set[str]],
mock_data_file: str,
monkeypatch: pytest.MonkeyPatch
) -> None:
# Load mock data
mock_data = load_mock_data(mock_data_file)
# Create an AsyncMock for execute_checks with side effect
execute_checks_mock = AsyncMock()
# Define a side effect function
def side_effect(*args, **kwargs):
logging.debug("Mock execute_checks called with kwargs: %s", kwargs)
# Return the affectedItems from kwargs
return kwargs.get('affectedItems', [])
# Attach side effect to the mock
execute_checks_mock.side_effect = side_effect
# Create an async mock for get_queue that returns mock_data
mock_get_queue = AsyncMock(return_value=mock_data)
# Patch the methods
monkeypatch.setattr('src.jobs.remove_failed_imports.get_queue', mock_get_queue)
monkeypatch.setattr('src.jobs.remove_failed_imports.execute_checks', execute_checks_mock)
# Call the function
await remove_failed_imports(settingsDict=settingsDict, BASE_URL='', API_KEY='', NAME='', deleted_downloads=set(), defective_tracker=set(), protectedDownloadIDs=set(), privateDowloadIDs=set())
# Assertions
assert execute_checks_mock.called # Ensure the mock was called
# Assert expected items are there
args, kwargs = execute_checks_mock.call_args
affectedItems = kwargs.get('affectedItems', [])
affectedItems_ids = {item['id'] for item in affectedItems}
expectedItems_ids = set(expected_removal_messages.keys())
assert len(affectedItems) == len(expected_removal_messages)
assert affectedItems_ids == expectedItems_ids
# Assert all expected messages are there
for affectedItem in affectedItems:
assert 'removal_messages' in affectedItem
assert expected_removal_messages[affectedItem['id']] == set(affectedItem.get('removal_messages', []))

View File

@@ -0,0 +1,39 @@
import pytest
from remove_failed_imports_utils import run_test
mock_data_file = 'tests/jobs/remove_failed_imports/mock_data/mock_data_1.json'
@pytest.mark.asyncio
async def test_with_pattern_one_message(monkeypatch):
settingsDict = {'FAILED_IMPORT_MESSAGE_PATTERNS': ['not found in the grabbed release']}
expected_removal_messages = {
2: {
'>>>>> Tracked Download State: importBlocked',
'>>>>> Status Messages (matching specified patterns):',
'>>>>> - Episode XYZ was not found in the grabbed release: Sonarr Title 2.mkv',
}
}
await run_test(settingsDict, expected_removal_messages, mock_data_file, monkeypatch)
@pytest.mark.asyncio
async def test_with_empty_pattern_one_message(monkeypatch):
settingsDict = {'FAILED_IMPORT_MESSAGE_PATTERNS': []}
expected_removal_messages = {
2: {
'>>>>> Tracked Download State: importBlocked',
'>>>>> Status Messages (All):',
'>>>>> - Episode XYZ was not found in the grabbed release: Sonarr Title 2.mkv',
}
}
await run_test(settingsDict, expected_removal_messages, mock_data_file, monkeypatch)
@pytest.mark.asyncio
async def test_without_pattern_one_message(monkeypatch):
settingsDict = {}
expected_removal_messages = {
2: {
'>>>>> Tracked Download State: importBlocked',
'>>>>> Status Messages (All):',
'>>>>> - Episode XYZ was not found in the grabbed release: Sonarr Title 2.mkv',
}
}
await run_test(settingsDict, expected_removal_messages, mock_data_file, monkeypatch)

View File

@@ -0,0 +1,41 @@
import pytest
from remove_failed_imports_utils import run_test
mock_data_file = 'tests/jobs/remove_failed_imports/mock_data/mock_data_2.json'
@pytest.mark.asyncio
async def test_multiple_status_messages_multiple_pattern(monkeypatch):
settingsDict = {'FAILED_IMPORT_MESSAGE_PATTERNS': ['world', 'all']}
expected_removal_messages = {
1: {
'>>>>> Tracked Download State: importBlocked',
'>>>>> Status Messages (matching specified patterns):',
'>>>>> - Message 1 - hello world',
'>>>>> - Message 2 - goodbye all',
}
}
await run_test(settingsDict, expected_removal_messages, mock_data_file, monkeypatch)
@pytest.mark.asyncio
async def test_multiple_status_messages_single_pattern(monkeypatch):
settingsDict = {'FAILED_IMPORT_MESSAGE_PATTERNS': ['world']}
expected_removal_messages = {
1: {
'>>>>> Tracked Download State: importBlocked',
'>>>>> Status Messages (matching specified patterns):',
'>>>>> - Message 1 - hello world'
}
}
await run_test(settingsDict, expected_removal_messages, mock_data_file, monkeypatch)
@pytest.mark.asyncio
async def test_multiple_status_messages_no_pattern(monkeypatch):
settingsDict = {}
expected_removal_messages = {
1: {
'>>>>> Tracked Download State: importBlocked',
'>>>>> Status Messages (All):',
'>>>>> - Message 1 - hello world',
'>>>>> - Message 2 - goodbye all',
}
}
await run_test(settingsDict, expected_removal_messages, mock_data_file, monkeypatch)

View File

@@ -0,0 +1,59 @@
import pytest
from remove_failed_imports_utils import run_test
mock_data_file = 'tests/jobs/remove_failed_imports/mock_data/mock_data_3.json'
@pytest.mark.asyncio
async def test_multiple_statuses_multiple_pattern(monkeypatch):
settingsDict = {'FAILED_IMPORT_MESSAGE_PATTERNS': ['world', 'all']}
expected_removal_messages = {
1: {
'>>>>> Tracked Download State: importPending',
'>>>>> Status Messages (matching specified patterns):',
'>>>>> - Message 1 - hello world',
'>>>>> - Message 2 - goodbye all',
},
2: {
'>>>>> Tracked Download State: importFailed',
'>>>>> Status Messages (matching specified patterns):',
'>>>>> - Message 1 - hello world',
'>>>>> - Message 2 - goodbye all',
}
}
await run_test(settingsDict, expected_removal_messages, mock_data_file, monkeypatch)
@pytest.mark.asyncio
async def test_multiple_statuses_single_pattern(monkeypatch):
settingsDict = {'FAILED_IMPORT_MESSAGE_PATTERNS': ['world']}
expected_removal_messages = {
1: {
'>>>>> Tracked Download State: importPending',
'>>>>> Status Messages (matching specified patterns):',
'>>>>> - Message 1 - hello world'
},
2: {
'>>>>> Tracked Download State: importFailed',
'>>>>> Status Messages (matching specified patterns):',
'>>>>> - Message 1 - hello world'
}
}
await run_test(settingsDict, expected_removal_messages, mock_data_file, monkeypatch)
@pytest.mark.asyncio
async def test_multiple_statuses_no_pattern(monkeypatch):
settingsDict = {}
expected_removal_messages = {
1: {
'>>>>> Tracked Download State: importPending',
'>>>>> Status Messages (All):',
'>>>>> - Message 1 - hello world',
'>>>>> - Message 2 - goodbye all',
},
2: {
'>>>>> Tracked Download State: importFailed',
'>>>>> Status Messages (All):',
'>>>>> - Message 1 - hello world',
'>>>>> - Message 2 - goodbye all',
}
}
await run_test(settingsDict, expected_removal_messages, mock_data_file, monkeypatch)

View File

@@ -0,0 +1,11 @@
{
"id": 1,
"downloadId": "A",
"title": "Sonarr Title 1",
"removal_messages": [
">>>>> Tracked Download State: importBlocked",
">>>>> Status Messages (matching specified patterns):",
">>>>> - Episode XYZ was not found in the grabbed release: Sonarr Title 2.mkv",
">>>>> - And yet another message"
]
}

View File

@@ -0,0 +1,52 @@
import os
os.environ['IS_IN_PYTEST'] = 'true'
import logging
import json
import pytest
from typing import Dict, Set, Any
from src.utils.shared import remove_download
from src.utils.trackers import Deleted_Downloads
# Utility function to load mock data
def load_mock_data(file_name):
with open(file_name, 'r') as file:
return json.load(file)
async def mock_rest_delete() -> None:
logger.debug(f"Mock rest_delete called with URL")
async def run_test(
settingsDict: Dict[str, Any],
expected_removal_messages: Set[str],
failType: str,
removeFromClient: bool,
mock_data_file: str,
monkeypatch: pytest.MonkeyPatch,
caplog: pytest.LogCaptureFixture
) -> None:
# Load mock data
affectedItem = load_mock_data(mock_data_file)
# Mock the `rest_delete` function
monkeypatch.setattr('src.utils.shared.rest_delete', mock_rest_delete)
# Call the function
with caplog.at_level(logging.INFO):
# Call the function and assert no exceptions
try:
deleted_downloads = Deleted_Downloads([])
await remove_download(settingsDict=settingsDict, BASE_URL='', API_KEY='', affectedItem=affectedItem, failType=failType, addToBlocklist=True, deleted_downloads=deleted_downloads, removeFromClient=removeFromClient)
except Exception as e:
pytest.fail(f"remove_download raised an exception: {e}")
# Assertions:
# Check that expected log messages are in the captured log
log_messages = {record.message for record in caplog.records if record.levelname == 'INFO'}
assert expected_removal_messages == log_messages
# Check that the affectedItem's downloadId was added to deleted_downloads
assert affectedItem['downloadId'] in deleted_downloads.dict

View File

@@ -0,0 +1,33 @@
import pytest
from remove_download_utils import run_test
# Parameters identical across all tests
mock_data_file = 'tests/utils/remove_download/mock_data/mock_data_1.json'
failType = 'failed import'
@pytest.mark.asyncio
async def test_removal_with_removal_messages(monkeypatch, caplog):
settingsDict = {'TEST_RUN': True}
removeFromClient = True
expected_removal_messages = {
'>>> Removing failed import download: Sonarr Title 1',
'>>>>> Tracked Download State: importBlocked',
'>>>>> Status Messages (matching specified patterns):',
'>>>>> - Episode XYZ was not found in the grabbed release: Sonarr Title 2.mkv',
'>>>>> - And yet another message'
}
await run_test(settingsDict=settingsDict, expected_removal_messages=expected_removal_messages, failType=failType, removeFromClient=removeFromClient, mock_data_file=mock_data_file, monkeypatch=monkeypatch, caplog=caplog)
@pytest.mark.asyncio
async def test_schizophrenic_removal_with_removal_messages(monkeypatch, caplog):
settingsDict = {'TEST_RUN': True}
removeFromClient = False
expected_removal_messages = {
'>>> Removing failed import download (without removing from torrent client): Sonarr Title 1',
'>>>>> Tracked Download State: importBlocked',
'>>>>> Status Messages (matching specified patterns):',
'>>>>> - Episode XYZ was not found in the grabbed release: Sonarr Title 2.mkv',
'>>>>> - And yet another message'
}
await run_test(settingsDict=settingsDict, expected_removal_messages=expected_removal_messages, failType=failType, removeFromClient=removeFromClient, mock_data_file=mock_data_file, monkeypatch=monkeypatch, caplog=caplog)