Bug Fixes & Automated Testing - remove_failed_imports.py #patch

This commit is contained in:
Benjamin Harder
2024-08-03 00:17:01 +02:00
parent d4c146a7d1
commit db50942ee8
21 changed files with 582 additions and 45 deletions

View File

@@ -10,12 +10,7 @@ from src.jobs.remove_orphans import remove_orphans
from src.jobs.remove_slow import remove_slow
from src.jobs.remove_stalled import remove_stalled
from src.jobs.remove_unmonitored import remove_unmonitored
class Deleted_Downloads:
# Keeps track of which downloads have already been deleted (to not double-delete)
def __init__(self, dict):
self.dict = dict
from src.utils.trackers import Deleted_Downloads
async def queueCleaner(settingsDict, arr_type, defective_tracker, download_sizes_tracker, protectedDownloadIDs, privateDowloadIDs):
# Read out correct instance depending on radarr/sonarr flag

View File

@@ -1,4 +1,4 @@
from src.utils.shared import (errorDetails, formattedQueueInfo, get_queue, privateTrackerCheck, protectedDownloadCheck, execute_checks, permittedAttemptsCheck, remove_download)
from src.utils.shared import (errorDetails, formattedQueueInfo, get_queue, execute_checks)
import sys, os, traceback
import logging, verboselogs
logger = verboselogs.VerboseLogger(__name__)
@@ -7,45 +7,70 @@ async def remove_failed_imports(settingsDict, BASE_URL, API_KEY, NAME, deleted_d
# Detects downloads stuck downloading meta data and triggers repeat check and subsequent delete. Adds to blocklist
try:
failType = 'failed import'
queue = await get_queue(BASE_URL, API_KEY)
queue = await get_queue(BASE_URL, API_KEY)
logger.debug('remove_failed_imports/queue IN: %s', formattedQueueInfo(queue))
if not queue: return 0
# Find items affected
affectedItems = []
# Check if any patterns have been specified
patterns = settingsDict.get('FAILED_IMPORT_MESSAGE_PATTERNS', [])
if not patterns: # If patterns is empty or not present
patterns = None
for queueItem in queue['records']:
if 'status' in queueItem \
and 'trackedDownloadStatus' in queueItem \
and 'trackedDownloadState' in queueItem \
and 'statusMessages' in queueItem:
removal_messages = []
if queueItem['status'] == 'completed' \
and queueItem['trackedDownloadStatus'] == 'warning' \
and queueItem['trackedDownloadState'] in {'importPending', 'importFailed', 'importBlocked'}:
# Find messages that find specified pattern and put them into a "removal_message" that will be displayed in the logger when removing the affected item
removal_messages = ['Tracked Download State: ' + queueItem['trackedDownloadState']]
for statusMessage in queueItem['statusMessages']:
if not settingsDict['FAILED_IMPORT_MESSAGE_PATTERNS']: # No patterns defined - including all status messages in the removal_messages
removal_messages.append ('Status Messages (All):')
removal_messages.extend(f"- {msg}" for msg in statusMessage.get('messages', []))
break
removal_messages.append ('Status Messages (matching specified patterns):')
messages = statusMessage.get('messages', [])
for message in messages:
if any(pattern in message for pattern in settingsDict['FAILED_IMPORT_MESSAGE_PATTERNS']):
removal_messages.append(f"- {message}")
if not patterns:
# No patterns defined - including all status messages in the removal_messages
removal_messages.append ('>>>>> Status Messages (All):')
for statusMessage in queueItem['statusMessages']:
removal_messages.extend(f">>>>> - {message}" for message in statusMessage.get('messages', []))
else:
# Specific patterns defined - only removing if any of these are matched
for statusMessage in queueItem['statusMessages']:
messages = statusMessage.get('messages', [])
for message in messages:
if any(pattern in message for pattern in patterns):
removal_messages.append(f">>>>> - {message}")
if removal_messages:
removal_messages.insert (0, '>>>>> Status Messages (matching specified patterns):')
if removal_messages:
removal_messages = list(dict.fromkeys(removal_messages)) # deduplication
removal_messages.insert(0,'>>>>> Tracked Download State: ' + queueItem['trackedDownloadState'])
queueItem['removal_messages'] = removal_messages
affectedItems.append(queueItem)
queueItem['removal_messages'] = removal_messages
affectedItems.append(queueItem)
check_kwargs = {
'settingsDict': settingsDict,
'affectedItems': affectedItems,
'failType': failType,
'BASE_URL': BASE_URL,
'API_KEY': API_KEY,
'NAME': NAME,
'deleted_downloads': deleted_downloads,
'defective_tracker': defective_tracker,
'privateDowloadIDs': privateDowloadIDs,
'protectedDownloadIDs': protectedDownloadIDs,
'addToBlocklist': True,
'doPrivateTrackerCheck': False,
'doProtectedDownloadCheck': True,
'doPermittedAttemptsCheck': False,
'extraParameters': {'keepTorrentForPrivateTrackers': True}
}
affectedItems = await execute_checks(**check_kwargs)
affectedItems = await execute_checks(settingsDict, affectedItems, failType, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, privateDowloadIDs, protectedDownloadIDs,
addToBlocklist = True,
doPrivateTrackerCheck = False,
doProtectedDownloadCheck = True,
doPermittedAttemptsCheck = False,
extraParameters = {'keepTorrentForPrivateTrackers': True}
)
return len(affectedItems)
except Exception as error:
errorDetails(NAME, error)

77
src/utils/main.py Normal file
View File

@@ -0,0 +1,77 @@
# Import Libraries
import asyncio
import logging, verboselogs
logger = verboselogs.VerboseLogger(__name__)
import json
# Import Functions
from config.definitions import settingsDict
from src.utils.loadScripts import *
from src.decluttarr import queueCleaner
from src.utils.rest import rest_get, rest_post
from src.utils.trackers import Defective_Tracker, Download_Sizes_Tracker
# Hide SSL Verification Warnings
if settingsDict['SSL_VERIFICATION']==False:
import warnings
warnings.filterwarnings("ignore", message="Unverified HTTPS request")
# Set up logging
setLoggingFormat(settingsDict)
# Main function
async def main(settingsDict):
# Adds to settings Dict the instances that are actually configures
settingsDict['INSTANCES'] = []
for arrApplication in settingsDict['SUPPORTED_ARR_APPS']:
if settingsDict[arrApplication + '_URL']:
settingsDict['INSTANCES'].append(arrApplication)
# Pre-populates the dictionaries (in classes) that track the items that were already caught as having problems or removed
defectiveTrackingInstances = {}
for instance in settingsDict['INSTANCES']:
defectiveTrackingInstances[instance] = {}
defective_tracker = Defective_Tracker(defectiveTrackingInstances)
download_sizes_tracker = Download_Sizes_Tracker({})
# Get name of arr-instances
for instance in settingsDict['INSTANCES']:
settingsDict = await getArrInstanceName(settingsDict, instance)
# Check outdated
upgradeChecks(settingsDict)
# Welcome Message
showWelcome()
# Current Settings
showSettings(settingsDict)
# Check Minimum Version and if instances are reachable and retrieve qbit cookie
settingsDict = await instanceChecks(settingsDict)
# Create qBit protection tag if not existing
await createQbitProtectionTag(settingsDict)
# Show Logger Level
showLoggerLevel(settingsDict)
# Start Cleaning
while True:
logger.verbose('-' * 50)
# Cache protected (via Tag) and private torrents
protectedDownloadIDs, privateDowloadIDs = await getProtectedAndPrivateFromQbit(settingsDict)
# Run script for each instance
for instance in settingsDict['INSTANCES']:
await queueCleaner(settingsDict, instance, defective_tracker, download_sizes_tracker, protectedDownloadIDs, privateDowloadIDs)
logger.verbose('')
logger.verbose('Queue clean-up complete!')
# Wait for the next run
await asyncio.sleep(settingsDict['REMOVE_TIMER']*60)
return
if __name__ == '__main__':
asyncio.run(main(settingsDict))

View File

@@ -55,7 +55,7 @@ def protectedDownloadCheck(settingsDict, affectedItems, failType, protectedDownl
return affectedItems
async def execute_checks(settingsDict, affectedItems, failType, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, privateDowloadIDs, protectedDownloadIDs, addToBlocklist, doPrivateTrackerCheck, doProtectedDownloadCheck, doPermittedAttemptsCheck, extraParameters = []):
async def execute_checks(settingsDict, affectedItems, failType, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, privateDowloadIDs, protectedDownloadIDs, addToBlocklist, doPrivateTrackerCheck, doProtectedDownloadCheck, doPermittedAttemptsCheck, extraParameters = {}):
# Goes over the affected items and performs the checks that are parametrized
try:
# De-duplicates the affected items (one downloadid may be shared by multiple affected items)
@@ -143,8 +143,8 @@ async def remove_download(settingsDict, BASE_URL, API_KEY, affectedItem, failTyp
logger.info('>>> Removing %s download (without removing from torrent client): %s', failType, affectedItem['title'])
# Print out detailed removal messages (if any were added in the jobs)
if removal_messages in affectedItem:
for removal_message in affectedItem.removal_messages:
if 'removal_messages' in affectedItem:
for removal_message in affectedItem['removal_messages']:
logger.info(removal_message)
if not settingsDict['TEST_RUN']:

15
src/utils/trackers.py Normal file
View File

@@ -0,0 +1,15 @@
# Set up classes that allow tracking of items from one loop to the next
class Defective_Tracker:
# Keeps track of which downloads were already caught as stalled previously
def __init__(self, dict):
self.dict = dict
class Download_Sizes_Tracker:
# Keeps track of the file sizes of the downloads
def __init__(self, dict):
self.dict = dict
class Deleted_Downloads:
# Keeps track of which downloads have already been deleted (to not double-delete)
def __init__(self, dict):
self.dict = dict