Merge pull request #70 from ManiMatter/dev

Fixing problems with dict + dict issue // adding compatibility for delay profiles // code cleanup
This commit is contained in:
ManiMatter
2024-03-30 10:17:01 +01:00
committed by GitHub
14 changed files with 315 additions and 274 deletions

View File

@@ -73,6 +73,7 @@ jobs:
--build-arg SHORT_COMMIT_ID=$SHORT_COMMIT_ID \
--push \
# "annotations": { "org.opencontainers.image.description": "DESCRIPTION" }
# - name: "Delete untagged versions"
# uses: actions/delete-package-versions@v4

View File

@@ -135,8 +135,8 @@ if READARR_URL: READARR_URL += '/api/v1'
if QBITTORRENT_URL: QBITTORRENT_URL += '/api/v2'
########### Add Variables to Dictionary
settings_dict = {}
settingsDict = {}
for var_name in dir():
if var_name.isupper():
settings_dict[var_name] = locals()[var_name]
settingsDict[var_name] = locals()[var_name]

233
main.py
View File

@@ -1,30 +1,22 @@
########### Import Libraries
import asyncio
import asyncio
import logging, verboselogs
from src.utils.rest import rest_get, rest_post
from requests.exceptions import RequestException
import json
from dateutil.relativedelta import relativedelta as rd
from config.config import settings_dict
logger = verboselogs.VerboseLogger(__name__)
########### Import Functions
from config.config import settingsDict
from src.utils.loadScripts import *
from src.decluttarr import queueCleaner
#print(json.dumps(settings_dict,indent=4))
import requests
import platform
from packaging import version
from src.utils.rest import rest_get, rest_post
# Hide SSL Verification Warnings
if settings_dict['SSL_VERIFICATION']==False:
if settingsDict['SSL_VERIFICATION']==False:
import warnings
warnings.filterwarnings("ignore", message="Unverified HTTPS request")
########### Enabling Logging
# Set up logging
log_level_num=logging.getLevelName(settings_dict['LOG_LEVEL'])
logger = verboselogs.VerboseLogger(__name__)
logging.basicConfig(
format=('' if settings_dict['IS_IN_DOCKER'] else '%(asctime)s ') + ('[%(levelname)-7s]' if settings_dict['LOG_LEVEL']=='VERBOSE' else '[%(levelname)s]') + ': %(message)s',
level=log_level_num
)
setLoggingFormat(settingsDict)
class Defective_Tracker:
# Keeps track of which downloads were already caught as stalled previously
@@ -36,194 +28,63 @@ class Download_Sizes_Tracker:
self.dict = dict
# Main function
async def main():
# Get name of Radarr / Sonarr instances
try:
if settings_dict['RADARR_URL']:
settings_dict['RADARR_NAME'] = (await rest_get(settings_dict['RADARR_URL']+'/system/status', settings_dict['RADARR_KEY']))['instanceName']
except:
settings_dict['RADARR_NAME'] = 'Radarr'
try:
if settings_dict['SONARR_URL']:
settings_dict['SONARR_NAME'] = (await rest_get(settings_dict['SONARR_URL']+'/system/status', settings_dict['SONARR_KEY']))['instanceName']
except:
settings_dict['SONARR_NAME'] = 'Sonarr'
async def main(settingsDict):
# Get name of arr-instances
for instance in settingsDict['INSTANCES']:
settingsDict = await getArrInstanceName(settingsDict, instance)
try:
if settings_dict['LIDARR_URL']:
settings_dict['LIDARR_NAME'] = (await rest_get(settings_dict['LIDARR_URL']+'/system/status', settings_dict['LIDARR_KEY']))['instanceName']
except:
settings_dict['LIDARR_NAME'] = 'Lidarr'
# Display current settings when loading script
showSettings(settingsDict)
try:
if settings_dict['READARR_URL']:
settings_dict['READARR_NAME'] = (await rest_get(settings_dict['READARR_URL']+'/system/status', settings_dict['READARR_KEY']))['instanceName']
except:
settings_dict['READARR_NAME'] = 'Readarr'
# Check Minimum Version and if instances are reachable and retrieve qbit cookie
settingsDict['RADARR_MIN_VERSION'] = '5.3.6.8608'
settingsDict['SONARR_MIN_VERSION'] = '4.0.1.1131'
settingsDict['LIDARR_MIN_VERSION'] = None
settingsDict['READARR_MIN_VERSION'] = None
settingsDict['QBITTORRENT_MIN_VERSION'] = '4.3.0'
settingsDict = await instanceChecks(settingsDict)
# Print Settings
fmt = '{0.days} days {0.hours} hours {0.minutes} minutes'
logger.info('#' * 50)
logger.info('Decluttarr - Application Started!')
logger.info('')
logger.info('*** Current Settings ***')
logger.info('Version: %s', settings_dict['IMAGE_TAG'])
logger.info('Commit: %s', settings_dict['SHORT_COMMIT_ID'])
logger.info('')
logger.info('%s | Removing failed downloads', str(settings_dict['REMOVE_FAILED']))
logger.info('%s | Removing downloads missing metadata', str(settings_dict['REMOVE_METADATA_MISSING']))
logger.info('%s | Removing downloads missing files', str(settings_dict['REMOVE_MISSING_FILES']))
logger.info('%s | Removing orphan downloads', str(settings_dict['REMOVE_ORPHANS']))
logger.info('%s | Removing slow downloads', str(settings_dict['REMOVE_SLOW']))
logger.info('%s | Removing stalled downloads', str(settings_dict['REMOVE_STALLED']))
logger.info('%s | Removing downloads belonging to unmonitored TV shows/movies', str(settings_dict['REMOVE_UNMONITORED']))
logger.info('')
logger.info('Running every: %s', fmt.format(rd(minutes=settings_dict['REMOVE_TIMER'])))
if settings_dict['REMOVE_SLOW']:
logger.info('Minimum speed enforced: %s KB/s', str(settings_dict['MIN_DOWNLOAD_SPEED']))
logger.info('Permitted number of times before stalled/missing metadata/slow downloads are removed: %s', str(settings_dict['PERMITTED_ATTEMPTS']))
if settings_dict['QBITTORRENT_URL']:
logger.info('Downloads with this tag will be skipped: \"%s\"', settings_dict['NO_STALLED_REMOVAL_QBIT_TAG'])
logger.info('Private Trackers will be skipped: %s', settings_dict['IGNORE_PRIVATE_TRACKERS'])
logger.info('')
logger.info('*** Configured Instances ***')
if settings_dict['RADARR_URL']: logger.info('%s: %s', settings_dict['RADARR_NAME'], settings_dict['RADARR_URL'])
if settings_dict['SONARR_URL']: logger.info('%s: %s', settings_dict['SONARR_NAME'], settings_dict['SONARR_URL'])
if settings_dict['LIDARR_URL']: logger.info('%s: %s', settings_dict['LIDARR_NAME'], settings_dict['LIDARR_URL'])
if settings_dict['READARR_URL']: logger.info('%s: %s', settings_dict['READARR_NAME'], settings_dict['READARR_URL'])
if settings_dict['QBITTORRENT_URL']: logger.info('qBittorrent: %s', settings_dict['QBITTORRENT_URL'])
# Create qBit protection tag if not existing
await createQbitProtectionTag(settingsDict)
logger.info('')
logger.info('*** Check Instances ***')
if settings_dict['RADARR_URL']:
error_occured = False
try:
await asyncio.get_event_loop().run_in_executor(None, lambda: requests.get(settings_dict['RADARR_URL']+'/system/status', params=None, headers={'X-Api-Key': settings_dict['RADARR_KEY']}, verify=settings_dict['SSL_VERIFICATION']))
except Exception as error:
error_occured = True
logger.error('-- | %s *** Error: %s ***', settings_dict['RADARR_NAME'], error)
if not error_occured:
radarr_version = (await rest_get(settings_dict['RADARR_URL']+'/system/status', settings_dict['RADARR_KEY']))['version']
if version.parse(radarr_version) < version.parse('5.3.6.8608'):
error_occured = True
logger.error('-- | %s *** Error: %s ***', settings_dict['RADARR_NAME'], 'Please update Radarr to at least version 5.3.6.8608. Current version: ' + radarr_version)
if not error_occured:
logger.info('OK | %s', settings_dict['RADARR_NAME'])
# Show Logger settings
showLoggerSettings(settingsDict)
if settings_dict['SONARR_URL']:
try:
await asyncio.get_event_loop().run_in_executor(None, lambda: requests.get(settings_dict['SONARR_URL']+'/system/status', params=None, headers={'X-Api-Key': settings_dict['SONARR_KEY']}, verify=settings_dict['SSL_VERIFICATION']))
except Exception as error:
error_occured = True
logger.error('-- | %s *** Error: %s ***', settings_dict['SONARR_NAME'], error)
if not error_occured:
sonarr_version = (await rest_get(settings_dict['SONARR_URL']+'/system/status', settings_dict['SONARR_KEY']))['version']
if version.parse(sonarr_version) < version.parse('4.0.1.1131'):
error_occured = True
logger.error('-- | %s *** Error: %s ***', settings_dict['SONARR_NAME'], 'Please update Sonarr to at least version 4.0.1.1131. Current version: ' + sonarr_version)
if not error_occured:
logger.info('OK | %s', settings_dict['SONARR_NAME'])
if settings_dict['LIDARR_URL']:
try:
await asyncio.get_event_loop().run_in_executor(None, lambda: requests.get(settings_dict['LIDARR_URL']+'/system/status', params=None, headers={'X-Api-Key': settings_dict['LIDARR_KEY']}, verify=settings_dict['SSL_VERIFICATION']))
logger.info('OK | %s', settings_dict['LIDARR_NAME'])
except Exception as error:
error_occured = True
logger.error('-- | %s *** Error: %s ***', settings_dict['LIDARR_NAME'], error)
if settings_dict['READARR_URL']:
try:
await asyncio.get_event_loop().run_in_executor(None, lambda: requests.get(settings_dict['READARR_URL']+'/system/status', params=None, headers={'X-Api-Key': settings_dict['READARR_KEY']}, verify=settings_dict['SSL_VERIFICATION']))
logger.info('OK | %s', settings_dict['READARR_NAME'])
except Exception as error:
error_occured = True
logger.error('-- | %s *** Error: %s ***', settings_dict['READARR_NAME'], error)
if settings_dict['QBITTORRENT_URL']:
# Checking if qbit can be reached, and checking if version is OK
try:
response = await asyncio.get_event_loop().run_in_executor(None, lambda: requests.post(settings_dict['QBITTORRENT_URL']+'/auth/login', data={'username': settings_dict['QBITTORRENT_USERNAME'], 'password': settings_dict['QBITTORRENT_PASSWORD']}, headers={'content-type': 'application/x-www-form-urlencoded'}, verify=settings_dict['SSL_VERIFICATION']))
if response.text == 'Fails.':
raise ConnectionError('Login failed.')
response.raise_for_status()
settings_dict['QBIT_COOKIE'] = {'SID': response.cookies['SID']}
except Exception as error:
error_occured = True
logger.error('-- | %s *** Error: %s / Reponse: %s ***', 'qBittorrent', error, response.text)
if not error_occured:
qbit_version = await rest_get(settings_dict['QBITTORRENT_URL']+'/app/version',cookies=settings_dict['QBIT_COOKIE'])
qbit_version = qbit_version[1:] # version without _v
if version.parse(qbit_version) < version.parse('4.3.0'):
error_occured = True
logger.error('-- | %s *** Error: %s ***', 'qBittorrent', 'Please update qBittorrent to at least version 4.3.0. Current version: ' + qbit_version)
if not error_occured:
logger.info('OK | %s', 'qBittorrent')
if error_occured:
logger.warning('At least one instance was not reachable. Waiting for 60 seconds, then exiting Decluttarr.')
await asyncio.sleep(60)
exit()
logger.info('')
logger.info('#' * 50)
if settings_dict['LOG_LEVEL'] == 'INFO':
logger.info('LOG_LEVEL = INFO: Only logging changes (switch to VERBOSE for more info)')
else:
logger.info(f'')
if settings_dict['TEST_RUN']:
logger.info(f'*'* 50)
logger.info(f'*'* 50)
logger.info(f'')
logger.info(f'!! TEST_RUN FLAG IS SET !!')
logger.info(f'NO UPDATES/DELETES WILL BE PERFORMED')
logger.info(f'')
logger.info(f'*'* 50)
logger.info(f'*'* 50)
# Check if Qbit Tag exists:
if settings_dict['QBITTORRENT_URL']:
current_tags = await rest_get(settings_dict['QBITTORRENT_URL']+'/torrents/tags',cookies=settings_dict['QBIT_COOKIE'])
if not settings_dict['NO_STALLED_REMOVAL_QBIT_TAG'] in current_tags:
if settings_dict['QBITTORRENT_URL']:
logger.info('Creating tag in qBittorrent: %s', settings_dict['NO_STALLED_REMOVAL_QBIT_TAG'])
if not settings_dict['TEST_RUN']:
await rest_post(url=settings_dict['QBITTORRENT_URL']+'/torrents/createTags', data={'tags': settings_dict['NO_STALLED_REMOVAL_QBIT_TAG']}, headers={'content-type': 'application/x-www-form-urlencoded'}, cookies=settings_dict['QBIT_COOKIE'])
# Start application
# Start Cleaning
while True:
logger.verbose('-' * 50)
# Cache protected (via Tag) and private torrents
protectedDownloadIDs = []
privateDowloadIDs = []
if settings_dict['QBITTORRENT_URL']:
protectedDowloadItems = await rest_get(settings_dict['QBITTORRENT_URL']+'/torrents/info',params={'tag': settings_dict['NO_STALLED_REMOVAL_QBIT_TAG']}, cookies=settings_dict['QBIT_COOKIE'] )
if settingsDict['QBITTORRENT_URL']:
protectedDowloadItems = await rest_get(settingsDict['QBITTORRENT_URL']+'/torrents/info',params={'tag': settingsDict['NO_STALLED_REMOVAL_QBIT_TAG']}, cookies=settingsDict['QBIT_COOKIE'] )
logger.debug('main/protectedDowloadItems: %s', str(protectedDowloadItems))
protectedDownloadIDs = [str.upper(item['hash']) for item in protectedDowloadItems]
if settings_dict['IGNORE_PRIVATE_TRACKERS']:
privateDowloadItems = await rest_get(settings_dict['QBITTORRENT_URL']+'/torrents/info',params={}, cookies=settings_dict['QBIT_COOKIE'] )
if settingsDict['IGNORE_PRIVATE_TRACKERS']:
privateDowloadItems = await rest_get(settingsDict['QBITTORRENT_URL']+'/torrents/info',params={}, cookies=settingsDict['QBIT_COOKIE'] )
privateDowloadIDs = [str.upper(item['hash']) for item in privateDowloadItems if item.get('is_private', False)]
if settings_dict['RADARR_URL']: await queueCleaner(settings_dict, 'radarr', defective_tracker, download_sizes_tracker, protectedDownloadIDs, privateDowloadIDs)
if settings_dict['SONARR_URL']: await queueCleaner(settings_dict, 'sonarr', defective_tracker, download_sizes_tracker, protectedDownloadIDs, privateDowloadIDs)
if settings_dict['LIDARR_URL']: await queueCleaner(settings_dict, 'lidarr', defective_tracker, download_sizes_tracker, protectedDownloadIDs, privateDowloadIDs)
if settings_dict['READARR_URL']: await queueCleaner(settings_dict, 'readarr', defective_tracker, download_sizes_tracker, protectedDownloadIDs, privateDowloadIDs)
# Run script for each instance
for instance in settingsDict['INSTANCES']:
await queueCleaner(settingsDict, instance, defective_tracker, download_sizes_tracker, protectedDownloadIDs, privateDowloadIDs)
logger.verbose('')
logger.verbose('Queue clean-up complete!')
await asyncio.sleep(settings_dict['REMOVE_TIMER']*60)
# Wait for the next run
await asyncio.sleep(settingsDict['REMOVE_TIMER']*60)
return
if __name__ == '__main__':
instances = {settings_dict['RADARR_URL']: {}} if settings_dict['RADARR_URL'] else {} + \
{settings_dict['SONARR_URL']: {}} if settings_dict['SONARR_URL'] else {} + \
{settings_dict['LIDARR_URL']: {}} if settings_dict['LIDARR_URL'] else {} + \
{settings_dict['READARR_URL']: {}} if settings_dict['READARR_URL'] else {}
defective_tracker = Defective_Tracker(instances)
arrApplications = ['RADARR', 'SONARR', 'LIDARR', 'READARR']
defectiveTrackingInstances = {}
settingsDict['INSTANCES'] = []
for arrApplication in arrApplications:
if settingsDict[arrApplication + '_URL']:
settingsDict['INSTANCES'].append(arrApplication)
defectiveTrackingInstances[arrApplication] = {}
defective_tracker = Defective_Tracker(defectiveTrackingInstances)
download_sizes_tracker = Download_Sizes_Tracker({})
asyncio.run(main())
asyncio.run(main(settingsDict))

View File

@@ -2,13 +2,13 @@
import logging, verboselogs
logger = verboselogs.VerboseLogger(__name__)
from src.utils.shared import (errorDetails, get_queue)
from src.remove_failed import remove_failed
from src.remove_metadata_missing import remove_metadata_missing
from src.remove_missing_files import remove_missing_files
from src.remove_orphans import remove_orphans
from src.remove_slow import remove_slow
from src.remove_stalled import remove_stalled
from src.remove_unmonitored import remove_unmonitored
from src.jobs.remove_failed import remove_failed
from src.jobs.remove_metadata_missing import remove_metadata_missing
from src.jobs.remove_missing_files import remove_missing_files
from src.jobs.remove_orphans import remove_orphans
from src.jobs.remove_slow import remove_slow
from src.jobs.remove_stalled import remove_stalled
from src.jobs.remove_unmonitored import remove_unmonitored
class Deleted_Downloads:
# Keeps track of which downloads have already been deleted (to not double-delete)
@@ -16,28 +16,28 @@ class Deleted_Downloads:
self.dict = dict
async def queueCleaner(settings_dict, arr_type, defective_tracker, download_sizes_tracker, protectedDownloadIDs, privateDowloadIDs):
async def queueCleaner(settingsDict, arr_type, defective_tracker, download_sizes_tracker, protectedDownloadIDs, privateDowloadIDs):
# Read out correct instance depending on radarr/sonarr flag
run_dict = {}
if arr_type == 'radarr':
BASE_URL = settings_dict['RADARR_URL']
API_KEY = settings_dict['RADARR_KEY']
NAME = settings_dict['RADARR_NAME']
if arr_type == 'RADARR':
BASE_URL = settingsDict['RADARR_URL']
API_KEY = settingsDict['RADARR_KEY']
NAME = settingsDict['RADARR_NAME']
full_queue_param = 'includeUnknownMovieItems'
elif arr_type == 'sonarr':
BASE_URL = settings_dict['SONARR_URL']
API_KEY = settings_dict['SONARR_KEY']
NAME = settings_dict['SONARR_NAME']
elif arr_type == 'SONARR':
BASE_URL = settingsDict['SONARR_URL']
API_KEY = settingsDict['SONARR_KEY']
NAME = settingsDict['SONARR_NAME']
full_queue_param = 'includeUnknownSeriesItems'
elif arr_type == 'lidarr':
BASE_URL = settings_dict['LIDARR_URL']
API_KEY = settings_dict['LIDARR_KEY']
NAME = settings_dict['LIDARR_NAME']
elif arr_type == 'LIDARR':
BASE_URL = settingsDict['LIDARR_URL']
API_KEY = settingsDict['LIDARR_KEY']
NAME = settingsDict['LIDARR_NAME']
full_queue_param = 'includeUnknownArtistItems'
elif arr_type == 'readarr':
BASE_URL = settings_dict['READARR_URL']
API_KEY = settings_dict['READARR_KEY']
NAME = settings_dict['READARR_NAME']
elif arr_type == 'READARR':
BASE_URL = settingsDict['READARR_URL']
API_KEY = settingsDict['READARR_KEY']
NAME = settingsDict['READARR_NAME']
full_queue_param = 'includeUnknownAuthorItems'
else:
logger.error('Unknown arr_type specified, exiting: %s', str(arr_type))
@@ -54,26 +54,26 @@ async def queueCleaner(settings_dict, arr_type, defective_tracker, download_size
deleted_downloads = Deleted_Downloads([])
items_detected = 0
try:
if settings_dict['REMOVE_FAILED']:
items_detected += await remove_failed( settings_dict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs)
if settingsDict['REMOVE_FAILED']:
items_detected += await remove_failed( settingsDict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs)
if settings_dict['REMOVE_STALLED']:
items_detected += await remove_stalled( settings_dict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs)
if settingsDict['REMOVE_STALLED']:
items_detected += await remove_stalled( settingsDict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs)
if settings_dict['REMOVE_METADATA_MISSING']:
items_detected += await remove_metadata_missing( settings_dict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs)
if settingsDict['REMOVE_METADATA_MISSING']:
items_detected += await remove_metadata_missing( settingsDict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs)
if settings_dict['REMOVE_ORPHANS']:
items_detected += await remove_orphans( settings_dict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs, full_queue_param)
if settingsDict['REMOVE_ORPHANS']:
items_detected += await remove_orphans( settingsDict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs, full_queue_param)
if settings_dict['REMOVE_UNMONITORED']:
items_detected += await remove_unmonitored( settings_dict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs, arr_type)
if settingsDict['REMOVE_UNMONITORED']:
items_detected += await remove_unmonitored( settingsDict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs, arr_type)
if settings_dict['REMOVE_MISSING_FILES']:
items_detected += await remove_missing_files( settings_dict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs)
if settingsDict['REMOVE_MISSING_FILES']:
items_detected += await remove_missing_files( settingsDict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs)
if settings_dict['REMOVE_SLOW']:
items_detected += await remove_slow( settings_dict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs, download_sizes_tracker)
if settingsDict['REMOVE_SLOW']:
items_detected += await remove_slow( settingsDict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs, download_sizes_tracker)
if items_detected == 0:
logger.verbose('>>> Queue is clean.')

View File

@@ -3,7 +3,7 @@ import sys, os, traceback
import logging, verboselogs
logger = verboselogs.VerboseLogger(__name__)
async def remove_failed(settings_dict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs):
async def remove_failed(settingsDict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs):
# Detects failed and triggers delete. Does not add to blocklist
try:
failType = 'failed'
@@ -16,7 +16,7 @@ async def remove_failed(settings_dict, BASE_URL, API_KEY, NAME, deleted_download
if 'errorMessage' in queueItem and 'status' in queueItem:
if queueItem['status'] == 'failed':
affectedItems.append(queueItem)
affectedItems = await execute_checks(settings_dict, affectedItems, failType, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, privateDowloadIDs, protectedDownloadIDs,
affectedItems = await execute_checks(settingsDict, affectedItems, failType, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, privateDowloadIDs, protectedDownloadIDs,
addToBlocklist = False,
doPrivateTrackerCheck = True,
doProtectedDownloadCheck = True,

View File

@@ -3,7 +3,7 @@ import sys, os, traceback
import logging, verboselogs
logger = verboselogs.VerboseLogger(__name__)
async def remove_metadata_missing(settings_dict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs):
async def remove_metadata_missing(settingsDict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs):
# Detects downloads stuck downloading meta data and triggers repeat check and subsequent delete. Adds to blocklist
try:
failType = 'missing metadata'
@@ -16,7 +16,7 @@ async def remove_metadata_missing(settings_dict, BASE_URL, API_KEY, NAME, delete
if 'errorMessage' in queueItem and 'status' in queueItem:
if queueItem['status'] == 'queued' and queueItem['errorMessage'] == 'qBittorrent is downloading metadata':
affectedItems.append(queueItem)
affectedItems = await execute_checks(settings_dict, affectedItems, failType, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, privateDowloadIDs, protectedDownloadIDs,
affectedItems = await execute_checks(settingsDict, affectedItems, failType, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, privateDowloadIDs, protectedDownloadIDs,
addToBlocklist = True,
doPrivateTrackerCheck = True,
doProtectedDownloadCheck = True,

View File

@@ -3,7 +3,7 @@ import sys, os, traceback
import logging, verboselogs
logger = verboselogs.VerboseLogger(__name__)
async def remove_missing_files(settings_dict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs):
async def remove_missing_files(settingsDict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs):
# Detects downloads broken because of missing files. Does not add to blocklist
try:
failType = 'missing files'
@@ -18,7 +18,7 @@ async def remove_missing_files(settings_dict, BASE_URL, API_KEY, NAME, deleted_d
(queueItem['errorMessage'] == 'DownloadClientQbittorrentTorrentStateMissingFiles' or
queueItem['errorMessage'] == 'The download is missing files')):
affectedItems.append(queueItem)
affectedItems = await execute_checks(settings_dict, affectedItems, failType, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, privateDowloadIDs, protectedDownloadIDs,
affectedItems = await execute_checks(settingsDict, affectedItems, failType, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, privateDowloadIDs, protectedDownloadIDs,
addToBlocklist = False,
doPrivateTrackerCheck = True,
doProtectedDownloadCheck = True,

View File

@@ -3,7 +3,7 @@ import sys, os, traceback
import logging, verboselogs
logger = verboselogs.VerboseLogger(__name__)
async def remove_orphans(settings_dict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs, full_queue_param):
async def remove_orphans(settingsDict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs, full_queue_param):
# Removes downloads belonging to movies/tv shows that have been deleted in the meantime. Does not add to blocklist
try:
failType = 'orphan'
@@ -22,7 +22,7 @@ async def remove_orphans(settings_dict, BASE_URL, API_KEY, NAME, deleted_downloa
if queueItem['id'] not in queueIDs:
affectedItems.append(queueItem)
affectedItems = await execute_checks(settings_dict, affectedItems, failType, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, privateDowloadIDs, protectedDownloadIDs,
affectedItems = await execute_checks(settingsDict, affectedItems, failType, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, privateDowloadIDs, protectedDownloadIDs,
addToBlocklist = False,
doPrivateTrackerCheck = True,
doProtectedDownloadCheck = True,

View File

@@ -3,7 +3,7 @@ import sys, os, traceback
import logging, verboselogs
logger = verboselogs.VerboseLogger(__name__)
async def remove_slow(settings_dict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs, download_sizes_tracker):
async def remove_slow(settingsDict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs, download_sizes_tracker):
# Detects slow downloads and triggers delete. Adds to blocklist
try:
failType = 'slow'
@@ -18,17 +18,17 @@ async def remove_slow(settings_dict, BASE_URL, API_KEY, NAME, deleted_downloads,
if queueItem['downloadId'] not in alreadyCheckedDownloadIDs:
alreadyCheckedDownloadIDs.append(queueItem['downloadId']) # One downloadId may occur in multiple queueItems - only check once for all of them per iteration
# determine if the downloaded bit on average between this and the last iteration is greater than the min threshold
downloadedSize, previousSize, increment, speed = await getDownloadedSize(settings_dict, queueItem, download_sizes_tracker, NAME)
downloadedSize, previousSize, increment, speed = await getDownloadedSize(settingsDict, queueItem, download_sizes_tracker, NAME)
if queueItem['status'] == 'downloading' and \
queueItem['downloadId'] in download_sizes_tracker.dict and \
speed is not None:
if speed < settings_dict['MIN_DOWNLOAD_SPEED']:
if speed < settingsDict['MIN_DOWNLOAD_SPEED']:
affectedItems.append(queueItem)
logger.debug('remove_slow/slow speed detected: %s (Speed: %d KB/s, KB now: %s, KB previous: %s, Diff: %s, In Minutes: %s', \
queueItem['title'], speed, downloadedSize, previousSize, increment, settings_dict['REMOVE_TIMER'])
queueItem['title'], speed, downloadedSize, previousSize, increment, settingsDict['REMOVE_TIMER'])
affectedItems = await execute_checks(settings_dict, affectedItems, failType, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, privateDowloadIDs, protectedDownloadIDs,
affectedItems = await execute_checks(settingsDict, affectedItems, failType, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, privateDowloadIDs, protectedDownloadIDs,
addToBlocklist = True,
doPrivateTrackerCheck = True,
doProtectedDownloadCheck = True,
@@ -39,12 +39,12 @@ async def remove_slow(settings_dict, BASE_URL, API_KEY, NAME, deleted_downloads,
return 0
from src.utils.rest import (rest_get)
async def getDownloadedSize(settings_dict, queueItem, download_sizes_tracker, NAME):
async def getDownloadedSize(settingsDict, queueItem, download_sizes_tracker, NAME):
try:
# Determines the speed of download
# Since Sonarr/Radarr do not update the downlodedSize on realtime, if possible, fetch it directly from qBit
if settings_dict['QBITTORRENT_URL'] and queueItem['downloadClient'] == 'qBittorrent':
qbitInfo = await rest_get(settings_dict['QBITTORRENT_URL']+'/torrents/info',params={'hashes': queueItem['downloadId']}, cookies=settings_dict['QBIT_COOKIE'] )
if settingsDict['QBITTORRENT_URL'] and queueItem['downloadClient'] == 'qBittorrent':
qbitInfo = await rest_get(settingsDict['QBITTORRENT_URL']+'/torrents/info',params={'hashes': queueItem['downloadId']}, cookies=settingsDict['QBIT_COOKIE'] )
downloadedSize = qbitInfo[0]['completed']
else:
logger.debug('getDownloadedSize/WARN: Using imprecise method to determine download increments because no direct qBIT query is possible')
@@ -52,7 +52,7 @@ async def getDownloadedSize(settings_dict, queueItem, download_sizes_tracker, NA
if queueItem['downloadId'] in download_sizes_tracker.dict:
previousSize = download_sizes_tracker.dict.get(queueItem['downloadId'])
increment = downloadedSize - previousSize
speed = round(increment / 1000 / (settings_dict['REMOVE_TIMER'] * 60),1)
speed = round(increment / 1000 / (settingsDict['REMOVE_TIMER'] * 60),1)
else:
previousSize = None
increment = None

View File

@@ -3,7 +3,7 @@ import sys, os, traceback
import logging, verboselogs
logger = verboselogs.VerboseLogger(__name__)
async def remove_stalled(settings_dict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs):
async def remove_stalled(settingsDict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs):
# Detects stalled and triggers repeat check and subsequent delete. Adds to blocklist
try:
failType = 'stalled'
@@ -16,7 +16,7 @@ async def remove_stalled(settings_dict, BASE_URL, API_KEY, NAME, deleted_downloa
if 'errorMessage' in queueItem and 'status' in queueItem:
if queueItem['status'] == 'warning' and queueItem['errorMessage'] == 'The download is stalled with no connections':
affectedItems.append(queueItem)
affectedItems = await execute_checks(settings_dict, affectedItems, failType, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, privateDowloadIDs, protectedDownloadIDs,
affectedItems = await execute_checks(settingsDict, affectedItems, failType, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, privateDowloadIDs, protectedDownloadIDs,
addToBlocklist = True,
doPrivateTrackerCheck = True,
doProtectedDownloadCheck = True,

View File

@@ -4,7 +4,7 @@ import logging, verboselogs
logger = verboselogs.VerboseLogger(__name__)
from src.utils.rest import rest_get
async def remove_unmonitored(settings_dict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs, arr_type):
async def remove_unmonitored(settingsDict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs, arr_type):
# Removes downloads belonging to movies/tv shows that are not monitored. Does not add to blocklist
try:
failType = 'unmonitored'
@@ -14,13 +14,13 @@ async def remove_unmonitored(settings_dict, BASE_URL, API_KEY, NAME, deleted_dow
# Find items affected
monitoredDownloadIDs = []
for queueItem in queue['records']:
if arr_type == 'sonarr':
if arr_type == 'SONARR':
isMonitored = (await rest_get(f'{BASE_URL}/episode/{str(queueItem["episodeId"])}', API_KEY))['monitored']
elif arr_type == 'radarr':
elif arr_type == 'RADARR':
isMonitored = (await rest_get(f'{BASE_URL}/movie/{str(queueItem["movieId"])}', API_KEY))['monitored']
elif arr_type == 'lidarr':
elif arr_type == 'LIDARR':
isMonitored = (await rest_get(f'{BASE_URL}/album/{str(queueItem["albumId"])}', API_KEY))['monitored']
elif arr_type == 'readarr':
elif arr_type == 'READARR':
isMonitored = (await rest_get(f'{BASE_URL}/book/{str(queueItem["bookId"])}', API_KEY))['monitored']
if isMonitored:
monitoredDownloadIDs.append(queueItem['downloadId'])
@@ -30,7 +30,7 @@ async def remove_unmonitored(settings_dict, BASE_URL, API_KEY, NAME, deleted_dow
if queueItem['downloadId'] not in monitoredDownloadIDs:
affectedItems.append(queueItem) # One downloadID may be shared by multiple queueItems. Only removes it if ALL queueitems are unmonitored
affectedItems = await execute_checks(settings_dict, affectedItems, failType, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, privateDowloadIDs, protectedDownloadIDs,
affectedItems = await execute_checks(settingsDict, affectedItems, failType, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, privateDowloadIDs, protectedDownloadIDs,
addToBlocklist = False,
doPrivateTrackerCheck = True,
doProtectedDownloadCheck = True,

159
src/utils/loadScripts.py Normal file
View File

@@ -0,0 +1,159 @@
########### Import Libraries
import logging, verboselogs
logger = verboselogs.VerboseLogger(__name__)
from dateutil.relativedelta import relativedelta as rd
import requests
from src.utils.rest import rest_get, rest_post #
import asyncio
from packaging import version
def setLoggingFormat(settingsDict):
# Sets logger output to specific format
log_level_num=logging.getLevelName(settingsDict['LOG_LEVEL'])
logging.basicConfig(
format=('' if settingsDict['IS_IN_DOCKER'] else '%(asctime)s ') + ('[%(levelname)-7s]' if settingsDict['LOG_LEVEL']=='VERBOSE' else '[%(levelname)s]') + ': %(message)s',
level=log_level_num
)
return
async def getArrInstanceName(settingsDict, arrApp):
# Retrieves the names of the arr instances, and if not defined, sets a default
try:
if settingsDict[arrApp + '_URL']:
settingsDict[arrApp + '_NAME'] = (await rest_get(settingsDict[arrApp + '_URL']+'/system/status', settingsDict[arrApp + '_KEY']))['instanceName']
except:
settingsDict[arrApp + '_NAME'] = arrApp.capitalize()
return settingsDict
def showSettings(settingsDict):
# Prints out the settings
fmt = '{0.days} days {0.hours} hours {0.minutes} minutes'
logger.info('#' * 50)
logger.info('Decluttarr - Application Started!')
logger.info('')
logger.info('*** Current Settings ***')
logger.info('Version: %s', settingsDict['IMAGE_TAG'])
logger.info('Commit: %s', settingsDict['SHORT_COMMIT_ID'])
logger.info('')
logger.info('%s | Removing failed downloads', str(settingsDict['REMOVE_FAILED']))
logger.info('%s | Removing downloads missing metadata', str(settingsDict['REMOVE_METADATA_MISSING']))
logger.info('%s | Removing downloads missing files', str(settingsDict['REMOVE_MISSING_FILES']))
logger.info('%s | Removing orphan downloads', str(settingsDict['REMOVE_ORPHANS']))
logger.info('%s | Removing slow downloads', str(settingsDict['REMOVE_SLOW']))
logger.info('%s | Removing stalled downloads', str(settingsDict['REMOVE_STALLED']))
logger.info('%s | Removing downloads belonging to unmonitored items', str(settingsDict['REMOVE_UNMONITORED']))
logger.info('')
logger.info('Running every: %s', fmt.format(rd(minutes=settingsDict['REMOVE_TIMER'])))
if settingsDict['REMOVE_SLOW']:
logger.info('Minimum speed enforced: %s KB/s', str(settingsDict['MIN_DOWNLOAD_SPEED']))
logger.info('Permitted number of times before stalled/missing metadata/slow downloads are removed: %s', str(settingsDict['PERMITTED_ATTEMPTS']))
if settingsDict['QBITTORRENT_URL']:
logger.info('Downloads with this tag will be skipped: \"%s\"', settingsDict['NO_STALLED_REMOVAL_QBIT_TAG'])
logger.info('Private Trackers will be skipped: %s', settingsDict['IGNORE_PRIVATE_TRACKERS'])
logger.info('')
logger.info('*** Configured Instances ***')
for instance in settingsDict['INSTANCES']:
if settingsDict[instance + '_URL']:
logger.info('%s: %s', settingsDict[instance + '_NAME'], settingsDict[instance + '_URL'])
if settingsDict['QBITTORRENT_URL']: logger.info('qBittorrent: %s', settingsDict['QBITTORRENT_URL'])
logger.info('')
return
async def instanceChecks(settingsDict):
# Checks if the arr and qbit instances are reachable, and returns the settings dictionary with the qbit cookie
logger.info('*** Check Instances ***')
error_occured = False
# Check ARR-apps
for instance in settingsDict['INSTANCES']:
if settingsDict[instance + '_URL']:
try:
await asyncio.get_event_loop().run_in_executor(None, lambda: requests.get(settingsDict[instance + '_URL']+'/system/status', params=None, headers={'X-Api-Key': settingsDict[instance + '_KEY']}, verify=settingsDict['SSL_VERIFICATION']))
except Exception as error:
error_occured = True
logger.error('!! %s Error: !!', settingsDict[instance + '_NAME'])
logger.error(error)
if not error_occured:
current_version = (await rest_get(settingsDict[instance + '_URL']+'/system/status', settingsDict[instance + '_KEY']))['version']
if settingsDict[instance + '_MIN_VERSION']:
if version.parse(current_version) < version.parse(settingsDict[instance + '_MIN_VERSION']):
error_occured = True
logger.error('!! %s Error: !!', settingsDict[instance + '_NAME'])
logger.error('Please update %s to at least version %s. Current version: %s', settingsDict[instance + 'MIN_VERSION'],current_version)
if not error_occured:
logger.info('OK | %s', settingsDict[instance + '_NAME'])
logger.debug('Current version of %s: %s', instance, current_version)
# Check Bittorrent
if settingsDict['QBITTORRENT_URL']:
# Checking if qbit can be reached, and checking if version is OK
try:
response = await asyncio.get_event_loop().run_in_executor(None, lambda: requests.post(settingsDict['QBITTORRENT_URL']+'/auth/login', data={'username': settingsDict['QBITTORRENT_USERNAME'], 'password': settingsDict['QBITTORRENT_PASSWORD']}, headers={'content-type': 'application/x-www-form-urlencoded'}, verify=settingsDict['SSL_VERIFICATION']))
if response.text == 'Fails.':
raise ConnectionError('Login failed.')
response.raise_for_status()
settingsDict['QBIT_COOKIE'] = {'SID': response.cookies['SID']}
except Exception as error:
error_occured = True
logger.error('!! %s Error: !!', 'qBittorrent')
logger.error(error)
logger.error('Details:')
logger.error(response.text)
if not error_occured:
qbit_version = await rest_get(settingsDict['QBITTORRENT_URL']+'/app/version',cookies=settingsDict['QBIT_COOKIE'])
qbit_version = qbit_version[1:] # version without _v
if version.parse(qbit_version) < version.parse(settingsDict['QBITTORRENT_MIN_VERSION']):
error_occured = True
logger.error('-- | %s *** Error: %s ***', 'qBittorrent', 'Please update qBittorrent to at least version %s Current version: %s',settingsDict['QBITTORRENT_MIN_VERSION'], qbit_version)
if not error_occured:
logger.info('OK | %s', 'qBittorrent')
logger.debug('Current version of %s: %s', 'qBittorrent', qbit_version)
if error_occured:
logger.warning('At least one instance was not reachable. Waiting for 60 seconds, then exiting Decluttarr.')
await asyncio.sleep(60)
exit()
logger.info('')
return settingsDict
async def createQbitProtectionTag(settingsDict):
# Creates the qBit Protection tag if not already present
if settingsDict['QBITTORRENT_URL']:
current_tags = await rest_get(settingsDict['QBITTORRENT_URL']+'/torrents/tags',cookies=settingsDict['QBIT_COOKIE'])
if not settingsDict['NO_STALLED_REMOVAL_QBIT_TAG'] in current_tags:
if settingsDict['QBITTORRENT_URL']:
logger.info('Creating tag in qBittorrent: %s', settingsDict['NO_STALLED_REMOVAL_QBIT_TAG'])
if not settingsDict['TEST_RUN']:
await rest_post(url=settingsDict['QBITTORRENT_URL']+'/torrents/createTags', data={'tags': settingsDict['NO_STALLED_REMOVAL_QBIT_TAG']}, headers={'content-type': 'application/x-www-form-urlencoded'}, cookies=settingsDict['QBIT_COOKIE'])
def showLoggerSettings(settingsDict):
logger.info('#' * 50)
if settingsDict['LOG_LEVEL'] == 'INFO':
logger.info('LOG_LEVEL = INFO: Only logging changes (switch to VERBOSE for more info)')
else:
logger.info(f'')
if settingsDict['TEST_RUN']:
logger.info(f'*'* 50)
logger.info(f'*'* 50)
logger.info(f'')
logger.info(f'!! TEST_RUN FLAG IS SET !!')
logger.info(f'NO UPDATES/DELETES WILL BE PERFORMED')
logger.info(f'')
logger.info(f'*'* 50)
logger.info(f'*'* 50)

View File

@@ -4,13 +4,13 @@ import asyncio
import requests
from requests.exceptions import RequestException
import json
from config.config import settings_dict
from config.config import settingsDict
# GET
async def rest_get(url, api_key=None, params=None, cookies=None):
try:
headers = {'X-Api-Key': api_key} if api_key else None
response = await asyncio.get_event_loop().run_in_executor(None, lambda: requests.get(url, params=params, headers=headers, cookies=cookies, verify=settings_dict['SSL_VERIFICATION']))
response = await asyncio.get_event_loop().run_in_executor(None, lambda: requests.get(url, params=params, headers=headers, cookies=cookies, verify=settingsDict['SSL_VERIFICATION']))
response.raise_for_status()
return response.json()
except requests.exceptions.HTTPError as e:
@@ -23,10 +23,10 @@ async def rest_get(url, api_key=None, params=None, cookies=None):
# DELETE
async def rest_delete(url, api_key, params=None):
if settings_dict['TEST_RUN']: return
if settingsDict['TEST_RUN']: return
try:
headers = {'X-Api-Key': api_key}
response = await asyncio.get_event_loop().run_in_executor(None, lambda: requests.delete(url, params=params, headers=headers, verify=settings_dict['SSL_VERIFICATION']))
response = await asyncio.get_event_loop().run_in_executor(None, lambda: requests.delete(url, params=params, headers=headers, verify=settingsDict['SSL_VERIFICATION']))
response.raise_for_status()
if response.status_code in [200, 204]:
return None
@@ -40,9 +40,9 @@ async def rest_delete(url, api_key, params=None):
# POST
async def rest_post(url, data=None, json=None, headers=None, cookies=None):
if settings_dict['TEST_RUN']: return
if settingsDict['TEST_RUN']: return
try:
response = await asyncio.get_event_loop().run_in_executor(None, lambda: requests.post(url, data=data, json=json, headers=headers, cookies=cookies, verify=settings_dict['SSL_VERIFICATION']))
response = await asyncio.get_event_loop().run_in_executor(None, lambda: requests.post(url, data=data, json=json, headers=headers, cookies=cookies, verify=settingsDict['SSL_VERIFICATION']))
response.raise_for_status()
if response.status_code in (200,201):
return None
@@ -57,10 +57,10 @@ async def rest_post(url, data=None, json=None, headers=None, cookies=None):
# PUT
async def rest_put(url, api_key, data):
if settings_dict['TEST_RUN']: return
if settingsDict['TEST_RUN']: return
try:
headers = {'X-Api-Key': api_key} | {"content-type": "application/json"}
response = await asyncio.get_event_loop().run_in_executor(None, lambda: requests.put(url, data=data, headers=headers, verify=settings_dict['SSL_VERIFICATION']))
response = await asyncio.get_event_loop().run_in_executor(None, lambda: requests.put(url, data=data, headers=headers, verify=settingsDict['SSL_VERIFICATION']))
response.raise_for_status()
return response.json()
except RequestException as e:

View File

@@ -12,16 +12,36 @@ async def get_queue(BASE_URL, API_KEY, params = {}):
if totalRecords == 0:
return None
queue = await rest_get(f'{BASE_URL}/queue', API_KEY, {'page': '1', 'pageSize': totalRecords}|params)
queue = filterOutDelayedQueueItems(queue)
return queue
def privateTrackerCheck(settings_dict, affectedItems, failType, privateDowloadIDs):
def filterOutDelayedQueueItems(queue):
# Ignores delayed queue items
if queue is None:
return None
seen_combinations = set()
filtered_records = []
for record in queue['records']:
combination = (record['title'], record['indexer'])
if record['status'] == 'delay':
if combination not in seen_combinations:
seen_combinations.add(combination)
logger.debug('>>> Delayed queue item ignored: %s (Indexer: %s)', record['title'], record['indexer'])
else:
filtered_records.append(record)
if not filtered_records:
return None
queue['records'] = filtered_records
return queue
def privateTrackerCheck(settingsDict, affectedItems, failType, privateDowloadIDs):
# Ignores private tracker items (if setting is turned on)
for affectedItem in reversed(affectedItems):
if settings_dict['IGNORE_PRIVATE_TRACKERS'] and affectedItem['downloadId'] in privateDowloadIDs:
if settingsDict['IGNORE_PRIVATE_TRACKERS'] and affectedItem['downloadId'] in privateDowloadIDs:
affectedItems.remove(affectedItem)
return affectedItems
def protectedDownloadCheck(settings_dict, affectedItems, failType, protectedDownloadIDs):
def protectedDownloadCheck(settingsDict, affectedItems, failType, protectedDownloadIDs):
# Checks if torrent is protected and skips
logger.debug('protectedDownloadCheck/protectedDownloadIDs (failType: %s): %s', failType, str(protectedDownloadIDs))
for affectedItem in reversed(affectedItems):
@@ -32,7 +52,7 @@ def protectedDownloadCheck(settings_dict, affectedItems, failType, protectedDown
return affectedItems
async def execute_checks(settings_dict, affectedItems, failType, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, privateDowloadIDs, protectedDownloadIDs, addToBlocklist, doPrivateTrackerCheck, doProtectedDownloadCheck, doPermittedAttemptsCheck):
async def execute_checks(settingsDict, affectedItems, failType, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, privateDowloadIDs, protectedDownloadIDs, addToBlocklist, doPrivateTrackerCheck, doProtectedDownloadCheck, doPermittedAttemptsCheck):
# Goes over the affected items and performs the checks that are parametrized
try:
# De-duplicates the affected items (one downloadid may be shared by multiple affected items)
@@ -44,17 +64,17 @@ async def execute_checks(settings_dict, affectedItems, failType, BASE_URL, API_K
affectedItems.remove(affectedItem)
# Skips protected items
if doPrivateTrackerCheck:
affectedItems = privateTrackerCheck(settings_dict, affectedItems, failType, privateDowloadIDs)
affectedItems = privateTrackerCheck(settingsDict, affectedItems, failType, privateDowloadIDs)
if doProtectedDownloadCheck:
affectedItems = protectedDownloadCheck(settings_dict, affectedItems, failType, protectedDownloadIDs)
affectedItems = protectedDownloadCheck(settingsDict, affectedItems, failType, protectedDownloadIDs)
# Checks if failing more often than permitted
if doPermittedAttemptsCheck:
affectedItems = permittedAttemptsCheck(settings_dict, affectedItems, failType, BASE_URL, defective_tracker)
affectedItems = permittedAttemptsCheck(settingsDict, affectedItems, failType, BASE_URL, defective_tracker)
# Deletes all downloads that have not survived the checks
for affectedItem in affectedItems:
await remove_download(settings_dict, BASE_URL, API_KEY, affectedItem, failType, addToBlocklist, deleted_downloads)
await remove_download(settingsDict, BASE_URL, API_KEY, affectedItem, failType, addToBlocklist, deleted_downloads)
# Exit Logs
if settings_dict['LOG_LEVEL'] == 'DEBUG':
if settingsDict['LOG_LEVEL'] == 'DEBUG':
queue = await get_queue(BASE_URL, API_KEY)
logger.debug('execute_checks/queue OUT (failType: %s): %s', failType, formattedQueueInfo(queue))
# Return removed items
@@ -63,7 +83,7 @@ async def execute_checks(settings_dict, affectedItems, failType, BASE_URL, API_K
errorDetails(NAME, error)
return []
def permittedAttemptsCheck(settings_dict, affectedItems, failType, BASE_URL, defective_tracker):
def permittedAttemptsCheck(settingsDict, affectedItems, failType, BASE_URL, defective_tracker):
# Checks if downloads are repeatedly found as stalled / stuck in metadata. Removes the items that are not exeeding permitted attempts
# Shows all affected items (for debugging)
logger.debug('permittedAttemptsCheck/affectedItems: %s', ', '.join(f"{affectedItem['id']}:{affectedItem['title']}:{affectedItem['downloadId']}" for affectedItem in affectedItems))
@@ -86,25 +106,25 @@ def permittedAttemptsCheck(settings_dict, affectedItems, failType, BASE_URL, def
defective_tracker.dict[BASE_URL][failType][affectedItem['downloadId']]['Attempts'] += 1
except KeyError:
add_keys_nested_dict(defective_tracker.dict,[BASE_URL, failType, affectedItem['downloadId']], {'title': affectedItem['title'], 'Attempts': 1})
attempts_left = settings_dict['PERMITTED_ATTEMPTS'] - defective_tracker.dict[BASE_URL][failType][affectedItem['downloadId']]['Attempts']
attempts_left = settingsDict['PERMITTED_ATTEMPTS'] - defective_tracker.dict[BASE_URL][failType][affectedItem['downloadId']]['Attempts']
# If not exceeding the number of permitted times, remove from being affected
if attempts_left >= 0: # Still got attempts left
logger.info('>>> Detected %s download (%s out of %s permitted times): %s', failType, str(defective_tracker.dict[BASE_URL][failType][affectedItem['downloadId']]['Attempts']), str(settings_dict['PERMITTED_ATTEMPTS']), affectedItem['title'])
logger.info('>>> Detected %s download (%s out of %s permitted times): %s', failType, str(defective_tracker.dict[BASE_URL][failType][affectedItem['downloadId']]['Attempts']), str(settingsDict['PERMITTED_ATTEMPTS']), affectedItem['title'])
affectedItems.remove(affectedItem)
if attempts_left <= -1: # Too many attempts
logger.info('>>> Detected %s download too many times (%s out of %s permitted times): %s', failType, str(defective_tracker.dict[BASE_URL][failType][affectedItem['downloadId']]['Attempts']), str(settings_dict['PERMITTED_ATTEMPTS']), affectedItem['title'])
logger.info('>>> Detected %s download too many times (%s out of %s permitted times): %s', failType, str(defective_tracker.dict[BASE_URL][failType][affectedItem['downloadId']]['Attempts']), str(settingsDict['PERMITTED_ATTEMPTS']), affectedItem['title'])
if attempts_left <= -2: # Too many attempts and should already have been removed
# If supposedly deleted item keeps coming back, print out guidance for "Reject Blocklisted Torrent Hashes While Grabbing"
logger.verbose('>>> [Tip!] Since this download should already have been removed in a previous iteration but keeps coming back, this indicates the blocking of the torrent does not work correctly. Consider turning on the option "Reject Blocklisted Torrent Hashes While Grabbing" on the indexer in the *arr app: %s', affectedItem['title'])
logger.debug('permittedAttemptsCheck/defective_tracker.dict OUT: %s', str(defective_tracker.dict))
return affectedItems
async def remove_download(settings_dict, BASE_URL, API_KEY, affectedItem, failType, addToBlocklist, deleted_downloads):
async def remove_download(settingsDict, BASE_URL, API_KEY, affectedItem, failType, addToBlocklist, deleted_downloads):
# Removes downloads and creates log entry
logger.debug('remove_download/deleted_downloads.dict IN: %s', str(deleted_downloads.dict))
if affectedItem['downloadId'] not in deleted_downloads.dict:
logger.info('>>> Removing %s download: %s', failType, affectedItem['title'])
if not settings_dict['TEST_RUN']:
if not settingsDict['TEST_RUN']:
await rest_delete(f'{BASE_URL}/queue/{affectedItem["id"]}', API_KEY, {'removeFromClient': True, 'blocklist': addToBlocklist})
deleted_downloads.dict.append(affectedItem['downloadId'])