mirror of
https://github.com/ManiMatter/decluttarr.git
synced 2026-04-21 00:05:35 +02:00
Cleanup of main.py, and fixing
TypeError: unsupported operand type(s) for +: 'dict' and 'dict'
This commit is contained in:
157
src/utils/loadScripts.py
Normal file
157
src/utils/loadScripts.py
Normal file
@@ -0,0 +1,157 @@
|
||||
########### Import Libraries
|
||||
import logging, verboselogs
|
||||
logger = verboselogs.VerboseLogger(__name__)
|
||||
from dateutil.relativedelta import relativedelta as rd
|
||||
import requests
|
||||
from src.utils.rest import rest_get, rest_post #
|
||||
import asyncio
|
||||
from packaging import version
|
||||
|
||||
def setLoggingFormat(settingsDict):
|
||||
# Sets logger output to specific format
|
||||
log_level_num=logging.getLevelName(settingsDict['LOG_LEVEL'])
|
||||
logging.basicConfig(
|
||||
format=('' if settingsDict['IS_IN_DOCKER'] else '%(asctime)s ') + ('[%(levelname)-7s]' if settingsDict['LOG_LEVEL']=='VERBOSE' else '[%(levelname)s]') + ': %(message)s',
|
||||
level=log_level_num
|
||||
)
|
||||
return
|
||||
|
||||
|
||||
async def getArrInstanceName(settingsDict, arrApp):
|
||||
# Retrieves the names of the arr instances, and if not defined, sets a default
|
||||
try:
|
||||
if settingsDict[arrApp + '_URL']:
|
||||
settingsDict[arrApp + '_NAME'] = (await rest_get(settingsDict[arrApp + '_URL']+'/system/status', settingsDict[arrApp + '_KEY']))['instanceName']
|
||||
except:
|
||||
settingsDict[arrApp + '_NAME'] = arrApp.capitalize()
|
||||
return settingsDict
|
||||
|
||||
|
||||
|
||||
def showSettings(settingsDict):
|
||||
# Prints out the settings
|
||||
fmt = '{0.days} days {0.hours} hours {0.minutes} minutes'
|
||||
logger.info('#' * 50)
|
||||
logger.info('Decluttarr - Application Started!')
|
||||
logger.info('')
|
||||
logger.info('*** Current Settings ***')
|
||||
logger.info('Version: %s', settingsDict['IMAGE_TAG'])
|
||||
logger.info('Commit: %s', settingsDict['SHORT_COMMIT_ID'])
|
||||
logger.info('')
|
||||
logger.info('%s | Removing failed downloads', str(settingsDict['REMOVE_FAILED']))
|
||||
logger.info('%s | Removing downloads missing metadata', str(settingsDict['REMOVE_METADATA_MISSING']))
|
||||
logger.info('%s | Removing downloads missing files', str(settingsDict['REMOVE_MISSING_FILES']))
|
||||
logger.info('%s | Removing orphan downloads', str(settingsDict['REMOVE_ORPHANS']))
|
||||
logger.info('%s | Removing slow downloads', str(settingsDict['REMOVE_SLOW']))
|
||||
logger.info('%s | Removing stalled downloads', str(settingsDict['REMOVE_STALLED']))
|
||||
logger.info('%s | Removing downloads belonging to unmonitored TV shows/movies', str(settingsDict['REMOVE_UNMONITORED']))
|
||||
logger.info('')
|
||||
logger.info('Running every: %s', fmt.format(rd(minutes=settingsDict['REMOVE_TIMER'])))
|
||||
if settingsDict['REMOVE_SLOW']:
|
||||
logger.info('Minimum speed enforced: %s KB/s', str(settingsDict['MIN_DOWNLOAD_SPEED']))
|
||||
logger.info('Permitted number of times before stalled/missing metadata/slow downloads are removed: %s', str(settingsDict['PERMITTED_ATTEMPTS']))
|
||||
if settingsDict['QBITTORRENT_URL']:
|
||||
logger.info('Downloads with this tag will be skipped: \"%s\"', settingsDict['NO_STALLED_REMOVAL_QBIT_TAG'])
|
||||
logger.info('Private Trackers will be skipped: %s', settingsDict['IGNORE_PRIVATE_TRACKERS'])
|
||||
|
||||
logger.info('')
|
||||
logger.info('*** Configured Instances ***')
|
||||
|
||||
for instance in settingsDict['INSTANCES']:
|
||||
if settingsDict[instance + '_URL']:
|
||||
logger.info('%s: %s', settingsDict[instance + '_NAME'], settingsDict[instance + '_URL'])
|
||||
|
||||
if settingsDict['QBITTORRENT_URL']: logger.info('qBittorrent: %s', settingsDict['QBITTORRENT_URL'])
|
||||
|
||||
logger.info('')
|
||||
return
|
||||
|
||||
|
||||
async def instanceChecks(settingsDict):
|
||||
# Checks if the arr and qbit instances are reachable, and returns the settings dictionary with the qbit cookie
|
||||
logger.info('*** Check Instances ***')
|
||||
error_occured = False
|
||||
# Check ARR-apps
|
||||
for instance in settingsDict['INSTANCES']:
|
||||
if settingsDict[instance + '_URL']:
|
||||
try:
|
||||
await asyncio.get_event_loop().run_in_executor(None, lambda: requests.get(settingsDict[instance + '_URL']+'/system/status', params=None, headers={'X-Api-Key': settingsDict[instance + '_KEY']}, verify=settingsDict['SSL_VERIFICATION']))
|
||||
except Exception as error:
|
||||
error_occured = True
|
||||
logger.error('!! %s Error: !!', settingsDict[instance + '_NAME'])
|
||||
logger.error(error)
|
||||
if not error_occured:
|
||||
if settingsDict[instance + '_MIN_VERSION']:
|
||||
current_version = (await rest_get(settingsDict[instance + '_URL']+'/system/status', settingsDict[instance + '_KEY']))['version']
|
||||
if version.parse(current_version) < version.parse(settingsDict[instance + '_MIN_VERSION']):
|
||||
error_occured = True
|
||||
logger.error('!! %s Error: !!', settingsDict[instance + '_NAME'])
|
||||
logger.error('Please update %s to at least version %s. Current version: %s', settingsDict[instance + 'MIN_VERSION'],current_version)
|
||||
if not error_occured:
|
||||
logger.info('OK | %s', settingsDict[instance + '_NAME'])
|
||||
|
||||
# Check Bittorrent
|
||||
if settingsDict['QBITTORRENT_URL']:
|
||||
# Checking if qbit can be reached, and checking if version is OK
|
||||
try:
|
||||
response = await asyncio.get_event_loop().run_in_executor(None, lambda: requests.post(settingsDict['QBITTORRENT_URL']+'/auth/login', data={'username': settingsDict['QBITTORRENT_USERNAME'], 'password': settingsDict['QBITTORRENT_PASSWORD']}, headers={'content-type': 'application/x-www-form-urlencoded'}, verify=settingsDict['SSL_VERIFICATION']))
|
||||
if response.text == 'Fails.':
|
||||
raise ConnectionError('Login failed.')
|
||||
response.raise_for_status()
|
||||
settingsDict['QBIT_COOKIE'] = {'SID': response.cookies['SID']}
|
||||
except Exception as error:
|
||||
error_occured = True
|
||||
logger.error('!! %s Error: !!', 'qBittorrent')
|
||||
logger.error(error)
|
||||
logger.error('Details:')
|
||||
logger.error(response.text)
|
||||
|
||||
if not error_occured:
|
||||
qbit_version = await rest_get(settingsDict['QBITTORRENT_URL']+'/app/version',cookies=settingsDict['QBIT_COOKIE'])
|
||||
qbit_version = qbit_version[1:] # version without _v
|
||||
if version.parse(qbit_version) < version.parse(settingsDict['QBITTORRENT_MIN_VERSION']):
|
||||
error_occured = True
|
||||
logger.error('-- | %s *** Error: %s ***', 'qBittorrent', 'Please update qBittorrent to at least version %s Current version: %s',settingsDict['QBITTORRENT_MIN_VERSION'], qbit_version)
|
||||
|
||||
if not error_occured:
|
||||
logger.info('OK | %s', 'qBittorrent')
|
||||
|
||||
|
||||
if error_occured:
|
||||
logger.warning('At least one instance was not reachable. Waiting for 60 seconds, then exiting Decluttarr.')
|
||||
await asyncio.sleep(60)
|
||||
exit()
|
||||
|
||||
logger.info('')
|
||||
return settingsDict
|
||||
|
||||
async def createQbitProtectionTag(settingsDict):
|
||||
# Creates the qBit Protection tag if not already present
|
||||
if settingsDict['QBITTORRENT_URL']:
|
||||
current_tags = await rest_get(settingsDict['QBITTORRENT_URL']+'/torrents/tags',cookies=settingsDict['QBIT_COOKIE'])
|
||||
if not settingsDict['NO_STALLED_REMOVAL_QBIT_TAG'] in current_tags:
|
||||
if settingsDict['QBITTORRENT_URL']:
|
||||
logger.info('Creating tag in qBittorrent: %s', settingsDict['NO_STALLED_REMOVAL_QBIT_TAG'])
|
||||
if not settingsDict['TEST_RUN']:
|
||||
await rest_post(url=settingsDict['QBITTORRENT_URL']+'/torrents/createTags', data={'tags': settingsDict['NO_STALLED_REMOVAL_QBIT_TAG']}, headers={'content-type': 'application/x-www-form-urlencoded'}, cookies=settingsDict['QBIT_COOKIE'])
|
||||
|
||||
def showLoggerSettings(settingsDict):
|
||||
logger.info('#' * 50)
|
||||
if settingsDict['LOG_LEVEL'] == 'INFO':
|
||||
logger.info('LOG_LEVEL = INFO: Only logging changes (switch to VERBOSE for more info)')
|
||||
else:
|
||||
logger.info(f'')
|
||||
if settingsDict['TEST_RUN']:
|
||||
logger.info(f'*'* 50)
|
||||
logger.info(f'*'* 50)
|
||||
logger.info(f'')
|
||||
logger.info(f'!! TEST_RUN FLAG IS SET !!')
|
||||
logger.info(f'NO UPDATES/DELETES WILL BE PERFORMED')
|
||||
logger.info(f'')
|
||||
logger.info(f'*'* 50)
|
||||
logger.info(f'*'* 50)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -4,13 +4,13 @@ import asyncio
|
||||
import requests
|
||||
from requests.exceptions import RequestException
|
||||
import json
|
||||
from config.config import settings_dict
|
||||
from config.config import settingsDict
|
||||
|
||||
# GET
|
||||
async def rest_get(url, api_key=None, params=None, cookies=None):
|
||||
try:
|
||||
headers = {'X-Api-Key': api_key} if api_key else None
|
||||
response = await asyncio.get_event_loop().run_in_executor(None, lambda: requests.get(url, params=params, headers=headers, cookies=cookies, verify=settings_dict['SSL_VERIFICATION']))
|
||||
response = await asyncio.get_event_loop().run_in_executor(None, lambda: requests.get(url, params=params, headers=headers, cookies=cookies, verify=settingsDict['SSL_VERIFICATION']))
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
except requests.exceptions.HTTPError as e:
|
||||
@@ -23,10 +23,10 @@ async def rest_get(url, api_key=None, params=None, cookies=None):
|
||||
|
||||
# DELETE
|
||||
async def rest_delete(url, api_key, params=None):
|
||||
if settings_dict['TEST_RUN']: return
|
||||
if settingsDict['TEST_RUN']: return
|
||||
try:
|
||||
headers = {'X-Api-Key': api_key}
|
||||
response = await asyncio.get_event_loop().run_in_executor(None, lambda: requests.delete(url, params=params, headers=headers, verify=settings_dict['SSL_VERIFICATION']))
|
||||
response = await asyncio.get_event_loop().run_in_executor(None, lambda: requests.delete(url, params=params, headers=headers, verify=settingsDict['SSL_VERIFICATION']))
|
||||
response.raise_for_status()
|
||||
if response.status_code in [200, 204]:
|
||||
return None
|
||||
@@ -40,9 +40,9 @@ async def rest_delete(url, api_key, params=None):
|
||||
|
||||
# POST
|
||||
async def rest_post(url, data=None, json=None, headers=None, cookies=None):
|
||||
if settings_dict['TEST_RUN']: return
|
||||
if settingsDict['TEST_RUN']: return
|
||||
try:
|
||||
response = await asyncio.get_event_loop().run_in_executor(None, lambda: requests.post(url, data=data, json=json, headers=headers, cookies=cookies, verify=settings_dict['SSL_VERIFICATION']))
|
||||
response = await asyncio.get_event_loop().run_in_executor(None, lambda: requests.post(url, data=data, json=json, headers=headers, cookies=cookies, verify=settingsDict['SSL_VERIFICATION']))
|
||||
response.raise_for_status()
|
||||
if response.status_code in (200,201):
|
||||
return None
|
||||
@@ -57,10 +57,10 @@ async def rest_post(url, data=None, json=None, headers=None, cookies=None):
|
||||
|
||||
# PUT
|
||||
async def rest_put(url, api_key, data):
|
||||
if settings_dict['TEST_RUN']: return
|
||||
if settingsDict['TEST_RUN']: return
|
||||
try:
|
||||
headers = {'X-Api-Key': api_key} | {"content-type": "application/json"}
|
||||
response = await asyncio.get_event_loop().run_in_executor(None, lambda: requests.put(url, data=data, headers=headers, verify=settings_dict['SSL_VERIFICATION']))
|
||||
response = await asyncio.get_event_loop().run_in_executor(None, lambda: requests.put(url, data=data, headers=headers, verify=settingsDict['SSL_VERIFICATION']))
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
except RequestException as e:
|
||||
|
||||
@@ -14,14 +14,14 @@ async def get_queue(BASE_URL, API_KEY, params = {}):
|
||||
queue = await rest_get(f'{BASE_URL}/queue', API_KEY, {'page': '1', 'pageSize': totalRecords}|params)
|
||||
return queue
|
||||
|
||||
def privateTrackerCheck(settings_dict, affectedItems, failType, privateDowloadIDs):
|
||||
def privateTrackerCheck(settingsDict, affectedItems, failType, privateDowloadIDs):
|
||||
# Ignores private tracker items (if setting is turned on)
|
||||
for affectedItem in reversed(affectedItems):
|
||||
if settings_dict['IGNORE_PRIVATE_TRACKERS'] and affectedItem['downloadId'] in privateDowloadIDs:
|
||||
if settingsDict['IGNORE_PRIVATE_TRACKERS'] and affectedItem['downloadId'] in privateDowloadIDs:
|
||||
affectedItems.remove(affectedItem)
|
||||
return affectedItems
|
||||
|
||||
def protectedDownloadCheck(settings_dict, affectedItems, failType, protectedDownloadIDs):
|
||||
def protectedDownloadCheck(settingsDict, affectedItems, failType, protectedDownloadIDs):
|
||||
# Checks if torrent is protected and skips
|
||||
logger.debug('protectedDownloadCheck/protectedDownloadIDs (failType: %s): %s', failType, str(protectedDownloadIDs))
|
||||
for affectedItem in reversed(affectedItems):
|
||||
@@ -32,7 +32,7 @@ def protectedDownloadCheck(settings_dict, affectedItems, failType, protectedDown
|
||||
return affectedItems
|
||||
|
||||
|
||||
async def execute_checks(settings_dict, affectedItems, failType, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, privateDowloadIDs, protectedDownloadIDs, addToBlocklist, doPrivateTrackerCheck, doProtectedDownloadCheck, doPermittedAttemptsCheck):
|
||||
async def execute_checks(settingsDict, affectedItems, failType, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, privateDowloadIDs, protectedDownloadIDs, addToBlocklist, doPrivateTrackerCheck, doProtectedDownloadCheck, doPermittedAttemptsCheck):
|
||||
# Goes over the affected items and performs the checks that are parametrized
|
||||
try:
|
||||
# De-duplicates the affected items (one downloadid may be shared by multiple affected items)
|
||||
@@ -44,17 +44,17 @@ async def execute_checks(settings_dict, affectedItems, failType, BASE_URL, API_K
|
||||
affectedItems.remove(affectedItem)
|
||||
# Skips protected items
|
||||
if doPrivateTrackerCheck:
|
||||
affectedItems = privateTrackerCheck(settings_dict, affectedItems, failType, privateDowloadIDs)
|
||||
affectedItems = privateTrackerCheck(settingsDict, affectedItems, failType, privateDowloadIDs)
|
||||
if doProtectedDownloadCheck:
|
||||
affectedItems = protectedDownloadCheck(settings_dict, affectedItems, failType, protectedDownloadIDs)
|
||||
affectedItems = protectedDownloadCheck(settingsDict, affectedItems, failType, protectedDownloadIDs)
|
||||
# Checks if failing more often than permitted
|
||||
if doPermittedAttemptsCheck:
|
||||
affectedItems = permittedAttemptsCheck(settings_dict, affectedItems, failType, BASE_URL, defective_tracker)
|
||||
affectedItems = permittedAttemptsCheck(settingsDict, affectedItems, failType, BASE_URL, defective_tracker)
|
||||
# Deletes all downloads that have not survived the checks
|
||||
for affectedItem in affectedItems:
|
||||
await remove_download(settings_dict, BASE_URL, API_KEY, affectedItem, failType, addToBlocklist, deleted_downloads)
|
||||
await remove_download(settingsDict, BASE_URL, API_KEY, affectedItem, failType, addToBlocklist, deleted_downloads)
|
||||
# Exit Logs
|
||||
if settings_dict['LOG_LEVEL'] == 'DEBUG':
|
||||
if settingsDict['LOG_LEVEL'] == 'DEBUG':
|
||||
queue = await get_queue(BASE_URL, API_KEY)
|
||||
logger.debug('execute_checks/queue OUT (failType: %s): %s', failType, formattedQueueInfo(queue))
|
||||
# Return removed items
|
||||
@@ -63,7 +63,7 @@ async def execute_checks(settings_dict, affectedItems, failType, BASE_URL, API_K
|
||||
errorDetails(NAME, error)
|
||||
return []
|
||||
|
||||
def permittedAttemptsCheck(settings_dict, affectedItems, failType, BASE_URL, defective_tracker):
|
||||
def permittedAttemptsCheck(settingsDict, affectedItems, failType, BASE_URL, defective_tracker):
|
||||
# Checks if downloads are repeatedly found as stalled / stuck in metadata. Removes the items that are not exeeding permitted attempts
|
||||
# Shows all affected items (for debugging)
|
||||
logger.debug('permittedAttemptsCheck/affectedItems: %s', ', '.join(f"{affectedItem['id']}:{affectedItem['title']}:{affectedItem['downloadId']}" for affectedItem in affectedItems))
|
||||
@@ -86,25 +86,25 @@ def permittedAttemptsCheck(settings_dict, affectedItems, failType, BASE_URL, def
|
||||
defective_tracker.dict[BASE_URL][failType][affectedItem['downloadId']]['Attempts'] += 1
|
||||
except KeyError:
|
||||
add_keys_nested_dict(defective_tracker.dict,[BASE_URL, failType, affectedItem['downloadId']], {'title': affectedItem['title'], 'Attempts': 1})
|
||||
attempts_left = settings_dict['PERMITTED_ATTEMPTS'] - defective_tracker.dict[BASE_URL][failType][affectedItem['downloadId']]['Attempts']
|
||||
attempts_left = settingsDict['PERMITTED_ATTEMPTS'] - defective_tracker.dict[BASE_URL][failType][affectedItem['downloadId']]['Attempts']
|
||||
# If not exceeding the number of permitted times, remove from being affected
|
||||
if attempts_left >= 0: # Still got attempts left
|
||||
logger.info('>>> Detected %s download (%s out of %s permitted times): %s', failType, str(defective_tracker.dict[BASE_URL][failType][affectedItem['downloadId']]['Attempts']), str(settings_dict['PERMITTED_ATTEMPTS']), affectedItem['title'])
|
||||
logger.info('>>> Detected %s download (%s out of %s permitted times): %s', failType, str(defective_tracker.dict[BASE_URL][failType][affectedItem['downloadId']]['Attempts']), str(settingsDict['PERMITTED_ATTEMPTS']), affectedItem['title'])
|
||||
affectedItems.remove(affectedItem)
|
||||
if attempts_left <= -1: # Too many attempts
|
||||
logger.info('>>> Detected %s download too many times (%s out of %s permitted times): %s', failType, str(defective_tracker.dict[BASE_URL][failType][affectedItem['downloadId']]['Attempts']), str(settings_dict['PERMITTED_ATTEMPTS']), affectedItem['title'])
|
||||
logger.info('>>> Detected %s download too many times (%s out of %s permitted times): %s', failType, str(defective_tracker.dict[BASE_URL][failType][affectedItem['downloadId']]['Attempts']), str(settingsDict['PERMITTED_ATTEMPTS']), affectedItem['title'])
|
||||
if attempts_left <= -2: # Too many attempts and should already have been removed
|
||||
# If supposedly deleted item keeps coming back, print out guidance for "Reject Blocklisted Torrent Hashes While Grabbing"
|
||||
logger.verbose('>>> [Tip!] Since this download should already have been removed in a previous iteration but keeps coming back, this indicates the blocking of the torrent does not work correctly. Consider turning on the option "Reject Blocklisted Torrent Hashes While Grabbing" on the indexer in the *arr app: %s', affectedItem['title'])
|
||||
logger.debug('permittedAttemptsCheck/defective_tracker.dict OUT: %s', str(defective_tracker.dict))
|
||||
return affectedItems
|
||||
|
||||
async def remove_download(settings_dict, BASE_URL, API_KEY, affectedItem, failType, addToBlocklist, deleted_downloads):
|
||||
async def remove_download(settingsDict, BASE_URL, API_KEY, affectedItem, failType, addToBlocklist, deleted_downloads):
|
||||
# Removes downloads and creates log entry
|
||||
logger.debug('remove_download/deleted_downloads.dict IN: %s', str(deleted_downloads.dict))
|
||||
if affectedItem['downloadId'] not in deleted_downloads.dict:
|
||||
logger.info('>>> Removing %s download: %s', failType, affectedItem['title'])
|
||||
if not settings_dict['TEST_RUN']:
|
||||
if not settingsDict['TEST_RUN']:
|
||||
await rest_delete(f'{BASE_URL}/queue/{affectedItem["id"]}', API_KEY, {'removeFromClient': True, 'blocklist': addToBlocklist})
|
||||
deleted_downloads.dict.append(affectedItem['downloadId'])
|
||||
|
||||
|
||||
Reference in New Issue
Block a user