Added "Ignore download clients" feature

This commit is contained in:
Benjamin Harder
2024-12-03 20:24:29 +01:00
parent 07c4f6f2f8
commit 8a606891d6
15 changed files with 59 additions and 18 deletions

View File

@@ -58,7 +58,7 @@ async def queueCleaner(
logger.verbose("Cleaning queue on %s:", NAME)
# Refresh queue:
try:
full_queue = await get_queue(BASE_URL, API_KEY, params={full_queue_param: True})
full_queue = await get_queue(BASE_URL, API_KEY, settingsDict, params={full_queue_param: True})
if full_queue:
logger.debug("queueCleaner/full_queue at start:")
logger.debug(full_queue)

View File

@@ -28,7 +28,7 @@ async def remove_failed(
# Detects failed and triggers delete. Does not add to blocklist
try:
failType = "failed"
queue = await get_queue(BASE_URL, API_KEY)
queue = await get_queue(BASE_URL, API_KEY, settingsDict)
logger.debug("remove_failed/queue IN: %s", formattedQueueInfo(queue))
if not queue:

View File

@@ -18,7 +18,7 @@ async def remove_failed_imports(
# Detects downloads stuck downloading meta data and triggers repeat check and subsequent delete. Adds to blocklist
try:
failType = "failed import"
queue = await get_queue(BASE_URL, API_KEY)
queue = await get_queue(BASE_URL, API_KEY, settingsDict)
logger.debug("remove_failed_imports/queue IN: %s", formattedQueueInfo(queue))
if not queue:
return 0

View File

@@ -28,7 +28,7 @@ async def remove_metadata_missing(
# Detects downloads stuck downloading meta data and triggers repeat check and subsequent delete. Adds to blocklist
try:
failType = "missing metadata"
queue = await get_queue(BASE_URL, API_KEY)
queue = await get_queue(BASE_URL, API_KEY, settingsDict)
logger.debug("remove_metadata_missing/queue IN: %s", formattedQueueInfo(queue))
if not queue:
return 0

View File

@@ -28,7 +28,7 @@ async def remove_missing_files(
# Detects downloads broken because of missing files. Does not add to blocklist
try:
failType = "missing files"
queue = await get_queue(BASE_URL, API_KEY)
queue = await get_queue(BASE_URL, API_KEY, settingsDict)
logger.debug("remove_missing_files/queue IN: %s", formattedQueueInfo(queue))
if not queue:
return 0

View File

@@ -28,8 +28,10 @@ async def remove_orphans(
# Removes downloads belonging to movies/tv shows that have been deleted in the meantime. Does not add to blocklist
try:
failType = "orphan"
full_queue = await get_queue(BASE_URL, API_KEY, params={full_queue_param: True})
queue = await get_queue(BASE_URL, API_KEY)
full_queue = await get_queue(
BASE_URL, API_KEY, settingsDict, params={full_queue_param: True}
)
queue = await get_queue(BASE_URL, API_KEY, settingsDict)
logger.debug("remove_orphans/full queue IN: %s", formattedQueueInfo(full_queue))
if not full_queue:
return 0 # By now the queue may be empty
@@ -63,7 +65,9 @@ async def remove_orphans(
logger.debug(
"remove_orphans/full queue OUT: %s",
formattedQueueInfo(
await get_queue(BASE_URL, API_KEY, params={full_queue_param: True})
await get_queue(
BASE_URL, API_KEY, settingsDict, params={full_queue_param: True}
)
),
)
return len(affectedItems)

View File

@@ -30,7 +30,7 @@ async def remove_slow(
# Detects slow downloads and triggers delete. Adds to blocklist
try:
failType = "slow"
queue = await get_queue(BASE_URL, API_KEY)
queue = await get_queue(BASE_URL, API_KEY, settingsDict)
logger.debug("remove_slow/queue IN: %s", formattedQueueInfo(queue))
if not queue:
return 0

View File

@@ -28,7 +28,7 @@ async def remove_stalled(
# Detects stalled and triggers repeat check and subsequent delete. Adds to blocklist
try:
failType = "stalled"
queue = await get_queue(BASE_URL, API_KEY)
queue = await get_queue(BASE_URL, API_KEY, settingsDict)
logger.debug("remove_stalled/queue IN: %s", formattedQueueInfo(queue))
if not queue:
return 0

View File

@@ -29,7 +29,7 @@ async def remove_unmonitored(
# Removes downloads belonging to movies/tv shows that are not monitored. Does not add to blocklist
try:
failType = "unmonitored"
queue = await get_queue(BASE_URL, API_KEY)
queue = await get_queue(BASE_URL, API_KEY, settingsDict)
logger.debug("remove_unmonitored/queue IN: %s", formattedQueueInfo(queue))
if not queue:
return 0

View File

@@ -23,7 +23,7 @@ async def run_periodic_rescans(
if not arr_type in settingsDict["RUN_PERIODIC_RESCANS"]:
return
try:
queue = await get_queue(BASE_URL, API_KEY)
queue = await get_queue(BASE_URL, API_KEY, settingsDict)
check_on_endpoint = []
RESCAN_SETTINGS = settingsDict["RUN_PERIODIC_RESCANS"][arr_type]
if RESCAN_SETTINGS["MISSING"]:

View File

@@ -28,6 +28,7 @@ async def getArrInstanceName(settingsDict, arrApp):
settingsDict[arrApp + '_NAME'] = arrApp.title()
return settingsDict
async def getProtectedAndPrivateFromQbit(settingsDict):
# Returns two lists containing the hashes of Qbit that are either protected by tag, or are private trackers (if IGNORE_PRIVATE_TRACKERS is true)
protectedDownloadIDs = []
@@ -101,7 +102,8 @@ def showSettings(settingsDict):
if settingsDict['QBITTORRENT_URL']:
logger.info('Downloads with this tag will be skipped: \"%s\"', settingsDict['NO_STALLED_REMOVAL_QBIT_TAG'])
logger.info('Private Trackers will be skipped: %s', settingsDict['IGNORE_PRIVATE_TRACKERS'])
if settingsDict['IGNORED_DOWNLOAD_CLIENTS']:
logger.info('Download clients skipped: %s',", ".join(settingsDict['IGNORED_DOWNLOAD_CLIENTS']))
logger.info('')
logger.info('*** Configured Instances ***')

View File

@@ -22,7 +22,7 @@ async def get_arr_records(BASE_URL, API_KEY, params={}, end_point=""):
return records["records"]
async def get_queue(BASE_URL, API_KEY, params={}):
async def get_queue(BASE_URL, API_KEY, settingsDict, params={}):
# Refreshes and retrieves the current queue
await rest_post(
url=BASE_URL + "/command",
@@ -31,6 +31,7 @@ async def get_queue(BASE_URL, API_KEY, params={}):
)
queue = await get_arr_records(BASE_URL, API_KEY, params=params, end_point="queue")
queue = filterOutDelayedQueueItems(queue)
queue = filterOutIgnoredDownloadClients(queue, settingsDict)
return queue
@@ -59,6 +60,28 @@ def filterOutDelayedQueueItems(queue):
return filtered_queue
def filterOutIgnoredDownloadClients(queue, settingsDict):
"""
Filters out queue items whose download client is listed in IGNORED_DOWNLOAD_CLIENTS.
"""
if queue is None:
return queue
filtered_queue = []
for queue_item in queue:
download_client = queue_item.get("downloadClient", "Unknown client")
if download_client in settingsDict["IGNORED_DOWNLOAD_CLIENTS"]:
logger.debug(
">>> Queue item ignored due to ignored download client: %s (Download Client: %s)",
queue_item["title"],
download_client,
)
else:
filtered_queue.append(queue_item)
return filtered_queue
def privateTrackerCheck(settingsDict, affectedItems, failType, privateDowloadIDs):
# Ignores private tracker items (if setting is turned on)
for affectedItem in reversed(affectedItems):
@@ -154,7 +177,7 @@ async def execute_checks(
)
# Exit Logs
if settingsDict["LOG_LEVEL"] == "DEBUG":
queue = await get_queue(BASE_URL, API_KEY)
queue = await get_queue(BASE_URL, API_KEY, settingsDict)
logger.debug(
"execute_checks/queue OUT (failType: %s): %s",
failType,
@@ -318,7 +341,7 @@ def errorDetails(NAME, error):
NAME,
fname,
exc_tb.tb_lineno,
traceback.format_exc()
traceback.format_exc(),
)
return
@@ -352,8 +375,10 @@ def formattedQueueInfo(queue):
errorDetails("formattedQueueInfo", error)
logger.debug("formattedQueueInfo/queue for debug: %s", str(queue))
if isinstance(error, KeyError):
logger.debug("formattedQueueInfo/queue_item with error for debug: %s", queue_item)
logger.debug(
"formattedQueueInfo/queue_item with error for debug: %s", queue_item
)
return "error"