Periodic Rescans Added (New Feature)

This commit is contained in:
Benjamin Harder
2024-09-14 13:32:54 +02:00
parent c7e5c0e374
commit 34fcff4de8
21 changed files with 472 additions and 253 deletions

View File

@@ -35,28 +35,10 @@ jobs:
echo "valid_branch_name=true" >> $GITHUB_OUTPUT
fi
lint:
unit-tests:
needs: validate-branch-name
if: needs.validate-branch-name.outputs.valid_branch_name == 'true'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: '3.10.13'
- name: Install linting dependencies
run: |
python -m pip install --upgrade pip
pip install pylint
- name: Run lint checks
run: |
echo "here we'll lint"
# pylint my_project
unit-tests:
needs: lint
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Set up Python

View File

@@ -3,9 +3,8 @@ repos:
hooks:
- id: black
name: black
entry: venv/bin/black
entry: venv/bin/black --config pyproject.toml
language: system
types: [python]

View File

@@ -1,5 +1,5 @@
[pytest]
log_cli = true
# log_cli = true
addopts = -q --tb=short -s
log_cli_level = INFO
log_cli_format = %(asctime)s - %(levelname)s - %(name)s - %(message)s

121
README.md
View File

@@ -55,47 +55,66 @@ services:
container_name: decluttarr
restart: always
environment:
- TZ=Europe/Zurich
- PUID=1000
- PGID=1000
## General
- LOG_LEVEL=INFO
#- TEST_RUN=True
#- SSL_VERIFICATION=False
## Features
- REMOVE_TIMER=10
- REMOVE_FAILED=True
- REMOVE_FAILED_IMPORTS=True
- REMOVE_METADATA_MISSING=True
- REMOVE_MISSING_FILES=True
- REMOVE_ORPHANS=True
- REMOVE_SLOW=True
- REMOVE_STALLED=True
- REMOVE_UNMONITORED=True
- MIN_DOWNLOAD_SPEED=100
- PERMITTED_ATTEMPTS=3
- NO_STALLED_REMOVAL_QBIT_TAG=Don't Kill
- IGNORE_PRIVATE_TRACKERS=True
- FAILED_IMPORT_MESSAGE_PATTERNS=["Not an upgrade for existing", "Not a Custom Format upgrade for existing"]
## Radarr
- RADARR_URL=http://radarr:7878
- RADARR_KEY=$RADARR_API_KEY
## Sonarr
- SONARR_URL=http://sonarr:8989
- SONARR_KEY=$SONARR_API_KEY
## Lidarr
- LIDARR_URL=http://lidarr:8686
- LIDARR_KEY=$LIDARR_API_KEY
## Readarr
- READARR_URL=http://readarr:8787
- READARR_KEY=$READARR_API_KEY
## Whisparr
- WHISPARR_URL=http://whisparr:6969
- WHISPARR_KEY=$WHISPARR_API_KEY
## qBittorrent
- QBITTORRENT_URL=http://qbittorrent:8080
#- QBITTORRENT_USERNAME=Your name
#- QBITTORRENT_PASSWORD=Your password
TZ=Europe/Zurich
PUID=1000
PGID=1000
## General
# TEST_RUN=True
# SSL_VERIFICATION=False
LOG_LEVEL: INFO
## Features
REMOVE_TIMER: 10
REMOVE_FAILED: True
REMOVE_FAILED_IMPORTS: True
REMOVE_METADATA_MISSING: True
REMOVE_MISSING_FILES: True
REMOVE_ORPHANS: True
REMOVE_SLOW: True
REMOVE_STALLED: True
REMOVE_UNMONITORED: True
RUN_PERIODIC_RESCANS: '
{
"SONARR": {"MISSING": true, "CUTOFF_UNMET": true, "MAX_CONCURRENT_SCANS": 3, "MIN_DAYS_BEFORE_RESCAN": 7},
"RADARR": {"MISSING": true, "CUTOFF_UNMET": true, "MAX_CONCURRENT_SCANS": 3, "MIN_DAYS_BEFORE_RESCAN": 7}
}'
# Feature Settings
PERMITTED_ATTEMPTS: 3
NO_STALLED_REMOVAL_QBIT_TAG: Don't Kill
REMOVE_SLOW: True
MIN_DOWNLOAD_SPEED: 100
FAILED_IMPORT_MESSAGE_PATTERNS: '
[
"Not a Custom Format upgrade for existing",
"Not an upgrade for existing"
]'
## Radarr
RADARR_URL: http://radarr:7878
RADARR_KEY: $RADARR_API_KEY
## Sonarr
SONARR_URL: http://sonarr:8989
SONARR_KEY: $SONARR_API_KEY
## Lidarr
LIDARR_URL=http://lidarr:8686
LIDARR_KEY=$LIDARR_API_KEY
## Readarr
READARR_URL=http://readarr:8787
READARR_KEY=$READARR_API_KEY
## Whisparr
WHISPARR_URL=http://whisparr:6969
WHISPARR_KEY=$WHISPARR_API_KEY
## qBitorrent
QBITTORRENT_URL: http://qbittorrent:8080
# QBITTORRENT_USERNAME=Your name
# QBITTORRENT_PASSWORD=Your password
```
3) Run `docker-compose up -d` in the directory where the file is located to create the docker container
Note: Always pull the "**latest**" version. The "dev" version is for testing only, and should only be pulled when contributing code or supporting with bug fixes
@@ -212,6 +231,26 @@ Steers which type of cleaning is applied to the downloads queue
- Permissible Values: True, False
- Is Mandatory: No (Defaults to False)
**RUN_PERIODIC_RESCANS**
- Steers whether searches are automatically triggered for items that are missing or have not yet met the cutoff
- Note: Only supports Radarr/Sonarr currently (Lidarr depending on: https://github.com/Lidarr/Lidarr/pull/5084 / Readarr Depending on: https://github.com/Readarr/Readarr/pull/3724)
- Type: Dictionaire
- Is Mandatory: No (Defaults to no searches being triggered automatically)
- "SONARR"/"RADARR" turns on the automatic searches for the respective instances
- "MISSING"/"CUTOFF_UNMET" turns on the automatic search for those wanted items (defaults to True)
- "MAX_CONCURRENT_SCANS" specifies the maximum number of items to be searched in each scan. This value dictates how many items are processed per search operation, which occurs according to the interval set by the REMOVE_TIMER.
- Note: The limit is per wanted list. Thus if both Radarr & Sonarr are set up for automatic searches, both for missing and cutoff unmet items, the actual count may be four times the MAX_CONCURRENT_SCANS
- "MIN_DAYS_BEFORE_RESCAN" steers the days that need to pass before an item is considered again for a scan
- Note: RUN_PERIODIC_RESCANS will always search those items that haven been searched for longest
```
RUN_PERIODIC_RESCANS: '
{
"SONARR": {"MISSING": true, "CUTOFF_UNMET": true, "MAX_CONCURRENT_SCANS": 3, "MIN_DAYS_BEFORE_RESCAN": 7},
"RADARR": {"MISSING": true, "CUTOFF_UNMET": true, "MAX_CONCURRENT_SCANS": 3, "MIN_DAYS_BEFORE_RESCAN": 7}
}'
```
**MIN_DOWNLOAD_SPEED**
- Sets the minimum download speed for active downloads
- If the increase in the downloaded file size of a download is less than this value between two consecutive checks, the download is considered slow and is removed if happening more ofthen than the permitted attempts

View File

@@ -12,6 +12,9 @@ REMOVE_ORPHANS = True
REMOVE_SLOW = True
REMOVE_STALLED = True
REMOVE_UNMONITORED = True
RUN_PERIODIC_RESCANS = {"SONARR": {"MISSING": true, CUTOFF_UNMET": true, "MAX_CONCURRENT_SCANS": 3, "MIN_DAYS_BEFORE_RESCAN": 7}, "RADARR": {"MISSING": true, "CUTOFF_UNMET": true, "MAX_CONCURRENT_SCANS": 3, "MIN_DAYS_BEFORE_RESCAN": 7}}
[feature_settings]
MIN_DOWNLOAD_SPEED = 100
PERMITTED_ATTEMPTS = 3
NO_STALLED_REMOVAL_QBIT_TAG = Don't Kill

View File

@@ -12,17 +12,20 @@ REMOVE_TIMER = get_config_value('REMOVE_TIMER',
REMOVE_FAILED = get_config_value('REMOVE_FAILED', 'features', False, bool, False)
REMOVE_FAILED_IMPORTS = get_config_value('REMOVE_FAILED_IMPORTS' , 'features', False, bool, False)
REMOVE_METADATA_MISSING = get_config_value('REMOVE_METADATA_MISSING', 'features', False, bool, False)
REMOVE_MISSING_FILES = get_config_value('REMOVE_MISSING_FILES' , 'features', False, bool, False)
REMOVE_NO_FORMAT_UPGRADE = get_config_value('REMOVE_NO_FORMAT_UPGRADE' , 'features', False, bool, False) # OUTDATED - WILL RETURN WARNING
REMOVE_ORPHANS = get_config_value('REMOVE_ORPHANS' , 'features', False, bool, False)
REMOVE_SLOW = get_config_value('REMOVE_SLOW' , 'features', False, bool, False)
REMOVE_MISSING_FILES = get_config_value('REMOVE_MISSING_FILES', 'features', False, bool, False)
REMOVE_NO_FORMAT_UPGRADE = get_config_value('REMOVE_NO_FORMAT_UPGRADE', 'features', False, bool, False) # OUTDATED - WILL RETURN WARNING
REMOVE_ORPHANS = get_config_value('REMOVE_ORPHANS', 'features', False, bool, False)
REMOVE_SLOW = get_config_value('REMOVE_SLOW', 'features', False, bool, False)
REMOVE_STALLED = get_config_value('REMOVE_STALLED', 'features', False, bool, False)
REMOVE_UNMONITORED = get_config_value('REMOVE_UNMONITORED' , 'features', False, bool, False)
MIN_DOWNLOAD_SPEED = get_config_value('MIN_DOWNLOAD_SPEED', 'features', False, int, 0)
PERMITTED_ATTEMPTS = get_config_value('PERMITTED_ATTEMPTS', 'features', False, int, 3)
NO_STALLED_REMOVAL_QBIT_TAG = get_config_value('NO_STALLED_REMOVAL_QBIT_TAG', 'features', False, str, 'Don\'t Kill')
IGNORE_PRIVATE_TRACKERS = get_config_value('IGNORE_PRIVATE_TRACKERS', 'features', False, bool, True)
FAILED_IMPORT_MESSAGE_PATTERNS = get_config_value('FAILED_IMPORT_MESSAGE_PATTERNS','features', False, list, [])
REMOVE_UNMONITORED = get_config_value('REMOVE_UNMONITORED', 'features', False, bool, False)
RUN_PERIODIC_RESCANS = get_config_value('RUN_PERIODIC_RESCANS', 'features', False, dict, {})
# Feature Settings
MIN_DOWNLOAD_SPEED = get_config_value('MIN_DOWNLOAD_SPEED', 'feature_settings', False, int, 0)
PERMITTED_ATTEMPTS = get_config_value('PERMITTED_ATTEMPTS', 'feature_settings', False, int, 3)
NO_STALLED_REMOVAL_QBIT_TAG = get_config_value('NO_STALLED_REMOVAL_QBIT_TAG', 'feature_settings', False, str, 'Don\'t Kill')
IGNORE_PRIVATE_TRACKERS = get_config_value('IGNORE_PRIVATE_TRACKERS', 'feature_settings', False, bool, True)
FAILED_IMPORT_MESSAGE_PATTERNS = get_config_value('FAILED_IMPORT_MESSAGE_PATTERNS','feature_settings', False, list, [])
# Radarr
RADARR_URL = get_config_value('RADARR_URL', 'radarr', False, str)
@@ -60,6 +63,41 @@ if not (IS_IN_PYTEST or RADARR_URL or SONARR_URL or LIDARR_URL or READARR_URL or
print(f'[ ERROR ]: No Radarr/Sonarr/Lidarr/Readarr/Whisparr URLs specified (nothing to monitor)')
exit()
#### Validate rescan settings
PERIODIC_RESCANS = get_config_value("PERIODIC_RESCANS", "features", False, dict, {})
rescan_supported_apps = ["SONARR", "RADARR"]
rescan_default_values = {
"MISSING": (True, bool),
"CUTOFF_UNMET": (True, bool),
"MAX_CONCURRENT_SCANS": (3, int),
"MIN_DAYS_BEFORE_RESCAN": (7, int),
}
# Remove rescan apps that are not supported
for key in list(RUN_PERIODIC_RESCANS.keys()):
if key not in rescan_supported_apps:
print(f"[ WARNING ]: Removed '{key}' from RUN_PERIODIC_RESCANS since only {rescan_supported_apps} are supported.")
RUN_PERIODIC_RESCANS.pop(key)
# Ensure SONARR and RADARR have the required parameters with default values if they are present
for app in rescan_supported_apps:
if app in RUN_PERIODIC_RESCANS:
for param, (default, expected_type) in rescan_default_values.items():
if param not in RUN_PERIODIC_RESCANS[app]:
print(f"[ INFO ]: Adding missing parameter '{param}' to '{app}' with default value '{default}'.")
RUN_PERIODIC_RESCANS[app][param] = default
else:
# Check the type and correct if necessary
current_value = RUN_PERIODIC_RESCANS[app][param]
if not isinstance(current_value, expected_type):
print(
f"[ INFO ]: Parameter '{param}' for '{app}' must be of type {expected_type.__name__} and found value '{current_value}' (type '{type(current_value).__name__}'). Defaulting to '{default}'."
)
RUN_PERIODIC_RESCANS[app][param] = default
########### Enrich setting variables
if RADARR_URL: RADARR_URL = RADARR_URL.rstrip('/') + '/api/v3'
if SONARR_URL: SONARR_URL = SONARR_URL.rstrip('/') + '/api/v3'
@@ -68,8 +106,14 @@ if READARR_URL: READARR_URL = READARR_URL.rstrip('/') + '/api/v1'
if WHISPARR_URL: WHISPARR_URL = WHISPARR_URL.rstrip('/') + '/api/v3'
if QBITTORRENT_URL: QBITTORRENT_URL = QBITTORRENT_URL.rstrip('/') + '/api/v2'
RADARR_MIN_VERSION = '5.3.6.8608'
SONARR_MIN_VERSION = '4.0.1.1131'
RADARR_MIN_VERSION = "5.3.6.8608"
if "RADARR" in PERIODIC_RESCANS:
RADARR_MIN_VERSION = "5.10.3.9171"
SONARR_MIN_VERSION = "4.0.1.1131"
if "SONARR" in PERIODIC_RESCANS:
SONARR_MIN_VERSION = "4.0.9.2332"
LIDARR_MIN_VERSION = None
READARR_MIN_VERSION = None
WHISPARR_MIN_VERSION = '2.0.0.548'
@@ -82,4 +126,3 @@ settingsDict = {}
for var_name in dir():
if var_name.isupper():
settingsDict[var_name] = locals()[var_name]

View File

@@ -22,9 +22,14 @@ def config_section_map(section):
options = config.options(section)
for option in options:
try:
dict1[option] = config.get(section, option)
except:
print("exception on %s!" % option)
value = config.get(section, option)
# Attempt to parse JSON for dictionary-like values
try:
dict1[option] = json.loads(value)
except json.JSONDecodeError:
dict1[option] = value
except Exception as e:
print(f"Exception on {option}: {e}")
dict1[option] = None
return dict1
@@ -38,44 +43,33 @@ def get_config_value(key, config_section, is_mandatory, datatype, default_value=
if IS_IN_DOCKER:
config_value = os.environ.get(key)
if config_value is not None:
# print(f'The value retrieved for [{config_section}]: {key} is "{config_value}"')
config_value = config_value
# return config_value
elif is_mandatory:
print(f"[ ERROR ]: Variable not specified in Docker environment: {key}")
sys.exit(0)
else:
# return default_value
# print(f'The default value used for [{config_section}]: {key} is "{default_value}" (data type: {type(default_value).__name__})')
config_value = default_value
else:
try:
config_value = config_section_map(config_section).get(key)
except configparser.NoSectionError:
config_value = None
if config_value is not None:
# print(f'The value retrieved for [{config_section}]: {key} is "{config_value}"')
config_value = config_value
# return config_value
elif is_mandatory:
print(
f"[ ERROR ]: Mandatory variable not specified in config file, section [{config_section}]: {key} (data type: {datatype.__name__})"
)
sys.exit(0)
else:
# return default_value
# print(f'The default value used for [{config_section}]: {key} is "{default_value}" (data type: {type(default_value).__name__})')
config_value = default_value
# Apply data type
try:
if datatype == bool:
config_value = eval(str(config_value).capitalize())
elif datatype == list:
if (
type(config_value) != list
): # Default value is already a list, doesn't need to be pushed through json.loads
elif datatype == list or datatype == dict:
if not isinstance(config_value, datatype):
config_value = json.loads(config_value)
elif config_value is not None:
config_value = cast(config_value, datatype)

View File

@@ -11,6 +11,7 @@ from src.jobs.remove_orphans import remove_orphans
from src.jobs.remove_slow import remove_slow
from src.jobs.remove_stalled import remove_stalled
from src.jobs.remove_unmonitored import remove_unmonitored
from src.jobs.run_periodic_rescans import run_periodic_rescans
from src.utils.trackers import Deleted_Downloads
@@ -56,119 +57,127 @@ async def queueCleaner(
# Cleans up the downloads queue
logger.verbose("Cleaning queue on %s:", NAME)
# Refresh queue:
full_queue = await get_queue(BASE_URL, API_KEY, params={full_queue_param: True})
if not full_queue:
logger.verbose(">>> Queue is empty.")
return
else:
logger.debug("queueCleaner/full_queue at start:")
logger.debug(full_queue)
deleted_downloads = Deleted_Downloads([])
items_detected = 0
try:
if settingsDict["REMOVE_FAILED"]:
items_detected += await remove_failed(
settingsDict,
BASE_URL,
API_KEY,
NAME,
deleted_downloads,
defective_tracker,
protectedDownloadIDs,
privateDowloadIDs,
)
full_queue = await get_queue(BASE_URL, API_KEY, params={full_queue_param: True})
if full_queue:
logger.debug("queueCleaner/full_queue at start:")
logger.debug(full_queue)
if settingsDict["REMOVE_FAILED_IMPORTS"]:
items_detected += await remove_failed_imports(
settingsDict,
BASE_URL,
API_KEY,
NAME,
deleted_downloads,
defective_tracker,
protectedDownloadIDs,
privateDowloadIDs,
)
deleted_downloads = Deleted_Downloads([])
items_detected = 0
if settingsDict["REMOVE_METADATA_MISSING"]:
items_detected += await remove_metadata_missing(
settingsDict,
BASE_URL,
API_KEY,
NAME,
deleted_downloads,
defective_tracker,
protectedDownloadIDs,
privateDowloadIDs,
)
if settingsDict["REMOVE_FAILED"]:
items_detected += await remove_failed(
settingsDict,
BASE_URL,
API_KEY,
NAME,
deleted_downloads,
defective_tracker,
protectedDownloadIDs,
privateDowloadIDs,
)
if settingsDict["REMOVE_MISSING_FILES"]:
items_detected += await remove_missing_files(
settingsDict,
BASE_URL,
API_KEY,
NAME,
deleted_downloads,
defective_tracker,
protectedDownloadIDs,
privateDowloadIDs,
)
if settingsDict["REMOVE_FAILED_IMPORTS"]:
items_detected += await remove_failed_imports(
settingsDict,
BASE_URL,
API_KEY,
NAME,
deleted_downloads,
defective_tracker,
protectedDownloadIDs,
privateDowloadIDs,
)
if settingsDict["REMOVE_ORPHANS"]:
items_detected += await remove_orphans(
settingsDict,
BASE_URL,
API_KEY,
NAME,
deleted_downloads,
defective_tracker,
protectedDownloadIDs,
privateDowloadIDs,
full_queue_param,
)
if settingsDict["REMOVE_METADATA_MISSING"]:
items_detected += await remove_metadata_missing(
settingsDict,
BASE_URL,
API_KEY,
NAME,
deleted_downloads,
defective_tracker,
protectedDownloadIDs,
privateDowloadIDs,
)
if settingsDict["REMOVE_SLOW"]:
items_detected += await remove_slow(
settingsDict,
BASE_URL,
API_KEY,
NAME,
deleted_downloads,
defective_tracker,
protectedDownloadIDs,
privateDowloadIDs,
download_sizes_tracker,
)
if settingsDict["REMOVE_MISSING_FILES"]:
items_detected += await remove_missing_files(
settingsDict,
BASE_URL,
API_KEY,
NAME,
deleted_downloads,
defective_tracker,
protectedDownloadIDs,
privateDowloadIDs,
)
if settingsDict["REMOVE_STALLED"]:
items_detected += await remove_stalled(
settingsDict,
BASE_URL,
API_KEY,
NAME,
deleted_downloads,
defective_tracker,
protectedDownloadIDs,
privateDowloadIDs,
)
if settingsDict["REMOVE_ORPHANS"]:
items_detected += await remove_orphans(
settingsDict,
BASE_URL,
API_KEY,
NAME,
deleted_downloads,
defective_tracker,
protectedDownloadIDs,
privateDowloadIDs,
full_queue_param,
)
if settingsDict["REMOVE_UNMONITORED"]:
items_detected += await remove_unmonitored(
if settingsDict["REMOVE_SLOW"]:
items_detected += await remove_slow(
settingsDict,
BASE_URL,
API_KEY,
NAME,
deleted_downloads,
defective_tracker,
protectedDownloadIDs,
privateDowloadIDs,
download_sizes_tracker,
)
if settingsDict["REMOVE_STALLED"]:
items_detected += await remove_stalled(
settingsDict,
BASE_URL,
API_KEY,
NAME,
deleted_downloads,
defective_tracker,
protectedDownloadIDs,
privateDowloadIDs,
)
if settingsDict["REMOVE_UNMONITORED"]:
items_detected += await remove_unmonitored(
settingsDict,
BASE_URL,
API_KEY,
NAME,
deleted_downloads,
defective_tracker,
protectedDownloadIDs,
privateDowloadIDs,
arr_type,
)
if items_detected == 0:
logger.verbose(">>> Queue is clean.")
else:
logger.verbose(">>> Queue is empty.")
if settingsDict["RUN_PERIODIC_RESCANS"]:
await run_periodic_rescans(
settingsDict,
BASE_URL,
API_KEY,
NAME,
deleted_downloads,
defective_tracker,
protectedDownloadIDs,
privateDowloadIDs,
arr_type,
)
if items_detected == 0:
logger.verbose(">>> Queue is clean.")
except Exception as error:
errorDetails(NAME, error)
return

View File

@@ -30,13 +30,15 @@ async def remove_failed(
failType = "failed"
queue = await get_queue(BASE_URL, API_KEY)
logger.debug("remove_failed/queue IN: %s", formattedQueueInfo(queue))
if not queue:
return 0
if await qBitOffline(settingsDict, failType, NAME):
return 0
# Find items affected
affectedItems = []
for queueItem in queue["records"]:
for queueItem in queue:
if "errorMessage" in queueItem and "status" in queueItem:
if queueItem["status"] == "failed":
affectedItems.append(queueItem)

View File

@@ -30,8 +30,7 @@ async def remove_failed_imports(
patterns = settingsDict.get("FAILED_IMPORT_MESSAGE_PATTERNS", [])
if not patterns: # If patterns is empty or not present
patterns = None
for queueItem in queue["records"]:
for queueItem in queue:
if (
"status" in queueItem
and "trackedDownloadStatus" in queueItem

View File

@@ -36,7 +36,7 @@ async def remove_metadata_missing(
return 0
# Find items affected
affectedItems = []
for queueItem in queue["records"]:
for queueItem in queue:
if "errorMessage" in queueItem and "status" in queueItem:
if (
queueItem["status"] == "queued"

View File

@@ -36,7 +36,7 @@ async def remove_missing_files(
return 0
# Find items affected
affectedItems = []
for queueItem in queue["records"]:
for queueItem in queue:
if "status" in queueItem:
# case to check for failed torrents
if (

View File

@@ -37,10 +37,10 @@ async def remove_orphans(
# Find items affected
# 1. create a list of the "known" queue items
queueIDs = [queueItem["id"] for queueItem in queue["records"]] if queue else []
queueIDs = [queueItem["id"] for queueItem in queue] if queue else []
affectedItems = []
# 2. compare all queue items against the known ones, and those that are not found are the "unknown" or "orphan" ones
for queueItem in full_queue["records"]:
for queueItem in full_queue:
if queueItem["id"] not in queueIDs:
affectedItems.append(queueItem)

View File

@@ -39,7 +39,7 @@ async def remove_slow(
# Find items affected
affectedItems = []
alreadyCheckedDownloadIDs = []
for queueItem in queue["records"]:
for queueItem in queue:
if (
"downloadId" in queueItem
and "size" in queueItem

View File

@@ -36,7 +36,7 @@ async def remove_stalled(
return 0
# Find items affected
affectedItems = []
for queueItem in queue["records"]:
for queueItem in queue:
if "errorMessage" in queueItem and "status" in queueItem:
if (
queueItem["status"] == "warning"

View File

@@ -35,7 +35,7 @@ async def remove_unmonitored(
return 0
# Find items affected
monitoredDownloadIDs = []
for queueItem in queue["records"]:
for queueItem in queue:
if arr_type == "SONARR":
isMonitored = (
await rest_get(
@@ -70,7 +70,7 @@ async def remove_unmonitored(
monitoredDownloadIDs.append(queueItem["downloadId"])
affectedItems = []
for queueItem in queue["records"]:
for queueItem in queue:
if queueItem["downloadId"] not in monitoredDownloadIDs:
affectedItems.append(
queueItem

View File

@@ -0,0 +1,129 @@
from src.utils.shared import (
errorDetails,
rest_get,
rest_post,
get_queue,
get_arr_records,
)
import logging, verboselogs
from datetime import datetime, timedelta, timezone
import dateutil.parser
logger = verboselogs.VerboseLogger(__name__)
async def run_periodic_rescans(
settingsDict,
BASE_URL,
API_KEY,
NAME,
arr_type,
):
# Checks the wanted items and runs scans
if not arr_type in settingsDict["RUN_PERIODIC_RESCANS"]:
return
try:
queue = await get_queue(BASE_URL, API_KEY)
check_on_endpoint = []
RESCAN_SETTINGS = settingsDict["RUN_PERIODIC_RESCANS"][arr_type]
if RESCAN_SETTINGS["MISSING"]:
check_on_endpoint.append("missing")
if RESCAN_SETTINGS["CUTOFF_UNMET"]:
check_on_endpoint.append("cutoff")
params = {"sortDirection": "ascending"}
if arr_type == "SONARR":
params["sortKey"] = "episodes.lastSearchTime"
queue_ids = [r["seriesId"] for r in queue if "seriesId" in r]
series = await rest_get(f"{BASE_URL}/series", API_KEY)
series_dict = {s["id"]: s for s in series}
elif arr_type == "RADARR":
params["sortKey"] = "movies.lastSearchTime"
queue_ids = [r["movieId"] for r in queue if "movieId" in r]
for end_point in check_on_endpoint:
records = await get_arr_records(
BASE_URL, API_KEY, params=params, end_point=f"wanted/{end_point}"
)
if records is None:
logger.verbose(
f">>> Rescan: No {end_point} items, thus nothing to rescan."
)
continue
# Filter out items that are already being downloaded (are in queue)
records = [r for r in records if r["id"] not in queue_ids]
if records is None:
logger.verbose(
f">>> Rescan: All {end_point} items are already being downloaded, thus nothing to rescan."
)
continue
# Remove records that have recently been searched already
for record in reversed(records):
if not (
("lastSearchTime" not in record)
or (
(
dateutil.parser.isoparse(record["lastSearchTime"])
+ timedelta(days=RESCAN_SETTINGS["MIN_DAYS_BEFORE_RESCAN"])
)
< datetime.now(timezone.utc)
)
):
records.remove(record)
# Select oldest records
records = records[: RESCAN_SETTINGS["MAX_CONCURRENT_SCANS"]]
if not records:
logger.verbose(
f">>> Rescan: All {end_point} items have recently been scanned for, thus nothing to rescan."
)
continue
if arr_type == "SONARR":
for record in records:
series_id = record.get("seriesId")
if series_id and series_id in series_dict:
record["series"] = series_dict[series_id]
else:
record["series"] = (
None # Or handle missing series info as needed
)
logger.verbose(
f">>> Running a scan for {len(records)} {end_point} items:\n"
+ "\n".join(
[
f"{episode['series']['title']} (Season {episode['seasonNumber']} / Episode {episode['episodeNumber']} / Aired: {episode['airDate']}): {episode['title']}"
for episode in records
]
)
)
json = {
"name": "EpisodeSearch",
"episodeIds": [r["id"] for r in records],
}
elif arr_type == "RADARR":
print(records)
logger.verbose(
f">>> Running a scan for {len(records)} {end_point} items:\n"
+ "\n".join(
[f"{movie['title']} ({movie['year']})" for movie in records]
)
)
json = {"name": "MoviesSearch", "movieIds": [r["id"] for r in records]}
if not settingsDict["TEST_RUN"]:
await rest_post(
url=BASE_URL + "/command",
json=json,
headers={"X-Api-Key": API_KEY},
)
except Exception as error:
errorDetails(NAME, error)
return 0

View File

@@ -82,7 +82,10 @@ def showSettings(settingsDict):
logger.info('%s | Removing slow downloads (%s)', str(settingsDict['REMOVE_SLOW']), 'REMOVE_SLOW')
logger.info('%s | Removing stalled downloads (%s)', str(settingsDict['REMOVE_STALLED']), 'REMOVE_STALLED')
logger.info('%s | Removing downloads belonging to unmonitored items (%s)', str(settingsDict['REMOVE_UNMONITORED']), 'REMOVE_UNMONITORED')
logger.info('')
for arr_type, RESCAN_SETTINGS in settingsDict['RUN_PERIODIC_RESCANS'].items():
logger.info('%s/%s (%s) | Search missing/cutoff-unmet items. Max queries/list: %s. Min. days to re-search: %s (%s)', RESCAN_SETTINGS['MISSING'], RESCAN_SETTINGS['CUTOFF_UNMET'], arr_type, RESCAN_SETTINGS['MAX_CONCURRENT_SCANS'], RESCAN_SETTINGS['MIN_DAYS_BEFORE_RESCAN'], 'RUN_PERIODIC_RESCANS')
logger.info('')
logger.info('Running every: %s', fmt.format(rd(minutes=settingsDict['REMOVE_TIMER'])))
if settingsDict['REMOVE_SLOW']:
logger.info('Minimum speed enforced: %s KB/s', str(settingsDict['MIN_DOWNLOAD_SPEED']))

View File

@@ -1,47 +1,51 @@
# Import Libraries
import asyncio
import asyncio
import logging, verboselogs
logger = verboselogs.VerboseLogger(__name__)
import json
# Import Functions
from config.definitions import settingsDict
from src.utils.loadScripts import *
from src.decluttarr import queueCleaner
from src.utils.rest import rest_get, rest_post
from src.utils.trackers import Defective_Tracker, Download_Sizes_Tracker
from src.utils.rest import rest_get, rest_post
from src.utils.trackers import Defective_Tracker, Download_Sizes_Tracker
# Hide SSL Verification Warnings
if settingsDict['SSL_VERIFICATION']==False:
if settingsDict["SSL_VERIFICATION"] == False:
import warnings
warnings.filterwarnings("ignore", message="Unverified HTTPS request")
# Set up logging
setLoggingFormat(settingsDict)
# Main function
async def main(settingsDict):
# Adds to settings Dict the instances that are actually configures
settingsDict['INSTANCES'] = []
for arrApplication in settingsDict['SUPPORTED_ARR_APPS']:
if settingsDict[arrApplication + '_URL']:
settingsDict['INSTANCES'].append(arrApplication)
# Adds to settings Dict the instances that are actually configures
settingsDict["INSTANCES"] = []
for arrApplication in settingsDict["SUPPORTED_ARR_APPS"]:
if settingsDict[arrApplication + "_URL"]:
settingsDict["INSTANCES"].append(arrApplication)
# Pre-populates the dictionaries (in classes) that track the items that were already caught as having problems or removed
defectiveTrackingInstances = {}
for instance in settingsDict['INSTANCES']:
defectiveTrackingInstances = {}
for instance in settingsDict["INSTANCES"]:
defectiveTrackingInstances[instance] = {}
defective_tracker = Defective_Tracker(defectiveTrackingInstances)
download_sizes_tracker = Download_Sizes_Tracker({})
# Get name of arr-instances
for instance in settingsDict['INSTANCES']:
for instance in settingsDict["INSTANCES"]:
settingsDict = await getArrInstanceName(settingsDict, instance)
# Check outdated
upgradeChecks(settingsDict)
# Welcome Message
showWelcome()
showWelcome()
# Current Settings
showSettings(settingsDict)
@@ -57,21 +61,29 @@ async def main(settingsDict):
# Start Cleaning
while True:
logger.verbose('-' * 50)
# Cache protected (via Tag) and private torrents
protectedDownloadIDs, privateDowloadIDs = await getProtectedAndPrivateFromQbit(settingsDict)
logger.verbose("-" * 50)
# Cache protected (via Tag) and private torrents
protectedDownloadIDs, privateDowloadIDs = await getProtectedAndPrivateFromQbit(
settingsDict
)
# Run script for each instance
for instance in settingsDict['INSTANCES']:
await queueCleaner(settingsDict, instance, defective_tracker, download_sizes_tracker, protectedDownloadIDs, privateDowloadIDs)
logger.verbose('')
logger.verbose('Queue clean-up complete!')
for instance in settingsDict["INSTANCES"]:
await queueCleaner(
settingsDict,
instance,
defective_tracker,
download_sizes_tracker,
protectedDownloadIDs,
privateDowloadIDs,
)
logger.verbose("")
logger.verbose("Queue clean-up complete!")
# Wait for the next run
await asyncio.sleep(settingsDict['REMOVE_TIMER']*60)
await asyncio.sleep(settingsDict["REMOVE_TIMER"] * 60)
return
if __name__ == '__main__':
if __name__ == "__main__":
asyncio.run(main(settingsDict))

View File

@@ -7,21 +7,29 @@ from src.utils.nest_functions import add_keys_nested_dict, nested_get
import sys, os, traceback
async def get_arr_records(BASE_URL, API_KEY, params={}, end_point=""):
# All records from a given endpoint
record_count = (await rest_get(f"{BASE_URL}/{end_point}", API_KEY, params))[
"totalRecords"
]
if record_count == 0:
return []
records = await rest_get(
f"{BASE_URL}/{end_point}",
API_KEY,
{"page": "1", "pageSize": record_count} | params,
)
return records["records"]
async def get_queue(BASE_URL, API_KEY, params={}):
# Retrieves the current queue
# Refreshes and retrieves the current queue
await rest_post(
url=BASE_URL + "/command",
json={"name": "RefreshMonitoredDownloads"},
headers={"X-Api-Key": API_KEY},
)
totalRecords = (await rest_get(f"{BASE_URL}/queue", API_KEY, params))[
"totalRecords"
]
if totalRecords == 0:
return None
queue = await rest_get(
f"{BASE_URL}/queue", API_KEY, {"page": "1", "pageSize": totalRecords} | params
)
queue = await get_arr_records(BASE_URL, API_KEY, params=params, end_point="queue")
queue = filterOutDelayedQueueItems(queue)
return queue
@@ -29,29 +37,26 @@ async def get_queue(BASE_URL, API_KEY, params={}):
def filterOutDelayedQueueItems(queue):
# Ignores delayed queue items
if queue is None:
return None
return queue
seen_combinations = set()
filtered_records = []
for record in queue["records"]:
filtered_queue = []
for queue_item in queue:
# Use get() method with default value "No indexer" if 'indexer' key does not exist
indexer = record.get("indexer", "No indexer")
protocol = record.get("protocol", "No protocol")
combination = (record["title"], protocol, indexer)
if record["status"] == "delay":
indexer = queue_item.get("indexer", "No indexer")
protocol = queue_item.get("protocol", "No protocol")
combination = (queue_item["title"], protocol, indexer)
if queue_item["status"] == "delay":
if combination not in seen_combinations:
seen_combinations.add(combination)
logger.debug(
">>> Delayed queue item ignored: %s (Protocol: %s, Indexer: %s)",
record["title"],
queue_item["title"],
protocol,
indexer,
)
else:
filtered_records.append(record)
if not filtered_records:
return None
queue["records"] = filtered_records
return queue
filtered_queue.append(queue_item)
return filtered_queue
def privateTrackerCheck(settingsDict, affectedItems, failType, privateDowloadIDs):
@@ -326,10 +331,10 @@ def formattedQueueInfo(queue):
if not queue:
return "empty"
formatted_list = []
for record in queue["records"]:
download_id = record["downloadId"]
title = record["title"]
item_id = record["id"]
for queue_item in queue:
download_id = queue_item["downloadId"]
title = queue_item["title"]
item_id = queue_item["id"]
# Check if there is an entry with the same download_id and title
existing_entry = next(
(item for item in formatted_list if item["downloadId"] == download_id),

View File

@@ -42,7 +42,7 @@ async def run_test(
execute_checks_mock.side_effect = side_effect
# Create an async mock for get_queue that returns mock_data
mock_get_queue = AsyncMock(return_value=mock_data)
mock_get_queue = AsyncMock(return_value=mock_data["records"])
# Patch the methods
monkeypatch.setattr("src.jobs.remove_failed_imports.get_queue", mock_get_queue)