diff --git a/README.md b/README.md index 09f8ac6..92c1655 100644 --- a/README.md +++ b/README.md @@ -37,8 +37,8 @@ Looking to **upgrade from V1 to V2**? Look [here](#upgrading-from-v1-to-v2) - [REMOVE_SLOW](#remove_slow) - [REMOVE_STALLED](#remove_stalled) - [REMOVE_UNMONITORED](#remove_unmonitored) - - [SEARCH_CUTOFF_UNMET_CONTENT](#search_unmet_cutoff_content) - - [SEARCH_MISSING_CONTENT](#search_missing_content) + - [SEARCH_CUTOFF_UNMET](#search_unmet_cutoff) + - [SEARCH_MISSING](#search_missing) - [Instances](#arr-instances) - [SONARR](#sonarr) - [RADARR](#radarr) @@ -66,8 +66,8 @@ Feature overview: - Removing downloads that are repeatedly have been found to be slow (remove_slow) - Removing downloads that are stalled (remove_stalled) - Removing downloads belonging to movies/series/albums etc. that have been marked as "unmonitored" (remove_unmonitored) -- Periodically searching for better content on movies/series/albums etc. where cutoff has not been reached yet (search_cutoff_unmet_content) -- Periodically searching for missing content that has not yet been found (search_missing_content) +- Periodically searching for better content on movies/series/albums etc. where cutoff has not been reached yet (search_cutoff_unmet) +- Periodically searching for missing content that has not yet been found (search_missing) Key behaviors: @@ -215,8 +215,8 @@ services: REMOVE_SLOW: True REMOVE_STALLED: True REMOVE_UNMONITORED: True - SEARCH_BETTER_CONTENT: True - SEARCH_MISSING_CONTENT: True + SEARCH_BETTER: True + SEARCH_MISSING: True # # --- OR: Jobs (with job-specific settings) --- # Alternatively, you can use the below notation, which for certain jobs allows you to set additional parameters @@ -242,10 +242,10 @@ services: # REMOVE_STALLED: | # max_strikes: 3 # REMOVE_UNMONITORED: True - # SEARCH_UNMET_CUTOFF_CONTENT: | + # SEARCH_UNMET_CUTOFF: | # min_days_between_searches: 7 # max_concurrent_searches: 3 - # SEARCH_MISSING_CONTENT: | + # SEARCH_MISSING: | # min_days_between_searches: 7 # max_concurrent_searches: 3 @@ -315,7 +315,7 @@ Below are **examples** how keys have changed. | `PERMITTED_ATTEMPTS` | `max_strikes` | | `NO_STALLED_REMOVAL_QBIT_TAG` | `protected_tag` | | `REMOVE_FAILED` | `remove_failed_downloads` | -| `RUN_PERIODIC_RESCANS` | `search_better_content`, `search_missing_content` (both under `jobs`) | +| `RUN_PERIODIC_RESCANS` | `search_better`, `search_missing` (both under `jobs`) | | `MIN_DAYS_BEFORE_RESCAN` | `min_days_between_searches` | | `MIN_DOWNLOAD_SPEED` | `min_speed` | | `FAILED_IMPORT_MESSAGE_PATTERNS`| `message_patterns` inside `remove_failed_imports`. Note that this now uses wildcards (*). Without wildcard(s), exact match is assumed | @@ -425,7 +425,7 @@ If a job has the same settings configured on job-level, the job-level settings w #### MIN_DAYS_BETWEEN_SEARCHES -- Only relevant together with search_unmet_cutoff_content and search_missing_content +- Only relevant together with search_unmet_cutoff and search_missing - Specified how many days should elapse before decluttarr tries to search for a given wanted item again - Type: Integer - Permissible Values: Any number @@ -433,7 +433,7 @@ If a job has the same settings configured on job-level, the job-level settings w #### MAX_CONCURRENT_SEARCHES -- Only relevant together with search_unmet_cutoff_content and search_missing_content +- Only relevant together with search_unmet_cutoff and search_missing - Specified how many ites concurrently on a single arr should be searched for in a given iteration - Each arr counts separately - Example: If your wanted-list has 100 entries, and you define "3" as your number, after roughly 30 searches you'll have all items on your list searched for. @@ -549,7 +549,7 @@ This is the interesting section. It defines which job you want decluttarr to run - Permissible Values: True, False - Is Mandatory: No (Defaults to False) -#### SEARCH_UNMET_CUTOFF_CONTENT +#### SEARCH_UNMET_CUTOFF - Steers whether searches are automatically triggered for items that are wanted and have not yet met the cutoff - Type: Boolean or Dict @@ -560,7 +560,7 @@ This is the interesting section. It defines which job you want decluttarr to run - Note: - You can also specify min_days_between_searches and max_concurrent_searches as job defaults (see above) or simply rely on the system defaults -#### SEARCH_MISSING_CONTENT +#### SEARCH_MISSING - Steers whether searches are automatically triggered for items that are missing - Type: Boolean or Dict diff --git a/config/config_example.yaml b/config/config_example.yaml index 8b03bbc..5688a54 100644 --- a/config/config_example.yaml +++ b/config/config_example.yaml @@ -36,10 +36,10 @@ jobs: remove_stalled: # max_strikes: 3 remove_unmonitored: - search_unmet_cutoff_content: + search_unmet_cutoff: # min_days_between_searches: 7 # max_concurrent_searches: 3 - search_missing_content: + search_missing: # min_days_between_searches: 7 # max_concurrent_searches: 3 diff --git a/main.py b/main.py index bc3b1f8..5a53401 100644 --- a/main.py +++ b/main.py @@ -26,6 +26,16 @@ def terminate(sigterm: signal.SIGTERM, frame: types.FrameType) -> None: # noqa: logger.info(f"Termination signal received at {datetime.datetime.now()}.") # noqa: DTZ005 sys.exit(0) +async def wait_next_run(): + # Calculate next run time dynamically (to display) + next_run = datetime.datetime.now() + datetime.timedelta(minutes=settings.general.timer) + formatted_next_run = next_run.strftime("%Y-%m-%d %H:%M") + + logger.verbose(f"*** Done - Next run at {formatted_next_run} ****") + + # Wait for the next run + await asyncio.sleep(settings.general.timer * 60) + # Main function async def main(): await launch_steps(settings) @@ -41,12 +51,10 @@ async def main(): # Run script for each instance for arr in settings.instances.arrs: await job_manager.run_jobs(arr) - - logger.verbose("") - logger.verbose("Queue clean-up complete!") + logger.verbose("") # Wait for the next run - await asyncio.sleep(settings.general.timer * 60) + await wait_next_run() return diff --git a/src/job_manager.py b/src/job_manager.py index 2ff3b16..dcfcf56 100644 --- a/src/job_manager.py +++ b/src/job_manager.py @@ -21,12 +21,11 @@ class JobManager: async def run_jobs(self, arr): self.arr = arr + logger.verbose(f"*** Running jobs on {self.arr.name} ({self.arr.base_url}) ***") await self.removal_jobs() await self.search_jobs() async def removal_jobs(self): - logger.verbose("") - logger.verbose(f"Cleaning queue on {self.arr.name}:") if not await self._queue_has_items(): return @@ -36,27 +35,37 @@ class JobManager: # Refresh trackers await self.arr.tracker.refresh_private_and_protected(self.settings) - # Execute Cleaning + # Run Remval Jobs removal_jobs = self._get_removal_jobs() + if not removal_jobs: + logger.verbose("Removel Jobs: None triggered (No jobs active)") + return + items_detected = 0 for removal_job in removal_jobs: items_detected += await removal_job.run() if items_detected == 0: - logger.verbose(">>> Queue is clean.") + logger.verbose("Removal Jobs: All jobs passed (Queue is clean)") async def search_jobs(self): if ( self.arr.arr_type == "whisparr" ): # Whisparr does not support this endpoint (yet?) return - if self.settings.jobs.search_missing_content.enabled: - await SearchHandler(self.arr, self.settings).handle_search("missing") - if self.settings.jobs.search_unmet_cutoff_content.enabled: - await SearchHandler(self.arr, self.settings).handle_search("cutoff") + if self.settings.jobs.search_missing.enabled: + await SearchHandler( + arr=self.arr, settings=self.settings, missing_or_cutoff="missing", job_name="search_missing" + ).handle_search() + if self.settings.jobs.search_unmet_cutoff.enabled: + await SearchHandler( + arr=self.arr, settings=self.settings, missing_or_cutoff="cutoff", job_name="search_cutoff_unmet" + ).handle_search() async def _queue_has_items(self): - logger.debug(f"job_manager.py/_queue_has_items (Before any removal jobs): Checking if any items in full queue") + logger.debug( + f"job_manager.py/_queue_has_items (Before any removal jobs): Checking if any items in full queue" + ) queue_manager = QueueManager(self.arr, self.settings) full_queue = await queue_manager.get_queue_items("full") if full_queue: @@ -67,12 +76,14 @@ class JobManager: return True self.arr.tracker.reset() - logger.verbose(">>> Queue is empty.") + logger.verbose("Removal Jobs: None triggered (Queue is empty)") return False async def _qbit_connected(self): for qbit in self.settings.download_clients.qbittorrent: - logger.debug(f"job_manager.py/_queue_has_items (Before any removal jobs): Checking if qbit is connected to the internet") + logger.debug( + f"job_manager.py/_queue_has_items (Before any removal jobs): Checking if qbit is connected to the internet" + ) # Check if any client is disconnected if not await qbit.check_qbit_connected(): logger.warning( diff --git a/src/jobs/removal_handler.py b/src/jobs/removal_handler.py index d4c26ec..e699da3 100644 --- a/src/jobs/removal_handler.py +++ b/src/jobs/removal_handler.py @@ -33,12 +33,12 @@ class RemovalHandler: async def _remove_download(self, affected_download, download_id, blocklist): queue_id = affected_download["queue_ids"][0] - logger.info(f">>> Job '{self.job_name}' triggered removal: {affected_download['title']}") + logger.info(f"Job '{self.job_name}' triggered removal: {affected_download['title']}") logger.debug(f"remove_handler.py/_remove_download: download_id={download_id}") await self.arr.remove_queue_item(queue_id=queue_id, blocklist=blocklist) async def _tag_as_obsolete(self, affected_download, download_id): - logger.info(f">>> Job'{self.job_name}' triggered obsolete-tagging: {affected_download['title']}") + logger.info(f"Job '{self.job_name}' triggered obsolete-tagging: {affected_download['title']}") for qbit in self.settings.download_clients.qbittorrent: await qbit.set_tag(tags=[self.settings.general.obsolete_tag], hashes=[download_id]) diff --git a/src/jobs/remove_failed_imports.py b/src/jobs/remove_failed_imports.py index 4d02481..c69d1b1 100644 --- a/src/jobs/remove_failed_imports.py +++ b/src/jobs/remove_failed_imports.py @@ -46,24 +46,21 @@ class RemoveFailedImports(RemovalJob): if not messages: return [] - return [f">>>>> Tracked Download State: {item['trackedDownloadState']}", *messages] + removal_messages = [ + f"↳ Tracked Download State: {item['trackedDownloadState']}", + f"↳ Status Messages:", + *[f" - {msg}" for msg in messages] + ] + return removal_messages + @staticmethod - def _get_matching_messages(status_messages, patterns) -> list: - """Extract messages matching the provided patterns (or all messages if no pattern).""" - matched_messages = [] - - if not patterns: - # No patterns provided, include all messages - for status_message in status_messages: - matched_messages.extend(f">>>>> - {msg}" for msg in status_message.get("messages", [])) - else: - # Patterns provided, match only those messages that fit the patterns - matched_messages.extend( - f">>>>> - {msg}" - for status_message in status_messages - for msg in status_message.get("messages", []) - if any(fnmatch.fnmatch(msg, pattern) for pattern in patterns) - ) - - return matched_messages + def _get_matching_messages(status_messages, patterns) -> list[str]: + """Extract unique messages matching the provided patterns (or all messages if no pattern).""" + messages = [ + msg + for status_message in status_messages + for msg in status_message.get("messages", []) + if not patterns or any(fnmatch.fnmatch(msg, pattern) for pattern in patterns) + ] + return list(dict.fromkeys(messages)) \ No newline at end of file diff --git a/src/jobs/search_handler.py b/src/jobs/search_handler.py index ae2d499..b16be97 100644 --- a/src/jobs/search_handler.py +++ b/src/jobs/search_handler.py @@ -8,22 +8,43 @@ from src.utils.wanted_manager import WantedManager class SearchHandler: - def __init__(self, arr, settings): + def __init__(self, arr, settings, missing_or_cutoff, job_name): self.arr = arr self.settings = settings - self.job = None self.wanted_manager = WantedManager(self.arr, self.settings) + self.missing_or_cutoff = missing_or_cutoff + self._configure_search_target() + self.job_name = job_name - async def handle_search(self, search_type): - logger.debug(f"search_handler.py: Running '{search_type}' search") - self._initialize_job(search_type) + def _configure_search_target(self): + logger.debug( + f"search_handler.py/_configure_search_target: Setting job & search label ({self.missing_or_cutoff})" + ) + if self.missing_or_cutoff == "missing": + self.job = self.settings.jobs.search_missing + self.search_target_label = f"missing {self.arr.detail_item_key}s" + elif self.missing_or_cutoff == "cutoff": + self.job = self.settings.jobs.search_unmet_cutoff + self.search_target_label = f"{self.arr.detail_item_key}s with unmet cutoff" + else: + error = f"Unknown search type: {self.missing_or_cutoff}" + raise ValueError(error) - logger.debug(f"search_handler.py/handle_search: Getting the list of wanted items ({search_type})") - wanted_items = await self._get_initial_wanted_items(search_type) + async def handle_search(self): + logger.debug( + f"search_handler.py/handle_search: Running '{self.missing_or_cutoff}' search" + ) + + logger.debug( + f"search_handler.py/handle_search: Getting the list of wanted items ({self.missing_or_cutoff})" + ) + wanted_items = await self._get_initial_wanted_items() if not wanted_items: return - logger.debug(f"search_handler.py/handle_search: Getting list of queue items to only search for items that are not already downloading.") + logger.debug( + f"search_handler.py/handle_search: Getting list of queue items to only search for items that are not already downloading." + ) queue = await QueueManager(self.arr, self.settings).get_queue_items( queue_scope="normal", ) @@ -31,38 +52,32 @@ class SearchHandler: if not wanted_items: return - await self._log_items(wanted_items, search_type) - logger.debug(f"search_handler.py/handle_search: Triggering search for wanted items ({search_type})") + await self._log_items(wanted_items) + logger.debug( + f"search_handler.py/handle_search: Triggering search for wanted items ({self.missing_or_cutoff})" + ) await self._trigger_search(wanted_items) - def _initialize_job(self, search_type): - logger.verbose("") - if search_type == "missing": - logger.verbose(f"Searching for missing content on {self.arr.name}:") - self.job = self.settings.jobs.search_missing_content - elif search_type == "cutoff": - logger.verbose(f"Searching for unmet cutoff content on {self.arr.name}:") - self.job = self.settings.jobs.search_unmet_cutoff_content - else: - error = f"Unknown search type: {search_type}" - raise ValueError(error) - - def _get_initial_wanted_items(self, search_type): - wanted = self.wanted_manager.get_wanted_items(search_type) + def _get_initial_wanted_items(self): + wanted = self.wanted_manager.get_wanted_items(self.missing_or_cutoff) if not wanted: - logger.verbose(f">>> No {search_type} items, thus not triggering a search.") + logger.verbose( + f"Job '{self.job_name}' did not trigger a search: No {self.search_target_label}" + ) return wanted def _filter_wanted_items(self, items, queue): items = self._filter_already_downloading(items, queue) if not items: - logger.verbose(">>> All items already downloading, nothing to search for.") + logger.verbose( + f"Job '{self.job_name}' did not trigger a search: All {self.search_target_label} are already in the queue" + ) return [] items = self._filter_recent_searches(items) if not items: logger.verbose( - ">>> All items recently searched for, thus not triggering another search.", + f"Job '{self.job_name}' did not trigger a search: All {self.search_target_label} were searched for in the last {self.job.min_days_between_searches} days" ) return [] @@ -98,15 +113,19 @@ class SearchHandler: return result - async def _log_items(self, items, search_type): - logger.verbose(f">>> Running a scan for {len(items)} {search_type} items:") + async def _log_items(self, items): + logger.info( + f"Job '{self.job_name}' triggered a search for {len(items)} {self.arr.detail_item_key}s" + ) for item in items: if self.arr.arr_type in ["radarr", "readarr", "lidarr"]: title = item.get("title", "Unknown") - logger.verbose(f">>> - {title}") + logger.verbose(f"- {title}") elif self.arr.arr_type == "sonarr": - logger.debug("search_handler.py/_log_items: Getting series information for better display in output") + logger.debug( + "search_handler.py/_log_items: Getting series information for better display in output" + ) series = await self.arr.get_series() series_title = next( (s["title"] for s in series if s["id"] == item.get("seriesId")), @@ -115,7 +134,7 @@ class SearchHandler: episode = item.get("episodeNumber", "00") season = item.get("seasonNumber", "00") season_numbering = f"S{int(season):02}/E{int(episode):02}" - logger.verbose(f">>> - {series_title} ({season_numbering})") + logger.verbose(f"- {series_title} ({season_numbering})") async def _get_series_dict(self): series = await self.arr.rest_get("series") diff --git a/src/settings/_jobs.py b/src/settings/_jobs.py index 5048293..3cd2076 100644 --- a/src/settings/_jobs.py +++ b/src/settings/_jobs.py @@ -32,6 +32,10 @@ class JobParams: self.max_concurrent_searches = max_concurrent_searches self.min_days_between_searches = min_days_between_searches + # if not self.max_concurrent_searches <= 0: + # logger.warning(f"Job setting 'max_concurrent_searches' must be an integer greater 0. Found: {str(self.max_concurrent_searches)}. Using default: 3") + # self.max_concurrent_searches = 3 + # Remove attributes that are None to keep the object clean self._remove_none_attributes() @@ -55,11 +59,16 @@ class JobDefaults: def __init__(self, config): job_defaults_config = config.get("job_defaults", {}) self.max_strikes = job_defaults_config.get("max_strikes", self.max_strikes) - self.max_concurrent_searches = job_defaults_config.get( - "max_concurrent_searches", self.max_concurrent_searches, - ) + max_concurrent_searches = job_defaults_config.get("max_concurrent_searches") + if isinstance(max_concurrent_searches, int) and max_concurrent_searches > 0: + self.max_concurrent_searches = max_concurrent_searches + else: + logger.warning( + f"Job default 'max_concurrent_searches' must be an integer greater 0. Found: {str(max_concurrent_searches)}. Using default: {self.max_concurrent_searches}" + ) self.min_days_between_searches = job_defaults_config.get( - "min_days_between_searches", self.min_days_between_searches, + "min_days_between_searches", + self.min_days_between_searches, ) validate_data_types(self) @@ -74,9 +83,7 @@ class Jobs: del self.job_defaults def _set_job_defaults(self): - self.remove_bad_files = JobParams( - keep_archives=self.job_defaults.keep_archives - ) + self.remove_bad_files = JobParams(keep_archives=self.job_defaults.keep_archives) self.remove_failed_downloads = JobParams() self.remove_failed_imports = JobParams( message_patterns=self.job_defaults.message_patterns, @@ -92,11 +99,11 @@ class Jobs: ) self.remove_stalled = JobParams(max_strikes=self.job_defaults.max_strikes) self.remove_unmonitored = JobParams() - self.search_unmet_cutoff_content = JobParams( + self.search_unmet_cutoff = JobParams( max_concurrent_searches=self.job_defaults.max_concurrent_searches, min_days_between_searches=self.job_defaults.min_days_between_searches, ) - self.search_missing_content = JobParams( + self.search_missing = JobParams( max_concurrent_searches=self.job_defaults.max_concurrent_searches, min_days_between_searches=self.job_defaults.min_days_between_searches, ) @@ -133,7 +140,8 @@ class Jobs: setattr(self, job_name, job) validate_data_types( - job, self.job_defaults, + job, + self.job_defaults, ) # Validates and applies defaults from job_defaults def log_status(self): diff --git a/src/settings/_user_config.py b/src/settings/_user_config.py index a74782c..5c282e7 100644 --- a/src/settings/_user_config.py +++ b/src/settings/_user_config.py @@ -27,8 +27,8 @@ CONFIG_MAPPING = { "REMOVE_SLOW", "REMOVE_STALLED", "REMOVE_UNMONITORED", - "SEARCH_UNMET_CUTOFF_CONTENT", - "SEARCH_MISSING_CONTENT", + "SEARCH_UNMET_CUTOFF", + "SEARCH_MISSING", ], "instances": ["SONARR", "RADARR", "READARR", "LIDARR", "WHISPARR"], "download_clients": ["QBITTORRENT"], diff --git a/src/settings/settings.py b/src/settings/settings.py index e605cd9..ce3ab21 100644 --- a/src/settings/settings.py +++ b/src/settings/settings.py @@ -33,13 +33,13 @@ class Settings: messages = ["🛠️ Decluttarr - Settings 🛠️", "-" * 80] for title, attr_name in sections: section = getattr(self, attr_name, None) - section_content = section.config_as_yaml() + section_yaml = section.config_as_yaml() if title == "ACTIVE JOBS": messages.append(self._format_section_title(title)) messages.append(self.jobs.list_job_status()) - elif section_content != "{}": + elif section_yaml != "{}": messages.append(self._format_section_title(title)) - messages.append(section_content) + messages.append(section_yaml) messages.append("") # Extra linebreak after section return "\n".join(messages)