diff --git a/media_manager/indexer/indexers/prowlarr.py b/media_manager/indexer/indexers/prowlarr.py index b94823b..5d672cb 100644 --- a/media_manager/indexer/indexers/prowlarr.py +++ b/media_manager/indexer/indexers/prowlarr.py @@ -1,9 +1,11 @@ import concurrent import logging from concurrent.futures import ThreadPoolExecutor +from contextlib import contextmanager +from dataclasses import dataclass -import requests -from requests.adapters import HTTPAdapter +from requests import Session +import prowlarr from media_manager.indexer.indexers.generic import GenericIndexer from media_manager.config import AllEncompassingConfig @@ -15,6 +17,21 @@ from media_manager.tv.schemas import Show log = logging.getLogger(__name__) +@dataclass +class IndexerInfo: + id: int + name: str + + supports_tv_search_tmdb: bool + supports_tv_search_imdb: bool + supports_tv_search_tvdb: bool + supports_tv_search_season: bool + + supports_movie_search_tmdb: bool + supports_movie_search_imdb: bool + supports_movie_search_tvdb: bool + + class Prowlarr(GenericIndexer): def __init__(self, **kwargs): """ @@ -25,54 +42,75 @@ class Prowlarr(GenericIndexer): """ super().__init__(name="prowlarr") config = AllEncompassingConfig().indexers.prowlarr - self.api_key = config.api_key - self.url = config.url + configuration = prowlarr.Configuration(host=config.url, retries=3) + configuration.api_key["X-Api-Key"] = config.api_key + self.config = configuration self.reject_torrents_on_url_error = config.reject_torrents_on_url_error self.timeout_seconds = config.timeout_seconds self.follow_redirects = config.follow_redirects + @contextmanager + def __get_api(self): + with prowlarr.ApiClient(self.config) as api_instance: + yield prowlarr.IndexerApi(api_instance) + + def __get_indexers(self) -> list[IndexerInfo]: + with self.__get_api() as client: + api = prowlarr.IndexerApi(client) + indexers = api.list_indexer() + indexer_info_list: list[IndexerInfo] = [] + for indexer in indexers: + tv_search_params = ( + indexer.capabilities.tv_search_params + if indexer.capabilities.tv_search_params + else [] + ) + movie_search_params = ( + indexer.capabilities.movie_search_params + if indexer.capabilities.movie_search_params + else [] + ) + + indexer_info = IndexerInfo( + id=indexer.id, + name=indexer.name, + supports_tv_search_tmdb="tmdbId" in tv_search_params, + supports_tv_search_imdb="imdbId" in tv_search_params, + supports_tv_search_tvdb="tvdbId" in tv_search_params, + supports_tv_search_season="season" in tv_search_params, + supports_movie_search_tmdb="tmdbId" in movie_search_params, + supports_movie_search_imdb="imdbId" in movie_search_params, + supports_movie_search_tvdb="tvdbId" in movie_search_params, + ) + indexer_info_list.append(indexer_info) + return indexer_info_list + def search(self, query: str, is_tv: bool) -> list[IndexerQueryResult]: log.info(f"Searching for: {query}") - url = self.url + "/api/v1/search" + processed_results: list[IndexerQueryResult] = [] + raw_results = None + with self.__get_api() as api: + search_api = prowlarr.SearchApi(api.api_client) - params = { - "query": query, - "apikey": self.api_key, - "categories": "5000" if is_tv else "2000", # TV: 5000, Movies: 2000 - "limit": 10000, - } - with requests.Session() as session: - adapter = HTTPAdapter(pool_connections=100, pool_maxsize=100) - session.mount("http://", adapter) - session.mount("https://", adapter) + try: + raw_results = search_api.list_search( + query=query, categories=[5000] if is_tv else [2000], limit=10000 + ) + except Exception as e: + log.error(f"Prowlarr search error: {e}") + raise RuntimeError(f"Prowlarr search error: {e}") from e - response = session.get(url, params=params) + for result in raw_results: + try: + processed_result = self.__process_result(result=result) + if processed_result: + processed_results.append(processed_result) + except Exception as e: + log.error(f"Failed to process result {result}: {e}") - if response.status_code != 200: - log.error(f"Prowlarr Error: {response.status_code}") - raise RuntimeError(f"Prowlarr Error: {response.status_code}") + return processed_results - futures = [] - result_list: list[IndexerQueryResult] = [] - - with ThreadPoolExecutor() as executor: - for item in response.json(): - future = executor.submit(self.__process_result, item, session) - futures.append(future) - - for future in concurrent.futures.as_completed(futures): - try: - result = future.result() - if result is not None: - result_list.append(result) - except Exception as e: - log.error(f"Processing of one search result failed because: {e}") - - return result_list - - def __process_result( - self, result, session: requests.Session - ) -> IndexerQueryResult | None: + def __process_result(self, result) -> IndexerQueryResult | None: # process usenet search result if result["protocol"] != "torrent": return IndexerQueryResult( @@ -102,7 +140,7 @@ class Prowlarr(GenericIndexer): try: final_download_url = follow_redirects_to_final_torrent_url( initial_url=initial_url, - session=session, + session=Session(), timeout=self.timeout_seconds, ) except RuntimeError as e: @@ -127,8 +165,85 @@ class Prowlarr(GenericIndexer): indexer=result["indexer"] if "indexer" in result else None, ) - def search_season(self, query: str, show: Show, season_number: int) -> list[IndexerQueryResult]: - pass + def __process_results(self, results: list) -> list[IndexerQueryResult]: + processed_results: list[IndexerQueryResult] = [] + for result in results: + try: + processed_result = self.__process_result(result=result) + if processed_result: + processed_results.append(processed_result) + except Exception as e: + log.error(f"Failed to process result {result}: {e}") + return processed_results + + def __get_newznab_api(self, searches: list) -> list: + results = [] + with prowlarr.NewznabApi(self.__get_api) as api: + futures = [] + with ThreadPoolExecutor() as executor: + for search in searches: + future = executor.submit(api.get_indexer_newznab, **search) + futures.append(future) + + for future in concurrent.futures.as_completed(futures): + try: + result = future.result() + if result is not None: + results.extend(result) + except Exception as e: + log.error(f"Querying one indexer failed because: {e}") + return results + + def search_season( + self, query: str, show: Show, season_number: int + ) -> list[IndexerQueryResult]: + indexers = self.__get_indexers() + + searches = [] + for indexer in indexers: + search_params = { + "id": indexer.id, + "cat": "5000", + } + + if indexer.supports_tv_search_tmdb and show.metadata_provider == "tmdb": + search_params["tmdbid"] = show.external_id + if indexer.supports_tv_search_tvdb and show.metadata_provider == "tvdb": + search_params["tvdbid"] = show.external_id + if indexer.supports_tv_search_imdb: + search_params["imdbid"] = show.imdb_id + if indexer.supports_tv_search_season: + search_params["season"] = season_number + + searches.append(search_params) + + raw_results = self.__get_newznab_api(searches=searches) + + search_results = self.__process_results(results=raw_results) + + return search_results def search_movie(self, query: str, movie: Movie) -> list[IndexerQueryResult]: - pass + indexers = self.__get_indexers() + + searches = [] + for indexer in indexers: + search_params = { + "id": indexer.id, + "cat": "2000", + } + + if indexer.supports_movie_search_tmdb and movie.metadata_provider == "tmdb": + search_params["tmdbid"] = movie.external_id + if indexer.supports_movie_search_tvdb and movie.metadata_provider == "tvdb": + search_params["tvdbid"] = movie.external_id + if indexer.supports_movie_search_imdb: + search_params["imdbid"] = movie.imdb_id + + searches.append(search_params) + + raw_results = self.__get_newznab_api(searches=searches) + + search_results = self.__process_results(results=raw_results) + + return search_results diff --git a/pyproject.toml b/pyproject.toml index 8fc4c19..f5628e4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -34,6 +34,7 @@ dependencies = [ "sabnzbd-api>=0.1.2", "transmission-rpc>=7.0.11", "libtorrent>=2.0.11", + "prowlarr-py>=1.1.0", ] [tool.setuptools.packages.find] diff --git a/uv.lock b/uv.lock index 55ab212..343e60f 100644 --- a/uv.lock +++ b/uv.lock @@ -1,5 +1,5 @@ version = 1 -revision = 2 +revision = 3 requires-python = ">=3.13" [[package]] @@ -652,6 +652,7 @@ dependencies = [ { name = "patool" }, { name = "pillow" }, { name = "pillow-avif-plugin" }, + { name = "prowlarr-py" }, { name = "psycopg", extra = ["binary"] }, { name = "pydantic" }, { name = "pydantic-settings", extra = ["toml"] }, @@ -686,6 +687,7 @@ requires-dist = [ { name = "patool", specifier = ">=4.0.1" }, { name = "pillow", specifier = ">=11.2.1" }, { name = "pillow-avif-plugin", specifier = ">=1.5.2" }, + { name = "prowlarr-py", specifier = ">=1.1.0" }, { name = "psycopg", extras = ["binary"], specifier = ">=3.2.9" }, { name = "pydantic", specifier = ">=2.11.5" }, { name = "pydantic-settings", extras = ["toml"], specifier = ">=2.9.1" }, @@ -795,6 +797,21 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, ] +[[package]] +name = "prowlarr-py" +version = "1.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pydantic" }, + { name = "python-dateutil" }, + { name = "typing-extensions" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c2/90/58f9e8034be18edf79e9305d77b27e32c54fb26be9dbf2bdfce3de279f0e/prowlarr_py-1.1.0.tar.gz", hash = "sha256:9834e1a6a92d543a6d5c1211a3c6c71343e5f0a0784865f2a276f1876783827c", size = 89629, upload-time = "2025-01-24T18:54:02.591Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c6/dd/f9b83cea22627b5b74132a56f9a3715d7981833e245a9d9d442f98a6e6f9/prowlarr_py-1.1.0-py3-none-any.whl", hash = "sha256:d7d7a3cf23bb9d0bc5e4131218ad677fd9c45913c3947596dd14d4dbf5119118", size = 199541, upload-time = "2025-01-24T18:54:00.617Z" }, +] + [[package]] name = "psutil" version = "5.9.8" @@ -971,6 +988,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/29/16/c8a903f4c4dffe7a12843191437d7cd8e32751d5de349d45d3fe69544e87/pytest-8.4.1-py3-none-any.whl", hash = "sha256:539c70ba6fcead8e78eebbf1115e8b589e7565830d7d006a8723f19ac8a0afb7", size = 365474, upload-time = "2025-06-18T05:48:03.955Z" }, ] +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "six" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432, upload-time = "2024-03-01T18:36:20.211Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892, upload-time = "2024-03-01T18:36:18.57Z" }, +] + [[package]] name = "python-dotenv" version = "1.1.1" @@ -1141,6 +1170,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e0/f9/0595336914c5619e5f28a1fb793285925a8cd4b432c9da0a987836c7f822/shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686", size = 9755, upload-time = "2023-10-24T04:13:38.866Z" }, ] +[[package]] +name = "six" +version = "1.17.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031, upload-time = "2024-12-04T17:35:28.174Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" }, +] + [[package]] name = "sniffio" version = "1.3.1"