diff --git a/media_manager/auth/db.py b/media_manager/auth/db.py index 12e961e..d7f1c3a 100644 --- a/media_manager/auth/db.py +++ b/media_manager/auth/db.py @@ -1,5 +1,4 @@ from collections.abc import AsyncGenerator -from typing import Optional from fastapi import Depends from fastapi_users.db import ( @@ -17,7 +16,7 @@ from media_manager.database import Base, build_db_url class OAuthAccount(SQLAlchemyBaseOAuthAccountTableUUID, Base): access_token: Mapped[str] = mapped_column(String(length=4096), nullable=False) - refresh_token: Mapped[Optional[str]] = mapped_column( + refresh_token: Mapped[str | None] = mapped_column( String(length=4096), nullable=True ) @@ -34,12 +33,12 @@ engine = create_async_engine( async_session_maker = async_sessionmaker(engine, expire_on_commit=False) -async def get_async_session() -> AsyncGenerator[AsyncSession, None]: +async def get_async_session() -> AsyncGenerator[AsyncSession]: async with async_session_maker() as session: yield session async def get_user_db( session: AsyncSession = Depends(get_async_session), -) -> AsyncGenerator[SQLAlchemyUserDatabase, None]: +) -> AsyncGenerator[SQLAlchemyUserDatabase]: yield SQLAlchemyUserDatabase(session, User, OAuthAccount) diff --git a/media_manager/auth/router.py b/media_manager/auth/router.py index 16cd6ee..f3fae2b 100644 --- a/media_manager/auth/router.py +++ b/media_manager/auth/router.py @@ -1,5 +1,5 @@ +from collections.abc import AsyncGenerator from contextlib import asynccontextmanager -from typing import AsyncGenerator from fastapi import APIRouter, Depends, FastAPI, status from fastapi_users.router import get_oauth_router diff --git a/media_manager/auth/users.py b/media_manager/auth/users.py index 9081ecc..c820fd2 100644 --- a/media_manager/auth/users.py +++ b/media_manager/auth/users.py @@ -1,7 +1,8 @@ import contextlib import logging import uuid -from typing import Any, AsyncGenerator, Optional, override +from collections.abc import AsyncGenerator +from typing import Any, override from fastapi import Depends, Request from fastapi.responses import RedirectResponse, Response @@ -49,7 +50,7 @@ class UserManager(UUIDIDMixin, BaseUserManager[User, uuid.UUID]): self, user: models.UP, update_dict: dict[str, Any], - request: Optional[Request] = None, + request: Request | None = None, ) -> None: log.info(f"User {user.id} has been updated.") if update_dict.get("is_superuser"): @@ -60,7 +61,7 @@ class UserManager(UUIDIDMixin, BaseUserManager[User, uuid.UUID]): @override async def on_after_register( - self, user: User, request: Optional[Request] = None + self, user: User, request: Request | None = None ) -> None: log.info(f"User {user.id} has registered.") if user.email in config.admin_emails: @@ -69,7 +70,7 @@ class UserManager(UUIDIDMixin, BaseUserManager[User, uuid.UUID]): @override async def on_after_forgot_password( - self, user: User, token: str, request: Optional[Request] = None + self, user: User, token: str, request: Request | None = None ) -> None: link = f"{MediaManagerConfig().misc.frontend_url}web/login/reset-password?token={token}" log.info(f"User {user.id} has forgot their password. Reset Link: {link}") @@ -100,28 +101,26 @@ class UserManager(UUIDIDMixin, BaseUserManager[User, uuid.UUID]): @override async def on_after_reset_password( - self, user: User, request: Optional[Request] = None + self, user: User, request: Request | None = None ) -> None: log.info(f"User {user.id} has reset their password.") @override async def on_after_request_verify( - self, user: User, token: str, request: Optional[Request] = None + self, user: User, token: str, request: Request | None = None ) -> None: log.info( f"Verification requested for user {user.id}. Verification token: {token}" ) @override - async def on_after_verify( - self, user: User, request: Optional[Request] = None - ) -> None: + async def on_after_verify(self, user: User, request: Request | None = None) -> None: log.info(f"User {user.id} has been verified") async def get_user_manager( user_db: SQLAlchemyUserDatabase = Depends(get_user_db), -) -> AsyncGenerator[UserManager, None]: +) -> AsyncGenerator[UserManager]: yield UserManager(user_db) @@ -176,8 +175,8 @@ async def create_default_admin_user() -> None: log.info( f"Found {user_count} existing users. Skipping default user creation." ) - except Exception as e: - log.error(f"Failed to create default admin user: {e}") + except Exception: + log.exception("Failed to create default admin user") log.info( "You can create an admin user manually by registering with an email from the admin_emails list in your config." ) diff --git a/media_manager/config.py b/media_manager/config.py index 782cb2a..7e2170b 100644 --- a/media_manager/config.py +++ b/media_manager/config.py @@ -1,7 +1,6 @@ import logging import os from pathlib import Path -from typing import Tuple, Type from pydantic import AnyHttpUrl from pydantic_settings import ( @@ -71,12 +70,12 @@ class MediaManagerConfig(BaseSettings): @classmethod def settings_customise_sources( cls, - settings_cls: Type[BaseSettings], + settings_cls: type[BaseSettings], init_settings: PydanticBaseSettingsSource, env_settings: PydanticBaseSettingsSource, dotenv_settings: PydanticBaseSettingsSource, file_secret_settings: PydanticBaseSettingsSource, - ) -> Tuple[PydanticBaseSettingsSource, ...]: + ) -> tuple[PydanticBaseSettingsSource, ...]: return ( init_settings, env_settings, diff --git a/media_manager/database/__init__.py b/media_manager/database/__init__.py index e34a73e..f76d696 100644 --- a/media_manager/database/__init__.py +++ b/media_manager/database/__init__.py @@ -1,7 +1,8 @@ import logging import os +from collections.abc import Generator from contextvars import ContextVar -from typing import Annotated, Any, Generator, Optional +from typing import Annotated from fastapi import Depends from sqlalchemy import create_engine @@ -15,8 +16,8 @@ log = logging.getLogger(__name__) Base = declarative_base() -engine: Optional[Engine] = None -SessionLocal: Optional[sessionmaker] = None +engine: Engine | None = None +SessionLocal: sessionmaker | None = None def build_db_url( @@ -83,7 +84,7 @@ def get_engine() -> Engine: return engine -def get_session() -> Generator[Session, Any, None]: +def get_session() -> Generator[Session]: if SessionLocal is None: msg = "Session factory not initialized. Call init_engine(...) first." raise RuntimeError(msg) @@ -91,9 +92,9 @@ def get_session() -> Generator[Session, Any, None]: try: yield db db.commit() - except Exception as e: + except Exception: db.rollback() - log.critical(f"error occurred: {e}") + log.critical("", exc_info=True) raise finally: db.close() diff --git a/media_manager/exceptions.py b/media_manager/exceptions.py index d6000c3..9dba394 100644 --- a/media_manager/exceptions.py +++ b/media_manager/exceptions.py @@ -4,6 +4,13 @@ from psycopg.errors import UniqueViolation from sqlalchemy.exc import IntegrityError +class RenameError(Exception): + """Error when renaming something""" + + def __init__(self, message: str = "Failed to rename source directory") -> None: + super().__init__(message) + + class MediaManagerError(Exception): """Base exception for MediaManager errors.""" diff --git a/media_manager/filesystem_checks.py b/media_manager/filesystem_checks.py index 5ae2797..12b4196 100644 --- a/media_manager/filesystem_checks.py +++ b/media_manager/filesystem_checks.py @@ -36,10 +36,8 @@ def run_filesystem_checks(config: MediaManagerConfig, log: Logger) -> None: if not test_hardlink.samefile(test_torrent_file): log.critical("Hardlink creation failed!") log.info("Successfully created test hardlink in TV directory") - except OSError as e: - log.error( - f"Hardlink creation failed, falling back to copying files. Error: {e}" - ) + except OSError: + log.exception("Hardlink creation failed, falling back to copying files") shutil.copy(src=test_torrent_file, dst=test_hardlink) finally: test_hardlink.unlink() diff --git a/media_manager/indexer/indexers/jackett.py b/media_manager/indexer/indexers/jackett.py index ebb8eb8..f55a53b 100644 --- a/media_manager/indexer/indexers/jackett.py +++ b/media_manager/indexer/indexers/jackett.py @@ -46,8 +46,8 @@ class Jackett(GenericIndexer, TorznabMixin): result = future.result() if result is not None: responses.extend(result) - except Exception as e: - log.error(f"search result failed with: {e}") + except Exception: + log.exception("Searching failed") return responses diff --git a/media_manager/indexer/indexers/torznab_mixin.py b/media_manager/indexer/indexers/torznab_mixin.py index a83a7ec..060e88a 100644 --- a/media_manager/indexer/indexers/torznab_mixin.py +++ b/media_manager/indexer/indexers/torznab_mixin.py @@ -1,6 +1,6 @@ import logging import xml.etree.ElementTree as ET -from datetime import datetime, timezone +from datetime import datetime from email.utils import parsedate_to_datetime from media_manager.indexer.schemas import IndexerQueryResult @@ -39,7 +39,7 @@ class TorznabMixin: posted_date = parsedate_to_datetime( attribute.attrib["value"] ) - now = datetime.now(timezone.utc) + now = datetime.now(datetime.UTC) age = int((now - posted_date).total_seconds()) else: if attribute.attrib["name"] == "seeders": @@ -79,6 +79,6 @@ class TorznabMixin: indexer=indexer_name, ) result_list.append(result) - except Exception as e: - log.error(f"1 Torznab search result errored with error: {e}") + except Exception: + log.exception("1 Torznab search result failed") return result_list diff --git a/media_manager/indexer/schemas.py b/media_manager/indexer/schemas.py index 29a87c9..02af87c 100644 --- a/media_manager/indexer/schemas.py +++ b/media_manager/indexer/schemas.py @@ -13,7 +13,9 @@ IndexerQueryResultId = typing.NewType("IndexerQueryResultId", UUID) class IndexerQueryResult(BaseModel): model_config = ConfigDict(from_attributes=True) - id: IndexerQueryResultId = pydantic.Field(default_factory=lambda: IndexerQueryResultId(uuid4())) + id: IndexerQueryResultId = pydantic.Field( + default_factory=lambda: IndexerQueryResultId(uuid4()) + ) title: str download_url: str = pydantic.Field( exclude=True, diff --git a/media_manager/indexer/service.py b/media_manager/indexer/service.py index e9e724d..7b1976b 100644 --- a/media_manager/indexer/service.py +++ b/media_manager/indexer/service.py @@ -45,9 +45,9 @@ class IndexerService: log.debug( f"Indexer {indexer.__class__.__name__} returned {len(indexer_results)} results for query: {query}" ) - except Exception as e: - log.error( - f"Indexer {indexer.__class__.__name__} failed for query '{query}': {e}" + except Exception: + log.exception( + f"Indexer {indexer.__class__.__name__} failed for query '{query}'" ) for result in results: @@ -65,9 +65,9 @@ class IndexerService: indexer_results = indexer.search_movie(query=query, movie=movie) if indexer_results: results.extend(indexer_results) - except Exception as e: - log.error( - f"Indexer {indexer.__class__.__name__} failed for movie search '{query}': {e}" + except Exception: + log.exception( + f"Indexer {indexer.__class__.__name__} failed for movie search '{query}'" ) for result in results: @@ -87,9 +87,9 @@ class IndexerService: ) if indexer_results: results.extend(indexer_results) - except Exception as e: - log.error( - f"Indexer {indexer.__class__.__name__} failed for season search '{query}': {e}" + except Exception: + log.exception( + f"Indexer {indexer.__class__.__name__} failed for season search '{query}'" ) for result in results: diff --git a/media_manager/indexer/utils.py b/media_manager/indexer/utils.py index dcca6b2..5d17e30 100644 --- a/media_manager/indexer/utils.py +++ b/media_manager/indexer/utils.py @@ -149,8 +149,11 @@ def follow_redirects_to_final_torrent_url( raise RuntimeError(msg) except requests.exceptions.RequestException as e: - log.debug(f"An error occurred during the request for {initial_url}: {e}") - msg = f"An error occurred during the request: {e}" + log.debug( + f"An error occurred during the request for {initial_url}", + exc_info=True, + ) + msg = "An error occurred during the request" raise RuntimeError(msg) from e return current_url diff --git a/media_manager/logging.py b/media_manager/logging.py index 9fae9a5..af36d8a 100644 --- a/media_manager/logging.py +++ b/media_manager/logging.py @@ -1,7 +1,7 @@ import logging import os import sys -from datetime import datetime, timezone +from datetime import UTC, datetime from logging.config import dictConfig from pathlib import Path from typing import override @@ -12,7 +12,7 @@ from pythonjsonlogger.json import JsonFormatter class ISOJsonFormatter(JsonFormatter): @override def formatTime(self, record: logging.LogRecord, datefmt: str | None = None) -> str: - dt = datetime.fromtimestamp(record.created, tz=timezone.utc) + dt = datetime.fromtimestamp(record.created, tz=UTC) return dt.isoformat(timespec="milliseconds").replace("+00:00", "Z") diff --git a/media_manager/metadataProvider/abstract_metadata_provider.py b/media_manager/metadataProvider/abstract_metadata_provider.py index 28935e5..cde99b1 100644 --- a/media_manager/metadataProvider/abstract_metadata_provider.py +++ b/media_manager/metadataProvider/abstract_metadata_provider.py @@ -18,15 +18,11 @@ class AbstractMetadataProvider(ABC): pass @abstractmethod - def get_show_metadata( - self, show_id: int, language: str | None = None - ) -> Show: + def get_show_metadata(self, show_id: int, language: str | None = None) -> Show: raise NotImplementedError() @abstractmethod - def get_movie_metadata( - self, movie_id: int, language: str | None = None - ) -> Movie: + def get_movie_metadata(self, movie_id: int, language: str | None = None) -> Movie: raise NotImplementedError() @abstractmethod diff --git a/media_manager/metadataProvider/tmdb.py b/media_manager/metadataProvider/tmdb.py index 1acb30f..34f8cba 100644 --- a/media_manager/metadataProvider/tmdb.py +++ b/media_manager/metadataProvider/tmdb.py @@ -51,7 +51,7 @@ class TmdbMetadataProvider(AbstractMetadataProvider): response.raise_for_status() return response.json() except requests.RequestException as e: - log.error(f"TMDB API error getting show metadata for ID {show_id}: {e}") + log.exception(f"TMDB API error getting show metadata for ID {show_id}") if notification_manager.is_configured(): notification_manager.send_notification( title="TMDB API Error", @@ -68,7 +68,7 @@ class TmdbMetadataProvider(AbstractMetadataProvider): response.raise_for_status() return response.json() except requests.RequestException as e: - log.error(f"TMDB API error getting show external IDs for ID {show_id}: {e}") + log.exception(f"TMDB API error getting show external IDs for ID {show_id}") if notification_manager.is_configured(): notification_manager.send_notification( title="TMDB API Error", @@ -90,8 +90,8 @@ class TmdbMetadataProvider(AbstractMetadataProvider): response.raise_for_status() return response.json() except requests.RequestException as e: - log.error( - f"TMDB API error getting season {season_number} metadata for show ID {show_id}: {e}" + log.exception( + f"TMDB API error getting season {season_number} metadata for show ID {show_id}" ) if notification_manager.is_configured(): notification_manager.send_notification( @@ -113,7 +113,7 @@ class TmdbMetadataProvider(AbstractMetadataProvider): response.raise_for_status() return response.json() except requests.RequestException as e: - log.error(f"TMDB API error searching TV shows with query '{query}': {e}") + log.exception(f"TMDB API error searching TV shows with query '{query}'") if notification_manager.is_configured(): notification_manager.send_notification( title="TMDB API Error", @@ -131,7 +131,7 @@ class TmdbMetadataProvider(AbstractMetadataProvider): response.raise_for_status() return response.json() except requests.RequestException as e: - log.error(f"TMDB API error getting trending TV: {e}") + log.exception("TMDB API error getting trending TV") if notification_manager.is_configured(): notification_manager.send_notification( title="TMDB API Error", @@ -151,7 +151,7 @@ class TmdbMetadataProvider(AbstractMetadataProvider): response.raise_for_status() return response.json() except requests.RequestException as e: - log.error(f"TMDB API error getting movie metadata for ID {movie_id}: {e}") + log.exception(f"TMDB API error getting movie metadata for ID {movie_id}") if notification_manager.is_configured(): notification_manager.send_notification( title="TMDB API Error", @@ -167,8 +167,8 @@ class TmdbMetadataProvider(AbstractMetadataProvider): response.raise_for_status() return response.json() except requests.RequestException as e: - log.error( - f"TMDB API error getting movie external IDs for ID {movie_id}: {e}" + log.exception( + f"TMDB API error getting movie external IDs for ID {movie_id}" ) if notification_manager.is_configured(): notification_manager.send_notification( @@ -190,7 +190,7 @@ class TmdbMetadataProvider(AbstractMetadataProvider): response.raise_for_status() return response.json() except requests.RequestException as e: - log.error(f"TMDB API error searching movies with query '{query}': {e}") + log.exception(f"TMDB API error searching movies with query '{query}'") if notification_manager.is_configured(): notification_manager.send_notification( title="TMDB API Error", @@ -208,7 +208,7 @@ class TmdbMetadataProvider(AbstractMetadataProvider): response.raise_for_status() return response.json() except requests.RequestException as e: - log.error(f"TMDB API error getting trending movies: {e}") + log.exception("TMDB API error getting trending movies") if notification_manager.is_configured(): notification_manager.send_notification( title="TMDB API Error", @@ -243,9 +243,7 @@ class TmdbMetadataProvider(AbstractMetadataProvider): return True @override - def get_show_metadata( - self, show_id: int, language: str | None = None - ) -> Show: + def get_show_metadata(self, show_id: int, language: str | None = None) -> Show: """ :param show_id: the external id of the show @@ -368,14 +366,12 @@ class TmdbMetadataProvider(AbstractMetadataProvider): original_language=original_language, ) ) - except Exception as e: - log.warning(f"Error processing search result: {e}") + except Exception: + log.warning("Error processing search result", exc_info=True) return formatted_results @override - def get_movie_metadata( - self, movie_id: int, language: str | None = None - ) -> Movie: + def get_movie_metadata(self, movie_id: int, language: str | None = None) -> Movie: """ Get movie metadata with language-aware fetching. @@ -470,8 +466,8 @@ class TmdbMetadataProvider(AbstractMetadataProvider): original_language=original_language, ) ) - except Exception as e: - log.warning(f"Error processing search result: {e}") + except Exception: + log.warning("Error processing search result", exc_info=True) return formatted_results @override diff --git a/media_manager/metadataProvider/tvdb.py b/media_manager/metadataProvider/tvdb.py index 616ae74..5f8ccc2 100644 --- a/media_manager/metadataProvider/tvdb.py +++ b/media_manager/metadataProvider/tvdb.py @@ -63,9 +63,7 @@ class TvdbMetadataProvider(AbstractMetadataProvider): return False @override - def get_show_metadata( - self, show_id: int, language: str | None = None - ) -> Show: + def get_show_metadata(self, show_id: int, language: str | None = None) -> Show: """ :param show_id: The external id of the show @@ -150,8 +148,8 @@ class TvdbMetadataProvider(AbstractMetadataProvider): vote_average=None, ) ) - except Exception as e: - log.warning(f"Error processing search result: {e}") + except Exception: + log.warning("Error processing search result", exc_info=True) return formatted_results results = self.__get_trending_tv() formatted_results = [] @@ -178,8 +176,8 @@ class TvdbMetadataProvider(AbstractMetadataProvider): vote_average=None, ) ) - except Exception as e: - log.warning(f"Error processing search result: {e}") + except Exception: + log.warning("Error processing search result", exc_info=True) return formatted_results @override @@ -215,8 +213,8 @@ class TvdbMetadataProvider(AbstractMetadataProvider): vote_average=None, ) ) - except Exception as e: - log.warning(f"Error processing search result: {e}") + except Exception: + log.warning("Error processing search result", exc_info=True) return formatted_results results = self.__get_trending_movies() results = results[0:20] @@ -231,15 +229,15 @@ class TvdbMetadataProvider(AbstractMetadataProvider): year = None if result.get("image"): - poster_path = "https://artworks.thetvdb.com" + str(result.get("image")) + poster_path = "https://artworks.thetvdb.com" + str( + result.get("image") + ) else: poster_path = None formatted_results.append( MetaDataProviderSearchResult( - poster_path= poster_path - if result.get("image") - else None, + poster_path=poster_path if result.get("image") else None, overview=result.get("overview"), name=result["name"], external_id=result["id"], @@ -249,8 +247,8 @@ class TvdbMetadataProvider(AbstractMetadataProvider): vote_average=None, ) ) - except Exception as e: - log.warning(f"Error processing search result: {e}") + except Exception: + log.warning("Error processing search result", exc_info=True) return formatted_results @override @@ -269,9 +267,7 @@ class TvdbMetadataProvider(AbstractMetadataProvider): return False @override - def get_movie_metadata( - self, movie_id: int, language: str | None = None - ) -> Movie: + def get_movie_metadata(self, movie_id: int, language: str | None = None) -> Movie: """ :param movie_id: the external id of the movie diff --git a/media_manager/movies/repository.py b/media_manager/movies/repository.py index 06b8058..1e70af6 100644 --- a/media_manager/movies/repository.py +++ b/media_manager/movies/repository.py @@ -59,8 +59,8 @@ class MovieRepository: msg = f"Movie with id {movie_id} not found." raise NotFoundError(msg) return MovieSchema.model_validate(result) - except SQLAlchemyError as e: - log.error(f"Database error while retrieving movie {movie_id}: {e}") + except SQLAlchemyError: + log.exception(f"Database error while retrieving movie {movie_id}") raise def get_movie_by_external_id( @@ -86,9 +86,9 @@ class MovieRepository: msg = f"Movie with external_id {external_id} and provider {metadata_provider} not found." raise NotFoundError(msg) return MovieSchema.model_validate(result) - except SQLAlchemyError as e: - log.error( - f"Database error while retrieving movie by external_id {external_id}: {e}" + except SQLAlchemyError: + log.exception( + f"Database error while retrieving movie by external_id {external_id}" ) raise @@ -103,8 +103,8 @@ class MovieRepository: stmt = select(Movie) results = self.db.execute(stmt).scalars().unique().all() return [MovieSchema.model_validate(movie) for movie in results] - except SQLAlchemyError as e: - log.error(f"Database error while retrieving all movies: {e}") + except SQLAlchemyError: + log.exception("Database error while retrieving all movies") raise def save_movie(self, movie: MovieSchema) -> MovieSchema: @@ -140,14 +140,14 @@ class MovieRepository: return MovieSchema.model_validate(db_movie) except IntegrityError as e: self.db.rollback() - log.error(f"Integrity error while saving movie {movie.name}: {e}") + log.exception(f"Integrity error while saving movie {movie.name}") msg = ( f"Movie with this primary key or unique constraint violation: {e.orig}" ) raise ConflictError(msg) from e - except SQLAlchemyError as e: + except SQLAlchemyError: self.db.rollback() - log.error(f"Database error while saving movie {movie.name}: {e}") + log.exception(f"Database error while saving movie {movie.name}") raise def delete_movie(self, movie_id: MovieId) -> None: @@ -168,9 +168,9 @@ class MovieRepository: self.db.delete(movie) self.db.commit() log.info(f"Successfully deleted movie with id: {movie_id}") - except SQLAlchemyError as e: + except SQLAlchemyError: self.db.rollback() - log.error(f"Database error while deleting movie {movie_id}: {e}") + log.exception(f"Database error while deleting movie {movie_id}") raise def add_movie_request( @@ -204,13 +204,13 @@ class MovieRepository: self.db.refresh(db_model) log.info(f"Successfully added movie request with id: {db_model.id}") return MovieRequestSchema.model_validate(db_model) - except IntegrityError as e: + except IntegrityError: self.db.rollback() - log.error(f"Integrity error while adding movie request: {e}") + log.exception("Integrity error while adding movie request") raise - except SQLAlchemyError as e: + except SQLAlchemyError: self.db.rollback() - log.error(f"Database error while adding movie request: {e}") + log.exception("Database error while adding movie request") raise def set_movie_library(self, movie_id: MovieId, library: str) -> None: @@ -229,9 +229,9 @@ class MovieRepository: raise NotFoundError(msg) movie.library = library self.db.commit() - except SQLAlchemyError as e: + except SQLAlchemyError: self.db.rollback() - log.error(f"Database error setting library for movie {movie_id}: {e}") + log.exception(f"Database error setting library for movie {movie_id}") raise def delete_movie_request(self, movie_request_id: MovieRequestId) -> None: @@ -251,10 +251,10 @@ class MovieRepository: raise NotFoundError(msg) self.db.commit() # Successfully deleted movie request with id: {movie_request_id} - except SQLAlchemyError as e: + except SQLAlchemyError: self.db.rollback() - log.error( - f"Database error while deleting movie request {movie_request_id}: {e}" + log.exception( + f"Database error while deleting movie request {movie_request_id}" ) raise @@ -273,8 +273,8 @@ class MovieRepository: ) results = self.db.execute(stmt).scalars().unique().all() return [RichMovieRequestSchema.model_validate(x) for x in results] - except SQLAlchemyError as e: - log.error(f"Database error while retrieving movie requests: {e}") + except SQLAlchemyError: + log.exception("Database error while retrieving movie requests") raise def add_movie_file(self, movie_file: MovieFileSchema) -> MovieFileSchema: @@ -292,13 +292,13 @@ class MovieRepository: self.db.commit() self.db.refresh(db_model) return MovieFileSchema.model_validate(db_model) - except IntegrityError as e: + except IntegrityError: self.db.rollback() - log.error(f"Integrity error while adding movie file: {e}") + log.exception("Integrity error while adding movie file") raise - except SQLAlchemyError as e: + except SQLAlchemyError: self.db.rollback() - log.error(f"Database error while adding movie file: {e}") + log.exception("Database error while adding movie file") raise def remove_movie_files_by_torrent_id(self, torrent_id: TorrentId) -> int: @@ -313,14 +313,15 @@ class MovieRepository: stmt = delete(MovieFile).where(MovieFile.torrent_id == torrent_id) result = self.db.execute(stmt) self.db.commit() - return result.rowcount - except SQLAlchemyError as e: + except SQLAlchemyError: self.db.rollback() - log.error( - f"Database error removing movie files for torrent_id {torrent_id}: {e}" + log.exception( + f"Database error removing movie files for torrent_id {torrent_id}" ) raise + return result.rowcount + def get_movie_files_by_movie_id(self, movie_id: MovieId) -> list[MovieFileSchema]: """ Retrieve all movie files for a given movie ID. @@ -333,9 +334,9 @@ class MovieRepository: stmt = select(MovieFile).where(MovieFile.movie_id == movie_id) results = self.db.execute(stmt).scalars().all() return [MovieFileSchema.model_validate(sf) for sf in results] - except SQLAlchemyError as e: - log.error( - f"Database error retrieving movie files for movie_id {movie_id}: {e}" + except SQLAlchemyError: + log.exception( + f"Database error retrieving movie files for movie_id {movie_id}" ) raise @@ -367,13 +368,13 @@ class MovieRepository: usenet=torrent.usenet, ) formatted_results.append(movie_torrent) - return formatted_results - except SQLAlchemyError as e: - log.error( - f"Database error retrieving torrents for movie_id {movie_id}: {e}" - ) + + except SQLAlchemyError: + log.exception(f"Database error retrieving torrents for movie_id {movie_id}") raise + return formatted_results + def get_all_movies_with_torrents(self) -> list[MovieSchema]: """ Retrieve all movies that are associated with a torrent, ordered alphabetically by movie name. @@ -391,8 +392,8 @@ class MovieRepository: ) results = self.db.execute(stmt).scalars().unique().all() return [MovieSchema.model_validate(movie) for movie in results] - except SQLAlchemyError as e: - log.error(f"Database error retrieving all movies with torrents: {e}") + except SQLAlchemyError: + log.exception("Database error retrieving all movies with torrents") raise def get_movie_request(self, movie_request_id: MovieRequestId) -> MovieRequestSchema: @@ -410,10 +411,8 @@ class MovieRepository: msg = f"Movie request with id {movie_request_id} not found." raise NotFoundError(msg) return MovieRequestSchema.model_validate(request) - except SQLAlchemyError as e: - log.error( - f"Database error retrieving movie request {movie_request_id}: {e}" - ) + except SQLAlchemyError: + log.exception(f"Database error retrieving movie request {movie_request_id}") raise def get_movie_by_torrent_id(self, torrent_id: TorrentId) -> MovieSchema: @@ -436,10 +435,8 @@ class MovieRepository: msg = f"Movie for torrent_id {torrent_id} not found." raise NotFoundError(msg) return MovieSchema.model_validate(result) - except SQLAlchemyError as e: - log.error( - f"Database error retrieving movie by torrent_id {torrent_id}: {e}" - ) + except SQLAlchemyError: + log.exception(f"Database error retrieving movie by torrent_id {torrent_id}") raise def update_movie_attributes( diff --git a/media_manager/movies/service.py b/media_manager/movies/service.py index 3dd12b6..c6cd3b4 100644 --- a/media_manager/movies/service.py +++ b/media_manager/movies/service.py @@ -8,7 +8,7 @@ from sqlalchemy.orm import Session from media_manager.config import MediaManagerConfig from media_manager.database import SessionLocal, get_session -from media_manager.exceptions import InvalidConfigError, NotFoundError +from media_manager.exceptions import InvalidConfigError, NotFoundError, RenameError from media_manager.indexer.repository import IndexerRepository from media_manager.indexer.schemas import IndexerQueryResult, IndexerQueryResultId from media_manager.indexer.service import IndexerService @@ -98,9 +98,7 @@ class MovieService: """ return self.movie_repository.add_movie_request(movie_request=movie_request) - def get_movie_request_by_id( - self, movie_request_id: MovieRequestId - ) -> MovieRequest: + def get_movie_request_by_id(self, movie_request_id: MovieRequestId) -> MovieRequest: """ Get a movie request by its ID. @@ -151,10 +149,8 @@ class MovieService: try: shutil.rmtree(movie_dir) log.info(f"Deleted movie directory: {movie_dir}") - except OSError as e: - log.error( - f"Deleting movie directory: {movie_dir} : {e.strerror}" - ) + except OSError: + log.exception(f"Deleting movie directory: {movie_dir}") if delete_torrents: # Get all torrents associated with this movie @@ -171,8 +167,10 @@ class MovieService: torrent=torrent, delete_files=True ) log.info(f"Deleted torrent: {torrent.torrent_title}") - except Exception as e: - log.warning(f"Failed to delete torrent {torrent.hash}: {e}") + except Exception: + log.warning( + f"Failed to delete torrent {torrent.hash}", exc_info=True + ) # Delete from database self.movie_repository.delete_movie(movie_id=movie.id) @@ -237,19 +235,19 @@ class MovieService: self.movie_repository.get_movie_by_external_id( external_id=external_id, metadata_provider=metadata_provider ) - return True except NotFoundError: return False elif movie_id is not None: try: self.movie_repository.get_movie_by_id(movie_id=movie_id) - return True except NotFoundError: return False else: msg = "Use one of the provided overloads for this function!" raise ValueError(msg) + return True + def get_all_available_torrents_for_movie( self, movie: Movie, search_query_override: str | None = None ) -> list[IndexerQueryResult]: @@ -570,8 +568,8 @@ class MovieService: try: movie_root_path.mkdir(parents=True, exist_ok=True) - except Exception as e: - log.error(f"Failed to create directory {movie_root_path}: {e}") + except Exception: + log.exception("Failed to create directory {movie_root_path}") return False # import movie video @@ -682,9 +680,8 @@ class MovieService: try: source_directory.rename(new_source_path) except Exception as e: - log.error(f"Failed to rename {source_directory} to {new_source_path}: {e}") - msg = "Failed to rename directory" - raise Exception(msg) from e + log.exception(f"Failed to rename {source_directory} to {new_source_path}") + raise RenameError from e video_files, subtitle_files, _all_files = get_files_for_import( directory=new_source_path @@ -786,12 +783,14 @@ def auto_download_all_approved_movie_requests() -> None: movie_repository = MovieRepository(db=db) torrent_service = TorrentService(torrent_repository=TorrentRepository(db=db)) indexer_service = IndexerService(indexer_repository=IndexerRepository(db=db)) - notification_service = NotificationService(notification_repository=NotificationRepository(db=db)) + notification_service = NotificationService( + notification_repository=NotificationRepository(db=db) + ) movie_service = MovieService( movie_repository=movie_repository, torrent_service=torrent_service, indexer_service=indexer_service, - notification_service=notification_service + notification_service=notification_service, ) log.info("Auto downloading all approved movie requests") @@ -821,7 +820,9 @@ def import_all_movie_torrents() -> None: movie_repository = MovieRepository(db=db) torrent_service = TorrentService(torrent_repository=TorrentRepository(db=db)) indexer_service = IndexerService(indexer_repository=IndexerRepository(db=db)) - notification_service = NotificationService(notification_repository=NotificationRepository(db=db)) + notification_service = NotificationService( + notification_repository=NotificationRepository(db=db) + ) movie_service = MovieService( movie_repository=movie_repository, torrent_service=torrent_service, @@ -841,11 +842,8 @@ def import_all_movie_torrents() -> None: ) continue movie_service.import_torrent_files(torrent=t, movie=movie) - except RuntimeError as e: - log.error( - f"Failed to import torrent {t.title}: {e}", - exc_info=True, - ) + except RuntimeError: + log.exception(f"Failed to import torrent {t.title}") log.info("Finished importing all torrents") db.commit() @@ -860,7 +858,9 @@ def update_all_movies_metadata() -> None: movie_repository=movie_repository, torrent_service=TorrentService(torrent_repository=TorrentRepository(db=db)), indexer_service=IndexerService(indexer_repository=IndexerRepository(db=db)), - notification_service=NotificationService(notification_repository=NotificationRepository(db=db)) + notification_service=NotificationService( + notification_repository=NotificationRepository(db=db) + ), ) log.info("Updating metadata for all movies") @@ -880,9 +880,9 @@ def update_all_movies_metadata() -> None: f"Unsupported metadata provider {movie.metadata_provider} for movie {movie.name}, skipping update." ) continue - except InvalidConfigError as e: - log.error( - f"Error initializing metadata provider {movie.metadata_provider} for movie {movie.name}: {e}" + except InvalidConfigError: + log.exception( + f"Error initializing metadata provider {movie.metadata_provider} for movie {movie.name}", ) continue movie_service.update_movie_metadata( diff --git a/media_manager/notification/manager.py b/media_manager/notification/manager.py index 2e7558c..f72bd24 100644 --- a/media_manager/notification/manager.py +++ b/media_manager/notification/manager.py @@ -3,7 +3,6 @@ Notification Manager - Orchestrates sending notifications through all configured """ import logging -from typing import List from media_manager.config import MediaManagerConfig from media_manager.notification.schemas import MessageNotification @@ -33,7 +32,7 @@ class NotificationManager: def __init__(self) -> None: self.config = MediaManagerConfig().notifications - self.providers: List[AbstractNotificationServiceProvider] = [] + self.providers: list[AbstractNotificationServiceProvider] = [] self._initialize_providers() def _initialize_providers(self) -> None: @@ -42,32 +41,32 @@ class NotificationManager: try: self.providers.append(EmailNotificationServiceProvider()) logger.info("Email notification provider initialized") - except Exception as e: - logger.error(f"Failed to initialize Email provider: {e}") + except Exception: + logger.exception("Failed to initialize Email provider") # Gotify provider if self.config.gotify.enabled: try: self.providers.append(GotifyNotificationServiceProvider()) logger.info("Gotify notification provider initialized") - except Exception as e: - logger.error(f"Failed to initialize Gotify provider: {e}") + except Exception: + logger.exception("Failed to initialize Gotify provider") # Ntfy provider if self.config.ntfy.enabled: try: self.providers.append(NtfyNotificationServiceProvider()) logger.info("Ntfy notification provider initialized") - except Exception as e: - logger.error(f"Failed to initialize Ntfy provider: {e}") + except Exception: + logger.exception("Failed to initialize Ntfy provider") # Pushover provider if self.config.pushover.enabled: try: self.providers.append(PushoverNotificationServiceProvider()) logger.info("Pushover notification provider initialized") - except Exception as e: - logger.error(f"Failed to initialize Pushover provider: {e}") + except Exception: + logger.exception("Failed to initialize Pushover provider") logger.info(f"Initialized {len(self.providers)} notification providers") @@ -86,10 +85,10 @@ class NotificationManager: else: logger.warning(f"Failed to send notification via {provider_name}") - except Exception as e: - logger.error(f"Error sending notification via {provider_name}: {e}") + except Exception: + logger.exception(f"Error sending notification via {provider_name}") - def get_configured_providers(self) -> List[str]: + def get_configured_providers(self) -> list[str]: return [provider.__class__.__name__ for provider in self.providers] def is_configured(self) -> bool: diff --git a/media_manager/notification/repository.py b/media_manager/notification/repository.py index 943f953..8f81d45 100644 --- a/media_manager/notification/repository.py +++ b/media_manager/notification/repository.py @@ -6,6 +6,7 @@ from sqlalchemy.exc import ( SQLAlchemyError, ) from sqlalchemy.orm import Session +from sqlalchemy.sql.expression import false from media_manager.exceptions import ConflictError, NotFoundError from media_manager.notification.models import Notification @@ -36,7 +37,7 @@ class NotificationRepository: try: stmt = ( select(Notification) - .where(Notification.read == False) # noqa: E712 + .where(Notification.read == false()) .order_by(Notification.timestamp.desc()) ) results = self.db.execute(stmt).scalars().all() @@ -44,8 +45,8 @@ class NotificationRepository: NotificationSchema.model_validate(notification) for notification in results ] - except SQLAlchemyError as e: - log.error(f"Database error while retrieving unread notifications: {e}") + except SQLAlchemyError: + log.exception("Database error while retrieving unread notifications") raise def get_all_notifications(self) -> list[NotificationSchema]: @@ -56,8 +57,8 @@ class NotificationRepository: NotificationSchema.model_validate(notification) for notification in results ] - except SQLAlchemyError as e: - log.error(f"Database error while retrieving notifications: {e}") + except SQLAlchemyError: + log.exception("Database error while retrieving notifications") raise def save_notification(self, notification: NotificationSchema) -> None: @@ -71,8 +72,8 @@ class NotificationRepository: ) ) self.db.commit() - except IntegrityError as e: - log.error(f"Could not save notification, Error: {e}") + except IntegrityError: + log.exception("Could not save notification") msg = f"Notification with id {notification.id} already exists." raise ConflictError(msg) from None return diff --git a/media_manager/notification/schemas.py b/media_manager/notification/schemas.py index ecdcf81..521bf46 100644 --- a/media_manager/notification/schemas.py +++ b/media_manager/notification/schemas.py @@ -12,7 +12,8 @@ class Notification(BaseModel): model_config = ConfigDict(from_attributes=True) id: NotificationId = Field( - default_factory=lambda: NotificationId(uuid.uuid4()), description="Unique identifier for the notification" + default_factory=lambda: NotificationId(uuid.uuid4()), + description="Unique identifier for the notification", ) read: bool = Field(False, description="Whether the notification has been read") message: str = Field(description="The content of the notification") diff --git a/media_manager/torrent/download_clients/qbittorrent.py b/media_manager/torrent/download_clients/qbittorrent.py index d20a078..d371337 100644 --- a/media_manager/torrent/download_clients/qbittorrent.py +++ b/media_manager/torrent/download_clients/qbittorrent.py @@ -53,8 +53,8 @@ class QbittorrentDownloadClient(AbstractDownloadClient): ) try: self.api_client.auth_log_in() - except Exception as e: - log.error(f"Failed to log into qbittorrent: {e}") + except Exception: + log.exception("Failed to log into qbittorrent") raise try: @@ -72,11 +72,8 @@ class QbittorrentDownloadClient(AbstractDownloadClient): if self.config.category_save_path != "" else None, ) - except Exception as e: - if str(e) != "": - log.error( - f"Error on updating MediaManager category in qBittorrent, error: {e}" - ) + except Exception: + log.exception("Error on updating MediaManager category in qBittorrent") def download_torrent(self, indexer_result: IndexerQueryResult) -> Torrent: """ diff --git a/media_manager/torrent/download_clients/sabnzbd.py b/media_manager/torrent/download_clients/sabnzbd.py index 38247e8..c2b5161 100644 --- a/media_manager/torrent/download_clients/sabnzbd.py +++ b/media_manager/torrent/download_clients/sabnzbd.py @@ -38,8 +38,8 @@ class SabnzbdDownloadClient(AbstractDownloadClient): try: # Test connection self.client.version() - except Exception as e: - log.error(f"Failed to connect to SABnzbd: {e}") + except Exception: + log.exception("Failed to connect to SABnzbd") raise def download_torrent(self, indexer_result: IndexerQueryResult) -> Torrent: @@ -55,10 +55,7 @@ class SabnzbdDownloadClient(AbstractDownloadClient): url=str(indexer_result.download_url), nzbname=indexer_result.title ) if not response["status"]: - error_msg = response - log.error(f"Failed to add NZB to SABnzbd: {error_msg}") - msg = f"Failed to add NZB to SABnzbd: {error_msg}" - raise RuntimeError(msg) + raise RuntimeError(f"Failed to add NZB to SABnzbd: {response}") # noqa: EM102, TRY003, TRY301 # Generate a hash for the NZB (using title and download URL) nzo_id = response["nzo_ids"][0] @@ -75,13 +72,12 @@ class SabnzbdDownloadClient(AbstractDownloadClient): # Get initial status from SABnzbd torrent.status = self.get_torrent_status(torrent) - - return torrent - - except Exception as e: - log.error(f"Failed to download NZB {indexer_result.title}: {e}") + except Exception: + log.exception(f"Failed to download NZB {indexer_result.title}") raise + return torrent + def remove_torrent(self, torrent: Torrent, delete_data: bool = False) -> None: """ Remove a torrent from SABnzbd. @@ -91,8 +87,8 @@ class SabnzbdDownloadClient(AbstractDownloadClient): """ try: self.client.delete_job(nzo_id=torrent.hash, delete_files=delete_data) - except Exception as e: - log.error(f"Failed to remove torrent {torrent.title}: {e}") + except Exception: + log.exception(f"Failed to remove torrent {torrent.title}") raise def pause_torrent(self, torrent: Torrent) -> None: @@ -103,8 +99,8 @@ class SabnzbdDownloadClient(AbstractDownloadClient): """ try: self.client.pause_job(nzo_id=torrent.hash) - except Exception as e: - log.error(f"Failed to pause torrent {torrent.title}: {e}") + except Exception: + log.exception(f"Failed to pause torrent {torrent.title}") raise def resume_torrent(self, torrent: Torrent) -> None: @@ -115,8 +111,8 @@ class SabnzbdDownloadClient(AbstractDownloadClient): """ try: self.client.resume_job(nzo_id=torrent.hash) - except Exception as e: - log.error(f"Failed to resume torrent {torrent.title}: {e}") + except Exception: + log.exception(f"Failed to resume torrent {torrent.title}") raise def get_torrent_status(self, torrent: Torrent) -> TorrentStatus: diff --git a/media_manager/torrent/download_clients/transmission.py b/media_manager/torrent/download_clients/transmission.py index eaba85c..fedaf9f 100644 --- a/media_manager/torrent/download_clients/transmission.py +++ b/media_manager/torrent/download_clients/transmission.py @@ -43,8 +43,8 @@ class TransmissionDownloadClient(AbstractDownloadClient): ) # Test connection self._client.session_stats() - except Exception as e: - log.error(f"Failed to connect to Transmission: {e}") + except Exception: + log.exception("Failed to connect to Transmission") raise def download_torrent(self, indexer_result: IndexerQueryResult) -> Torrent: @@ -68,8 +68,8 @@ class TransmissionDownloadClient(AbstractDownloadClient): f"Successfully added torrent to Transmission: {indexer_result.title}" ) - except Exception as e: - log.error(f"Failed to add torrent to Transmission: {e}") + except Exception: + log.exception("Failed to add torrent to Transmission") raise torrent = Torrent( @@ -95,8 +95,8 @@ class TransmissionDownloadClient(AbstractDownloadClient): try: self._client.remove_torrent(torrent.hash, delete_data=delete_data) - except Exception as e: - log.error(f"Failed to remove torrent: {e}") + except Exception: + log.exception("Failed to remove torrent") raise def get_torrent_status(self, torrent: Torrent) -> TorrentStatus: @@ -123,13 +123,12 @@ class TransmissionDownloadClient(AbstractDownloadClient): log.warning( f"Torrent {torrent.title} has error status: {transmission_torrent.error_string}" ) - - return status - - except Exception as e: - log.error(f"Failed to get torrent status: {e}") + except Exception: + log.exception("Failed to get torrent status") return TorrentStatus.error + return status + def pause_torrent(self, torrent: Torrent) -> None: """ Pause a torrent download. @@ -140,8 +139,8 @@ class TransmissionDownloadClient(AbstractDownloadClient): self._client.stop_torrent(torrent.hash) log.debug(f"Successfully paused torrent: {torrent.title}") - except Exception as e: - log.error(f"Failed to pause torrent: {e}") + except Exception: + log.exception("Failed to pause torrent") raise def resume_torrent(self, torrent: Torrent) -> None: @@ -154,6 +153,6 @@ class TransmissionDownloadClient(AbstractDownloadClient): self._client.start_torrent(torrent.hash) log.debug(f"Successfully resumed torrent: {torrent.title}") - except Exception as e: - log.error(f"Failed to resume torrent: {e}") + except Exception: + log.exception("Failed to resume torrent") raise diff --git a/media_manager/torrent/manager.py b/media_manager/torrent/manager.py index 65b12a4..11fe78b 100644 --- a/media_manager/torrent/manager.py +++ b/media_manager/torrent/manager.py @@ -43,22 +43,22 @@ class DownloadManager: if self.config.qbittorrent.enabled: try: self._torrent_client = QbittorrentDownloadClient() - except Exception as e: - log.error(f"Failed to initialize qBittorrent client: {e}") + except Exception: + log.exception("Failed to initialize qBittorrent client") # If qBittorrent is not available or failed, try Transmission if self._torrent_client is None and self.config.transmission.enabled: try: self._torrent_client = TransmissionDownloadClient() - except Exception as e: - log.error(f"Failed to initialize Transmission client: {e}") + except Exception: + log.exception("Failed to initialize Transmission client") # Initialize SABnzbd client for usenet if self.config.sabnzbd.enabled: try: self._usenet_client = SabnzbdDownloadClient() - except Exception as e: - log.error(f"Failed to initialize SABnzbd client: {e}") + except Exception: + log.exception("Failed to initialize SABnzbd client") active_clients = [] if self._torrent_client: diff --git a/media_manager/torrent/repository.py b/media_manager/torrent/repository.py index 382e0df..6e20ef4 100644 --- a/media_manager/torrent/repository.py +++ b/media_manager/torrent/repository.py @@ -87,7 +87,9 @@ class TorrentRepository: return None return MovieSchema.model_validate(result) - def get_movie_files_of_torrent(self, torrent_id: TorrentId) -> list[MovieFileSchema]: + def get_movie_files_of_torrent( + self, torrent_id: TorrentId + ) -> list[MovieFileSchema]: stmt = select(MovieFile).where(MovieFile.torrent_id == torrent_id) result = self.db.execute(stmt).scalars().all() return [MovieFileSchema.model_validate(movie_file) for movie_file in result] diff --git a/media_manager/torrent/service.py b/media_manager/torrent/service.py index c5bcaaf..d8c1bee 100644 --- a/media_manager/torrent/service.py +++ b/media_manager/torrent/service.py @@ -92,8 +92,8 @@ class TorrentService: for x in self.torrent_repository.get_all_torrents(): try: torrents.append(self.get_torrent_status(x)) - except RuntimeError as e: - log.error(f"Error fetching status for torrent {x.title}: {e}") + except RuntimeError: + log.exception(f"Error fetching status for torrent {x.title}") return torrents def get_torrent_by_id(self, torrent_id: TorrentId) -> Torrent: diff --git a/media_manager/torrent/utils.py b/media_manager/torrent/utils.py index 48223f1..2fb57d9 100644 --- a/media_manager/torrent/utils.py +++ b/media_manager/torrent/utils.py @@ -57,8 +57,8 @@ def extract_archives(files: list) -> None: ) try: patoolib.extract_archive(str(file), outdir=str(file.parent)) - except patoolib.util.PatoolError as e: - log.error(f"Failed to extract archive {file}. Error: {e}") + except patoolib.util.PatoolError: + log.exception(f"Failed to extract archive {file}") def get_torrent_filepath(torrent: Torrent) -> Path: @@ -72,10 +72,10 @@ def import_file(target_file: Path, source_file: Path) -> None: try: target_file.hardlink_to(source_file) except FileExistsError: - log.error(f"File already exists at {target_file}.") - except (OSError, UnsupportedOperation, NotImplementedError) as e: - log.error( - f"Failed to create hardlink from {source_file} to {target_file}: {e}. Falling back to copying the file." + log.exception(f"File already exists at {target_file}.") + except (OSError, UnsupportedOperation, NotImplementedError): + log.exception( + f"Failed to create hardlink from {source_file} to {target_file}. Falling back to copying the file." ) shutil.copy(src=source_file, dst=target_file) @@ -148,16 +148,16 @@ def get_torrent_hash(torrent: IndexerQueryResult) -> str: response = requests.get(str(torrent.download_url), timeout=30) response.raise_for_status() torrent_content = response.content - except InvalidSchema as e: - log.debug(f"Invalid schema for URL {torrent.download_url}: {e}") + except InvalidSchema: + log.debug(f"Invalid schema for URL {torrent.download_url}", exc_info=True) final_url = follow_redirects_to_final_torrent_url( initial_url=torrent.download_url, session=requests.Session(), timeout=MediaManagerConfig().indexers.prowlarr.timeout_seconds, ) return str(libtorrent.parse_magnet_uri(final_url).info_hash) - except Exception as e: - log.error(f"Failed to download torrent file: {e}") + except Exception: + log.exception("Failed to download torrent file") raise # saving the torrent file @@ -170,9 +170,10 @@ def get_torrent_hash(torrent: IndexerQueryResult) -> str: torrent_hash = hashlib.sha1( # noqa: S324 bencoder.encode(decoded_content[b"info"]) ).hexdigest() - except Exception as e: - log.error(f"Failed to decode torrent file: {e}") + except Exception: + log.exception("Failed to decode torrent file") raise + return torrent_hash diff --git a/media_manager/tv/repository.py b/media_manager/tv/repository.py index e89ae57..6aad169 100644 --- a/media_manager/tv/repository.py +++ b/media_manager/tv/repository.py @@ -67,8 +67,8 @@ class TvRepository: msg = f"Show with id {show_id} not found." raise NotFoundError(msg) return ShowSchema.model_validate(result) - except SQLAlchemyError as e: - log.error(f"Database error while retrieving show {show_id}: {e}") + except SQLAlchemyError: + log.exception(f"Database error while retrieving show {show_id}") raise def get_show_by_external_id( @@ -95,9 +95,9 @@ class TvRepository: msg = f"Show with external_id {external_id} and provider {metadata_provider} not found." raise NotFoundError(msg) return ShowSchema.model_validate(result) - except SQLAlchemyError as e: - log.error( - f"Database error while retrieving show by external_id {external_id}: {e}" + except SQLAlchemyError: + log.exception( + f"Database error while retrieving show by external_id {external_id}", ) raise @@ -114,8 +114,8 @@ class TvRepository: ) results = self.db.execute(stmt).scalars().unique().all() return [ShowSchema.model_validate(show) for show in results] - except SQLAlchemyError as e: - log.error(f"Database error while retrieving all shows: {e}") + except SQLAlchemyError: + log.exception("Database error while retrieving all shows") raise def get_total_downloaded_episodes_count(self) -> int: @@ -124,11 +124,9 @@ class TvRepository: select(func.count()).select_from(Episode).join(Season).join(SeasonFile) ) return self.db.execute(stmt).scalar_one_or_none() - except SQLAlchemyError as e: - log.error( - f"Database error while calculating downloaded episodes count: {e}" - ) - raise e + except SQLAlchemyError: + log.exception("Database error while calculating downloaded episodes count") + raise def save_show(self, show: ShowSchema) -> ShowSchema: """ @@ -192,9 +190,9 @@ class TvRepository: self.db.rollback() msg = f"Show with this primary key or unique constraint violation: {e.orig}" raise ConflictError(msg) from e - except SQLAlchemyError as e: + except SQLAlchemyError: self.db.rollback() - log.error(f"Database error while saving show {show.name}: {e}") + log.exception(f"Database error while saving show {show.name}") raise def delete_show(self, show_id: ShowId) -> None: @@ -212,9 +210,9 @@ class TvRepository: raise NotFoundError(msg) self.db.delete(show) self.db.commit() - except SQLAlchemyError as e: + except SQLAlchemyError: self.db.rollback() - log.error(f"Database error while deleting show {show_id}: {e}") + log.exception(f"Database error while deleting show {show_id}") raise def get_season(self, season_id: SeasonId) -> SeasonSchema: @@ -232,8 +230,8 @@ class TvRepository: msg = f"Season with id {season_id} not found." raise NotFoundError(msg) return SeasonSchema.model_validate(season) - except SQLAlchemyError as e: - log.error(f"Database error while retrieving season {season_id}: {e}") + except SQLAlchemyError: + log.exception(f"Database error while retrieving season {season_id}") raise def add_season_request( @@ -265,13 +263,13 @@ class TvRepository: self.db.commit() self.db.refresh(db_model) return SeasonRequestSchema.model_validate(db_model) - except IntegrityError as e: + except IntegrityError: self.db.rollback() - log.error(f"Integrity error while adding season request: {e}") + log.exception("Integrity error while adding season request") raise - except SQLAlchemyError as e: + except SQLAlchemyError: self.db.rollback() - log.error(f"Database error while adding season request: {e}") + log.exception("Database error while adding season request") raise def delete_season_request(self, season_request_id: SeasonRequestId) -> None: @@ -290,10 +288,10 @@ class TvRepository: msg = f"SeasonRequest with id {season_request_id} not found." raise NotFoundError(msg) self.db.commit() - except SQLAlchemyError as e: + except SQLAlchemyError: self.db.rollback() - log.error( - f"Database error while deleting season request {season_request_id}: {e}" + log.exception( + f"Database error while deleting season request {season_request_id}" ) raise @@ -319,9 +317,9 @@ class TvRepository: msg = f"Season number {season_number} for show_id {show_id} not found." raise NotFoundError(msg) return SeasonSchema.model_validate(result) - except SQLAlchemyError as e: - log.error( - f"Database error retrieving season {season_number} for show {show_id}: {e}" + except SQLAlchemyError: + log.exception( + f"Database error retrieving season {season_number} for show {show_id}" ) raise @@ -353,8 +351,8 @@ class TvRepository: ) for x in results ] - except SQLAlchemyError as e: - log.error(f"Database error while retrieving season requests: {e}") + except SQLAlchemyError: + log.exception("Database error while retrieving season requests") raise def add_season_file(self, season_file: SeasonFileSchema) -> SeasonFileSchema: @@ -372,13 +370,13 @@ class TvRepository: self.db.commit() self.db.refresh(db_model) return SeasonFileSchema.model_validate(db_model) - except IntegrityError as e: + except IntegrityError: self.db.rollback() - log.error(f"Integrity error while adding season file: {e}") + log.exception("Integrity error while adding season file") raise - except SQLAlchemyError as e: + except SQLAlchemyError: self.db.rollback() - log.error(f"Database error while adding season file: {e}") + log.exception("Database error while adding season file") raise def remove_season_files_by_torrent_id(self, torrent_id: TorrentId) -> int: @@ -393,13 +391,13 @@ class TvRepository: stmt = delete(SeasonFile).where(SeasonFile.torrent_id == torrent_id) result = self.db.execute(stmt) self.db.commit() - return result.rowcount - except SQLAlchemyError as e: + except SQLAlchemyError: self.db.rollback() - log.error( - f"Database error removing season files for torrent_id {torrent_id}: {e}" + log.exception( + f"Database error removing season files for torrent_id {torrent_id}" ) raise + return result.rowcount def set_show_library(self, show_id: ShowId, library: str) -> None: """ @@ -417,9 +415,9 @@ class TvRepository: raise NotFoundError(msg) show.library = library self.db.commit() - except SQLAlchemyError as e: + except SQLAlchemyError: self.db.rollback() - log.error(f"Database error setting library for show {show_id}: {e}") + log.exception(f"Database error setting library for show {show_id}") raise def get_season_files_by_season_id( @@ -436,9 +434,9 @@ class TvRepository: stmt = select(SeasonFile).where(SeasonFile.season_id == season_id) results = self.db.execute(stmt).scalars().all() return [SeasonFileSchema.model_validate(sf) for sf in results] - except SQLAlchemyError as e: - log.error( - f"Database error retrieving season files for season_id {season_id}: {e}" + except SQLAlchemyError: + log.exception( + f"Database error retrieving season files for season_id {season_id}" ) raise @@ -460,8 +458,8 @@ class TvRepository: ) results = self.db.execute(stmt).scalars().unique().all() return [TorrentSchema.model_validate(torrent) for torrent in results] - except SQLAlchemyError as e: - log.error(f"Database error retrieving torrents for show_id {show_id}: {e}") + except SQLAlchemyError: + log.exception(f"Database error retrieving torrents for show_id {show_id}") raise def get_all_shows_with_torrents(self) -> list[ShowSchema]: @@ -483,8 +481,8 @@ class TvRepository: ) results = self.db.execute(stmt).scalars().unique().all() return [ShowSchema.model_validate(show) for show in results] - except SQLAlchemyError as e: - log.error(f"Database error retrieving all shows with torrents: {e}") + except SQLAlchemyError: + log.exception("Database error retrieving all shows with torrents") raise def get_seasons_by_torrent_id(self, torrent_id: TorrentId) -> list[SeasonNumber]: @@ -504,9 +502,9 @@ class TvRepository: ) results = self.db.execute(stmt).scalars().unique().all() return [SeasonNumber(x) for x in results] - except SQLAlchemyError as e: - log.error( - f"Database error retrieving season numbers for torrent_id {torrent_id}: {e}" + except SQLAlchemyError: + log.exception( + f"Database error retrieving season numbers for torrent_id {torrent_id}" ) raise @@ -528,9 +526,9 @@ class TvRepository: msg = f"Season request with id {season_request_id} not found." raise NotFoundError(msg) return SeasonRequestSchema.model_validate(request) - except SQLAlchemyError as e: - log.error( - f"Database error retrieving season request {season_request_id}: {e}" + except SQLAlchemyError: + log.exception( + f"Database error retrieving season request {season_request_id}" ) raise @@ -555,8 +553,8 @@ class TvRepository: msg = f"Show for season_id {season_id} not found." raise NotFoundError(msg) return ShowSchema.model_validate(result) - except SQLAlchemyError as e: - log.error(f"Database error retrieving show by season_id {season_id}: {e}") + except SQLAlchemyError: + log.exception(f"Database error retrieving show by season_id {season_id}") raise def add_season_to_show( diff --git a/media_manager/tv/router.py b/media_manager/tv/router.py index 7acf25c..46f05d9 100644 --- a/media_manager/tv/router.py +++ b/media_manager/tv/router.py @@ -94,7 +94,9 @@ def get_all_importable_shows( dependencies=[Depends(current_superuser)], status_code=status.HTTP_204_NO_CONTENT, ) -def import_detected_show(tv_service: tv_service_dep, tv_show: show_dep, directory: str) -> None: +def import_detected_show( + tv_service: tv_service_dep, tv_show: show_dep, directory: str +) -> None: """ Import a detected show from the specified directory into the library. """ @@ -145,7 +147,7 @@ def add_a_show( Add a new show to the library. """ try: - show = tv_service.add_show( + show = tv_service.add_show( external_id=show_id, metadata_provider=metadata_provider, language=language, diff --git a/media_manager/tv/service.py b/media_manager/tv/service.py index 9ccc350..39f4355 100644 --- a/media_manager/tv/service.py +++ b/media_manager/tv/service.py @@ -8,7 +8,7 @@ from sqlalchemy.exc import IntegrityError from media_manager.config import MediaManagerConfig from media_manager.database import get_session -from media_manager.exceptions import InvalidConfigError, NotFoundError +from media_manager.exceptions import InvalidConfigError, NotFoundError, RenameError from media_manager.indexer.repository import IndexerRepository from media_manager.indexer.schemas import IndexerQueryResult, IndexerQueryResultId from media_manager.indexer.service import IndexerService @@ -174,8 +174,10 @@ class TvService: try: self.torrent_service.cancel_download(torrent, delete_files=True) log.info(f"Deleted torrent: {torrent.hash}") - except Exception as e: - log.warning(f"Failed to delete torrent {torrent.hash}: {e}") + except Exception: + log.warning( + f"Failed to delete torrent {torrent.hash}", exc_info=True + ) self.tv_repository.delete_show(show_id=show.id) @@ -226,19 +228,19 @@ class TvService: self.tv_repository.get_show_by_external_id( external_id=external_id, metadata_provider=metadata_provider ) - return True except NotFoundError: return False elif show_id is not None: try: self.tv_repository.get_show_by_id(show_id=show_id) - return True except NotFoundError: return False else: msg = "Use one of the provided overloads for this function!" raise ValueError(msg) + return True + def get_all_available_torrents_for_a_season( self, season_number: int, @@ -379,8 +381,9 @@ class TvService: if torrent_file.imported: return True - except RuntimeError as e: - log.error(f"Error retrieving torrent, error: {e}") + except RuntimeError: + log.exception("Error retrieving torrent") + return False def get_show_by_external_id( @@ -641,7 +644,7 @@ class TvService: return True else: msg = f"Could not find any video file for episode {episode_number} of show {show.name} S{season.number}" - raise Exception(msg) + raise Exception(msg) # noqa: TRY002 # TODO: resolve this def import_season( self, @@ -659,9 +662,9 @@ class TvService: try: season_path.mkdir(parents=True, exist_ok=True) except Exception as e: - log.warning(f"Could not create path {season_path}: {e}") + log.exception(f"Could not create path {season_path}") msg = f"Could not create path {season_path}" - raise Exception(msg) from e + raise Exception(msg) from e # noqa: TRY002 # TODO: resolve this for episode in season.episodes: try: @@ -901,9 +904,8 @@ class TvService: try: source_directory.rename(new_source_path) except Exception as e: - log.error(f"Failed to rename {source_directory} to {new_source_path}: {e}") - msg = "Failed to rename source directory" - raise Exception(msg) from e + log.exception(f"Failed to rename {source_directory} to {new_source_path}") + raise RenameError from e video_files, subtitle_files, _all_files = get_files_for_import( directory=new_source_path @@ -967,12 +969,14 @@ def auto_download_all_approved_season_requests() -> None: tv_repository = TvRepository(db=db) torrent_service = TorrentService(torrent_repository=TorrentRepository(db=db)) indexer_service = IndexerService(indexer_repository=IndexerRepository(db=db)) - notification_service = NotificationService(notification_repository=NotificationRepository(db=db)) + notification_service = NotificationService( + notification_repository=NotificationRepository(db=db) + ) tv_service = TvService( tv_repository=tv_repository, torrent_service=torrent_service, indexer_service=indexer_service, - notification_service=notification_service + notification_service=notification_service, ) log.info("Auto downloading all approved season requests") @@ -1004,12 +1008,14 @@ def import_all_show_torrents() -> None: tv_repository = TvRepository(db=db) torrent_service = TorrentService(torrent_repository=TorrentRepository(db=db)) indexer_service = IndexerService(indexer_repository=IndexerRepository(db=db)) - notification_service = NotificationService(notification_repository=NotificationRepository(db=db)) + notification_service = NotificationService( + notification_repository=NotificationRepository(db=db) + ) tv_service = TvService( tv_repository=tv_repository, torrent_service=torrent_service, indexer_service=indexer_service, - notification_service=notification_service + notification_service=notification_service, ) log.info("Importing all torrents") torrents = torrent_service.get_all_torrents() @@ -1024,10 +1030,8 @@ def import_all_show_torrents() -> None: ) continue tv_service.import_torrent_files(torrent=t, show=show) - except RuntimeError as e: - log.error( - f"Error importing torrent {t.title} for show {show.name}: {e}" - ) + except RuntimeError: + log.exception(f"Error importing torrent {t.title} for show {show.name}") log.info("Finished importing all torrents") db.commit() @@ -1042,7 +1046,9 @@ def update_all_non_ended_shows_metadata() -> None: tv_repository=tv_repository, torrent_service=TorrentService(torrent_repository=TorrentRepository(db=db)), indexer_service=IndexerService(indexer_repository=IndexerRepository(db=db)), - notification_service=NotificationService(notification_repository=NotificationRepository(db=db)) + notification_service=NotificationService( + notification_repository=NotificationRepository(db=db) + ), ) log.info("Updating metadata for all non-ended shows") @@ -1062,9 +1068,9 @@ def update_all_non_ended_shows_metadata() -> None: f"Unsupported metadata provider {show.metadata_provider} for show {show.name}, skipping update." ) continue - except InvalidConfigError as e: - log.error( - f"Error initializing metadata provider {show.metadata_provider} for show {show.name}: {e}" + except InvalidConfigError: + log.exception( + f"Error initializing metadata provider {show.metadata_provider} for show {show.name}" ) continue updated_show = tv_service.update_show_metadata( diff --git a/metadata_relay/app/tmdb.py b/metadata_relay/app/tmdb.py index 39a9744..b358fca 100644 --- a/metadata_relay/app/tmdb.py +++ b/metadata_relay/app/tmdb.py @@ -32,7 +32,9 @@ else: return TV(show_id).external_ids() @router.get("/tv/shows/{show_id}/{season_number}") - async def get_tmdb_season(season_number: int, show_id: int, language: str = "en") -> dict: + async def get_tmdb_season( + season_number: int, show_id: int, language: str = "en" + ) -> dict: return TV_Seasons(season_number=season_number, tv_id=show_id).info( language=language ) @@ -42,7 +44,9 @@ else: return Trending(media_type="movie").info(language=language) @router.get("/movies/search") - async def search_tmdb_movies(query: str, page: int = 1, language: str = "en") -> dict: + async def search_tmdb_movies( + query: str, page: int = 1, language: str = "en" + ) -> dict: return Search().movie(page=page, query=query, language=language) @router.get("/movies/{movie_id}") diff --git a/ruff.toml b/ruff.toml index 45c97e9..ba09100 100644 --- a/ruff.toml +++ b/ruff.toml @@ -1,3 +1,4 @@ +exclude = ["alembic/versions"] namespace-packages = ["alembic", "metadata_relay"] [format] @@ -5,7 +6,7 @@ line-ending = "lf" quote-style = "double" [lint] -# to be enabled: BLE, C90, CPY, D, DOC, DTZ, FBT, G, PL, RSE, SLF, SIM, TC, TRY, UP +# to be enabled: BLE, C90, CPY, D, DOC, DTZ, FBT, G, PL, RSE, SLF, SIM, TC extend-select = [ "A", "ARG", "ASYNC", "ANN", "B", @@ -20,7 +21,8 @@ extend-select = [ "Q", "RET", "RUF", "S", "SLOT", - "T10", "T20", "TD", "TID", + "T10", "T20", "TD", "TID", "TRY", + "UP", "W", "YTT" ] @@ -32,6 +34,10 @@ ignore = [ "E501", # currently a bug?! with providers and depends "FAST003", + # I'm not sure if we want to lint them + "FIX002", + # let's decide if we want this + "TD002", "TD003", ] [lint.flake8-bugbear]