diff --git a/alembic/env.py b/alembic/env.py index 00d5ea9..b09aad9 100644 --- a/alembic/env.py +++ b/alembic/env.py @@ -1,13 +1,16 @@ import sys -sys.path = ["", ".."] + sys.path[1:] +sys.path = ["", "..", *sys.path[1:]] from logging.config import fileConfig # noqa: E402 +from sqlalchemy import ( # noqa: E402 + engine_from_config, + pool, +) + from alembic import context # noqa: E402 -from sqlalchemy import engine_from_config # noqa: E402 -from sqlalchemy import pool # noqa: E402 # this is the Alembic Config object, which provides # access to the values within the .ini file in use. @@ -23,34 +26,40 @@ if config.config_file_name is not None: # from myapp import mymodel # target_metadata = mymodel.Base.metadata -from media_manager.auth.db import User, OAuthAccount # noqa: E402 +from media_manager.auth.db import OAuthAccount, User # noqa: E402 +from media_manager.config import MediaManagerConfig # noqa: E402 +from media_manager.database import Base # noqa: E402 from media_manager.indexer.models import IndexerQueryResult # noqa: E402 -from media_manager.torrent.models import Torrent # noqa: E402 -from media_manager.tv.models import Show, Season, Episode, SeasonFile, SeasonRequest # noqa: E402 from media_manager.movies.models import Movie, MovieFile, MovieRequest # noqa: E402 from media_manager.notification.models import Notification # noqa: E402 -from media_manager.database import Base # noqa: E402 -from media_manager.config import MediaManagerConfig # noqa: E402 +from media_manager.torrent.models import Torrent # noqa: E402 +from media_manager.tv.models import ( # noqa: E402 + Episode, + Season, + SeasonFile, + SeasonRequest, + Show, +) target_metadata = Base.metadata # this is to keep pycharm from complaining about/optimizing unused imports # noinspection PyStatementEffect -( - User, - OAuthAccount, - IndexerQueryResult, - Torrent, - Show, - Season, - Episode, - SeasonFile, - SeasonRequest, - Movie, - MovieFile, - MovieRequest, - Notification, -) +__all__ = [ + "Episode", + "IndexerQueryResult", + "Movie", + "MovieFile", + "MovieRequest", + "Notification", + "OAuthAccount", + "Season", + "SeasonFile", + "SeasonRequest", + "Show", + "Torrent", + "User", +] # other values from the config, defined by the needs of env.py, @@ -60,19 +69,7 @@ target_metadata = Base.metadata db_config = MediaManagerConfig().database -db_url = ( - "postgresql+psycopg" - + "://" - + db_config.user - + ":" - + db_config.password - + "@" - + db_config.host - + ":" - + str(db_config.port) - + "/" - + db_config.dbname -) +db_url = f"postgresql+psycopg://{db_config.user}:{db_config.password}@{db_config.host}:{db_config.port}/{db_config.dbname}" config.set_main_option("sqlalchemy.url", db_url) @@ -109,7 +106,7 @@ def run_migrations_online() -> None: """ - def include_object(object, name, type_, reflected, compare_to): + def include_object(_object, name, type_, _reflected, _compare_to): if type_ == "table" and name == "apscheduler_jobs": return False return True diff --git a/alembic/versions/16e78af9e5bf_add_original_language_columns_to_show_.py b/alembic/versions/16e78af9e5bf_add_original_language_columns_to_show_.py index 6cba8ae..d3283f3 100644 --- a/alembic/versions/16e78af9e5bf_add_original_language_columns_to_show_.py +++ b/alembic/versions/16e78af9e5bf_add_original_language_columns_to_show_.py @@ -8,9 +8,9 @@ Create Date: 2025-12-13 18:47:02.146038 from typing import Sequence, Union -from alembic import op import sqlalchemy as sa +from alembic import op # revision identifiers, used by Alembic. revision: str = "16e78af9e5bf" diff --git a/alembic/versions/1801d9f5a275_add_library_field_to_movie_and_show_.py b/alembic/versions/1801d9f5a275_add_library_field_to_movie_and_show_.py index 4b67e68..a24fb40 100644 --- a/alembic/versions/1801d9f5a275_add_library_field_to_movie_and_show_.py +++ b/alembic/versions/1801d9f5a275_add_library_field_to_movie_and_show_.py @@ -8,9 +8,9 @@ Create Date: 2025-07-16 01:09:44.045395 from typing import Sequence, Union -from alembic import op import sqlalchemy as sa +from alembic import op # revision identifiers, used by Alembic. revision: str = "1801d9f5a275" diff --git a/alembic/versions/1f340754640a_add_continuous_download_column_to_show_.py b/alembic/versions/1f340754640a_add_continuous_download_column_to_show_.py index 44eab02..ac10ba5 100644 --- a/alembic/versions/1f340754640a_add_continuous_download_column_to_show_.py +++ b/alembic/versions/1f340754640a_add_continuous_download_column_to_show_.py @@ -8,9 +8,9 @@ Create Date: 2025-06-22 13:46:01.973406 from typing import Sequence, Union -from alembic import op import sqlalchemy as sa +from alembic import op # revision identifiers, used by Alembic. revision: str = "1f340754640a" diff --git a/alembic/versions/21a19f0675f9_increase_access_token_length.py b/alembic/versions/21a19f0675f9_increase_access_token_length.py index 0c07a18..b2832cc 100644 --- a/alembic/versions/21a19f0675f9_increase_access_token_length.py +++ b/alembic/versions/21a19f0675f9_increase_access_token_length.py @@ -8,9 +8,9 @@ Create Date: 2025-07-06 10:49:08.814496 from typing import Sequence, Union -from alembic import op import sqlalchemy as sa +from alembic import op # revision identifiers, used by Alembic. revision: str = "21a19f0675f9" diff --git a/alembic/versions/2c61f662ca9e_add_imdb_id_fields.py b/alembic/versions/2c61f662ca9e_add_imdb_id_fields.py index 48a61a1..9c4710b 100644 --- a/alembic/versions/2c61f662ca9e_add_imdb_id_fields.py +++ b/alembic/versions/2c61f662ca9e_add_imdb_id_fields.py @@ -8,9 +8,9 @@ Create Date: 2025-12-23 19:42:09.593945 from typing import Sequence, Union -from alembic import op import sqlalchemy as sa +from alembic import op # revision identifiers, used by Alembic. revision: str = "2c61f662ca9e" diff --git a/alembic/versions/333866afcd2c_add_usenet_columns.py b/alembic/versions/333866afcd2c_add_usenet_columns.py index 7728d52..73bb318 100644 --- a/alembic/versions/333866afcd2c_add_usenet_columns.py +++ b/alembic/versions/333866afcd2c_add_usenet_columns.py @@ -8,9 +8,10 @@ Create Date: 2025-07-09 20:55:42.338629 from typing import Sequence, Union -from alembic import op import sqlalchemy as sa +from alembic import op + # revision identifiers, used by Alembic. revision: str = "333866afcd2c" down_revision: Union[str, None] = "aa4689f80796" diff --git a/alembic/versions/5299dfed220b_add_score_field_to_indexerqueryresult_.py b/alembic/versions/5299dfed220b_add_score_field_to_indexerqueryresult_.py index 8a01fcc..1387d0c 100644 --- a/alembic/versions/5299dfed220b_add_score_field_to_indexerqueryresult_.py +++ b/alembic/versions/5299dfed220b_add_score_field_to_indexerqueryresult_.py @@ -8,9 +8,9 @@ Create Date: 2025-07-16 23:24:37.931188 from typing import Sequence, Union -from alembic import op import sqlalchemy as sa +from alembic import op # revision identifiers, used by Alembic. revision: str = "5299dfed220b" diff --git a/alembic/versions/7508237d5bc2_add_ended_column_to_show_table.py b/alembic/versions/7508237d5bc2_add_ended_column_to_show_table.py index 730a9d7..b5c1dfd 100644 --- a/alembic/versions/7508237d5bc2_add_ended_column_to_show_table.py +++ b/alembic/versions/7508237d5bc2_add_ended_column_to_show_table.py @@ -8,9 +8,9 @@ Create Date: 2025-06-10 21:25:27.871064 from typing import Sequence, Union -from alembic import op import sqlalchemy as sa +from alembic import op # revision identifiers, used by Alembic. revision: str = "7508237d5bc2" diff --git a/alembic/versions/93fb07842385_initial_migration.py b/alembic/versions/93fb07842385_initial_migration.py index daea886..7994e77 100644 --- a/alembic/versions/93fb07842385_initial_migration.py +++ b/alembic/versions/93fb07842385_initial_migration.py @@ -8,10 +8,11 @@ Create Date: 2025-05-27 21:36:18.532068 from typing import Sequence, Union -from alembic import op import sqlalchemy as sa from sqlalchemy.dialects import postgresql +from alembic import op + # revision identifiers, used by Alembic. revision: str = "93fb07842385" down_revision: Union[str, None] = None diff --git a/alembic/versions/aa4689f80796_increase_refresh_token_length.py b/alembic/versions/aa4689f80796_increase_refresh_token_length.py index bc928a2..a3f86db 100644 --- a/alembic/versions/aa4689f80796_increase_refresh_token_length.py +++ b/alembic/versions/aa4689f80796_increase_refresh_token_length.py @@ -8,9 +8,9 @@ Create Date: 2025-07-06 10:54:19.714809 from typing import Sequence, Union -from alembic import op import sqlalchemy as sa +from alembic import op # revision identifiers, used by Alembic. revision: str = "aa4689f80796" diff --git a/alembic/versions/eb0bd3cc1852_add_indexer_column_to_indexerqueryresult.py b/alembic/versions/eb0bd3cc1852_add_indexer_column_to_indexerqueryresult.py index 0b1bc82..7d6b1e7 100644 --- a/alembic/versions/eb0bd3cc1852_add_indexer_column_to_indexerqueryresult.py +++ b/alembic/versions/eb0bd3cc1852_add_indexer_column_to_indexerqueryresult.py @@ -8,9 +8,9 @@ Create Date: 2025-10-28 21:39:24.480466 from typing import Sequence, Union -from alembic import op import sqlalchemy as sa +from alembic import op # revision identifiers, used by Alembic. revision: str = "eb0bd3cc1852" diff --git a/media_manager/auth/__init__.py b/media_manager/auth/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/media_manager/auth/config.py b/media_manager/auth/config.py index f0ef38d..a664c2b 100644 --- a/media_manager/auth/config.py +++ b/media_manager/auth/config.py @@ -1,7 +1,8 @@ -from pydantic_settings import BaseSettings -from pydantic import Field import secrets +from pydantic import Field +from pydantic_settings import BaseSettings + class OpenIdConfig(BaseSettings): client_id: str = "" diff --git a/media_manager/auth/db.py b/media_manager/auth/db.py index e6bec02..145e8e4 100644 --- a/media_manager/auth/db.py +++ b/media_manager/auth/db.py @@ -3,16 +3,16 @@ from typing import Optional from fastapi import Depends from fastapi_users.db import ( + SQLAlchemyBaseOAuthAccountTableUUID, SQLAlchemyBaseUserTableUUID, SQLAlchemyUserDatabase, - SQLAlchemyBaseOAuthAccountTableUUID, ) from sqlalchemy import String from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine -from sqlalchemy.orm import Mapped, relationship, mapped_column +from sqlalchemy.orm import Mapped, mapped_column, relationship -from media_manager.database import Base, build_db_url from media_manager.config import MediaManagerConfig +from media_manager.database import Base, build_db_url class OAuthAccount(SQLAlchemyBaseOAuthAccountTableUUID, Base): @@ -20,7 +20,6 @@ class OAuthAccount(SQLAlchemyBaseOAuthAccountTableUUID, Base): refresh_token: Mapped[Optional[str]] = mapped_column( String(length=4096), nullable=True ) - pass class User(SQLAlchemyBaseUserTableUUID, Base): diff --git a/media_manager/auth/router.py b/media_manager/auth/router.py index f6beb28..2e1e410 100644 --- a/media_manager/auth/router.py +++ b/media_manager/auth/router.py @@ -1,19 +1,18 @@ -from fastapi import APIRouter, Depends -from fastapi import status +from fastapi import APIRouter, Depends, status from fastapi_users.router import get_oauth_router from httpx_oauth.oauth2 import OAuth2 from sqlalchemy import select -from media_manager.config import MediaManagerConfig from media_manager.auth.db import User -from media_manager.auth.schemas import UserRead, AuthMetadata +from media_manager.auth.schemas import AuthMetadata, UserRead from media_manager.auth.users import ( + SECRET, current_superuser, + fastapi_users, openid_client, openid_cookie_auth_backend, - SECRET, - fastapi_users, ) +from media_manager.config import MediaManagerConfig from media_manager.database import DbSessionDependency users_router = APIRouter() @@ -31,23 +30,22 @@ def get_openid_router(): is_verified_by_default=True, redirect_url=None, ) - else: - # this is there, so that the appropriate routes are created even if OIDC is not configured, - # e.g. for generating the frontend's openapi client - return get_oauth_router( - oauth_client=OAuth2( - client_id="mock", - client_secret="mock", - authorize_endpoint="https://example.com/authorize", - access_token_endpoint="https://example.com/token", - ), - backend=openid_cookie_auth_backend, - get_user_manager=fastapi_users.get_user_manager, - state_secret=SECRET, - associate_by_email=False, - is_verified_by_default=False, - redirect_url=None, - ) + # this is there, so that the appropriate routes are created even if OIDC is not configured, + # e.g. for generating the frontend's openapi client + return get_oauth_router( + oauth_client=OAuth2( + client_id="mock", + client_secret="mock", # noqa: S106 + authorize_endpoint="https://example.com/authorize", + access_token_endpoint="https://example.com/token", # noqa: S106 + ), + backend=openid_cookie_auth_backend, + get_user_manager=fastapi_users.get_user_manager, + state_secret=SECRET, + associate_by_email=False, + is_verified_by_default=False, + redirect_url=None, + ) openid_config = MediaManagerConfig().auth.openid_connect @@ -68,5 +66,4 @@ def get_all_users(db: DbSessionDependency) -> list[UserRead]: def get_auth_metadata() -> AuthMetadata: if openid_config.enabled: return AuthMetadata(oauth_providers=[openid_config.name]) - else: - return AuthMetadata(oauth_providers=[]) + return AuthMetadata(oauth_providers=[]) diff --git a/media_manager/auth/users.py b/media_manager/auth/users.py index f3e6d0d..e5549fb 100644 --- a/media_manager/auth/users.py +++ b/media_manager/auth/users.py @@ -1,9 +1,10 @@ import contextlib import logging import uuid -from typing import Optional, Any +from typing import Any, Optional, override from fastapi import Depends, Request +from fastapi.responses import RedirectResponse, Response from fastapi_users import BaseUserManager, FastAPIUsers, UUIDIDMixin, models from fastapi_users.authentication import ( AuthenticationBackend, @@ -13,13 +14,12 @@ from fastapi_users.authentication import ( ) from fastapi_users.db import SQLAlchemyUserDatabase from httpx_oauth.clients.openid import OpenID -from fastapi.responses import RedirectResponse, Response +from sqlalchemy import func, select from starlette import status -from sqlalchemy import select, func import media_manager.notification.utils -from media_manager.auth.db import User, get_user_db, get_async_session -from media_manager.auth.schemas import UserUpdate, UserCreate +from media_manager.auth.db import User, get_async_session, get_user_db +from media_manager.auth.schemas import UserCreate, UserUpdate from media_manager.config import MediaManagerConfig log = logging.getLogger(__name__) @@ -44,6 +44,7 @@ class UserManager(UUIDIDMixin, BaseUserManager[User, uuid.UUID]): reset_password_token_secret = SECRET verification_token_secret = SECRET + @override async def on_after_update( self, user: models.UP, @@ -51,18 +52,20 @@ class UserManager(UUIDIDMixin, BaseUserManager[User, uuid.UUID]): request: Optional[Request] = None, ) -> None: log.info(f"User {user.id} has been updated.") - if "is_superuser" in update_dict and update_dict["is_superuser"]: + if update_dict.get("is_superuser"): log.info(f"User {user.id} has been granted superuser privileges.") if "email" in update_dict: updated_user = UserUpdate(is_verified=True) await self.update(user=user, user_update=updated_user) + @override async def on_after_register(self, user: User, request: Optional[Request] = None): log.info(f"User {user.id} has registered.") if user.email in config.admin_emails: updated_user = UserUpdate(is_superuser=True, is_verified=True) await self.update(user=user, user_update=updated_user) + @override async def on_after_forgot_password( self, user: User, token: str, request: Optional[Request] = None ): @@ -93,11 +96,13 @@ class UserManager(UUIDIDMixin, BaseUserManager[User, uuid.UUID]): ) log.info(f"Sent password reset email to {user.email}") + @override async def on_after_reset_password( self, user: User, request: Optional[Request] = None ): log.info(f"User {user.id} has reset their password.") + @override async def on_after_request_verify( self, user: User, token: str, request: Optional[Request] = None ): @@ -105,6 +110,7 @@ class UserManager(UUIDIDMixin, BaseUserManager[User, uuid.UUID]): f"Verification requested for user {user.id}. Verification token: {token}" ) + @override async def on_after_verify(self, user: User, request: Optional[Request] = None): log.info(f"User {user.id} has been verified") @@ -140,7 +146,7 @@ async def create_default_admin_user(): if config.auth.admin_emails else "admin@example.com" ) - default_password = "admin" # Simple default password + default_password = "admin" # noqa: S105 # Simple default password user_create = UserCreate( email=admin_email, diff --git a/media_manager/config.py b/media_manager/config.py index 9457ba4..5c86acf 100644 --- a/media_manager/config.py +++ b/media_manager/config.py @@ -1,13 +1,13 @@ import logging import os from pathlib import Path -from typing import Type, Tuple +from typing import Tuple, Type from pydantic import AnyHttpUrl from pydantic_settings import ( BaseSettings, - SettingsConfigDict, PydanticBaseSettingsSource, + SettingsConfigDict, TomlConfigSettingsSource, ) diff --git a/media_manager/database/__init__.py b/media_manager/database/__init__.py index b4392fe..e69ea7e 100644 --- a/media_manager/database/__init__.py +++ b/media_manager/database/__init__.py @@ -23,21 +23,20 @@ def build_db_url( host: str, port: int | str, dbname: str, -) -> str: - db_url = URL.create( +) -> URL: + return URL.create( "postgresql+psycopg", user, password, host, - port, + int(port), dbname, ) - return db_url def init_engine( db_config: Any | None = None, - url: str | None = None, + url: str | URL | None = None, ) -> Engine: """ Initialize the global SQLAlchemy engine and session factory. @@ -51,7 +50,8 @@ def init_engine( if db_config is None: url = os.getenv("DATABASE_URL") if not url: - raise RuntimeError("DB config or `DATABASE_URL` must be provided") + msg = "DB config or `DATABASE_URL` must be provided" + raise RuntimeError(msg) else: url = build_db_url( db_config.user, @@ -76,15 +76,15 @@ def init_engine( def get_engine() -> Engine: if engine is None: - raise RuntimeError("Engine not initialized. Call init_engine(...) first.") + msg = "Engine not initialized. Call init_engine(...) first." + raise RuntimeError(msg) return engine def get_session() -> Generator[Session, Any, None]: if SessionLocal is None: - raise RuntimeError( - "Session factory not initialized. Call init_engine(...) first." - ) + msg = "Session factory not initialized. Call init_engine(...) first." + raise RuntimeError(msg) db = SessionLocal() try: yield db diff --git a/media_manager/database/config.py b/media_manager/database/config.py index e183890..1d4f58b 100644 --- a/media_manager/database/config.py +++ b/media_manager/database/config.py @@ -5,5 +5,5 @@ class DbConfig(BaseSettings): host: str = "localhost" port: int = 5432 user: str = "MediaManager" - password: str = "MediaManager" + password: str = "MediaManager" # noqa: S105 dbname: str = "MediaManager" diff --git a/media_manager/exceptions.py b/media_manager/exceptions.py index 87d9e15..c1ee8b7 100644 --- a/media_manager/exceptions.py +++ b/media_manager/exceptions.py @@ -1,10 +1,10 @@ -from fastapi import Request +from fastapi import FastAPI, Request from fastapi.responses import JSONResponse -from sqlalchemy.exc import IntegrityError from psycopg.errors import UniqueViolation +from sqlalchemy.exc import IntegrityError -class MediaManagerException(Exception): +class MediaManagerError(Exception): """Base exception for MediaManager errors.""" def __init__(self, message: str = "An error occurred."): @@ -12,7 +12,7 @@ class MediaManagerException(Exception): self.message = message -class MediaAlreadyExists(MediaManagerException): +class MediaAlreadyExistsError(MediaManagerError): """Raised when a media entity already exists (HTTP 409).""" def __init__( @@ -21,49 +21,49 @@ class MediaAlreadyExists(MediaManagerException): super().__init__(message) -class NotFoundError(MediaManagerException): +class NotFoundError(MediaManagerError): """Raised when an entity is not found (HTTP 404).""" def __init__(self, message: str = "The requested entity was not found."): super().__init__(message) -class InvalidConfigError(MediaManagerException): +class InvalidConfigError(MediaManagerError): """Raised when the server is improperly configured (HTTP 500).""" def __init__(self, message: str = "The server is improperly configured."): super().__init__(message) -class BadRequestError(MediaManagerException): +class BadRequestError(MediaManagerError): """Raised for invalid client requests (HTTP 400).""" def __init__(self, message: str = "Bad request."): super().__init__(message) -class UnauthorizedError(MediaManagerException): +class UnauthorizedError(MediaManagerError): """Raised for authentication failures (HTTP 401).""" def __init__(self, message: str = "Unauthorized."): super().__init__(message) -class ForbiddenError(MediaManagerException): +class ForbiddenError(MediaManagerError): """Raised for forbidden actions (HTTP 403).""" def __init__(self, message: str = "Forbidden."): super().__init__(message) -class ConflictError(MediaManagerException): +class ConflictError(MediaManagerError): """Raised for resource conflicts (HTTP 409).""" def __init__(self, message: str = "Conflict."): super().__init__(message) -class UnprocessableEntityError(MediaManagerException): +class UnprocessableEntityError(MediaManagerError): """Raised for validation errors (HTTP 422).""" def __init__(self, message: str = "Unprocessable entity."): @@ -72,53 +72,53 @@ class UnprocessableEntityError(MediaManagerException): # Exception handlers async def media_already_exists_exception_handler( - request: Request, exc: MediaAlreadyExists + _request: Request, exc: MediaAlreadyExistsError ) -> JSONResponse: return JSONResponse(status_code=409, content={"detail": exc.message}) async def not_found_error_exception_handler( - request: Request, exc: NotFoundError + _request: Request, exc: NotFoundError ) -> JSONResponse: return JSONResponse(status_code=404, content={"detail": exc.message}) async def invalid_config_error_exception_handler( - request: Request, exc: InvalidConfigError + _request: Request, exc: InvalidConfigError ) -> JSONResponse: return JSONResponse(status_code=500, content={"detail": exc.message}) async def bad_request_error_handler( - request: Request, exc: BadRequestError + _request: Request, exc: BadRequestError ) -> JSONResponse: return JSONResponse(status_code=400, content={"detail": exc.message}) async def unauthorized_error_handler( - request: Request, exc: UnauthorizedError + _request: Request, exc: UnauthorizedError ) -> JSONResponse: return JSONResponse(status_code=401, content={"detail": exc.message}) async def forbidden_error_handler( - request: Request, exc: ForbiddenError + _request: Request, exc: ForbiddenError ) -> JSONResponse: return JSONResponse(status_code=403, content={"detail": exc.message}) -async def conflict_error_handler(request: Request, exc: ConflictError) -> JSONResponse: +async def conflict_error_handler(_request: Request, exc: ConflictError) -> JSONResponse: return JSONResponse(status_code=409, content={"detail": exc.message}) async def unprocessable_entity_error_handler( - request: Request, exc: UnprocessableEntityError + _request: Request, exc: UnprocessableEntityError ) -> JSONResponse: return JSONResponse(status_code=422, content={"detail": exc.message}) async def sqlalchemy_integrity_error_handler( - request: Request, exc: Exception + _request: Request, _exc: Exception ) -> JSONResponse: return JSONResponse( status_code=409, @@ -128,10 +128,10 @@ async def sqlalchemy_integrity_error_handler( ) -def register_exception_handlers(app): +def register_exception_handlers(app: FastAPI): app.add_exception_handler(NotFoundError, not_found_error_exception_handler) app.add_exception_handler( - MediaAlreadyExists, media_already_exists_exception_handler + MediaAlreadyExistsError, media_already_exists_exception_handler ) app.add_exception_handler( InvalidConfigError, invalid_config_error_exception_handler diff --git a/media_manager/indexer/dependencies.py b/media_manager/indexer/dependencies.py index 29b763a..b1ae1c1 100644 --- a/media_manager/indexer/dependencies.py +++ b/media_manager/indexer/dependencies.py @@ -2,9 +2,9 @@ from typing import Annotated from fastapi import Depends +from media_manager.database import DbSessionDependency from media_manager.indexer.repository import IndexerRepository from media_manager.indexer.service import IndexerService -from media_manager.database import DbSessionDependency from media_manager.tv.service import TvService diff --git a/media_manager/indexer/indexers/generic.py b/media_manager/indexer/indexers/generic.py index 4a0c0a7..d70e700 100644 --- a/media_manager/indexer/indexers/generic.py +++ b/media_manager/indexer/indexers/generic.py @@ -1,4 +1,4 @@ -from abc import abstractmethod, ABC +from abc import ABC, abstractmethod from media_manager.indexer.schemas import IndexerQueryResult from media_manager.movies.schemas import Movie @@ -8,11 +8,8 @@ from media_manager.tv.schemas import Show class GenericIndexer(ABC): name: str - def __init__(self, name: str = None): - if name: - self.name = name - else: - raise ValueError("indexer name must not be None") + def __init__(self, name: str): + self.name = name @abstractmethod def search(self, query: str, is_tv: bool) -> list[IndexerQueryResult]: diff --git a/media_manager/indexer/indexers/jackett.py b/media_manager/indexer/indexers/jackett.py index 1e01702..2527176 100644 --- a/media_manager/indexer/indexers/jackett.py +++ b/media_manager/indexer/indexers/jackett.py @@ -4,10 +4,10 @@ from concurrent.futures.thread import ThreadPoolExecutor import requests +from media_manager.config import MediaManagerConfig from media_manager.indexer.indexers.generic import GenericIndexer from media_manager.indexer.indexers.torznab_mixin import TorznabMixin from media_manager.indexer.schemas import IndexerQueryResult -from media_manager.config import MediaManagerConfig from media_manager.movies.schemas import Movie from media_manager.tv.schemas import Show diff --git a/media_manager/indexer/indexers/prowlarr.py b/media_manager/indexer/indexers/prowlarr.py index 82e68b7..38b48ff 100644 --- a/media_manager/indexer/indexers/prowlarr.py +++ b/media_manager/indexer/indexers/prowlarr.py @@ -3,8 +3,8 @@ from dataclasses import dataclass from requests import Session -from media_manager.indexer.indexers.generic import GenericIndexer from media_manager.config import MediaManagerConfig +from media_manager.indexer.indexers.generic import GenericIndexer from media_manager.indexer.indexers.torznab_mixin import TorznabMixin from media_manager.indexer.schemas import IndexerQueryResult from media_manager.movies.schemas import Movie @@ -38,7 +38,7 @@ class Prowlarr(GenericIndexer, TorznabMixin): super().__init__(name="prowlarr") self.config = MediaManagerConfig().indexers.prowlarr - def _call_prowlarr_api(self, path: str, parameters: dict = None): + def _call_prowlarr_api(self, path: str, parameters: dict | None = None): url = f"{self.config.url}/api/v1{path}" headers = {"X-Api-Key": self.config.api_key} with Session() as session: @@ -50,7 +50,7 @@ class Prowlarr(GenericIndexer, TorznabMixin): ) def _newznab_search( - self, indexer: IndexerInfo, parameters: dict = None + self, indexer: IndexerInfo, parameters: dict | None = None ) -> list[IndexerQueryResult]: if parameters is None: parameters = {} diff --git a/media_manager/indexer/indexers/torznab_mixin.py b/media_manager/indexer/indexers/torznab_mixin.py index a5e1916..a43eebc 100644 --- a/media_manager/indexer/indexers/torznab_mixin.py +++ b/media_manager/indexer/indexers/torznab_mixin.py @@ -1,10 +1,9 @@ import logging +import xml.etree.ElementTree as ET +from datetime import datetime, timezone +from email.utils import parsedate_to_datetime from media_manager.indexer.schemas import IndexerQueryResult -import xml.etree.ElementTree as ET -from xml.etree.ElementTree import Element -from email.utils import parsedate_to_datetime -from datetime import datetime, timezone log = logging.getLogger(__name__) @@ -12,7 +11,7 @@ log = logging.getLogger(__name__) class TorznabMixin: def process_search_result(self, xml: str) -> list[IndexerQueryResult]: result_list: list[IndexerQueryResult] = [] - xml_tree = ET.fromstring(xml) + xml_tree = ET.fromstring(xml) # noqa: S314 # trusted source, since it is user controlled xmlns = { "torznab": "http://torznab.com/schemas/2015/feed", "atom": "http://www.w3.org/2005/Atom", @@ -33,9 +32,7 @@ class TorznabMixin: item.find("enclosure").attrib["type"] != "application/x-bittorrent" ) - attributes: list[Element] = [ - x for x in item.findall("torznab:attr", xmlns) - ] + attributes = list(item.findall("torznab:attr", xmlns)) for attribute in attributes: if is_usenet: if attribute.attrib["name"] == "usenetdate": diff --git a/media_manager/indexer/models.py b/media_manager/indexer/models.py index 5c50c75..fea717b 100644 --- a/media_manager/indexer/models.py +++ b/media_manager/indexer/models.py @@ -1,6 +1,6 @@ from uuid import UUID -from sqlalchemy import String, Integer +from sqlalchemy import Integer, String from sqlalchemy.dialects.postgresql import ARRAY from sqlalchemy.orm import Mapped, mapped_column from sqlalchemy.sql.sqltypes import BigInteger diff --git a/media_manager/indexer/repository.py b/media_manager/indexer/repository.py index e10a876..343a6d5 100644 --- a/media_manager/indexer/repository.py +++ b/media_manager/indexer/repository.py @@ -4,9 +4,11 @@ from sqlalchemy.orm import Session from media_manager.indexer.models import IndexerQueryResult from media_manager.indexer.schemas import ( - IndexerQueryResultId, IndexerQueryResult as IndexerQueryResultSchema, ) +from media_manager.indexer.schemas import ( + IndexerQueryResultId, +) log = logging.getLogger(__name__) diff --git a/media_manager/indexer/schemas.py b/media_manager/indexer/schemas.py index fd509fe..0d28c55 100644 --- a/media_manager/indexer/schemas.py +++ b/media_manager/indexer/schemas.py @@ -3,7 +3,7 @@ import typing from uuid import UUID, uuid4 import pydantic -from pydantic import BaseModel, computed_field, ConfigDict +from pydantic import BaseModel, ConfigDict, computed_field from media_manager.torrent.models import Quality @@ -30,7 +30,7 @@ class IndexerQueryResult(BaseModel): indexer: str | None - @computed_field(return_type=Quality) + @computed_field @property def quality(self) -> Quality: high_quality_pattern = r"\b(4k)\b" @@ -40,24 +40,22 @@ class IndexerQueryResult(BaseModel): if re.search(high_quality_pattern, self.title, re.IGNORECASE): return Quality.uhd - elif re.search(medium_quality_pattern, self.title, re.IGNORECASE): + if re.search(medium_quality_pattern, self.title, re.IGNORECASE): return Quality.fullhd - elif re.search(low_quality_pattern, self.title, re.IGNORECASE): + if re.search(low_quality_pattern, self.title, re.IGNORECASE): return Quality.hd - elif re.search(very_low_quality_pattern, self.title, re.IGNORECASE): + if re.search(very_low_quality_pattern, self.title, re.IGNORECASE): return Quality.sd return Quality.unknown - @computed_field(return_type=list[int]) + @computed_field @property def season(self) -> list[int]: - pattern = r"\b[sS](\d+)\b" + pattern = r"\bS(\d+)\b" matches = re.findall(pattern, self.title, re.IGNORECASE) if matches.__len__() == 2: - result = [] - for i in range(int(matches[0]), int(matches[1]) + 1): - result.append(i) + result = list(range(int(matches[0]), int(matches[1]) + 1)) elif matches.__len__() == 1: result = [int(matches[0])] else: diff --git a/media_manager/indexer/service.py b/media_manager/indexer/service.py index d99c8f9..5100851 100644 --- a/media_manager/indexer/service.py +++ b/media_manager/indexer/service.py @@ -4,8 +4,8 @@ from media_manager.config import MediaManagerConfig from media_manager.indexer.indexers.generic import GenericIndexer from media_manager.indexer.indexers.jackett import Jackett from media_manager.indexer.indexers.prowlarr import Prowlarr -from media_manager.indexer.schemas import IndexerQueryResultId, IndexerQueryResult from media_manager.indexer.repository import IndexerRepository +from media_manager.indexer.schemas import IndexerQueryResult, IndexerQueryResultId from media_manager.movies.schemas import Movie from media_manager.torrent.utils import remove_special_chars_and_parentheses from media_manager.tv.schemas import Show diff --git a/media_manager/indexer/utils.py b/media_manager/indexer/utils.py index b068b1b..a4337aa 100644 --- a/media_manager/indexer/utils.py +++ b/media_manager/indexer/utils.py @@ -132,7 +132,8 @@ def follow_redirects_to_final_torrent_url( if 300 <= response.status_code < 400: redirect_url = response.headers.get("Location") if not redirect_url: - raise RuntimeError("Redirect response without Location header") + msg = "Redirect response without Location header" + raise RuntimeError(msg) # Resolve relative redirects against the last URL current_url = urljoin(current_url, redirect_url) @@ -144,10 +145,12 @@ def follow_redirects_to_final_torrent_url( response.raise_for_status() # Raise an exception for bad status codes return current_url else: - raise RuntimeError("Exceeded maximum number of redirects") + msg = "Exceeded maximum number of redirects" + raise RuntimeError(msg) except requests.exceptions.RequestException as e: log.debug(f"An error occurred during the request for {initial_url}: {e}") - raise RuntimeError(f"An error occurred during the request: {e}") from e + msg = f"An error occurred during the request: {e}" + raise RuntimeError(msg) from e return current_url diff --git a/media_manager/logging.py b/media_manager/logging.py index 0b7abc6..2d63833 100644 --- a/media_manager/logging.py +++ b/media_manager/logging.py @@ -1,13 +1,16 @@ import logging import os import sys -from logging.config import dictConfig -from pythonjsonlogger.json import JsonFormatter -from pathlib import Path from datetime import datetime, timezone +from logging.config import dictConfig +from pathlib import Path +from typing import override + +from pythonjsonlogger.json import JsonFormatter class ISOJsonFormatter(JsonFormatter): + @override def formatTime(self, record, datefmt=None): dt = datetime.fromtimestamp(record.created, tz=timezone.utc) return dt.isoformat(timespec="milliseconds").replace("+00:00", "Z") diff --git a/media_manager/main.py b/media_manager/main.py index 283b5ee..8586e42 100644 --- a/media_manager/main.py +++ b/media_manager/main.py @@ -1,43 +1,47 @@ -from media_manager.logging import setup_logging, LOGGING_CONFIG -from media_manager.scheduler import setup_scheduler -from media_manager.filesystem_checks import run_filesystem_checks -from media_manager.config import MediaManagerConfig -import uvicorn +import logging import os -from fastapi import FastAPI, APIRouter + +import uvicorn +from fastapi import APIRouter, FastAPI from fastapi.middleware.cors import CORSMiddleware -from uvicorn.middleware.proxy_headers import ProxyHeadersMiddleware from fastapi.staticfiles import StaticFiles -from starlette.responses import RedirectResponse, FileResponse, Response -from media_manager.auth.users import ( - bearer_auth_backend, - fastapi_users, - cookie_auth_backend, -) +from psycopg.errors import UniqueViolation +from sqlalchemy.exc import IntegrityError +from starlette.responses import FileResponse, RedirectResponse, Response +from uvicorn.middleware.proxy_headers import ProxyHeadersMiddleware + +import media_manager.movies.router as movies_router +import media_manager.torrent.router as torrent_router +import media_manager.tv.router as tv_router from media_manager.auth.router import ( - users_router as custom_users_router, auth_metadata_router, get_openid_router, ) -from media_manager.auth.schemas import UserCreate, UserRead, UserUpdate -from media_manager.exceptions import ( - NotFoundError, - not_found_error_exception_handler, - MediaAlreadyExists, - media_already_exists_exception_handler, - InvalidConfigError, - invalid_config_error_exception_handler, - sqlalchemy_integrity_error_handler, - ConflictError, - conflict_error_handler, +from media_manager.auth.router import ( + users_router as custom_users_router, ) -from sqlalchemy.exc import IntegrityError -from psycopg.errors import UniqueViolation -import media_manager.torrent.router as torrent_router -import media_manager.movies.router as movies_router -import media_manager.tv.router as tv_router +from media_manager.auth.schemas import UserCreate, UserRead, UserUpdate +from media_manager.auth.users import ( + bearer_auth_backend, + cookie_auth_backend, + fastapi_users, +) +from media_manager.config import MediaManagerConfig +from media_manager.exceptions import ( + ConflictError, + InvalidConfigError, + MediaAlreadyExistsError, + NotFoundError, + conflict_error_handler, + invalid_config_error_exception_handler, + media_already_exists_exception_handler, + not_found_error_exception_handler, + sqlalchemy_integrity_error_handler, +) +from media_manager.filesystem_checks import run_filesystem_checks +from media_manager.logging import LOGGING_CONFIG, setup_logging from media_manager.notification.router import router as notification_router -import logging +from media_manager.scheduler import setup_scheduler setup_logging() @@ -47,7 +51,7 @@ log = logging.getLogger(__name__) if config.misc.development: log.warning("Development Mode activated!") -scheduler = setup_scheduler(config, log) +scheduler = setup_scheduler(config) run_filesystem_checks(config, log) @@ -56,7 +60,7 @@ FRONTEND_FILES_DIR = os.getenv("FRONTEND_FILES_DIR") DISABLE_FRONTEND_MOUNT = os.getenv("DISABLE_FRONTEND_MOUNT", "").lower() == "true" FRONTEND_FOLLOW_SYMLINKS = os.getenv("FRONTEND_FOLLOW_SYMLINKS", "").lower() == "true" - +log.info("Hello World!") app = FastAPI(root_path=BASE_PATH) app.add_middleware(ProxyHeadersMiddleware, trusted_hosts="*") origins = config.misc.cors_urls @@ -155,7 +159,7 @@ async def login(): # this will serve the custom 404 page for frontend routes, so SvelteKit can handle routing @app.exception_handler(404) -async def not_found_handler(request, exc): +async def not_found_handler(request, _exc): if not DISABLE_FRONTEND_MOUNT and any( base_path in ["/web", "/dashboard", "/login"] for base_path in request.url.path ): @@ -165,7 +169,9 @@ async def not_found_handler(request, exc): # Register exception handlers for custom exceptions app.add_exception_handler(NotFoundError, not_found_error_exception_handler) -app.add_exception_handler(MediaAlreadyExists, media_already_exists_exception_handler) +app.add_exception_handler( + MediaAlreadyExistsError, media_already_exists_exception_handler +) app.add_exception_handler(InvalidConfigError, invalid_config_error_exception_handler) app.add_exception_handler(IntegrityError, sqlalchemy_integrity_error_handler) app.add_exception_handler(UniqueViolation, sqlalchemy_integrity_error_handler) diff --git a/media_manager/metadataProvider/abstractMetaDataProvider.py b/media_manager/metadataProvider/abstract_metadata_provider.py similarity index 87% rename from media_manager/metadataProvider/abstractMetaDataProvider.py rename to media_manager/metadataProvider/abstract_metadata_provider.py index 471d7b7..301f06f 100644 --- a/media_manager/metadataProvider/abstractMetaDataProvider.py +++ b/media_manager/metadataProvider/abstract_metadata_provider.py @@ -1,10 +1,10 @@ import logging from abc import ABC, abstractmethod -from media_manager.metadataProvider.schemas import MetaDataProviderSearchResult -from media_manager.tv.schemas import Show -from media_manager.movies.schemas import Movie from media_manager.config import MediaManagerConfig +from media_manager.metadataProvider.schemas import MetaDataProviderSearchResult +from media_manager.movies.schemas import Movie +from media_manager.tv.schemas import Show log = logging.getLogger(__name__) @@ -18,11 +18,15 @@ class AbstractMetadataProvider(ABC): pass @abstractmethod - def get_show_metadata(self, id: int = None, language: str | None = None) -> Show: + def get_show_metadata( + self, show_id: int | None = None, language: str | None = None + ) -> Show: raise NotImplementedError() @abstractmethod - def get_movie_metadata(self, id: int = None, language: str | None = None) -> Movie: + def get_movie_metadata( + self, movie_id: int | None = None, language: str | None = None + ) -> Movie: raise NotImplementedError() @abstractmethod diff --git a/media_manager/metadataProvider/dependencies.py b/media_manager/metadataProvider/dependencies.py index bf4cd7b..502d4a6 100644 --- a/media_manager/metadataProvider/dependencies.py +++ b/media_manager/metadataProvider/dependencies.py @@ -1,12 +1,12 @@ from typing import Annotated, Literal from fastapi import Depends - from fastapi.exceptions import HTTPException -from media_manager.metadataProvider.tmdb import TmdbMetadataProvider -from media_manager.metadataProvider.abstractMetaDataProvider import ( + +from media_manager.metadataProvider.abstract_metadata_provider import ( AbstractMetadataProvider, ) +from media_manager.metadataProvider.tmdb import TmdbMetadataProvider from media_manager.metadataProvider.tvdb import TvdbMetadataProvider @@ -15,13 +15,12 @@ def get_metadata_provider( ) -> AbstractMetadataProvider: if metadata_provider == "tmdb": return TmdbMetadataProvider() - elif metadata_provider == "tvdb": + if metadata_provider == "tvdb": return TvdbMetadataProvider() - else: - raise HTTPException( - status_code=400, - detail=f"Invalid metadata provider: {metadata_provider}. Supported providers are 'tmdb' and 'tvdb'.", - ) + raise HTTPException( + status_code=400, + detail=f"Invalid metadata provider: {metadata_provider}. Supported providers are 'tmdb' and 'tvdb'.", + ) metadata_provider_dep = Annotated[ diff --git a/media_manager/metadataProvider/schemas.py b/media_manager/metadataProvider/schemas.py index 7720df0..98fff7d 100644 --- a/media_manager/metadataProvider/schemas.py +++ b/media_manager/metadataProvider/schemas.py @@ -1,4 +1,5 @@ from pydantic import BaseModel + from media_manager.movies.schemas import MovieId from media_manager.tv.schemas import ShowId diff --git a/media_manager/metadataProvider/tmdb.py b/media_manager/metadataProvider/tmdb.py index 2e59977..0fe3477 100644 --- a/media_manager/metadataProvider/tmdb.py +++ b/media_manager/metadataProvider/tmdb.py @@ -1,17 +1,17 @@ import logging +from typing import override import requests import media_manager.metadataProvider.utils from media_manager.config import MediaManagerConfig -from media_manager.metadataProvider.abstractMetaDataProvider import ( +from media_manager.metadataProvider.abstract_metadata_provider import ( AbstractMetadataProvider, ) from media_manager.metadataProvider.schemas import MetaDataProviderSearchResult -from media_manager.tv.schemas import Episode, Season, Show, SeasonNumber, EpisodeNumber from media_manager.movies.schemas import Movie from media_manager.notification.manager import notification_manager - +from media_manager.tv.schemas import Episode, EpisodeNumber, Season, SeasonNumber, Show ENDED_STATUS = {"Ended", "Canceled"} @@ -39,35 +39,40 @@ class TmdbMetadataProvider(AbstractMetadataProvider): return original_language return self.default_language - def __get_show_metadata(self, id: int, language: str | None = None) -> dict: + def __get_show_metadata(self, show_id: int, language: str | None = None) -> dict: if language is None: language = self.default_language try: response = requests.get( - url=f"{self.url}/tv/shows/{id}", params={"language": language} + url=f"{self.url}/tv/shows/{show_id}", + params={"language": language}, + timeout=60, ) response.raise_for_status() return response.json() except requests.RequestException as e: - log.error(f"TMDB API error getting show metadata for ID {id}: {e}") + log.error(f"TMDB API error getting show metadata for ID {show_id}: {e}") if notification_manager.is_configured(): notification_manager.send_notification( title="TMDB API Error", - message=f"Failed to fetch show metadata for ID {id} from TMDB. Error: {str(e)}", + message=f"Failed to fetch show metadata for ID {show_id} from TMDB. Error: {e}", ) raise - def __get_show_external_ids(self, id: int) -> dict: + def __get_show_external_ids(self, show_id: int) -> dict: try: - response = requests.get(url=f"{self.url}/tv/shows/{id}/external_ids") + response = requests.get( + url=f"{self.url}/tv/shows/{show_id}/external_ids", + timeout=60, + ) response.raise_for_status() return response.json() except requests.RequestException as e: - log.error(f"TMDB API error getting show external IDs for ID {id}: {e}") + log.error(f"TMDB API error getting show external IDs for ID {show_id}: {e}") if notification_manager.is_configured(): notification_manager.send_notification( title="TMDB API Error", - message=f"Failed to fetch show external IDs for ID {id} from TMDB. Error: {str(e)}", + message=f"Failed to fetch show external IDs for ID {show_id} from TMDB. Error: {e}", ) raise @@ -80,6 +85,7 @@ class TmdbMetadataProvider(AbstractMetadataProvider): response = requests.get( url=f"{self.url}/tv/shows/{show_id}/{season_number}", params={"language": language}, + timeout=60, ) response.raise_for_status() return response.json() @@ -90,7 +96,7 @@ class TmdbMetadataProvider(AbstractMetadataProvider): if notification_manager.is_configured(): notification_manager.send_notification( title="TMDB API Error", - message=f"Failed to fetch season {season_number} metadata for show ID {show_id} from TMDB. Error: {str(e)}", + message=f"Failed to fetch season {season_number} metadata for show ID {show_id} from TMDB. Error: {e}", ) raise @@ -102,6 +108,7 @@ class TmdbMetadataProvider(AbstractMetadataProvider): "query": query, "page": page, }, + timeout=60, ) response.raise_for_status() return response.json() @@ -110,7 +117,7 @@ class TmdbMetadataProvider(AbstractMetadataProvider): if notification_manager.is_configured(): notification_manager.send_notification( title="TMDB API Error", - message=f"Failed to search TV shows with query '{query}' on TMDB. Error: {str(e)}", + message=f"Failed to search TV shows with query '{query}' on TMDB. Error: {e}", ) raise @@ -119,6 +126,7 @@ class TmdbMetadataProvider(AbstractMetadataProvider): response = requests.get( url=f"{self.url}/tv/trending", params={"language": self.default_language}, + timeout=60, ) response.raise_for_status() return response.json() @@ -127,39 +135,45 @@ class TmdbMetadataProvider(AbstractMetadataProvider): if notification_manager.is_configured(): notification_manager.send_notification( title="TMDB API Error", - message=f"Failed to fetch trending TV shows from TMDB. Error: {str(e)}", + message=f"Failed to fetch trending TV shows from TMDB. Error: {e}", ) raise - def __get_movie_metadata(self, id: int, language: str | None = None) -> dict: + def __get_movie_metadata(self, movie_id: int, language: str | None = None) -> dict: if language is None: language = self.default_language try: response = requests.get( - url=f"{self.url}/movies/{id}", params={"language": language} + url=f"{self.url}/movies/{movie_id}", + params={"language": language}, + timeout=60, ) response.raise_for_status() return response.json() except requests.RequestException as e: - log.error(f"TMDB API error getting movie metadata for ID {id}: {e}") + log.error(f"TMDB API error getting movie metadata for ID {movie_id}: {e}") if notification_manager.is_configured(): notification_manager.send_notification( title="TMDB API Error", - message=f"Failed to fetch movie metadata for ID {id} from TMDB. Error: {str(e)}", + message=f"Failed to fetch movie metadata for ID {movie_id} from TMDB. Error: {e}", ) raise - def __get_movie_external_ids(self, id: int) -> dict: + def __get_movie_external_ids(self, movie_id: int) -> dict: try: - response = requests.get(url=f"{self.url}/movies/{id}/external_ids") + response = requests.get( + url=f"{self.url}/movies/{movie_id}/external_ids", timeout=60 + ) response.raise_for_status() return response.json() except requests.RequestException as e: - log.error(f"TMDB API error getting movie external IDs for ID {id}: {e}") + log.error( + f"TMDB API error getting movie external IDs for ID {movie_id}: {e}" + ) if notification_manager.is_configured(): notification_manager.send_notification( title="TMDB API Error", - message=f"Failed to fetch movie external IDs for ID {id} from TMDB. Error: {str(e)}", + message=f"Failed to fetch movie external IDs for ID {movie_id} from TMDB. Error: {e}", ) raise @@ -171,6 +185,7 @@ class TmdbMetadataProvider(AbstractMetadataProvider): "query": query, "page": page, }, + timeout=60, ) response.raise_for_status() return response.json() @@ -179,7 +194,7 @@ class TmdbMetadataProvider(AbstractMetadataProvider): if notification_manager.is_configured(): notification_manager.send_notification( title="TMDB API Error", - message=f"Failed to search movies with query '{query}' on TMDB. Error: {str(e)}", + message=f"Failed to search movies with query '{query}' on TMDB. Error: {e}", ) raise @@ -188,6 +203,7 @@ class TmdbMetadataProvider(AbstractMetadataProvider): response = requests.get( url=f"{self.url}/movies/trending", params={"language": self.default_language}, + timeout=60, ) response.raise_for_status() return response.json() @@ -196,10 +212,11 @@ class TmdbMetadataProvider(AbstractMetadataProvider): if notification_manager.is_configured(): notification_manager.send_notification( title="TMDB API Error", - message=f"Failed to fetch trending movies from TMDB. Error: {str(e)}", + message=f"Failed to fetch trending movies from TMDB. Error: {e}", ) raise + @override def download_show_poster_image(self, show: Show) -> bool: # Determine which language to use based on show's original_language language = self.__get_language_param(show.original_language) @@ -214,7 +231,7 @@ class TmdbMetadataProvider(AbstractMetadataProvider): "https://image.tmdb.org/t/p/original" + show_metadata["poster_path"] ) if media_manager.metadataProvider.utils.download_poster_image( - storage_path=self.storage_path, poster_url=poster_url, id=show.id + storage_path=self.storage_path, poster_url=poster_url, uuid=show.id ): log.info("Successfully downloaded poster image for show " + show.name) else: @@ -225,7 +242,10 @@ class TmdbMetadataProvider(AbstractMetadataProvider): return False return True - def get_show_metadata(self, id: int = None, language: str | None = None) -> Show: + @override + def get_show_metadata( + self, show_id: int | None = None, language: str | None = None + ) -> Show: """ :param id: the external id of the show @@ -237,17 +257,17 @@ class TmdbMetadataProvider(AbstractMetadataProvider): """ # If language not provided, fetch once to determine original language if language is None: - show_metadata = self.__get_show_metadata(id) + show_metadata = self.__get_show_metadata(show_id) language = show_metadata.get("original_language") # Determine which language to use for metadata language = self.__get_language_param(language) # Fetch show metadata in the appropriate language - show_metadata = self.__get_show_metadata(id, language=language) + show_metadata = self.__get_show_metadata(show_id, language=language) # get imdb id - external_ids = self.__get_show_external_ids(id=id) + external_ids = self.__get_show_external_ids(show_id=show_id) imdb_id = external_ids.get("imdb_id") season_list = [] @@ -258,16 +278,14 @@ class TmdbMetadataProvider(AbstractMetadataProvider): season_number=season["season_number"], language=language, ) - episode_list = [] - - for episode in season_metadata["episodes"]: - episode_list.append( - Episode( - external_id=int(episode["id"]), - title=episode["name"], - number=EpisodeNumber(episode["episode_number"]), - ) + episode_list = [ + Episode( + external_id=int(episode["id"]), + title=episode["name"], + number=EpisodeNumber(episode["episode_number"]), ) + for episode in season_metadata["episodes"] + ] season_list.append( Season( @@ -283,8 +301,8 @@ class TmdbMetadataProvider(AbstractMetadataProvider): show_metadata["first_air_date"] ) - show = Show( - external_id=id, + return Show( + external_id=show_id, name=show_metadata["name"], overview=show_metadata["overview"], year=year, @@ -295,8 +313,7 @@ class TmdbMetadataProvider(AbstractMetadataProvider): imdb_id=imdb_id, ) - return show - + @override def search_show( self, query: str | None = None, max_pages: int = 5 ) -> list[MetaDataProviderSearchResult]: @@ -313,8 +330,7 @@ class TmdbMetadataProvider(AbstractMetadataProvider): if not result_page["results"]: break - else: - results.extend(result_page["results"]) + results.extend(result_page["results"]) formatted_results = [] for result in results: @@ -356,7 +372,10 @@ class TmdbMetadataProvider(AbstractMetadataProvider): log.warning(f"Error processing search result: {e}") return formatted_results - def get_movie_metadata(self, id: int = None, language: str | None = None) -> Movie: + @override + def get_movie_metadata( + self, movie_id: int | None = None, language: str | None = None + ) -> Movie: """ Get movie metadata with language-aware fetching. @@ -369,25 +388,25 @@ class TmdbMetadataProvider(AbstractMetadataProvider): """ # If language not provided, fetch once to determine original language if language is None: - movie_metadata = self.__get_movie_metadata(id=id) + movie_metadata = self.__get_movie_metadata(movie_id=movie_id) language = movie_metadata.get("original_language") # Determine which language to use for metadata language = self.__get_language_param(language) # Fetch movie metadata in the appropriate language - movie_metadata = self.__get_movie_metadata(id=id, language=language) + movie_metadata = self.__get_movie_metadata(movie_id=movie_id, language=language) # get imdb id - external_ids = self.__get_movie_external_ids(id=id) + external_ids = self.__get_movie_external_ids(movie_id=movie_id) imdb_id = external_ids.get("imdb_id") year = media_manager.metadataProvider.utils.get_year_from_date( movie_metadata["release_date"] ) - movie = Movie( - external_id=id, + return Movie( + external_id=movie_id, name=movie_metadata["title"], overview=movie_metadata["overview"], year=year, @@ -396,8 +415,7 @@ class TmdbMetadataProvider(AbstractMetadataProvider): imdb_id=imdb_id, ) - return movie - + @override def search_movie( self, query: str | None = None, max_pages: int = 5 ) -> list[MetaDataProviderSearchResult]: @@ -414,8 +432,7 @@ class TmdbMetadataProvider(AbstractMetadataProvider): if not result_page["results"]: break - else: - results.extend(result_page["results"]) + results.extend(result_page["results"]) formatted_results = [] for result in results: @@ -457,13 +474,14 @@ class TmdbMetadataProvider(AbstractMetadataProvider): log.warning(f"Error processing search result: {e}") return formatted_results + @override def download_movie_poster_image(self, movie: Movie) -> bool: # Determine which language to use based on movie's original_language language = self.__get_language_param(movie.original_language) # Fetch metadata in the appropriate language to get localized poster movie_metadata = self.__get_movie_metadata( - id=movie.external_id, language=language + movie_id=movie.external_id, language=language ) # downloading the poster @@ -473,7 +491,7 @@ class TmdbMetadataProvider(AbstractMetadataProvider): "https://image.tmdb.org/t/p/original" + movie_metadata["poster_path"] ) if media_manager.metadataProvider.utils.download_poster_image( - storage_path=self.storage_path, poster_url=poster_url, id=movie.id + storage_path=self.storage_path, poster_url=poster_url, uuid=movie.id ): log.info("Successfully downloaded poster image for movie " + movie.name) else: diff --git a/media_manager/metadataProvider/tvdb.py b/media_manager/metadataProvider/tvdb.py index 5198121..a3d100b 100644 --- a/media_manager/metadataProvider/tvdb.py +++ b/media_manager/metadataProvider/tvdb.py @@ -1,14 +1,16 @@ -import requests import logging +from typing import override + +import requests import media_manager.metadataProvider.utils from media_manager.config import MediaManagerConfig -from media_manager.metadataProvider.abstractMetaDataProvider import ( +from media_manager.metadataProvider.abstract_metadata_provider import ( AbstractMetadataProvider, ) from media_manager.metadataProvider.schemas import MetaDataProviderSearchResult -from media_manager.tv.schemas import Episode, Season, Show, SeasonNumber from media_manager.movies.schemas import Movie +from media_manager.tv.schemas import Episode, Season, SeasonNumber, Show log = logging.getLogger(__name__) @@ -20,59 +22,56 @@ class TvdbMetadataProvider(AbstractMetadataProvider): config = MediaManagerConfig().metadata.tvdb self.url = config.tvdb_relay_url - def __get_show(self, id: int) -> dict: - return requests.get(f"{self.url}/tv/shows/{id}").json() + def __get_show(self, show_id: int) -> dict: + return requests.get(url=f"{self.url}/tv/shows/{show_id}", timeout=60).json() - def __get_season(self, id: int) -> dict: - return requests.get(f"{self.url}/tv/seasons/{id}").json() + def __get_season(self, show_id: int) -> dict: + return requests.get(url=f"{self.url}/tv/seasons/{show_id}", timeout=60).json() def __search_tv(self, query: str) -> dict: return requests.get( - f"{self.url}/tv/search", - params={"query": query}, + url=f"{self.url}/tv/search", params={"query": query}, timeout=60 ).json() def __get_trending_tv(self) -> dict: - return requests.get(f"{self.url}/tv/trending").json() + return requests.get(url=f"{self.url}/tv/trending", timeout=60).json() - def __get_movie(self, id: int) -> dict: - return requests.get(f"{self.url}/movies/{id}").json() + def __get_movie(self, movie_id: int) -> dict: + return requests.get(url=f"{self.url}/movies/{movie_id}", timeout=60).json() def __search_movie(self, query: str) -> dict: return requests.get( - f"{self.url}/movies/search", - params={"query": query}, + url=f"{self.url}/movies/search", params={"query": query}, timeout=60 ).json() def __get_trending_movies(self) -> dict: - return requests.get(f"{self.url}/movies/trending").json() + return requests.get(url=f"{self.url}/movies/trending", timeout=60).json() + @override def download_show_poster_image(self, show: Show) -> bool: - show_metadata = self.__get_show(id=show.external_id) + show_metadata = self.__get_show(show_id=show.external_id) if show_metadata["image"] is not None: media_manager.metadataProvider.utils.download_poster_image( storage_path=self.storage_path, poster_url=show_metadata["image"], - id=show.id, + uuid=show.id, ) log.debug("Successfully downloaded poster image for show " + show.name) return True - else: - log.warning(f"image for show {show.name} could not be downloaded") - return False + log.warning(f"image for show {show.name} could not be downloaded") + return False - def get_show_metadata(self, id: int = None, language: str | None = None) -> Show: + @override + def get_show_metadata( + self, show_id: int | None = None, language: str | None = None + ) -> Show: """ :param id: the external id of the show - :type id: int :param language: does nothing, TVDB does not support multiple languages - :type language: str | None - :return: returns a ShowMetadata object - :rtype: ShowMetadata """ - series = self.__get_show(id=id) + series = self.__get_show(show_id) seasons = [] seasons_ids = [season["id"] for season in series["seasons"]] @@ -85,7 +84,7 @@ class TvdbMetadataProvider(AbstractMetadataProvider): imdb_id = remote_id.get("id") for season in seasons_ids: - s = self.__get_season(id=season) + s = self.__get_season(show_id=season) # the seasons need to be filtered to a certain type, # otherwise the same season will be imported in aired and dvd order, # which causes duplicate season number + show ids which in turn violates a unique constraint of the season table @@ -112,15 +111,11 @@ class TvdbMetadataProvider(AbstractMetadataProvider): episodes=episodes, ) ) - try: - year = series["year"] - except KeyError: - year = None - show = Show( + return Show( name=series["name"], overview=series["overview"], - year=year, + year=series.get("year"), external_id=series["id"], metadata_provider=self.name, seasons=seasons, @@ -128,8 +123,7 @@ class TvdbMetadataProvider(AbstractMetadataProvider): imdb_id=imdb_id, ) - return show - + @override def search_show( self, query: str | None = None ) -> list[MetaDataProviderSearchResult]: @@ -159,36 +153,36 @@ class TvdbMetadataProvider(AbstractMetadataProvider): except Exception as e: log.warning(f"Error processing search result: {e}") return formatted_results - else: - results = self.__get_trending_tv() - formatted_results = [] - for result in results: - try: - if result["type"] == "series": - try: - year = result["year"] - except KeyError: - year = None + results = self.__get_trending_tv() + formatted_results = [] + for result in results: + try: + if result["type"] == "series": + try: + year = result["year"] + except KeyError: + year = None - formatted_results.append( - MetaDataProviderSearchResult( - poster_path="https://artworks.thetvdb.com" - + result.get("image") - if result.get("image") - else None, - overview=result.get("overview"), - name=result["name"], - external_id=result["id"], - year=year, - metadata_provider=self.name, - added=False, - vote_average=None, - ) + formatted_results.append( + MetaDataProviderSearchResult( + poster_path="https://artworks.thetvdb.com" + + result.get("image") + if result.get("image") + else None, + overview=result.get("overview"), + name=result["name"], + external_id=result["id"], + year=year, + metadata_provider=self.name, + added=False, + vote_average=None, ) - except Exception as e: - log.warning(f"Error processing search result: {e}") - return formatted_results + ) + except Exception as e: + log.warning(f"Error processing search result: {e}") + return formatted_results + @override def search_movie( self, query: str | None = None ) -> list[MetaDataProviderSearchResult]: @@ -224,38 +218,37 @@ class TvdbMetadataProvider(AbstractMetadataProvider): except Exception as e: log.warning(f"Error processing search result: {e}") return formatted_results - else: - results = self.__get_trending_movies() - results = results[0:20] - log.debug(f"got {len(results)} results from TVDB search") - formatted_results = [] - for result in results: - result = self.__get_movie(result["id"]) + results = self.__get_trending_movies() + results = results[0:20] + log.debug(f"got {len(results)} results from TVDB search") + formatted_results = [] + for result in results: + result = self.__get_movie(result["id"]) + try: try: - try: - year = result["year"] - except KeyError: - year = None + year = result["year"] + except KeyError: + year = None - formatted_results.append( - MetaDataProviderSearchResult( - poster_path="https://artworks.thetvdb.com" - + result.get("image") - if result.get("image") - else None, - overview=result.get("overview"), - name=result["name"], - external_id=result["id"], - year=year, - metadata_provider=self.name, - added=False, - vote_average=None, - ) + formatted_results.append( + MetaDataProviderSearchResult( + poster_path="https://artworks.thetvdb.com" + result.get("image") + if result.get("image") + else None, + overview=result.get("overview"), + name=result["name"], + external_id=result["id"], + year=year, + metadata_provider=self.name, + added=False, + vote_average=None, ) - except Exception as e: - log.warning(f"Error processing search result: {e}") - return formatted_results + ) + except Exception as e: + log.warning(f"Error processing search result: {e}") + return formatted_results + @override def download_movie_poster_image(self, movie: Movie) -> bool: movie_metadata = self.__get_movie(movie.external_id) @@ -263,25 +256,24 @@ class TvdbMetadataProvider(AbstractMetadataProvider): media_manager.metadataProvider.utils.download_poster_image( storage_path=self.storage_path, poster_url=movie_metadata["image"], - id=movie.id, + uuid=movie.id, ) log.info("Successfully downloaded poster image for show " + movie.name) return True - else: - log.warning(f"image for show {movie.name} could not be downloaded") - return False + log.warning(f"image for show {movie.name} could not be downloaded") + return False - def get_movie_metadata(self, id: int = None, language: str | None = None) -> Movie: + @override + def get_movie_metadata( + self, movie_id: int | None = None, language: str | None = None + ) -> Movie: """ - :param id: the external id of the movie - :type id: int + :param movie_id: the external id of the movie :param language: does nothing, TVDB does not support multiple languages - :type language: str | None :return: returns a Movie object - :rtype: Movie """ - movie = self.__get_movie(id) + movie = self.__get_movie(movie_id) # get imdb id from remote ids imdb_id = None @@ -291,7 +283,7 @@ class TvdbMetadataProvider(AbstractMetadataProvider): if remote_id.get("type") == 2: imdb_id = remote_id.get("id") - movie = Movie( + return Movie( name=movie["name"], overview="Overviews are not supported with TVDB", year=movie.get("year"), @@ -299,5 +291,3 @@ class TvdbMetadataProvider(AbstractMetadataProvider): metadata_provider=self.name, imdb_id=imdb_id, ) - - return movie diff --git a/media_manager/metadataProvider/utils.py b/media_manager/metadataProvider/utils.py index 8d12690..bff3cbd 100644 --- a/media_manager/metadataProvider/utils.py +++ b/media_manager/metadataProvider/utils.py @@ -1,29 +1,25 @@ +from pathlib import Path from uuid import UUID -from PIL import Image import requests -import pillow_avif - -pillow_avif +from PIL import Image def get_year_from_date(first_air_date: str | None) -> int | None: if first_air_date: return int(first_air_date.split("-")[0]) - else: - return None + return None -def download_poster_image(storage_path=None, poster_url=None, id: UUID = None) -> bool: - res = requests.get(poster_url, stream=True) +def download_poster_image(storage_path: Path, poster_url: str, uuid: UUID) -> bool: + res = requests.get(poster_url, stream=True, timeout=60) + if res.status_code == 200: - image_file_path = storage_path.joinpath(str(id)) - with open(str(image_file_path) + ".jpg", "wb") as f: - f.write(res.content) + image_file_path = storage_path.joinpath(str(uuid)).with_suffix("jpg") + image_file_path.write_bytes(res.content) - original_image = Image.open(str(image_file_path) + ".jpg") - original_image.save(str(image_file_path) + ".avif", quality=50) - original_image.save(str(image_file_path) + ".webp", quality=50) + original_image = Image.open(image_file_path) + original_image.save(image_file_path.with_suffix(".avif"), quality=50) + original_image.save(image_file_path.with_suffix(".webp"), quality=50) return True - else: - return False + return False diff --git a/media_manager/movies/dependencies.py b/media_manager/movies/dependencies.py index b6b3bca..33bb786 100644 --- a/media_manager/movies/dependencies.py +++ b/media_manager/movies/dependencies.py @@ -1,16 +1,15 @@ from typing import Annotated -from fastapi import Depends, Path +from fastapi import Depends, HTTPException, Path from media_manager.database import DbSessionDependency +from media_manager.exceptions import NotFoundError +from media_manager.indexer.dependencies import indexer_service_dep from media_manager.movies.repository import MovieRepository from media_manager.movies.schemas import Movie, MovieId from media_manager.movies.service import MovieService -from media_manager.exceptions import NotFoundError -from fastapi import HTTPException -from media_manager.indexer.dependencies import indexer_service_dep -from media_manager.torrent.dependencies import torrent_service_dep from media_manager.notification.dependencies import notification_service_dep +from media_manager.torrent.dependencies import torrent_service_dep def get_movie_repository(db_session: DbSessionDependency) -> MovieRepository: @@ -47,7 +46,7 @@ def get_movie_by_id( raise HTTPException( status_code=404, detail=f"Movie with ID {movie_id} not found.", - ) + ) from None return movie diff --git a/media_manager/movies/repository.py b/media_manager/movies/repository.py index cbcf497..f78e0cf 100644 --- a/media_manager/movies/repository.py +++ b/media_manager/movies/repository.py @@ -1,22 +1,33 @@ -from sqlalchemy import select, delete +import logging + +from sqlalchemy import delete, select from sqlalchemy.exc import ( IntegrityError, SQLAlchemyError, ) from sqlalchemy.orm import Session, joinedload -import logging -from media_manager.exceptions import NotFoundError, ConflictError -from media_manager.movies.models import Movie, MovieRequest, MovieFile +from media_manager.exceptions import ConflictError, NotFoundError +from media_manager.movies.models import Movie, MovieFile, MovieRequest from media_manager.movies.schemas import ( Movie as MovieSchema, - MovieId, - MovieRequest as MovieRequestSchema, - MovieRequestId, +) +from media_manager.movies.schemas import ( MovieFile as MovieFileSchema, - RichMovieRequest as RichMovieRequestSchema, +) +from media_manager.movies.schemas import ( + MovieId, + MovieRequestId, +) +from media_manager.movies.schemas import ( + MovieRequest as MovieRequestSchema, +) +from media_manager.movies.schemas import ( MovieTorrent as MovieTorrentSchema, ) +from media_manager.movies.schemas import ( + RichMovieRequest as RichMovieRequestSchema, +) from media_manager.torrent.models import Torrent from media_manager.torrent.schemas import TorrentId @@ -45,7 +56,8 @@ class MovieRepository: stmt = select(Movie).where(Movie.id == movie_id) result = self.db.execute(stmt).unique().scalar_one_or_none() if not result: - raise NotFoundError(f"Movie with id {movie_id} not found.") + msg = f"Movie with id {movie_id} not found." + raise NotFoundError(msg) return MovieSchema.model_validate(result) except SQLAlchemyError as e: log.error(f"Database error while retrieving movie {movie_id}: {e}") @@ -71,9 +83,8 @@ class MovieRepository: ) result = self.db.execute(stmt).unique().scalar_one_or_none() if not result: - raise NotFoundError( - f"Movie with external_id {external_id} and provider {metadata_provider} not found." - ) + msg = f"Movie with external_id {external_id} and provider {metadata_provider} not found." + raise NotFoundError(msg) return MovieSchema.model_validate(result) except SQLAlchemyError as e: log.error( @@ -130,9 +141,10 @@ class MovieRepository: except IntegrityError as e: self.db.rollback() log.error(f"Integrity error while saving movie {movie.name}: {e}") - raise ConflictError( + msg = ( f"Movie with this primary key or unique constraint violation: {e.orig}" ) + raise ConflictError(msg) from e except SQLAlchemyError as e: self.db.rollback() log.error(f"Database error while saving movie {movie.name}: {e}") @@ -151,7 +163,8 @@ class MovieRepository: movie = self.db.get(Movie, movie_id) if not movie: log.warning(f"Movie with id {movie_id} not found for deletion.") - raise NotFoundError(f"Movie with id {movie_id} not found.") + msg = f"Movie with id {movie_id} not found." + raise NotFoundError(msg) self.db.delete(movie) self.db.commit() log.info(f"Successfully deleted movie with id: {movie_id}") @@ -212,7 +225,8 @@ class MovieRepository: try: movie = self.db.get(Movie, movie_id) if not movie: - raise NotFoundError(f"movie with id {movie_id} not found.") + msg = f"movie with id {movie_id} not found." + raise NotFoundError(msg) movie.library = library self.db.commit() except SQLAlchemyError as e: @@ -233,9 +247,8 @@ class MovieRepository: result = self.db.execute(stmt) if result.rowcount == 0: self.db.rollback() - raise NotFoundError( - f"movie request with id {movie_request_id} not found." - ) + msg = f"movie request with id {movie_request_id} not found." + raise NotFoundError(msg) self.db.commit() # Successfully deleted movie request with id: {movie_request_id} except SQLAlchemyError as e: @@ -300,8 +313,7 @@ class MovieRepository: stmt = delete(MovieFile).where(MovieFile.torrent_id == torrent_id) result = self.db.execute(stmt) self.db.commit() - deleted_count = result.rowcount - return deleted_count + return result.rowcount except SQLAlchemyError as e: self.db.rollback() log.error( @@ -395,9 +407,8 @@ class MovieRepository: try: request = self.db.get(MovieRequest, movie_request_id) if not request: - raise NotFoundError( - f"Movie request with id {movie_request_id} not found." - ) + msg = f"Movie request with id {movie_request_id} not found." + raise NotFoundError(msg) return MovieRequestSchema.model_validate(request) except SQLAlchemyError as e: log.error( @@ -422,7 +433,8 @@ class MovieRepository: ) result = self.db.execute(stmt).unique().scalar_one_or_none() if not result: - raise NotFoundError(f"Movie for torrent_id {torrent_id} not found.") + msg = f"Movie for torrent_id {torrent_id} not found." + raise NotFoundError(msg) return MovieSchema.model_validate(result) except SQLAlchemyError as e: log.error( @@ -450,7 +462,8 @@ class MovieRepository: """ db_movie = self.db.get(Movie, movie_id) if not db_movie: - raise NotFoundError(f"Movie with id {movie_id} not found.") + msg = f"Movie with id {movie_id} not found." + raise NotFoundError(msg) updated = False if name is not None and db_movie.name != name: diff --git a/media_manager/movies/router.py b/media_manager/movies/router.py index 41d8113..1ffe82e 100644 --- a/media_manager/movies/router.py +++ b/media_manager/movies/router.py @@ -1,33 +1,33 @@ from pathlib import Path from typing import Annotated -from fastapi import APIRouter, Depends, status, HTTPException +from fastapi import APIRouter, Depends, HTTPException, status from media_manager.auth.schemas import UserRead from media_manager.auth.users import current_active_user, current_superuser from media_manager.config import LibraryItem, MediaManagerConfig from media_manager.exceptions import ConflictError from media_manager.indexer.schemas import ( - IndexerQueryResultId, IndexerQueryResult, + IndexerQueryResultId, ) from media_manager.metadataProvider.dependencies import metadata_provider_dep from media_manager.metadataProvider.schemas import MetaDataProviderSearchResult from media_manager.movies import log from media_manager.movies.dependencies import ( - movie_service_dep, movie_dep, + movie_service_dep, ) from media_manager.movies.schemas import ( + CreateMovieRequest, Movie, MovieRequest, - RichMovieTorrent, + MovieRequestBase, + MovieRequestId, PublicMovie, PublicMovieFile, - CreateMovieRequest, - MovieRequestId, RichMovieRequest, - MovieRequestBase, + RichMovieTorrent, ) from media_manager.schemas import MediaImportSuggestion from media_manager.torrent.schemas import Torrent @@ -43,13 +43,12 @@ router = APIRouter() @router.get( "/search", dependencies=[Depends(current_active_user)], - response_model=list[MetaDataProviderSearchResult], ) def search_for_movie( query: str, movie_service: movie_service_dep, metadata_provider: metadata_provider_dep, -): +) -> list[MetaDataProviderSearchResult]: """ Search for a movie on the configured metadata provider. """ @@ -61,12 +60,11 @@ def search_for_movie( @router.get( "/recommended", dependencies=[Depends(current_active_user)], - response_model=list[MetaDataProviderSearchResult], ) def get_popular_movies( movie_service: movie_service_dep, metadata_provider: metadata_provider_dep, -): +) -> list[MetaDataProviderSearchResult]: """ Get a list of recommended/popular movies from the metadata provider. """ @@ -82,11 +80,10 @@ def get_popular_movies( "/importable", status_code=status.HTTP_200_OK, dependencies=[Depends(current_superuser)], - response_model=list[MediaImportSuggestion], ) def get_all_importable_movies( movie_service: movie_service_dep, metadata_provider: metadata_provider_dep -): +) -> list[MediaImportSuggestion]: """ Get a list of unknown movies that were detected in the movie directory and are importable. """ @@ -124,9 +121,8 @@ def import_detected_movie( @router.get( "", dependencies=[Depends(current_active_user)], - response_model=list[PublicMovie], ) -def get_all_movies(movie_service: movie_service_dep): +def get_all_movies(movie_service: movie_service_dep) -> list[Movie]: """ Get all movies in the library. """ @@ -169,9 +165,10 @@ def add_a_movie( @router.get( "/torrents", dependencies=[Depends(current_active_user)], - response_model=list[RichMovieTorrent], ) -def get_all_movies_with_torrents(movie_service: movie_service_dep): +def get_all_movies_with_torrents( + movie_service: movie_service_dep, +) -> list[RichMovieTorrent]: """ Get all movies that are associated with torrents. """ @@ -181,9 +178,8 @@ def get_all_movies_with_torrents(movie_service: movie_service_dep): @router.get( "/libraries", dependencies=[Depends(current_active_user)], - response_model=list[LibraryItem], ) -def get_available_libraries(): +def get_available_libraries() -> list[LibraryItem]: """ Get available Movie libraries from configuration. """ @@ -198,9 +194,8 @@ def get_available_libraries(): @router.get( "/requests", dependencies=[Depends(current_active_user)], - response_model=list[RichMovieRequest], ) -def get_all_movie_requests(movie_service: movie_service_dep): +def get_all_movie_requests(movie_service: movie_service_dep) -> list[RichMovieRequest]: """ Get all movie requests. """ @@ -210,13 +205,12 @@ def get_all_movie_requests(movie_service: movie_service_dep): @router.post( "/requests", status_code=status.HTTP_201_CREATED, - response_model=MovieRequest, ) def create_movie_request( movie_service: movie_service_dep, movie_request: CreateMovieRequest, user: Annotated[UserRead, Depends(current_active_user)], -): +) -> MovieRequest: """ Create a new movie request. """ @@ -234,14 +228,13 @@ def create_movie_request( @router.put( "/requests/{movie_request_id}", - response_model=MovieRequest, ) def update_movie_request( movie_service: movie_service_dep, movie_request_id: MovieRequestId, update_movie_request: MovieRequestBase, user: Annotated[UserRead, Depends(current_active_user)], -): +) -> MovieRequest: """ Update an existing movie request. """ @@ -298,9 +291,8 @@ def delete_movie_request( @router.get( "/{movie_id}", dependencies=[Depends(current_active_user)], - response_model=PublicMovie, ) -def get_movie_by_id(movie_service: movie_service_dep, movie: movie_dep): +def get_movie_by_id(movie_service: movie_service_dep, movie: movie_dep) -> PublicMovie: """ Get details for a specific movie. """ @@ -331,7 +323,6 @@ def delete_a_movie( @router.post( "/{movie_id}/library", dependencies=[Depends(current_superuser)], - response_model=None, status_code=status.HTTP_204_NO_CONTENT, ) def set_library( @@ -349,9 +340,10 @@ def set_library( @router.get( "/{movie_id}/files", dependencies=[Depends(current_active_user)], - response_model=list[PublicMovieFile], ) -def get_movie_files_by_movie_id(movie_service: movie_service_dep, movie: movie_dep): +def get_movie_files_by_movie_id( + movie_service: movie_service_dep, movie: movie_dep +) -> list[PublicMovieFile]: """ Get files associated with a specific movie. """ @@ -361,13 +353,12 @@ def get_movie_files_by_movie_id(movie_service: movie_service_dep, movie: movie_d @router.get( "/{movie_id}/torrents", dependencies=[Depends(current_active_user)], - response_model=list[IndexerQueryResult], ) def search_for_torrents_for_movie( movie_service: movie_service_dep, movie: movie_dep, search_query_override: str | None = None, -): +) -> list[IndexerQueryResult]: """ Search for torrents for a specific movie. """ @@ -380,14 +371,13 @@ def search_for_torrents_for_movie( "/{movie_id}/torrents", status_code=status.HTTP_201_CREATED, dependencies=[Depends(current_active_user)], - response_model=Torrent, ) def download_torrent_for_movie( movie_service: movie_service_dep, movie: movie_dep, public_indexer_result_id: IndexerQueryResultId, override_file_path_suffix: str = "", -): +) -> Torrent: """ Trigger a download for a specific torrent for a movie. """ diff --git a/media_manager/movies/schemas.py b/media_manager/movies/schemas.py index 6061df9..d176be7 100644 --- a/media_manager/movies/schemas.py +++ b/media_manager/movies/schemas.py @@ -2,7 +2,7 @@ import typing import uuid from uuid import UUID -from pydantic import BaseModel, Field, ConfigDict, model_validator +from pydantic import BaseModel, ConfigDict, Field, model_validator from media_manager.auth.schemas import UserRead from media_manager.torrent.models import Quality @@ -47,9 +47,8 @@ class MovieRequestBase(BaseModel): @model_validator(mode="after") def ensure_wanted_quality_is_eq_or_gt_min_quality(self) -> "MovieRequestBase": if self.min_quality.value < self.wanted_quality.value: - raise ValueError( - "wanted_quality must be equal to or lower than minimum_quality." - ) + msg = "wanted_quality must be equal to or lower than minimum_quality." + raise ValueError(msg) return self diff --git a/media_manager/movies/service.py b/media_manager/movies/service.py index bc81945..a309ed8 100644 --- a/media_manager/movies/service.py +++ b/media_manager/movies/service.py @@ -1,51 +1,55 @@ import re import shutil from pathlib import Path +from typing import overload from sqlalchemy.exc import IntegrityError from sqlalchemy.orm import Session from media_manager.config import MediaManagerConfig +from media_manager.database import SessionLocal, get_session from media_manager.exceptions import InvalidConfigError, NotFoundError from media_manager.indexer.repository import IndexerRepository -from media_manager.database import SessionLocal, get_session -from media_manager.indexer.schemas import IndexerQueryResult -from media_manager.indexer.schemas import IndexerQueryResultId -from media_manager.indexer.utils import evaluate_indexer_query_results -from media_manager.metadataProvider.schemas import MetaDataProviderSearchResult -from media_manager.notification.service import NotificationService -from media_manager.schemas import MediaImportSuggestion -from media_manager.torrent.schemas import Torrent, TorrentStatus, Quality -from media_manager.torrent.service import TorrentService -from media_manager.movies import log -from media_manager.movies.schemas import ( - Movie, - MovieId, - MovieRequest, - MovieFile, - RichMovieTorrent, - PublicMovie, - PublicMovieFile, - MovieRequestId, - RichMovieRequest, -) -from media_manager.torrent.schemas import QualityStrings -from media_manager.movies.repository import MovieRepository -from media_manager.torrent.repository import TorrentRepository -from media_manager.torrent.utils import ( - import_file, - get_files_for_import, - remove_special_characters, - get_importable_media_directories, - remove_special_chars_and_parentheses, - extract_external_id_from_string, -) +from media_manager.indexer.schemas import IndexerQueryResult, IndexerQueryResultId from media_manager.indexer.service import IndexerService -from media_manager.metadataProvider.abstractMetaDataProvider import ( +from media_manager.indexer.utils import evaluate_indexer_query_results +from media_manager.metadataProvider.abstract_metadata_provider import ( AbstractMetadataProvider, ) +from media_manager.metadataProvider.schemas import MetaDataProviderSearchResult from media_manager.metadataProvider.tmdb import TmdbMetadataProvider from media_manager.metadataProvider.tvdb import TvdbMetadataProvider +from media_manager.movies import log +from media_manager.movies.repository import MovieRepository +from media_manager.movies.schemas import ( + Movie, + MovieFile, + MovieId, + MovieRequest, + MovieRequestId, + PublicMovie, + PublicMovieFile, + RichMovieRequest, + RichMovieTorrent, +) +from media_manager.notification.service import NotificationService +from media_manager.schemas import MediaImportSuggestion +from media_manager.torrent.repository import TorrentRepository +from media_manager.torrent.schemas import ( + Quality, + QualityStrings, + Torrent, + TorrentStatus, +) +from media_manager.torrent.service import TorrentService +from media_manager.torrent.utils import ( + extract_external_id_from_string, + get_files_for_import, + get_importable_media_directories, + import_file, + remove_special_characters, + remove_special_chars_and_parentheses, +) class MovieService: @@ -75,7 +79,7 @@ class MovieService: :param language: Optional language code (ISO 639-1) to fetch metadata in. """ movie_with_metadata = metadata_provider.get_movie_metadata( - id=external_id, language=language + movie_id=external_id, language=language ) if not movie_with_metadata: return None @@ -158,7 +162,9 @@ class MovieService: ) for movie_torrent in movie_torrents: - torrent = self.torrent_service.get_torrent_by_id(torrent_id=movie_torrent.torrent_id) + torrent = self.torrent_service.get_torrent_by_id( + torrent_id=movie_torrent.torrent_id + ) try: self.torrent_service.cancel_download( torrent=torrent, delete_files=True @@ -187,11 +193,33 @@ class MovieService: result.append(movie_file) return result + @overload + def check_if_movie_exists( + self, *, external_id: int, metadata_provider: str + ) -> bool: + """ + Check if a movie exists in the database. + + :param external_id: The external ID of the movie. + :param metadata_provider: The metadata provider. + :return: True if the movie exists, False otherwise. + """ + + @overload + def check_if_movie_exists(self, *, movie_id: MovieId) -> bool: + """ + Check if a movie exists in the database. + + :param movie_id: The ID of the movie. + :return: True if the movie exists, False otherwise. + """ + def check_if_movie_exists( self, - external_id: int = None, - metadata_provider: str = None, - movie_id: MovieId = None, + *, + external_id=None, + metadata_provider=None, + movie_id=None, ) -> bool: """ Check if a movie exists in the database. @@ -202,7 +230,8 @@ class MovieService: :return: True if the movie exists, False otherwise. :raises ValueError: If neither external ID and metadata provider nor movie ID are provided. """ - if external_id and metadata_provider: + + if not (external_id is None or metadata_provider is None): try: self.movie_repository.get_movie_by_external_id( external_id=external_id, metadata_provider=metadata_provider @@ -210,20 +239,18 @@ class MovieService: return True except NotFoundError: return False - elif movie_id: + elif movie_id is not None: try: self.movie_repository.get_movie_by_id(movie_id=movie_id) return True except NotFoundError: return False - else: - raise ValueError( - "Either external_id and metadata_provider or movie_id must be provided" - ) + msg = "Use one of the provided overloads for this function!" + raise ValueError(msg) def get_all_available_torrents_for_movie( - self, movie: Movie, search_query_override: str = None + self, movie: Movie, search_query_override: str | None = None ) -> list[IndexerQueryResult]: """ Get all available torrents for a given movie. @@ -233,16 +260,13 @@ class MovieService: :return: A list of indexer query results. """ if search_query_override: - torrents = self.indexer_service.search( - query=search_query_override, is_tv=False - ) - return torrents - else: - torrents = self.indexer_service.search_movie(movie=movie) + return self.indexer_service.search(query=search_query_override, is_tv=False) - return evaluate_indexer_query_results( - is_tv=False, query_results=torrents, media=movie - ) + torrents = self.indexer_service.search_movie(movie=movie) + + return evaluate_indexer_query_results( + is_tv=False, query_results=torrents, media=movie + ) def get_all_movies(self) -> list[Movie]: """ @@ -291,16 +315,15 @@ class MovieService: :param metadata_provider: The metadata provider to use. :return: A list of metadata provider movie search results. """ - results: list[MetaDataProviderSearchResult] = metadata_provider.search_movie() + results = metadata_provider.search_movie() - filtered_results = [] - for result in results: + return [ + result + for result in results if not self.check_if_movie_exists( external_id=result.external_id, metadata_provider=metadata_provider.name - ): - filtered_results.append(result) - - return filtered_results + ) + ] def get_public_movie_by_id(self, movie: Movie) -> PublicMovie: """ @@ -348,12 +371,11 @@ class MovieService: """ if movie_file.torrent_id is None: return True - else: - torrent_file = self.torrent_service.get_torrent_by_id( - torrent_id=movie_file.torrent_id - ) - if torrent_file.imported: - return True + torrent_file = self.torrent_service.get_torrent_by_id( + torrent_id=movie_file.torrent_id + ) + if torrent_file.imported: + return True return False def get_movie_by_external_id( @@ -462,7 +484,8 @@ class MovieService: :raises ValueError: If the movie request is not authorized. """ if not movie_request.authorized: - raise ValueError("Movie request is not authorized") + msg = "Movie request is not authorized" + raise ValueError(msg) log.info(f"Downloading approved movie request {movie_request.id}") @@ -583,8 +606,7 @@ class MovieService: :param movie: The Movie object """ - video_files, subtitle_files, all_files = get_files_for_import(torrent=torrent) - success: list[bool] = [] + video_files, subtitle_files, _all_files = get_files_for_import(torrent=torrent) if len(video_files) != 1: # Send notification about multiple video files found @@ -609,15 +631,12 @@ class MovieService: f"Found {len(movie_files)} movie files associated with torrent {torrent.title}" ) - for movie_file in movie_files: - success.append( - self.import_movie( - movie=movie, - video_files=video_files, - subtitle_files=subtitle_files, - file_path_suffix=movie_file.file_path_suffix, - ) + success = [ + self.import_movie( + movie, video_files, subtitle_files, movie_file.file_path_suffix ) + for movie_file in movie_files + ] if all(success): torrent.imported = True @@ -663,9 +682,10 @@ class MovieService: source_directory.rename(new_source_path) except Exception as e: log.error(f"Failed to rename {source_directory} to {new_source_path}: {e}") - raise Exception("Failed to rename directory") from e + msg = "Failed to rename directory" + raise Exception(msg) from e - video_files, subtitle_files, all_files = get_files_for_import( + video_files, subtitle_files, _all_files = get_files_for_import( directory=new_source_path ) @@ -681,7 +701,7 @@ class MovieService: movie_id=movie.id, file_path_suffix="IMPORTED", torrent_id=None, - quality=Quality.unknown + quality=Quality.unknown, ) ) @@ -701,7 +721,7 @@ class MovieService: # Use stored original_language preference for metadata fetching fresh_movie_data = metadata_provider.get_movie_metadata( - id=db_movie.external_id, language=db_movie.original_language + movie_id=db_movie.external_id, language=db_movie.original_language ) if not fresh_movie_data: log.warning( @@ -856,7 +876,7 @@ def update_all_movies_metadata() -> None: continue except InvalidConfigError as e: log.error( - f"Error initializing metadata provider {movie.metadata_provider} for movie {movie.name}: {str(e)}" + f"Error initializing metadata provider {movie.metadata_provider} for movie {movie.name}: {e}" ) continue movie_service.update_movie_metadata( diff --git a/media_manager/notification/manager.py b/media_manager/notification/manager.py index 0453297..a1e95ef 100644 --- a/media_manager/notification/manager.py +++ b/media_manager/notification/manager.py @@ -4,8 +4,10 @@ Notification Manager - Orchestrates sending notifications through all configured import logging from typing import List + +from media_manager.config import MediaManagerConfig from media_manager.notification.schemas import MessageNotification -from media_manager.notification.service_providers.abstractNotificationServiceProvider import ( +from media_manager.notification.service_providers.abstract_notification_service_provider import ( AbstractNotificationServiceProvider, ) from media_manager.notification.service_providers.email import ( @@ -20,7 +22,6 @@ from media_manager.notification.service_providers.ntfy import ( from media_manager.notification.service_providers.pushover import ( PushoverNotificationServiceProvider, ) -from media_manager.config import MediaManagerConfig logger = logging.getLogger(__name__) diff --git a/media_manager/notification/repository.py b/media_manager/notification/repository.py index 577baad..a080176 100644 --- a/media_manager/notification/repository.py +++ b/media_manager/notification/repository.py @@ -1,17 +1,20 @@ -from sqlalchemy import select, delete, update +import logging + +from sqlalchemy import delete, select, update from sqlalchemy.exc import ( IntegrityError, SQLAlchemyError, ) from sqlalchemy.orm import Session -import logging -from media_manager.exceptions import NotFoundError, ConflictError +from media_manager.exceptions import ConflictError, NotFoundError from media_manager.notification.models import Notification from media_manager.notification.schemas import ( - NotificationId, Notification as NotificationSchema, ) +from media_manager.notification.schemas import ( + NotificationId, +) log = logging.getLogger(__name__) @@ -20,11 +23,12 @@ class NotificationRepository: def __init__(self, db: Session): self.db = db - def get_notification(self, id: NotificationId) -> NotificationSchema: - result = self.db.get(Notification, id) + def get_notification(self, nid: NotificationId) -> NotificationSchema: + result = self.db.get(Notification, nid) if not result: - raise NotFoundError(f"Notification with id {id} not found.") + msg = f"Notification with id {nid} not found." + raise NotFoundError(msg) return NotificationSchema.model_validate(result) @@ -69,25 +73,25 @@ class NotificationRepository: self.db.commit() except IntegrityError as e: log.error(f"Could not save notification, Error: {e}") - raise ConflictError( - f"Notification with id {notification.id} already exists." - ) + msg = f"Notification with id {notification.id} already exists." + raise ConflictError(msg) from None return - def mark_notification_as_read(self, id: NotificationId) -> None: - stmt = update(Notification).where(Notification.id == id).values(read=True) + def mark_notification_as_read(self, nid: NotificationId) -> None: + stmt = update(Notification).where(Notification.id == nid).values(read=True) self.db.execute(stmt) return - def mark_notification_as_unread(self, id: NotificationId) -> None: - stmt = update(Notification).where(Notification.id == id).values(read=False) + def mark_notification_as_unread(self, nid: NotificationId) -> None: + stmt = update(Notification).where(Notification.id == nid).values(read=False) self.db.execute(stmt) return - def delete_notification(self, id: NotificationId) -> None: - stmt = delete(Notification).where(Notification.id == id) + def delete_notification(self, nid: NotificationId) -> None: + stmt = delete(Notification).where(Notification.id == nid) result = self.db.execute(stmt) if result.rowcount == 0: - raise NotFoundError(f"Notification with id {id} not found.") + msg = f"Notification with id {nid} not found." + raise NotFoundError(msg) self.db.commit() return diff --git a/media_manager/notification/router.py b/media_manager/notification/router.py index bdc88d6..227723e 100644 --- a/media_manager/notification/router.py +++ b/media_manager/notification/router.py @@ -1,8 +1,8 @@ from fastapi import APIRouter, Depends, status from media_manager.auth.users import current_active_user -from media_manager.notification.schemas import Notification, NotificationId from media_manager.notification.dependencies import notification_service_dep +from media_manager.notification.schemas import Notification, NotificationId router = APIRouter() @@ -15,9 +15,10 @@ router = APIRouter() @router.get( "", dependencies=[Depends(current_active_user)], - response_model=list[Notification], ) -def get_all_notifications(notification_service: notification_service_dep): +def get_all_notifications( + notification_service: notification_service_dep, +) -> list[Notification]: """ Get all notifications. """ @@ -27,9 +28,10 @@ def get_all_notifications(notification_service: notification_service_dep): @router.get( "/unread", dependencies=[Depends(current_active_user)], - response_model=list[Notification], ) -def get_unread_notifications(notification_service: notification_service_dep): +def get_unread_notifications( + notification_service: notification_service_dep, +) -> list[Notification]: """ Get all unread notifications. """ @@ -39,18 +41,17 @@ def get_unread_notifications(notification_service: notification_service_dep): @router.get( "/{notification_id}", dependencies=[Depends(current_active_user)], - response_model=Notification, responses={ status.HTTP_404_NOT_FOUND: {"description": "Notification not found"}, }, ) def get_notification( notification_id: NotificationId, notification_service: notification_service_dep -): +) -> Notification: """ Get a specific notification by ID. """ - return notification_service.get_notification(id=notification_id) + return notification_service.get_notification(nid=notification_id) # -------------------------------- @@ -72,7 +73,7 @@ def mark_notification_as_read( """ Mark a notification as read. """ - notification_service.mark_notification_as_read(id=notification_id) + notification_service.mark_notification_as_read(nid=notification_id) @router.patch( @@ -89,7 +90,7 @@ def mark_notification_as_unread( """ Mark a notification as unread. """ - notification_service.mark_notification_as_unread(id=notification_id) + notification_service.mark_notification_as_unread(nid=notification_id) @router.delete( @@ -106,4 +107,4 @@ def delete_notification( """ Delete a notification. """ - notification_service.delete_notification(id=notification_id) + notification_service.delete_notification(nid=notification_id) diff --git a/media_manager/notification/schemas.py b/media_manager/notification/schemas.py index d3fe06c..1e3bf8a 100644 --- a/media_manager/notification/schemas.py +++ b/media_manager/notification/schemas.py @@ -3,8 +3,7 @@ import uuid from datetime import datetime from uuid import UUID -from pydantic import BaseModel, Field, ConfigDict - +from pydantic import BaseModel, ConfigDict, Field NotificationId = typing.NewType("NotificationId", UUID) diff --git a/media_manager/notification/service.py b/media_manager/notification/service.py index 211b4af..a97515e 100644 --- a/media_manager/notification/service.py +++ b/media_manager/notification/service.py @@ -1,6 +1,6 @@ -from media_manager.notification.repository import NotificationRepository -from media_manager.notification.schemas import NotificationId, Notification from media_manager.notification.manager import notification_manager +from media_manager.notification.repository import NotificationRepository +from media_manager.notification.schemas import Notification, NotificationId class NotificationService: @@ -11,8 +11,8 @@ class NotificationService: self.notification_repository = notification_repository self.notification_manager = notification_manager - def get_notification(self, id: NotificationId) -> Notification: - return self.notification_repository.get_notification(id=id) + def get_notification(self, nid: NotificationId) -> Notification: + return self.notification_repository.get_notification(nid=nid) def get_unread_notifications(self) -> list[Notification]: return self.notification_repository.get_unread_notifications() @@ -23,14 +23,14 @@ class NotificationService: def save_notification(self, notification: Notification) -> None: return self.notification_repository.save_notification(notification) - def mark_notification_as_read(self, id: NotificationId) -> None: - return self.notification_repository.mark_notification_as_read(id=id) + def mark_notification_as_read(self, nid: NotificationId) -> None: + return self.notification_repository.mark_notification_as_read(nid=nid) - def mark_notification_as_unread(self, id: NotificationId) -> None: - return self.notification_repository.mark_notification_as_unread(id=id) + def mark_notification_as_unread(self, nid: NotificationId) -> None: + return self.notification_repository.mark_notification_as_unread(nid=nid) - def delete_notification(self, id: NotificationId) -> None: - return self.notification_repository.delete_notification(id=id) + def delete_notification(self, nid: NotificationId) -> None: + return self.notification_repository.delete_notification(nid=nid) def send_notification_to_all_providers(self, title: str, message: str) -> None: self.notification_manager.send_notification(title, message) diff --git a/media_manager/notification/service_providers/abstractNotificationServiceProvider.py b/media_manager/notification/service_providers/abstract_notification_service_provider.py similarity index 77% rename from media_manager/notification/service_providers/abstractNotificationServiceProvider.py rename to media_manager/notification/service_providers/abstract_notification_service_provider.py index 1bc3ec9..a4a33e9 100644 --- a/media_manager/notification/service_providers/abstractNotificationServiceProvider.py +++ b/media_manager/notification/service_providers/abstract_notification_service_provider.py @@ -1,9 +1,10 @@ -import abc +from abc import ABC, abstractmethod + from media_manager.notification.schemas import MessageNotification -class AbstractNotificationServiceProvider(abc.ABC): - @abc.abstractmethod +class AbstractNotificationServiceProvider(ABC): + @abstractmethod def send_notification(self, message: MessageNotification) -> bool: """ Sends a notification with the given message. @@ -11,4 +12,3 @@ class AbstractNotificationServiceProvider(abc.ABC): :param message: The message to send in the notification. :return: True if the notification was sent successfully, False otherwise. """ - pass diff --git a/media_manager/notification/service_providers/email.py b/media_manager/notification/service_providers/email.py index c52453c..9339252 100644 --- a/media_manager/notification/service_providers/email.py +++ b/media_manager/notification/service_providers/email.py @@ -1,9 +1,9 @@ import media_manager.notification.utils +from media_manager.config import MediaManagerConfig from media_manager.notification.schemas import MessageNotification -from media_manager.notification.service_providers.abstractNotificationServiceProvider import ( +from media_manager.notification.service_providers.abstract_notification_service_provider import ( AbstractNotificationServiceProvider, ) -from media_manager.config import MediaManagerConfig class EmailNotificationServiceProvider(AbstractNotificationServiceProvider): diff --git a/media_manager/notification/service_providers/gotify.py b/media_manager/notification/service_providers/gotify.py index fd04807..13610c7 100644 --- a/media_manager/notification/service_providers/gotify.py +++ b/media_manager/notification/service_providers/gotify.py @@ -2,7 +2,7 @@ import requests from media_manager.config import MediaManagerConfig from media_manager.notification.schemas import MessageNotification -from media_manager.notification.service_providers.abstractNotificationServiceProvider import ( +from media_manager.notification.service_providers.abstract_notification_service_provider import ( AbstractNotificationServiceProvider, ) @@ -22,6 +22,7 @@ class GotifyNotificationServiceProvider(AbstractNotificationServiceProvider): "message": message.message, "title": message.title, }, + timeout=60, ) if response.status_code not in range(200, 300): return False diff --git a/media_manager/notification/service_providers/ntfy.py b/media_manager/notification/service_providers/ntfy.py index 6de4601..a5e8439 100644 --- a/media_manager/notification/service_providers/ntfy.py +++ b/media_manager/notification/service_providers/ntfy.py @@ -2,7 +2,7 @@ import requests from media_manager.config import MediaManagerConfig from media_manager.notification.schemas import MessageNotification -from media_manager.notification.service_providers.abstractNotificationServiceProvider import ( +from media_manager.notification.service_providers.abstract_notification_service_provider import ( AbstractNotificationServiceProvider, ) @@ -22,6 +22,7 @@ class NtfyNotificationServiceProvider(AbstractNotificationServiceProvider): headers={ "Title": "MediaManager - " + message.title, }, + timeout=60, ) if response.status_code not in range(200, 300): return False diff --git a/media_manager/notification/service_providers/pushover.py b/media_manager/notification/service_providers/pushover.py index b3e1235..acee023 100644 --- a/media_manager/notification/service_providers/pushover.py +++ b/media_manager/notification/service_providers/pushover.py @@ -2,7 +2,7 @@ import requests from media_manager.config import MediaManagerConfig from media_manager.notification.schemas import MessageNotification -from media_manager.notification.service_providers.abstractNotificationServiceProvider import ( +from media_manager.notification.service_providers.abstract_notification_service_provider import ( AbstractNotificationServiceProvider, ) @@ -20,6 +20,7 @@ class PushoverNotificationServiceProvider(AbstractNotificationServiceProvider): "message": message.message, "title": "MediaManager - " + message.title, }, + timeout=60, ) if response.status_code not in range(200, 300): return False diff --git a/media_manager/scheduler.py b/media_manager/scheduler.py index 52a601f..5d2e152 100644 --- a/media_manager/scheduler.py +++ b/media_manager/scheduler.py @@ -1,20 +1,21 @@ +from apscheduler.jobstores.sqlalchemy import SQLAlchemyJobStore from apscheduler.schedulers.background import BackgroundScheduler from apscheduler.triggers.cron import CronTrigger -from apscheduler.jobstores.sqlalchemy import SQLAlchemyJobStore + import media_manager.database +from media_manager.movies.service import ( + auto_download_all_approved_movie_requests, + import_all_movie_torrents, + update_all_movies_metadata, +) from media_manager.tv.service import ( auto_download_all_approved_season_requests, import_all_show_torrents, update_all_non_ended_shows_metadata, ) -from media_manager.movies.service import ( - import_all_movie_torrents, - update_all_movies_metadata, - auto_download_all_approved_movie_requests, -) -def setup_scheduler(config, log): +def setup_scheduler(config): from media_manager.database import init_engine init_engine(config.database) diff --git a/media_manager/torrent/config.py b/media_manager/torrent/config.py index 7f6e359..30339a0 100644 --- a/media_manager/torrent/config.py +++ b/media_manager/torrent/config.py @@ -6,7 +6,7 @@ class QbittorrentConfig(BaseSettings): host: str = "localhost" port: int = 8080 username: str = "admin" - password: str = "admin" + password: str = "admin" # noqa: S105 enabled: bool = False category_name: str = "MediaManager" diff --git a/media_manager/torrent/dependencies.py b/media_manager/torrent/dependencies.py index e9673b4..e8509ce 100644 --- a/media_manager/torrent/dependencies.py +++ b/media_manager/torrent/dependencies.py @@ -1,14 +1,14 @@ from typing import Annotated from fastapi import Depends - -from media_manager.exceptions import NotFoundError -from media_manager.database import DbSessionDependency -from media_manager.torrent.service import TorrentService -from media_manager.torrent.repository import TorrentRepository -from media_manager.torrent.schemas import TorrentId, Torrent from fastapi.exceptions import HTTPException +from media_manager.database import DbSessionDependency +from media_manager.exceptions import NotFoundError +from media_manager.torrent.repository import TorrentRepository +from media_manager.torrent.schemas import Torrent, TorrentId +from media_manager.torrent.service import TorrentService + def get_torrent_repository(db: DbSessionDependency) -> TorrentRepository: return TorrentRepository(db=db) @@ -39,7 +39,7 @@ def get_torrent_by_id( except NotFoundError: raise HTTPException( status_code=404, detail=f"Torrent with ID {torrent_id} not found" - ) + ) from None return torrent diff --git a/media_manager/torrent/download_clients/abstractDownloadClient.py b/media_manager/torrent/download_clients/abstract_download_client.py similarity index 92% rename from media_manager/torrent/download_clients/abstractDownloadClient.py rename to media_manager/torrent/download_clients/abstract_download_client.py index d2d521e..eecba94 100644 --- a/media_manager/torrent/download_clients/abstractDownloadClient.py +++ b/media_manager/torrent/download_clients/abstract_download_client.py @@ -1,7 +1,7 @@ from abc import ABC, abstractmethod from media_manager.indexer.schemas import IndexerQueryResult -from media_manager.torrent.schemas import TorrentStatus, Torrent +from media_manager.torrent.schemas import Torrent, TorrentStatus class AbstractDownloadClient(ABC): @@ -23,7 +23,6 @@ class AbstractDownloadClient(ABC): :param torrent: The indexer query result of the torrent file to download. :return: The torrent object with calculated hash and initial status. """ - pass @abstractmethod def remove_torrent(self, torrent: Torrent, delete_data: bool = False) -> None: @@ -33,7 +32,6 @@ class AbstractDownloadClient(ABC): :param torrent: The torrent to remove. :param delete_data: Whether to delete the downloaded data. """ - pass @abstractmethod def get_torrent_status(self, torrent: Torrent) -> TorrentStatus: @@ -43,7 +41,6 @@ class AbstractDownloadClient(ABC): :param torrent: The torrent to get the status of. :return: The status of the torrent. """ - pass @abstractmethod def pause_torrent(self, torrent: Torrent) -> None: @@ -52,7 +49,6 @@ class AbstractDownloadClient(ABC): :param torrent: The torrent to pause. """ - pass @abstractmethod def resume_torrent(self, torrent: Torrent) -> None: @@ -61,4 +57,3 @@ class AbstractDownloadClient(ABC): :param torrent: The torrent to resume. """ - pass diff --git a/media_manager/torrent/download_clients/qbittorrent.py b/media_manager/torrent/download_clients/qbittorrent.py index afef952..538c3c5 100644 --- a/media_manager/torrent/download_clients/qbittorrent.py +++ b/media_manager/torrent/download_clients/qbittorrent.py @@ -5,10 +5,10 @@ from qbittorrentapi import Conflict409Error from media_manager.config import MediaManagerConfig from media_manager.indexer.schemas import IndexerQueryResult -from media_manager.torrent.download_clients.abstractDownloadClient import ( +from media_manager.torrent.download_clients.abstract_download_client import ( AbstractDownloadClient, ) -from media_manager.torrent.schemas import TorrentStatus, Torrent +from media_manager.torrent.schemas import Torrent, TorrentStatus from media_manager.torrent.utils import get_torrent_hash log = logging.getLogger(__name__) @@ -102,9 +102,8 @@ class QbittorrentDownloadClient(AbstractDownloadClient): log.error( f"Failed to download torrent, API-Answer isn't 'Ok.'; API Answer: {answer}" ) - raise RuntimeError( - f"Failed to download torrent, API-Answer isn't 'Ok.'; API Answer: {answer}" - ) + msg = f"Failed to download torrent, API-Answer isn't 'Ok.'; API Answer: {answer}" + raise RuntimeError(msg) log.info(f"Successfully processed torrent: {indexer_result.title}") @@ -154,19 +153,17 @@ class QbittorrentDownloadClient(AbstractDownloadClient): if not info: log.warning(f"No information found for torrent: {torrent.id}") return TorrentStatus.unknown - else: - state: str = info[0]["state"] + state: str = info[0]["state"] - if state in self.DOWNLOADING_STATE: - return TorrentStatus.downloading - elif state in self.FINISHED_STATE: - return TorrentStatus.finished - elif state in self.ERROR_STATE: - return TorrentStatus.error - elif state in self.UNKNOWN_STATE: - return TorrentStatus.unknown - else: - return TorrentStatus.error + if state in self.DOWNLOADING_STATE: + return TorrentStatus.downloading + if state in self.FINISHED_STATE: + return TorrentStatus.finished + if state in self.ERROR_STATE: + return TorrentStatus.error + if state in self.UNKNOWN_STATE: + return TorrentStatus.unknown + return TorrentStatus.error def pause_torrent(self, torrent: Torrent) -> None: """ diff --git a/media_manager/torrent/download_clients/sabnzbd.py b/media_manager/torrent/download_clients/sabnzbd.py index 70ed67d..af83c6f 100644 --- a/media_manager/torrent/download_clients/sabnzbd.py +++ b/media_manager/torrent/download_clients/sabnzbd.py @@ -1,12 +1,13 @@ import logging +import sabnzbd_api + from media_manager.config import MediaManagerConfig from media_manager.indexer.schemas import IndexerQueryResult -from media_manager.torrent.download_clients.abstractDownloadClient import ( +from media_manager.torrent.download_clients.abstract_download_client import ( AbstractDownloadClient, ) from media_manager.torrent.schemas import Torrent, TorrentStatus -import sabnzbd_api log = logging.getLogger(__name__) @@ -56,7 +57,8 @@ class SabnzbdDownloadClient(AbstractDownloadClient): if not response["status"]: error_msg = response log.error(f"Failed to add NZB to SABnzbd: {error_msg}") - raise RuntimeError(f"Failed to add NZB to SABnzbd: {error_msg}") + msg = f"Failed to add NZB to SABnzbd: {error_msg}" + raise RuntimeError(msg) # Generate a hash for the NZB (using title and download URL) nzo_id = response["nzo_ids"][0] @@ -137,9 +139,8 @@ class SabnzbdDownloadClient(AbstractDownloadClient): """ if sabnzbd_status in self.DOWNLOADING_STATE: return TorrentStatus.downloading - elif sabnzbd_status in self.FINISHED_STATE: + if sabnzbd_status in self.FINISHED_STATE: return TorrentStatus.finished - elif sabnzbd_status in self.ERROR_STATE: + if sabnzbd_status in self.ERROR_STATE: return TorrentStatus.error - else: - return TorrentStatus.unknown + return TorrentStatus.unknown diff --git a/media_manager/torrent/download_clients/transmission.py b/media_manager/torrent/download_clients/transmission.py index 7216231..509c2e0 100644 --- a/media_manager/torrent/download_clients/transmission.py +++ b/media_manager/torrent/download_clients/transmission.py @@ -1,12 +1,14 @@ import logging +from types import MappingProxyType import transmission_rpc + from media_manager.config import MediaManagerConfig from media_manager.indexer.schemas import IndexerQueryResult -from media_manager.torrent.download_clients.abstractDownloadClient import ( +from media_manager.torrent.download_clients.abstract_download_client import ( AbstractDownloadClient, ) -from media_manager.torrent.schemas import TorrentStatus, Torrent +from media_manager.torrent.schemas import Torrent, TorrentStatus from media_manager.torrent.utils import get_torrent_hash log = logging.getLogger(__name__) @@ -16,15 +18,17 @@ class TransmissionDownloadClient(AbstractDownloadClient): name = "transmission" # Transmission status mappings - STATUS_MAPPING = { - "stopped": TorrentStatus.unknown, - "check pending": TorrentStatus.downloading, - "checking": TorrentStatus.downloading, - "download pending": TorrentStatus.downloading, - "downloading": TorrentStatus.downloading, - "seed pending": TorrentStatus.finished, - "seeding": TorrentStatus.finished, - } + STATUS_MAPPING = MappingProxyType( + { + "stopped": TorrentStatus.unknown, + "check pending": TorrentStatus.downloading, + "checking": TorrentStatus.downloading, + "download pending": TorrentStatus.downloading, + "downloading": TorrentStatus.downloading, + "seed pending": TorrentStatus.finished, + "seeding": TorrentStatus.finished, + } + ) def __init__(self): self.config = MediaManagerConfig().torrents.transmission diff --git a/media_manager/torrent/manager.py b/media_manager/torrent/manager.py index 053ddcd..e76dad2 100644 --- a/media_manager/torrent/manager.py +++ b/media_manager/torrent/manager.py @@ -3,14 +3,14 @@ from enum import Enum from media_manager.config import MediaManagerConfig from media_manager.indexer.schemas import IndexerQueryResult -from media_manager.torrent.download_clients.abstractDownloadClient import ( +from media_manager.torrent.download_clients.abstract_download_client import ( AbstractDownloadClient, ) from media_manager.torrent.download_clients.qbittorrent import QbittorrentDownloadClient +from media_manager.torrent.download_clients.sabnzbd import SabnzbdDownloadClient from media_manager.torrent.download_clients.transmission import ( TransmissionDownloadClient, ) -from media_manager.torrent.download_clients.sabnzbd import SabnzbdDownloadClient from media_manager.torrent.schemas import Torrent, TorrentStatus log = logging.getLogger(__name__) @@ -79,12 +79,13 @@ class DownloadManager: # Use the usenet flag from the indexer result to determine the client type if indexer_result.usenet: if not self._usenet_client: - raise RuntimeError("No usenet download client configured") + msg = "No usenet download client configured" + raise RuntimeError(msg) return self._usenet_client - else: - if not self._torrent_client: - raise RuntimeError("No torrent download client configured") - return self._torrent_client + if not self._torrent_client: + msg = "No torrent download client configured" + raise RuntimeError(msg) + return self._torrent_client def download(self, indexer_result: IndexerQueryResult) -> Torrent: """ diff --git a/media_manager/torrent/repository.py b/media_manager/torrent/repository.py index a09451a..aba9fc0 100644 --- a/media_manager/torrent/repository.py +++ b/media_manager/torrent/repository.py @@ -1,16 +1,20 @@ -from sqlalchemy import select, delete +from sqlalchemy import delete, select from media_manager.database import DbSessionDependency -from media_manager.torrent.models import Torrent -from media_manager.torrent.schemas import TorrentId, Torrent as TorrentSchema -from media_manager.tv.models import SeasonFile, Show, Season -from media_manager.tv.schemas import SeasonFile as SeasonFileSchema, Show as ShowSchema from media_manager.exceptions import NotFoundError from media_manager.movies.models import Movie, MovieFile from media_manager.movies.schemas import ( Movie as MovieSchema, +) +from media_manager.movies.schemas import ( MovieFile as MovieFileSchema, ) +from media_manager.torrent.models import Torrent +from media_manager.torrent.schemas import Torrent as TorrentSchema +from media_manager.torrent.schemas import TorrentId +from media_manager.tv.models import Season, SeasonFile, Show +from media_manager.tv.schemas import SeasonFile as SeasonFileSchema +from media_manager.tv.schemas import Show as ShowSchema class TorrentRepository: @@ -52,7 +56,8 @@ class TorrentRepository: def get_torrent_by_id(self, torrent_id: TorrentId) -> TorrentSchema: result = self.db.get(Torrent, torrent_id) if result is None: - raise NotFoundError(f"Torrent with ID {torrent_id} not found.") + msg = f"Torrent with ID {torrent_id} not found." + raise NotFoundError(msg) return TorrentSchema.model_validate(result) def delete_torrent( diff --git a/media_manager/torrent/router.py b/media_manager/torrent/router.py index fa0b900..abb03c3 100644 --- a/media_manager/torrent/router.py +++ b/media_manager/torrent/router.py @@ -1,14 +1,12 @@ +from fastapi import APIRouter, status from fastapi.exceptions import HTTPException - -from fastapi import APIRouter -from fastapi import status from fastapi.params import Depends from media_manager.auth.users import current_active_user, current_superuser from media_manager.torrent.dependencies import ( - torrent_service_dep, torrent_dep, torrent_repository_dep, + torrent_service_dep, ) from media_manager.torrent.schemas import Torrent, TorrentStatus @@ -19,14 +17,13 @@ router = APIRouter() "", status_code=status.HTTP_200_OK, dependencies=[Depends(current_active_user)], - response_model=list[Torrent], ) -def get_all_torrents(service: torrent_service_dep): +def get_all_torrents(service: torrent_service_dep) -> list[Torrent]: return service.get_all_torrents() -@router.get("/{torrent_id}", status_code=status.HTTP_200_OK, response_model=Torrent) -def get_torrent(service: torrent_service_dep, torrent: torrent_dep): +@router.get("/{torrent_id}", status_code=status.HTTP_200_OK) +def get_torrent(service: torrent_service_dep, torrent: torrent_dep) -> Torrent: return service.get_torrent_by_id(torrent_id=torrent.id) @@ -66,14 +63,13 @@ def retry_torrent_download( "/{torrent_id}/status", status_code=status.HTTP_200_OK, dependencies=[Depends(current_superuser)], - response_model=Torrent, ) def update_torrent_status( rep: torrent_repository_dep, torrent: torrent_dep, state: TorrentStatus | None = None, imported: bool | None = None, -): +) -> Torrent: if imported is not None: torrent.imported = imported if state is not None: diff --git a/media_manager/torrent/schemas.py b/media_manager/torrent/schemas.py index dfcca71..a481602 100644 --- a/media_manager/torrent/schemas.py +++ b/media_manager/torrent/schemas.py @@ -2,7 +2,7 @@ import typing import uuid from enum import Enum -from pydantic import ConfigDict, BaseModel, Field +from pydantic import BaseModel, ConfigDict, Field TorrentId = typing.NewType("TorrentId", uuid.UUID) diff --git a/media_manager/torrent/service.py b/media_manager/torrent/service.py index 6181fdf..f27eaf9 100644 --- a/media_manager/torrent/service.py +++ b/media_manager/torrent/service.py @@ -1,11 +1,11 @@ import logging from media_manager.indexer.schemas import IndexerQueryResult +from media_manager.movies.schemas import Movie from media_manager.torrent.manager import DownloadManager from media_manager.torrent.repository import TorrentRepository from media_manager.torrent.schemas import Torrent, TorrentId from media_manager.tv.schemas import SeasonFile, Show -from media_manager.movies.schemas import Movie log = logging.getLogger(__name__) diff --git a/media_manager/torrent/utils.py b/media_manager/torrent/utils.py index a3a614b..5ef54d5 100644 --- a/media_manager/torrent/utils.py +++ b/media_manager/torrent/utils.py @@ -2,13 +2,13 @@ import hashlib import logging import mimetypes import re -from pathlib import Path, UnsupportedOperation import shutil +from pathlib import Path, UnsupportedOperation import bencoder +import libtorrent import patoolib import requests -import libtorrent from requests.exceptions import InvalidSchema from media_manager.config import MediaManagerConfig @@ -19,7 +19,7 @@ from media_manager.torrent.schemas import Torrent log = logging.getLogger(__name__) -def list_files_recursively(path: Path = Path(".")) -> list[Path]: +def list_files_recursively(path: Path = Path()) -> list[Path]: files = list(path.glob("**/*")) log.debug(f"Found {len(files)} entries via glob") valid_files = [] @@ -151,21 +151,19 @@ def get_torrent_hash(torrent: IndexerQueryResult) -> str: session=requests.Session(), timeout=MediaManagerConfig().indexers.prowlarr.timeout_seconds, ) - torrent_hash = str(libtorrent.parse_magnet_uri(final_url).info_hash) - return torrent_hash + return str(libtorrent.parse_magnet_uri(final_url).info_hash) except Exception as e: log.error(f"Failed to download torrent file: {e}") raise # saving the torrent file - with open(torrent_filepath, "wb") as file: - file.write(torrent_content) + torrent_filepath.write_bytes(torrent_content) # parsing info hash log.debug(f"parsing torrent file: {torrent.download_url}") try: decoded_content = bencoder.decode(torrent_content) - torrent_hash = hashlib.sha1( + torrent_hash = hashlib.sha1( # noqa: S324 bencoder.encode(decoded_content[b"info"]) ).hexdigest() except Exception as e: @@ -185,9 +183,7 @@ def remove_special_characters(filename: str) -> str: sanitized = re.sub(r"([<>:\"/\\|?*])", "", filename) # Remove leading and trailing dots or spaces - sanitized = sanitized.strip(" .") - - return sanitized + return sanitized.strip(" .") def remove_special_chars_and_parentheses(title: str) -> str: @@ -211,26 +207,25 @@ def remove_special_chars_and_parentheses(title: str) -> str: sanitized = remove_special_characters(sanitized) # Collapse multiple whitespace characters and trim the result - sanitized = re.sub(r"\s+", " ", sanitized).strip() - return sanitized + return re.sub(r"\s+", " ", sanitized).strip() def get_importable_media_directories(path: Path) -> list[Path]: - libraries = [] - libraries.extend(MediaManagerConfig().misc.movie_libraries) - libraries.extend(MediaManagerConfig().misc.tv_libraries) + libraries = [ + *MediaManagerConfig().misc.movie_libraries, + *MediaManagerConfig().misc.tv_libraries, + ] library_paths = {Path(library.path).absolute() for library in libraries} unfiltered_dirs = [d for d in path.glob("*") if d.is_dir()] - media_dirs = [] - for media_dir in unfiltered_dirs: - if media_dir.absolute() not in library_paths and not media_dir.name.startswith( - "." - ): - media_dirs.append(media_dir) - return media_dirs + return [ + media_dir + for media_dir in unfiltered_dirs + if media_dir.absolute() not in library_paths + and not media_dir.name.startswith(".") + ] def extract_external_id_from_string(input_string: str) -> tuple[str | None, int | None]: diff --git a/media_manager/tv/dependencies.py b/media_manager/tv/dependencies.py index 527aff3..421bf24 100644 --- a/media_manager/tv/dependencies.py +++ b/media_manager/tv/dependencies.py @@ -1,16 +1,15 @@ from typing import Annotated -from fastapi import Depends, Path +from fastapi import Depends, HTTPException, Path from media_manager.database import DbSessionDependency -from media_manager.tv.repository import TvRepository -from media_manager.tv.schemas import Show, ShowId, SeasonId, Season -from media_manager.tv.service import TvService from media_manager.exceptions import NotFoundError -from fastapi import HTTPException from media_manager.indexer.dependencies import indexer_service_dep -from media_manager.torrent.dependencies import torrent_service_dep from media_manager.notification.dependencies import notification_service_dep +from media_manager.torrent.dependencies import torrent_service_dep +from media_manager.tv.repository import TvRepository +from media_manager.tv.schemas import Season, SeasonId, Show, ShowId +from media_manager.tv.service import TvService def get_tv_repository(db_session: DbSessionDependency) -> TvRepository: @@ -47,7 +46,7 @@ def get_show_by_id( raise HTTPException( status_code=404, detail=f"Show with ID {show_id} not found.", - ) + ) from None return show @@ -64,7 +63,7 @@ def get_season_by_id( raise HTTPException( status_code=404, detail=f"Season with ID {season_id} not found.", - ) + ) from None return season diff --git a/media_manager/tv/repository.py b/media_manager/tv/repository.py index a5390b6..c0529b3 100644 --- a/media_manager/tv/repository.py +++ b/media_manager/tv/repository.py @@ -1,27 +1,40 @@ -from sqlalchemy import select, delete, func +from sqlalchemy import delete, func, select from sqlalchemy.exc import ( IntegrityError, SQLAlchemyError, ) from sqlalchemy.orm import Session, joinedload +from media_manager.exceptions import ConflictError, NotFoundError from media_manager.torrent.models import Torrent -from media_manager.torrent.schemas import TorrentId, Torrent as TorrentSchema +from media_manager.torrent.schemas import Torrent as TorrentSchema +from media_manager.torrent.schemas import TorrentId from media_manager.tv import log -from media_manager.tv.models import Season, Show, Episode, SeasonRequest, SeasonFile -from media_manager.exceptions import NotFoundError, ConflictError +from media_manager.tv.models import Episode, Season, SeasonFile, SeasonRequest, Show from media_manager.tv.schemas import ( - Season as SeasonSchema, - SeasonId, - Show as ShowSchema, - ShowId, Episode as EpisodeSchema, - SeasonRequest as SeasonRequestSchema, - SeasonFile as SeasonFileSchema, +) +from media_manager.tv.schemas import ( + EpisodeId, + SeasonId, SeasonNumber, SeasonRequestId, + ShowId, +) +from media_manager.tv.schemas import ( RichSeasonRequest as RichSeasonRequestSchema, - EpisodeId, +) +from media_manager.tv.schemas import ( + Season as SeasonSchema, +) +from media_manager.tv.schemas import ( + SeasonFile as SeasonFileSchema, +) +from media_manager.tv.schemas import ( + SeasonRequest as SeasonRequestSchema, +) +from media_manager.tv.schemas import ( + Show as ShowSchema, ) @@ -51,7 +64,8 @@ class TvRepository: ) result = self.db.execute(stmt).unique().scalar_one_or_none() if not result: - raise NotFoundError(f"Show with id {show_id} not found.") + msg = f"Show with id {show_id} not found." + raise NotFoundError(msg) return ShowSchema.model_validate(result) except SQLAlchemyError as e: log.error(f"Database error while retrieving show {show_id}: {e}") @@ -78,9 +92,8 @@ class TvRepository: ) result = self.db.execute(stmt).unique().scalar_one_or_none() if not result: - raise NotFoundError( - f"Show with external_id {external_id} and provider {metadata_provider} not found." - ) + msg = f"Show with external_id {external_id} and provider {metadata_provider} not found." + raise NotFoundError(msg) return ShowSchema.model_validate(result) except SQLAlchemyError as e: log.error( @@ -110,8 +123,7 @@ class TvRepository: stmt = ( select(func.count()).select_from(Episode).join(Season).join(SeasonFile) ) - total_count = self.db.execute(stmt).scalar_one_or_none() - return total_count + return self.db.execute(stmt).scalar_one_or_none() except SQLAlchemyError as e: log.error( f"Database error while calculating downloaded episodes count: {e}" @@ -178,9 +190,8 @@ class TvRepository: return ShowSchema.model_validate(db_show) except IntegrityError as e: self.db.rollback() - raise ConflictError( - f"Show with this primary key or unique constraint violation: {e.orig}" - ) from e + msg = f"Show with this primary key or unique constraint violation: {e.orig}" + raise ConflictError(msg) from e except SQLAlchemyError as e: self.db.rollback() log.error(f"Database error while saving show {show.name}: {e}") @@ -197,7 +208,8 @@ class TvRepository: try: show = self.db.get(Show, show_id) if not show: - raise NotFoundError(f"Show with id {show_id} not found.") + msg = f"Show with id {show_id} not found." + raise NotFoundError(msg) self.db.delete(show) self.db.commit() except SQLAlchemyError as e: @@ -217,7 +229,8 @@ class TvRepository: try: season = self.db.get(Season, season_id) if not season: - raise NotFoundError(f"Season with id {season_id} not found.") + msg = f"Season with id {season_id} not found." + raise NotFoundError(msg) return SeasonSchema.model_validate(season) except SQLAlchemyError as e: log.error(f"Database error while retrieving season {season_id}: {e}") @@ -274,9 +287,8 @@ class TvRepository: result = self.db.execute(stmt) if result.rowcount == 0: self.db.rollback() - raise NotFoundError( - f"SeasonRequest with id {season_request_id} not found." - ) + msg = f"SeasonRequest with id {season_request_id} not found." + raise NotFoundError(msg) self.db.commit() except SQLAlchemyError as e: self.db.rollback() @@ -304,9 +316,8 @@ class TvRepository: ) result = self.db.execute(stmt).unique().scalar_one_or_none() if not result: - raise NotFoundError( - f"Season number {season_number} for show_id {show_id} not found." - ) + msg = f"Season number {season_number} for show_id {show_id} not found." + raise NotFoundError(msg) return SeasonSchema.model_validate(result) except SQLAlchemyError as e: log.error( @@ -382,8 +393,7 @@ class TvRepository: stmt = delete(SeasonFile).where(SeasonFile.torrent_id == torrent_id) result = self.db.execute(stmt) self.db.commit() - deleted_count = result.rowcount - return deleted_count + return result.rowcount except SQLAlchemyError as e: self.db.rollback() log.error( @@ -403,7 +413,8 @@ class TvRepository: try: show = self.db.get(Show, show_id) if not show: - raise NotFoundError(f"Show with id {show_id} not found.") + msg = f"Show with id {show_id} not found." + raise NotFoundError(msg) show.library = library self.db.commit() except SQLAlchemyError as e: @@ -514,9 +525,8 @@ class TvRepository: request = self.db.get(SeasonRequest, season_request_id) if not request: log.warning(f"Season request with id {season_request_id} not found.") - raise NotFoundError( - f"Season request with id {season_request_id} not found." - ) + msg = f"Season request with id {season_request_id} not found." + raise NotFoundError(msg) return SeasonRequestSchema.model_validate(request) except SQLAlchemyError as e: log.error( @@ -542,7 +552,8 @@ class TvRepository: ) result = self.db.execute(stmt).unique().scalar_one_or_none() if not result: - raise NotFoundError(f"Show for season_id {season_id} not found.") + msg = f"Show for season_id {season_id} not found." + raise NotFoundError(msg) return ShowSchema.model_validate(result) except SQLAlchemyError as e: log.error(f"Database error retrieving show by season_id {season_id}: {e}") @@ -563,7 +574,8 @@ class TvRepository: """ db_show = self.db.get(Show, show_id) if not db_show: - raise NotFoundError(f"Show with id {show_id} not found.") + msg = f"Show with id {show_id} not found." + raise NotFoundError(msg) stmt = ( select(Season) @@ -612,7 +624,8 @@ class TvRepository: """ db_season = self.db.get(Season, season_id) if not db_season: - raise NotFoundError(f"Season with id {season_id} not found.") + msg = f"Season with id {season_id} not found." + raise NotFoundError(msg) stmt = ( select(Episode) @@ -660,7 +673,8 @@ class TvRepository: """ db_show = self.db.get(Show, show_id) if not db_show: - raise NotFoundError(f"Show with id {show_id} not found.") + msg = f"Show with id {show_id} not found." + raise NotFoundError(msg) updated = False if name is not None and db_show.name != name: @@ -705,7 +719,8 @@ class TvRepository: """ db_season = self.db.get(Season, season_id) if not db_season: - raise NotFoundError(f"Season with id {season_id} not found.") + msg = f"Season with id {season_id} not found." + raise NotFoundError(msg) updated = False if name is not None and db_season.name != name: @@ -735,7 +750,8 @@ class TvRepository: """ db_episode = self.db.get(Episode, episode_id) if not db_episode: - raise NotFoundError(f"Episode with id {episode_id} not found.") + msg = f"Episode with id {episode_id} not found." + raise NotFoundError(msg) updated = False if title is not None and db_episode.title != title: diff --git a/media_manager/tv/router.py b/media_manager/tv/router.py index bf56e48..914cf16 100644 --- a/media_manager/tv/router.py +++ b/media_manager/tv/router.py @@ -1,16 +1,16 @@ from pathlib import Path from typing import Annotated -from fastapi import APIRouter, Depends, status, HTTPException +from fastapi import APIRouter, Depends, HTTPException, status from media_manager.auth.db import User from media_manager.auth.schemas import UserRead from media_manager.auth.users import current_active_user, current_superuser -from media_manager.config import MediaManagerConfig, LibraryItem -from media_manager.exceptions import MediaAlreadyExists +from media_manager.config import LibraryItem, MediaManagerConfig +from media_manager.exceptions import MediaAlreadyExistsError from media_manager.indexer.schemas import ( - IndexerQueryResultId, IndexerQueryResult, + IndexerQueryResultId, ) from media_manager.metadataProvider.dependencies import metadata_provider_dep from media_manager.metadataProvider.schemas import MetaDataProviderSearchResult @@ -24,17 +24,17 @@ from media_manager.tv.dependencies import ( tv_service_dep, ) from media_manager.tv.schemas import ( - Show, - SeasonRequest, - ShowId, - RichShowTorrent, - PublicShow, - PublicSeasonFile, CreateSeasonRequest, - SeasonRequestId, - UpdateSeasonRequest, + PublicSeasonFile, + PublicShow, RichSeasonRequest, + RichShowTorrent, Season, + SeasonRequest, + SeasonRequestId, + Show, + ShowId, + UpdateSeasonRequest, ) router = APIRouter() @@ -47,11 +47,10 @@ router = APIRouter() @router.get( "/search", dependencies=[Depends(current_active_user)], - response_model=list[MetaDataProviderSearchResult], ) def search_metadata_providers_for_a_show( tv_service: tv_service_dep, query: str, metadata_provider: metadata_provider_dep -): +) -> list[MetaDataProviderSearchResult]: """ Search for a show on the configured metadata provider. """ @@ -61,11 +60,10 @@ def search_metadata_providers_for_a_show( @router.get( "/recommended", dependencies=[Depends(current_active_user)], - response_model=list[MetaDataProviderSearchResult], ) def get_recommended_shows( tv_service: tv_service_dep, metadata_provider: metadata_provider_dep -): +) -> list[MetaDataProviderSearchResult]: """ Get a list of recommended/popular shows from the metadata provider. """ @@ -81,11 +79,10 @@ def get_recommended_shows( "/importable", status_code=status.HTTP_200_OK, dependencies=[Depends(current_superuser)], - response_model=list[MediaImportSuggestion], ) def get_all_importable_shows( tv_service: tv_service_dep, metadata_provider: metadata_provider_dep -): +) -> list[MediaImportSuggestion]: """ Get a list of unknown shows that were detected in the TV directory and are importable. """ @@ -117,9 +114,10 @@ def import_detected_show(tv_service: tv_service_dep, tv_show: show_dep, director @router.get( - "/shows", dependencies=[Depends(current_active_user)], response_model=list[Show] + "/shows", + dependencies=[Depends(current_active_user)], ) -def get_all_shows(tv_service: tv_service_dep): +def get_all_shows(tv_service: tv_service_dep) -> list[Show]: """ Get all shows in the library. """ @@ -152,7 +150,7 @@ def add_a_show( metadata_provider=metadata_provider, language=language, ) - except MediaAlreadyExists: + except MediaAlreadyExistsError: show = tv_service.get_show_by_external_id( show_id, metadata_provider=metadata_provider.name ) @@ -162,22 +160,19 @@ def add_a_show( @router.get( "/shows/torrents", dependencies=[Depends(current_active_user)], - response_model=list[RichShowTorrent], ) -def get_shows_with_torrents(tv_service: tv_service_dep): +def get_shows_with_torrents(tv_service: tv_service_dep) -> list[RichShowTorrent]: """ Get all shows that are associated with torrents. """ - result = tv_service.get_all_shows_with_torrents() - return result + return tv_service.get_all_shows_with_torrents() @router.get( "/shows/libraries", dependencies=[Depends(current_active_user)], - response_model=list[LibraryItem], ) -def get_available_libraries(): +def get_available_libraries() -> list[LibraryItem]: """ Get available TV libraries from configuration. """ @@ -192,7 +187,6 @@ def get_available_libraries(): @router.get( "/shows/{show_id}", dependencies=[Depends(current_active_user)], - response_model=PublicShow, ) def get_a_show(show: show_dep, tv_service: tv_service_dep) -> PublicShow: """ @@ -225,7 +219,6 @@ def delete_a_show( @router.post( "/shows/{show_id}/metadata", dependencies=[Depends(current_active_user)], - response_model=PublicShow, ) def update_shows_metadata( show: show_dep, tv_service: tv_service_dep, metadata_provider: metadata_provider_dep @@ -240,7 +233,6 @@ def update_shows_metadata( @router.post( "/shows/{show_id}/continuousDownload", dependencies=[Depends(current_superuser)], - response_model=PublicShow, ) def set_continuous_download( show: show_dep, tv_service: tv_service_dep, continuous_download: bool @@ -257,7 +249,6 @@ def set_continuous_download( @router.post( "/shows/{show_id}/library", dependencies=[Depends(current_superuser)], - response_model=None, status_code=status.HTTP_204_NO_CONTENT, ) def set_library( @@ -275,9 +266,8 @@ def set_library( @router.get( "/shows/{show_id}/torrents", dependencies=[Depends(current_active_user)], - response_model=RichShowTorrent, ) -def get_a_shows_torrents(show: show_dep, tv_service: tv_service_dep): +def get_a_shows_torrents(show: show_dep, tv_service: tv_service_dep) -> RichShowTorrent: """ Get torrents associated with a specific show. """ @@ -293,7 +283,6 @@ def get_a_shows_torrents(show: show_dep, tv_service: tv_service_dep): "/seasons/requests", status_code=status.HTTP_200_OK, dependencies=[Depends(current_active_user)], - response_model=list[RichSeasonRequest], ) def get_season_requests(tv_service: tv_service_dep) -> list[RichSeasonRequest]: """ @@ -379,14 +368,13 @@ def delete_season_request( tv_service.delete_season_request(season_request_id=request_id) log.info(f"User {user.id} deleted season request {request_id}.") return None - else: - log.warning( - f"User {user.id} tried to delete season request {request_id} but is not authorized." - ) - return HTTPException( - status_code=status.HTTP_403_FORBIDDEN, - detail="Not authorized to delete this request", - ) + log.warning( + f"User {user.id} tried to delete season request {request_id} but is not authorized." + ) + return HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail="Not authorized to delete this request", + ) # ----------------------------------------------------------------------------- @@ -397,7 +385,6 @@ def delete_season_request( @router.get( "/seasons/{season_id}", dependencies=[Depends(current_active_user)], - response_model=Season, ) def get_season(season: season_dep) -> Season: """ @@ -409,7 +396,6 @@ def get_season(season: season_dep) -> Season: @router.get( "/seasons/{season_id}/files", dependencies=[Depends(current_active_user)], - response_model=list[PublicSeasonFile], ) def get_season_files( season: season_dep, tv_service: tv_service_dep @@ -429,14 +415,13 @@ def get_season_files( "/torrents", status_code=status.HTTP_200_OK, dependencies=[Depends(current_superuser)], - response_model=list[IndexerQueryResult], ) def get_torrents_for_a_season( tv_service: tv_service_dep, show_id: ShowId, season_number: int = 1, - search_query_override: str = None, -): + search_query_override: str | None = None, +) -> list[IndexerQueryResult]: """ Search for torrents for a specific season of a show. Default season_number is 1 because it often returns multi-season torrents. @@ -451,7 +436,6 @@ def get_torrents_for_a_season( @router.post( "/torrents", status_code=status.HTTP_200_OK, - response_model=Torrent, dependencies=[Depends(current_superuser)], ) def download_a_torrent( @@ -459,7 +443,7 @@ def download_a_torrent( public_indexer_result_id: IndexerQueryResultId, show_id: ShowId, override_file_path_suffix: str = "", -): +) -> Torrent: """ Trigger a download for a specific torrent. """ @@ -478,11 +462,10 @@ def download_a_torrent( @router.get( "/episodes/count", status_code=status.HTTP_200_OK, - response_model=int, description="Total number of episodes downloaded", dependencies=[Depends(current_active_user)], ) -def get_total_count_of_downloaded_episodes(tv_service: tv_service_dep): +def get_total_count_of_downloaded_episodes(tv_service: tv_service_dep) -> int: """ Get the total count of downloaded episodes across all shows. """ diff --git a/media_manager/tv/schemas.py b/media_manager/tv/schemas.py index 6419b58..1e9b3be 100644 --- a/media_manager/tv/schemas.py +++ b/media_manager/tv/schemas.py @@ -2,7 +2,7 @@ import typing import uuid from uuid import UUID -from pydantic import BaseModel, Field, ConfigDict, model_validator +from pydantic import BaseModel, ConfigDict, Field, model_validator from media_manager.auth.schemas import UserRead from media_manager.torrent.models import Quality @@ -69,15 +69,13 @@ class SeasonRequestBase(BaseModel): @model_validator(mode="after") def ensure_wanted_quality_is_eq_or_gt_min_quality(self) -> "SeasonRequestBase": if self.min_quality.value < self.wanted_quality.value: - raise ValueError( - "wanted_quality must be equal to or lower than minimum_quality." - ) + msg = "wanted_quality must be equal to or lower than minimum_quality." + raise ValueError(msg) return self class CreateSeasonRequest(SeasonRequestBase): season_id: SeasonId - pass class UpdateSeasonRequest(SeasonRequestBase): diff --git a/media_manager/tv/service.py b/media_manager/tv/service.py index 3252686..6f7978f 100644 --- a/media_manager/tv/service.py +++ b/media_manager/tv/service.py @@ -1,58 +1,63 @@ +import pprint import re import shutil +from pathlib import Path +from typing import overload from sqlalchemy.exc import IntegrityError from media_manager.config import MediaManagerConfig from media_manager.database import get_session -from media_manager.exceptions import InvalidConfigError +from media_manager.exceptions import InvalidConfigError, NotFoundError from media_manager.indexer.repository import IndexerRepository -from media_manager.indexer.schemas import IndexerQueryResult -from media_manager.indexer.schemas import IndexerQueryResultId -from media_manager.indexer.utils import evaluate_indexer_query_results -from media_manager.metadataProvider.schemas import MetaDataProviderSearchResult -from media_manager.notification.service import NotificationService -from media_manager.torrent.schemas import Torrent, TorrentStatus, Quality -from media_manager.torrent.service import TorrentService -from media_manager.tv import log -from media_manager.tv.schemas import ( - Show, - ShowId, - SeasonRequest, - SeasonFile, - SeasonId, - Season, - RichShowTorrent, - RichSeasonTorrent, - PublicSeason, - PublicShow, - PublicSeasonFile, - SeasonRequestId, - RichSeasonRequest, - EpisodeId, - Episode as EpisodeSchema, -) -from media_manager.torrent.schemas import QualityStrings -from media_manager.tv.repository import TvRepository -from media_manager.exceptions import NotFoundError -import pprint -from pathlib import Path -from media_manager.torrent.repository import TorrentRepository -from media_manager.torrent.utils import ( - import_file, - get_files_for_import, - remove_special_characters, - get_importable_media_directories, - extract_external_id_from_string, - remove_special_chars_and_parentheses, -) +from media_manager.indexer.schemas import IndexerQueryResult, IndexerQueryResultId from media_manager.indexer.service import IndexerService -from media_manager.metadataProvider.abstractMetaDataProvider import ( +from media_manager.indexer.utils import evaluate_indexer_query_results +from media_manager.metadataProvider.abstract_metadata_provider import ( AbstractMetadataProvider, ) +from media_manager.metadataProvider.schemas import MetaDataProviderSearchResult from media_manager.metadataProvider.tmdb import TmdbMetadataProvider from media_manager.metadataProvider.tvdb import TvdbMetadataProvider +from media_manager.notification.service import NotificationService from media_manager.schemas import MediaImportSuggestion +from media_manager.torrent.repository import TorrentRepository +from media_manager.torrent.schemas import ( + Quality, + QualityStrings, + Torrent, + TorrentStatus, +) +from media_manager.torrent.service import TorrentService +from media_manager.torrent.utils import ( + extract_external_id_from_string, + get_files_for_import, + get_importable_media_directories, + import_file, + remove_special_characters, + remove_special_chars_and_parentheses, +) +from media_manager.tv import log +from media_manager.tv.repository import TvRepository +from media_manager.tv.schemas import ( + Episode as EpisodeSchema, +) +from media_manager.tv.schemas import ( + EpisodeId, + PublicSeason, + PublicSeasonFile, + PublicShow, + RichSeasonRequest, + RichSeasonTorrent, + RichShowTorrent, + Season, + SeasonFile, + SeasonId, + SeasonRequest, + SeasonRequestId, + Show, + ShowId, +) class TvService: @@ -82,7 +87,7 @@ class TvService: :param language: Optional language code (ISO 639-1) to fetch metadata in. """ show_with_metadata = metadata_provider.get_show_metadata( - id=external_id, language=language + show_id=external_id, language=language ) saved_show = self.tv_repository.save_show(show=show_with_metadata) metadata_provider.download_show_poster_image(show=saved_show) @@ -193,22 +198,29 @@ class TvService: result.append(season_file) return result - def check_if_show_exists( - self, - external_id: int = None, - metadata_provider: str = None, - show_id: ShowId = None, - ) -> bool: + @overload + def check_if_show_exists(self, *, external_id: int, metadata_provider: str) -> bool: """ Check if a show exists in the database. :param external_id: The external ID of the show. :param metadata_provider: The metadata provider. + :return: True if the show exists, False otherwise. + """ + + @overload + def check_if_show_exists(self, *, show_id: ShowId) -> bool: + """ + Check if a show exists in the database. + :param show_id: The ID of the show. :return: True if the show exists, False otherwise. - :raises ValueError: If neither external ID and metadata provider nor show ID are provided. """ - if external_id and metadata_provider: + + def check_if_show_exists( + self, *, external_id=None, metadata_provider=None, show_id=None + ) -> bool: + if not (external_id is None or metadata_provider is None): try: self.tv_repository.get_show_by_external_id( external_id=external_id, metadata_provider=metadata_provider @@ -216,19 +228,21 @@ class TvService: return True except NotFoundError: return False - elif show_id: + elif show_id is not None: try: self.tv_repository.get_show_by_id(show_id=show_id) return True except NotFoundError: return False else: - raise ValueError( - "External ID and metadata provider or Show ID must be provided" - ) + msg = "Use one of the provided overloads for this function!" + raise ValueError(msg) def get_all_available_torrents_for_a_season( - self, season_number: int, show_id: ShowId, search_query_override: str = None + self, + season_number: int, + show_id: ShowId, + search_query_override: str | None = None, ) -> list[IndexerQueryResult]: """ Get all available torrents for a given season. @@ -238,26 +252,21 @@ class TvService: :param search_query_override: Optional override for the search query. :return: A list of indexer query results. """ - show = self.tv_repository.get_show_by_id(show_id=show_id) if search_query_override: - torrents = self.indexer_service.search( - query=search_query_override, is_tv=True - ) - return torrents - else: - torrents = self.indexer_service.search_season( - show=show, season_number=season_number - ) + return self.indexer_service.search(query=search_query_override, is_tv=True) - results: list[IndexerQueryResult] = [] - for torrent in torrents: - if season_number in torrent.season: - results.append(torrent) + show = self.tv_repository.get_show_by_id(show_id=show_id) - return evaluate_indexer_query_results( - is_tv=True, query_results=results, media=show - ) + torrents = self.indexer_service.search_season( + show=show, season_number=season_number + ) + + results = [torrent for torrent in torrents if season_number in torrent.season] + + return evaluate_indexer_query_results( + is_tv=True, query_results=results, media=show + ) def get_all_shows(self) -> list[Show]: """ @@ -305,16 +314,15 @@ class TvService: :param metadata_provider: The metadata provider to use. :return: A list of metadata provider show search results. """ - results: list[MetaDataProviderSearchResult] = metadata_provider.search_show() + results = metadata_provider.search_show() - filtered_results = [] - for result in results: + return [ + result + for result in results if not self.check_if_show_exists( external_id=result.external_id, metadata_provider=metadata_provider.name - ): - filtered_results.append(result) - - return filtered_results + ) + ] def get_public_show_by_id(self, show: Show) -> PublicShow: """ @@ -363,16 +371,15 @@ class TvService: """ if season_file.torrent_id is None: return True - else: - try: - torrent_file = self.torrent_service.get_torrent_by_id( - torrent_id=season_file.torrent_id - ) + try: + torrent_file = self.torrent_service.get_torrent_by_id( + torrent_id=season_file.torrent_id + ) - if torrent_file.imported: - return True - except RuntimeError as e: - log.error(f"Error retrieving torrent, error: {e}") + if torrent_file.imported: + return True + except RuntimeError as e: + log.error(f"Error retrieving torrent, error: {e}") return False def get_show_by_external_id( @@ -511,9 +518,8 @@ class TvService: :raises ValueError: If the season request is not authorized. """ if not season_request.authorized: - raise ValueError( - f"Season request {season_request.id} is not authorized for download" - ) + msg = f"Season request {season_request.id} is not authorized for download" + raise ValueError(msg) log.info(f"Downloading approved season request {season_request.id}") @@ -633,9 +639,8 @@ class TvService: import_file(target_file=target_video_file, source_file=file) return True else: - raise Exception( - f"Could not find any video file for episode {episode_number} of show {show.name} S{season.number}" - ) + msg = f"Could not find any video file for episode {episode_number} of show {show.name} S{season.number}" + raise Exception(msg) def import_season( self, @@ -654,7 +659,8 @@ class TvService: season_path.mkdir(parents=True, exist_ok=True) except Exception as e: log.warning(f"Could not create path {season_path}: {e}") - raise Exception(f"Could not create path {season_path}") from e + msg = f"Could not create path {season_path}" + raise Exception(msg) from e for episode in season.episodes: try: @@ -689,7 +695,7 @@ class TvService: :param show: The Show object """ - video_files, subtitle_files, all_files = get_files_for_import(torrent=torrent) + video_files, subtitle_files, _all_files = get_files_for_import(torrent=torrent) success: list[bool] = [] @@ -704,7 +710,7 @@ class TvService: for season_file in season_files: season = self.get_season(season_id=season_file.season_id) - season_import_success, imported_episodes_count = self.import_season( + season_import_success, _imported_episodes_count = self.import_season( show=show, season=season, video_files=video_files, @@ -758,7 +764,7 @@ class TvService: # Use stored original_language preference for metadata fetching fresh_show_data = metadata_provider.get_show_metadata( - id=db_show.external_id, language=db_show.original_language + show_id=db_show.external_id, language=db_show.original_language ) if not fresh_show_data: log.warning( @@ -833,16 +839,15 @@ class TvService: log.debug( f"Adding new season {fresh_season_data.number} to show {db_show.name}" ) - episodes_for_schema = [] - for ep_data in fresh_season_data.episodes: - episodes_for_schema.append( - EpisodeSchema( - id=EpisodeId(ep_data.id), - number=ep_data.number, - external_id=ep_data.external_id, - title=ep_data.title, - ) + episodes_for_schema = [ + EpisodeSchema( + id=EpisodeId(ep_data.id), + number=ep_data.number, + external_id=ep_data.external_id, + title=ep_data.title, ) + for ep_data in fresh_season_data.episodes + ] season_schema = Season( id=SeasonId(fresh_season_data.id), @@ -896,9 +901,10 @@ class TvService: source_directory.rename(new_source_path) except Exception as e: log.error(f"Failed to rename {source_directory} to {new_source_path}: {e}") - raise Exception("Failed to rename source directory") from e + msg = "Failed to rename source directory" + raise Exception(msg) from e - video_files, subtitle_files, all_files = get_files_for_import( + video_files, subtitle_files, _all_files = get_files_for_import( directory=new_source_path ) for season in tv_show.seasons: @@ -1052,7 +1058,7 @@ def update_all_non_ended_shows_metadata() -> None: continue except InvalidConfigError as e: log.error( - f"Error initializing metadata provider {show.metadata_provider} for show {show.name}: {str(e)}" + f"Error initializing metadata provider {show.metadata_provider} for show {show.name}: {e}" ) continue updated_show = tv_service.update_show_metadata( diff --git a/metadata_relay/app/tmdb.py b/metadata_relay/app/tmdb.py index 22288be..45660f5 100644 --- a/metadata_relay/app/tmdb.py +++ b/metadata_relay/app/tmdb.py @@ -2,8 +2,8 @@ import logging import os import tmdbsimple -from tmdbsimple import TV, TV_Seasons, Movies, Trending, Search from fastapi import APIRouter +from tmdbsimple import TV, Movies, Search, Trending, TV_Seasons log = logging.getLogger(__name__) diff --git a/metadata_relay/app/tvdb.py b/metadata_relay/app/tvdb.py index 0014786..4f62b82 100644 --- a/metadata_relay/app/tvdb.py +++ b/metadata_relay/app/tvdb.py @@ -1,7 +1,7 @@ +import logging import os import tvdb_v4_official -import logging from fastapi import APIRouter log = logging.getLogger(__name__) diff --git a/metadata_relay/main.py b/metadata_relay/main.py index 6d42ae1..5cc0616 100644 --- a/metadata_relay/main.py +++ b/metadata_relay/main.py @@ -1,11 +1,10 @@ import os -from fastapi import FastAPI -from starlette_exporter import PrometheusMiddleware, handle_metrics from app.tmdb import router as tmdb_router from app.tvdb import router as tvdb_router +from fastapi import FastAPI +from starlette_exporter import PrometheusMiddleware, handle_metrics -print("Hello world!") app = FastAPI(root_path=os.getenv("BASE_PATH")) app.add_middleware(PrometheusMiddleware) diff --git a/pyproject.toml b/pyproject.toml index 668a755..308dd50 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -30,12 +30,14 @@ dependencies = [ "alembic>=1.16.1", "pytest>=8.4.0", "pillow>=11.3.0", - "pillow-avif-plugin>=1.5.2", "sabnzbd-api>=0.1.2", "transmission-rpc>=7.0.11", "libtorrent>=2.0.11", ] +[dependency-groups] +dev = ["ruff"] + [tool.setuptools.packages.find] include = ["media_manager*"] exclude = ["web*", "Writerside*", "metadata_relay*", "tests*"] diff --git a/ruff.toml b/ruff.toml new file mode 100644 index 0000000..848c14f --- /dev/null +++ b/ruff.toml @@ -0,0 +1,38 @@ +namespace-packages = ["alembic", "metadata_relay"] + +[format] +line-ending = "lf" +quote-style = "double" + +[lint] +# to be enabled: ANN, BLE, C90, CPY, D, DOC, DTZ, FBT, G, PL, RSE, SLF, SIM, TC, TRY, UP +extend-select = [ + "A", "ARG", "ASYNC", + "B", + "C4", "COM", + "DTZ", + "E", "EM", "EXE", + "F", "FA", "FAST", "FIX", "FLY", "FURB", + "I", "ICN", "INP", "INT", "ISC", + "LOG", + "N", + "PERF", "PGH", "PIE", "PT", "PTH", "PYI", + "Q", + "RET", "RUF", + "S", "SLOT", + "T10", "T20", "TD", "TID", + "W", + "YTT" +] + +ignore = [ + # incompatible with formatter + "COM812", + # lines too long + "E501", + # currently a bug?! with providers and depends + "FAST003", +] + +[lint.flake8-bugbear] +extend-immutable-calls = ["fastapi.Depends", "fastapi.Path"] diff --git a/uv.lock b/uv.lock index cafdc67..f5714bc 100644 --- a/uv.lock +++ b/uv.lock @@ -197,11 +197,11 @@ wheels = [ [[package]] name = "certifi" -version = "2025.11.12" +version = "2026.1.4" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a2/8c/58f469717fa48465e4a50c014a0400602d3c437d7c0c468e17ada824da3a/certifi-2025.11.12.tar.gz", hash = "sha256:d8ab5478f2ecd78af242878415affce761ca6bc54a22a27e026d7c25357c3316", size = 160538, upload-time = "2025-11-12T02:54:51.517Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e0/2d/a891ca51311197f6ad14a7ef42e2399f36cf2f9bd44752b3dc4eab60fdc5/certifi-2026.1.4.tar.gz", hash = "sha256:ac726dd470482006e014ad384921ed6438c457018f4b3d204aea4281258b2120", size = 154268, upload-time = "2026-01-04T02:42:41.825Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/70/7d/9bc192684cea499815ff478dfcdc13835ddf401365057044fb721ec6bddb/certifi-2025.11.12-py3-none-any.whl", hash = "sha256:97de8790030bbd5c2d96b7ec782fc2f7820ef8dba6db909ccf95449f2d062d4b", size = 159438, upload-time = "2025-11-12T02:54:49.735Z" }, + { url = "https://files.pythonhosted.org/packages/e6/ad/3cc14f097111b4de0040c83a525973216457bbeeb63739ef1ed275c1c021/certifi-2026.1.4-py3-none-any.whl", hash = "sha256:9943707519e4add1115f44c2bc244f782c0249876bf51b6599fee1ffbedd685c", size = 152900, upload-time = "2026-01-04T02:42:40.15Z" }, ] [[package]] @@ -866,7 +866,6 @@ dependencies = [ { name = "libtorrent" }, { name = "patool" }, { name = "pillow" }, - { name = "pillow-avif-plugin" }, { name = "psycopg", extra = ["binary"] }, { name = "pydantic" }, { name = "pydantic-settings", extra = ["toml"] }, @@ -884,6 +883,11 @@ dependencies = [ { name = "uvicorn" }, ] +[package.dev-dependencies] +dev = [ + { name = "ruff" }, +] + [package.metadata] requires-dist = [ { name = "alembic", specifier = ">=1.16.1" }, @@ -900,7 +904,6 @@ requires-dist = [ { name = "libtorrent", specifier = ">=2.0.11" }, { name = "patool", specifier = ">=4.0.1" }, { name = "pillow", specifier = ">=11.3.0" }, - { name = "pillow-avif-plugin", specifier = ">=1.5.2" }, { name = "psycopg", extras = ["binary"], specifier = ">=3.2.9" }, { name = "pydantic", specifier = ">=2.11.5" }, { name = "pydantic-settings", extras = ["toml"], specifier = ">=2.9.1" }, @@ -918,6 +921,9 @@ requires-dist = [ { name = "uvicorn", specifier = ">=0.34.2" }, ] +[package.metadata.requires-dev] +dev = [{ name = "ruff" }] + [[package]] name = "mypy-extensions" version = "1.1.0" @@ -947,86 +953,60 @@ wheels = [ [[package]] name = "pillow" -version = "12.0.0" +version = "12.1.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/5a/b0/cace85a1b0c9775a9f8f5d5423c8261c858760e2466c79b2dd184638b056/pillow-12.0.0.tar.gz", hash = "sha256:87d4f8125c9988bfbed67af47dd7a953e2fc7b0cc1e7800ec6d2080d490bb353", size = 47008828, upload-time = "2025-10-15T18:24:14.008Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d0/02/d52c733a2452ef1ffcc123b68e6606d07276b0e358db70eabad7e40042b7/pillow-12.1.0.tar.gz", hash = "sha256:5c5ae0a06e9ea030ab786b0251b32c7e4ce10e58d983c0d5c56029455180b5b9", size = 46977283, upload-time = "2026-01-02T09:13:29.892Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/62/f2/de993bb2d21b33a98d031ecf6a978e4b61da207bef02f7b43093774c480d/pillow-12.0.0-cp313-cp313-ios_13_0_arm64_iphoneos.whl", hash = "sha256:0869154a2d0546545cde61d1789a6524319fc1897d9ee31218eae7a60ccc5643", size = 4045493, upload-time = "2025-10-15T18:22:25.758Z" }, - { url = "https://files.pythonhosted.org/packages/0e/b6/bc8d0c4c9f6f111a783d045310945deb769b806d7574764234ffd50bc5ea/pillow-12.0.0-cp313-cp313-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:a7921c5a6d31b3d756ec980f2f47c0cfdbce0fc48c22a39347a895f41f4a6ea4", size = 4120461, upload-time = "2025-10-15T18:22:27.286Z" }, - { url = "https://files.pythonhosted.org/packages/5d/57/d60d343709366a353dc56adb4ee1e7d8a2cc34e3fbc22905f4167cfec119/pillow-12.0.0-cp313-cp313-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:1ee80a59f6ce048ae13cda1abf7fbd2a34ab9ee7d401c46be3ca685d1999a399", size = 3576912, upload-time = "2025-10-15T18:22:28.751Z" }, - { url = "https://files.pythonhosted.org/packages/a4/a4/a0a31467e3f83b94d37568294b01d22b43ae3c5d85f2811769b9c66389dd/pillow-12.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:c50f36a62a22d350c96e49ad02d0da41dbd17ddc2e29750dbdba4323f85eb4a5", size = 5249132, upload-time = "2025-10-15T18:22:30.641Z" }, - { url = "https://files.pythonhosted.org/packages/83/06/48eab21dd561de2914242711434c0c0eb992ed08ff3f6107a5f44527f5e9/pillow-12.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5193fde9a5f23c331ea26d0cf171fbf67e3f247585f50c08b3e205c7aeb4589b", size = 4650099, upload-time = "2025-10-15T18:22:32.73Z" }, - { url = "https://files.pythonhosted.org/packages/fc/bd/69ed99fd46a8dba7c1887156d3572fe4484e3f031405fcc5a92e31c04035/pillow-12.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:bde737cff1a975b70652b62d626f7785e0480918dece11e8fef3c0cf057351c3", size = 6230808, upload-time = "2025-10-15T18:22:34.337Z" }, - { url = "https://files.pythonhosted.org/packages/ea/94/8fad659bcdbf86ed70099cb60ae40be6acca434bbc8c4c0d4ef356d7e0de/pillow-12.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:a6597ff2b61d121172f5844b53f21467f7082f5fb385a9a29c01414463f93b07", size = 8037804, upload-time = "2025-10-15T18:22:36.402Z" }, - { url = "https://files.pythonhosted.org/packages/20/39/c685d05c06deecfd4e2d1950e9a908aa2ca8bc4e6c3b12d93b9cafbd7837/pillow-12.0.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0b817e7035ea7f6b942c13aa03bb554fc44fea70838ea21f8eb31c638326584e", size = 6345553, upload-time = "2025-10-15T18:22:38.066Z" }, - { url = "https://files.pythonhosted.org/packages/38/57/755dbd06530a27a5ed74f8cb0a7a44a21722ebf318edbe67ddbd7fb28f88/pillow-12.0.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f4f1231b7dec408e8670264ce63e9c71409d9583dd21d32c163e25213ee2a344", size = 7037729, upload-time = "2025-10-15T18:22:39.769Z" }, - { url = "https://files.pythonhosted.org/packages/ca/b6/7e94f4c41d238615674d06ed677c14883103dce1c52e4af16f000338cfd7/pillow-12.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6e51b71417049ad6ab14c49608b4a24d8fb3fe605e5dfabfe523b58064dc3d27", size = 6459789, upload-time = "2025-10-15T18:22:41.437Z" }, - { url = "https://files.pythonhosted.org/packages/9c/14/4448bb0b5e0f22dd865290536d20ec8a23b64e2d04280b89139f09a36bb6/pillow-12.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:d120c38a42c234dc9a8c5de7ceaaf899cf33561956acb4941653f8bdc657aa79", size = 7130917, upload-time = "2025-10-15T18:22:43.152Z" }, - { url = "https://files.pythonhosted.org/packages/dd/ca/16c6926cc1c015845745d5c16c9358e24282f1e588237a4c36d2b30f182f/pillow-12.0.0-cp313-cp313-win32.whl", hash = "sha256:4cc6b3b2efff105c6a1656cfe59da4fdde2cda9af1c5e0b58529b24525d0a098", size = 6302391, upload-time = "2025-10-15T18:22:44.753Z" }, - { url = "https://files.pythonhosted.org/packages/6d/2a/dd43dcfd6dae9b6a49ee28a8eedb98c7d5ff2de94a5d834565164667b97b/pillow-12.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:4cf7fed4b4580601c4345ceb5d4cbf5a980d030fd5ad07c4d2ec589f95f09905", size = 7007477, upload-time = "2025-10-15T18:22:46.838Z" }, - { url = "https://files.pythonhosted.org/packages/77/f0/72ea067f4b5ae5ead653053212af05ce3705807906ba3f3e8f58ddf617e6/pillow-12.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:9f0b04c6b8584c2c193babcccc908b38ed29524b29dd464bc8801bf10d746a3a", size = 2435918, upload-time = "2025-10-15T18:22:48.399Z" }, - { url = "https://files.pythonhosted.org/packages/f5/5e/9046b423735c21f0487ea6cb5b10f89ea8f8dfbe32576fe052b5ba9d4e5b/pillow-12.0.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:7fa22993bac7b77b78cae22bad1e2a987ddf0d9015c63358032f84a53f23cdc3", size = 5251406, upload-time = "2025-10-15T18:22:49.905Z" }, - { url = "https://files.pythonhosted.org/packages/12/66/982ceebcdb13c97270ef7a56c3969635b4ee7cd45227fa707c94719229c5/pillow-12.0.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f135c702ac42262573fe9714dfe99c944b4ba307af5eb507abef1667e2cbbced", size = 4653218, upload-time = "2025-10-15T18:22:51.587Z" }, - { url = "https://files.pythonhosted.org/packages/16/b3/81e625524688c31859450119bf12674619429cab3119eec0e30a7a1029cb/pillow-12.0.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c85de1136429c524e55cfa4e033b4a7940ac5c8ee4d9401cc2d1bf48154bbc7b", size = 6266564, upload-time = "2025-10-15T18:22:53.215Z" }, - { url = "https://files.pythonhosted.org/packages/98/59/dfb38f2a41240d2408096e1a76c671d0a105a4a8471b1871c6902719450c/pillow-12.0.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:38df9b4bfd3db902c9c2bd369bcacaf9d935b2fff73709429d95cc41554f7b3d", size = 8069260, upload-time = "2025-10-15T18:22:54.933Z" }, - { url = "https://files.pythonhosted.org/packages/dc/3d/378dbea5cd1874b94c312425ca77b0f47776c78e0df2df751b820c8c1d6c/pillow-12.0.0-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7d87ef5795da03d742bf49439f9ca4d027cde49c82c5371ba52464aee266699a", size = 6379248, upload-time = "2025-10-15T18:22:56.605Z" }, - { url = "https://files.pythonhosted.org/packages/84/b0/d525ef47d71590f1621510327acec75ae58c721dc071b17d8d652ca494d8/pillow-12.0.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:aff9e4d82d082ff9513bdd6acd4f5bd359f5b2c870907d2b0a9c5e10d40c88fe", size = 7066043, upload-time = "2025-10-15T18:22:58.53Z" }, - { url = "https://files.pythonhosted.org/packages/61/2c/aced60e9cf9d0cde341d54bf7932c9ffc33ddb4a1595798b3a5150c7ec4e/pillow-12.0.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:8d8ca2b210ada074d57fcee40c30446c9562e542fc46aedc19baf758a93532ee", size = 6490915, upload-time = "2025-10-15T18:23:00.582Z" }, - { url = "https://files.pythonhosted.org/packages/ef/26/69dcb9b91f4e59f8f34b2332a4a0a951b44f547c4ed39d3e4dcfcff48f89/pillow-12.0.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:99a7f72fb6249302aa62245680754862a44179b545ded638cf1fef59befb57ef", size = 7157998, upload-time = "2025-10-15T18:23:02.627Z" }, - { url = "https://files.pythonhosted.org/packages/61/2b/726235842220ca95fa441ddf55dd2382b52ab5b8d9c0596fe6b3f23dafe8/pillow-12.0.0-cp313-cp313t-win32.whl", hash = "sha256:4078242472387600b2ce8d93ade8899c12bf33fa89e55ec89fe126e9d6d5d9e9", size = 6306201, upload-time = "2025-10-15T18:23:04.709Z" }, - { url = "https://files.pythonhosted.org/packages/c0/3d/2afaf4e840b2df71344ababf2f8edd75a705ce500e5dc1e7227808312ae1/pillow-12.0.0-cp313-cp313t-win_amd64.whl", hash = "sha256:2c54c1a783d6d60595d3514f0efe9b37c8808746a66920315bfd34a938d7994b", size = 7013165, upload-time = "2025-10-15T18:23:06.46Z" }, - { url = "https://files.pythonhosted.org/packages/6f/75/3fa09aa5cf6ed04bee3fa575798ddf1ce0bace8edb47249c798077a81f7f/pillow-12.0.0-cp313-cp313t-win_arm64.whl", hash = "sha256:26d9f7d2b604cd23aba3e9faf795787456ac25634d82cd060556998e39c6fa47", size = 2437834, upload-time = "2025-10-15T18:23:08.194Z" }, - { url = "https://files.pythonhosted.org/packages/54/2a/9a8c6ba2c2c07b71bec92cf63e03370ca5e5f5c5b119b742bcc0cde3f9c5/pillow-12.0.0-cp314-cp314-ios_13_0_arm64_iphoneos.whl", hash = "sha256:beeae3f27f62308f1ddbcfb0690bf44b10732f2ef43758f169d5e9303165d3f9", size = 4045531, upload-time = "2025-10-15T18:23:10.121Z" }, - { url = "https://files.pythonhosted.org/packages/84/54/836fdbf1bfb3d66a59f0189ff0b9f5f666cee09c6188309300df04ad71fa/pillow-12.0.0-cp314-cp314-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:d4827615da15cd59784ce39d3388275ec093ae3ee8d7f0c089b76fa87af756c2", size = 4120554, upload-time = "2025-10-15T18:23:12.14Z" }, - { url = "https://files.pythonhosted.org/packages/0d/cd/16aec9f0da4793e98e6b54778a5fbce4f375c6646fe662e80600b8797379/pillow-12.0.0-cp314-cp314-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:3e42edad50b6909089750e65c91aa09aaf1e0a71310d383f11321b27c224ed8a", size = 3576812, upload-time = "2025-10-15T18:23:13.962Z" }, - { url = "https://files.pythonhosted.org/packages/f6/b7/13957fda356dc46339298b351cae0d327704986337c3c69bb54628c88155/pillow-12.0.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:e5d8efac84c9afcb40914ab49ba063d94f5dbdf5066db4482c66a992f47a3a3b", size = 5252689, upload-time = "2025-10-15T18:23:15.562Z" }, - { url = "https://files.pythonhosted.org/packages/fc/f5/eae31a306341d8f331f43edb2e9122c7661b975433de5e447939ae61c5da/pillow-12.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:266cd5f2b63ff316d5a1bba46268e603c9caf5606d44f38c2873c380950576ad", size = 4650186, upload-time = "2025-10-15T18:23:17.379Z" }, - { url = "https://files.pythonhosted.org/packages/86/62/2a88339aa40c4c77e79108facbd307d6091e2c0eb5b8d3cf4977cfca2fe6/pillow-12.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:58eea5ebe51504057dd95c5b77d21700b77615ab0243d8152793dc00eb4faf01", size = 6230308, upload-time = "2025-10-15T18:23:18.971Z" }, - { url = "https://files.pythonhosted.org/packages/c7/33/5425a8992bcb32d1cb9fa3dd39a89e613d09a22f2c8083b7bf43c455f760/pillow-12.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f13711b1a5ba512d647a0e4ba79280d3a9a045aaf7e0cc6fbe96b91d4cdf6b0c", size = 8039222, upload-time = "2025-10-15T18:23:20.909Z" }, - { url = "https://files.pythonhosted.org/packages/d8/61/3f5d3b35c5728f37953d3eec5b5f3e77111949523bd2dd7f31a851e50690/pillow-12.0.0-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6846bd2d116ff42cba6b646edf5bf61d37e5cbd256425fa089fee4ff5c07a99e", size = 6346657, upload-time = "2025-10-15T18:23:23.077Z" }, - { url = "https://files.pythonhosted.org/packages/3a/be/ee90a3d79271227e0f0a33c453531efd6ed14b2e708596ba5dd9be948da3/pillow-12.0.0-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c98fa880d695de164b4135a52fd2e9cd7b7c90a9d8ac5e9e443a24a95ef9248e", size = 7038482, upload-time = "2025-10-15T18:23:25.005Z" }, - { url = "https://files.pythonhosted.org/packages/44/34/a16b6a4d1ad727de390e9bd9f19f5f669e079e5826ec0f329010ddea492f/pillow-12.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:fa3ed2a29a9e9d2d488b4da81dcb54720ac3104a20bf0bd273f1e4648aff5af9", size = 6461416, upload-time = "2025-10-15T18:23:27.009Z" }, - { url = "https://files.pythonhosted.org/packages/b6/39/1aa5850d2ade7d7ba9f54e4e4c17077244ff7a2d9e25998c38a29749eb3f/pillow-12.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d034140032870024e6b9892c692fe2968493790dd57208b2c37e3fb35f6df3ab", size = 7131584, upload-time = "2025-10-15T18:23:29.752Z" }, - { url = "https://files.pythonhosted.org/packages/bf/db/4fae862f8fad0167073a7733973bfa955f47e2cac3dc3e3e6257d10fab4a/pillow-12.0.0-cp314-cp314-win32.whl", hash = "sha256:1b1b133e6e16105f524a8dec491e0586d072948ce15c9b914e41cdadd209052b", size = 6400621, upload-time = "2025-10-15T18:23:32.06Z" }, - { url = "https://files.pythonhosted.org/packages/2b/24/b350c31543fb0107ab2599464d7e28e6f856027aadda995022e695313d94/pillow-12.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:8dc232e39d409036af549c86f24aed8273a40ffa459981146829a324e0848b4b", size = 7142916, upload-time = "2025-10-15T18:23:34.71Z" }, - { url = "https://files.pythonhosted.org/packages/0f/9b/0ba5a6fd9351793996ef7487c4fdbde8d3f5f75dbedc093bb598648fddf0/pillow-12.0.0-cp314-cp314-win_arm64.whl", hash = "sha256:d52610d51e265a51518692045e372a4c363056130d922a7351429ac9f27e70b0", size = 2523836, upload-time = "2025-10-15T18:23:36.967Z" }, - { url = "https://files.pythonhosted.org/packages/f5/7a/ceee0840aebc579af529b523d530840338ecf63992395842e54edc805987/pillow-12.0.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:1979f4566bb96c1e50a62d9831e2ea2d1211761e5662afc545fa766f996632f6", size = 5255092, upload-time = "2025-10-15T18:23:38.573Z" }, - { url = "https://files.pythonhosted.org/packages/44/76/20776057b4bfd1aef4eeca992ebde0f53a4dce874f3ae693d0ec90a4f79b/pillow-12.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b2e4b27a6e15b04832fe9bf292b94b5ca156016bbc1ea9c2c20098a0320d6cf6", size = 4653158, upload-time = "2025-10-15T18:23:40.238Z" }, - { url = "https://files.pythonhosted.org/packages/82/3f/d9ff92ace07be8836b4e7e87e6a4c7a8318d47c2f1463ffcf121fc57d9cb/pillow-12.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:fb3096c30df99fd01c7bf8e544f392103d0795b9f98ba71a8054bcbf56b255f1", size = 6267882, upload-time = "2025-10-15T18:23:42.434Z" }, - { url = "https://files.pythonhosted.org/packages/9f/7a/4f7ff87f00d3ad33ba21af78bfcd2f032107710baf8280e3722ceec28cda/pillow-12.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7438839e9e053ef79f7112c881cef684013855016f928b168b81ed5835f3e75e", size = 8071001, upload-time = "2025-10-15T18:23:44.29Z" }, - { url = "https://files.pythonhosted.org/packages/75/87/fcea108944a52dad8cca0715ae6247e271eb80459364a98518f1e4f480c1/pillow-12.0.0-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5d5c411a8eaa2299322b647cd932586b1427367fd3184ffbb8f7a219ea2041ca", size = 6380146, upload-time = "2025-10-15T18:23:46.065Z" }, - { url = "https://files.pythonhosted.org/packages/91/52/0d31b5e571ef5fd111d2978b84603fce26aba1b6092f28e941cb46570745/pillow-12.0.0-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d7e091d464ac59d2c7ad8e7e08105eaf9dafbc3883fd7265ffccc2baad6ac925", size = 7067344, upload-time = "2025-10-15T18:23:47.898Z" }, - { url = "https://files.pythonhosted.org/packages/7b/f4/2dd3d721f875f928d48e83bb30a434dee75a2531bca839bb996bb0aa5a91/pillow-12.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:792a2c0be4dcc18af9d4a2dfd8a11a17d5e25274a1062b0ec1c2d79c76f3e7f8", size = 6491864, upload-time = "2025-10-15T18:23:49.607Z" }, - { url = "https://files.pythonhosted.org/packages/30/4b/667dfcf3d61fc309ba5a15b141845cece5915e39b99c1ceab0f34bf1d124/pillow-12.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:afbefa430092f71a9593a99ab6a4e7538bc9eabbf7bf94f91510d3503943edc4", size = 7158911, upload-time = "2025-10-15T18:23:51.351Z" }, - { url = "https://files.pythonhosted.org/packages/a2/2f/16cabcc6426c32218ace36bf0d55955e813f2958afddbf1d391849fee9d1/pillow-12.0.0-cp314-cp314t-win32.whl", hash = "sha256:3830c769decf88f1289680a59d4f4c46c72573446352e2befec9a8512104fa52", size = 6408045, upload-time = "2025-10-15T18:23:53.177Z" }, - { url = "https://files.pythonhosted.org/packages/35/73/e29aa0c9c666cf787628d3f0dcf379f4791fba79f4936d02f8b37165bdf8/pillow-12.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:905b0365b210c73afb0ebe9101a32572152dfd1c144c7e28968a331b9217b94a", size = 7148282, upload-time = "2025-10-15T18:23:55.316Z" }, - { url = "https://files.pythonhosted.org/packages/c1/70/6b41bdcddf541b437bbb9f47f94d2db5d9ddef6c37ccab8c9107743748a4/pillow-12.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:99353a06902c2e43b43e8ff74ee65a7d90307d82370604746738a1e0661ccca7", size = 2525630, upload-time = "2025-10-15T18:23:57.149Z" }, -] - -[[package]] -name = "pillow-avif-plugin" -version = "1.5.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f9/32/a3bfad0537ba6f2accc6a8a2e53e09b266418347f58898f811ca2fb70bd9/pillow_avif_plugin-1.5.2.tar.gz", hash = "sha256:811e0dc8be1e44393d2e3865ec330a8a8a1194b94eb8cfca6fa778e3f476d649", size = 20571, upload-time = "2025-04-24T14:11:49.163Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/f8/6a/7a03fd17fe33f07025776d77ee59a67007854a477da89c4d68ab2d8bcc77/pillow_avif_plugin-1.5.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5be8435d2ebf0972f6c2ef69d8869e5875f5cd77c69c9606e0145595b81f7790", size = 3900271, upload-time = "2025-04-24T14:10:45.33Z" }, - { url = "https://files.pythonhosted.org/packages/6c/10/e4daa1c0ad14492724305b4786bad560b8ffd5643e4817d6684f463b7cf0/pillow_avif_plugin-1.5.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d2e571b845da4ea4fc16d647e64656bc754840a10ad5ef9fd020d389ea664c9d", size = 2805824, upload-time = "2025-04-24T14:10:46.489Z" }, - { url = "https://files.pythonhosted.org/packages/58/34/5443fe48d8923c35742e5b11e0ecd2378e4275d6310b6bf7c1533e24cafc/pillow_avif_plugin-1.5.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35d61a9b586793c93e439b9181c201bc34c67cc4817f8f5625b8df37f0a25efe", size = 2991320, upload-time = "2025-04-24T14:10:47.731Z" }, - { url = "https://files.pythonhosted.org/packages/93/9f/a85b96e545a7306743eebae6216a9f5af4869ffa40fd285adead78ebed50/pillow_avif_plugin-1.5.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84fce3d0ef7b5c8ecf5a1a29da487c824b93bf33680de31829e8179bd4511548", size = 6365646, upload-time = "2025-04-24T14:10:48.958Z" }, - { url = "https://files.pythonhosted.org/packages/ec/7e/126fce54561f11d3d4d9ad21a4a14eab2a2cf08ee8668253005c551a15ca/pillow_avif_plugin-1.5.2-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:e0ac45d820bcb895d0667d18a61bf0c940ea4659fcbe991d7ce767ec062c175c", size = 3003952, upload-time = "2025-04-24T14:10:50.218Z" }, - { url = "https://files.pythonhosted.org/packages/f4/62/a7b296c207ff702dcab8508c65c3c0b3a72f61b1967df9a4273cbe68dc76/pillow_avif_plugin-1.5.2-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:8b0c86ade32600b832f2323409239fadd46d17a05380d11836e7fc24e54c170a", size = 4173898, upload-time = "2025-04-24T14:10:51.689Z" }, - { url = "https://files.pythonhosted.org/packages/8f/de/6405c66ce3d3af44b5e0091f26321eb09d8ec75cab4d3d843a316c1686cd/pillow_avif_plugin-1.5.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d639ef381496e0306e43cccd9bf384e0eccba979b6758c2aa4986fb25b920cab", size = 3101313, upload-time = "2025-04-24T14:10:53.143Z" }, - { url = "https://files.pythonhosted.org/packages/8a/de/bee7bd4539d45df2328fb5c52326b72770471aa3441c8b6d91b33e54f7c3/pillow_avif_plugin-1.5.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:3b168757ee646a0b53f58da978b73dc1c5f98da9d36b52908055d54d0a48c845", size = 4195538, upload-time = "2025-04-24T14:10:55.854Z" }, - { url = "https://files.pythonhosted.org/packages/4d/40/f11c63a815bae18b4792df5435e61351d90cc4795da5cb4e19b1d4bf532e/pillow_avif_plugin-1.5.2-cp313-cp313-win_amd64.whl", hash = "sha256:b840a431dbb04f60a18bbd2e28c2b26d845efcddd28f98e9386eb5a5938a3b38", size = 9867834, upload-time = "2025-04-24T14:10:57.26Z" }, - { url = "https://files.pythonhosted.org/packages/f5/ce/36c401834c477fb3034dee6962adacc368f944d72b96f298a2e4787ae44d/pillow_avif_plugin-1.5.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:4fb6744377634b28b0ed54fd12a1e6fa6cf09633dc7608958595d2659e1186a8", size = 3900637, upload-time = "2025-04-24T14:10:58.939Z" }, - { url = "https://files.pythonhosted.org/packages/57/e1/faacc26a8da3b1a965860affcc107bc555929bf96a9e7810815aa09ab17f/pillow_avif_plugin-1.5.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:ea00363749cd6c359c054b920fef0cd1f4864af7920c86886339128d704298a3", size = 2806181, upload-time = "2025-04-24T14:11:00.517Z" }, - { url = "https://files.pythonhosted.org/packages/e4/7f/4d92ec39a00308c81b101ad3c8a948aaa3f24d03246e74d68361975f8231/pillow_avif_plugin-1.5.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4b36cce231d6e0fa09972a1558eac18e94a29d83342129488f0a1fcb4509cb8", size = 2996403, upload-time = "2025-04-24T14:11:02.162Z" }, - { url = "https://files.pythonhosted.org/packages/b9/24/a5299390c810daa51932a7c8ba00e14e005199e7fe2474f6701a3890079b/pillow_avif_plugin-1.5.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4053ec0a6767229703d05fd4d47c39b92b6044e14489ed8e79f485c8d96a9ad3", size = 6369881, upload-time = "2025-04-24T14:11:03.55Z" }, - { url = "https://files.pythonhosted.org/packages/53/d7/8ffb730e2321d6493f2facc5e50d4e1364e45ed7315fc711e99ac98ddbd1/pillow_avif_plugin-1.5.2-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:6830b27e41669b9008dc4d9d69f3d66d2ee3d26367b4aa59c229cc5ffc702748", size = 3009137, upload-time = "2025-04-24T14:11:04.841Z" }, - { url = "https://files.pythonhosted.org/packages/8d/df/3df3c4dcb1b30759e27591182afc8ed7533c87591be685fa3fc5ca32bd68/pillow_avif_plugin-1.5.2-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:cf0e32a6a6635de41aabc196a7d888431553e78aae51e25d6602d384ef91be8b", size = 4178359, upload-time = "2025-04-24T14:11:06.05Z" }, - { url = "https://files.pythonhosted.org/packages/36/94/25659c3bc8ae80e856ac012842186200a3a00fb99214dab8ee8049244425/pillow_avif_plugin-1.5.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:bc6bb9a7103b57dcfdea910330323d330220c2e89d71f8182f1dff773f10a12d", size = 3106107, upload-time = "2025-04-24T14:11:07.284Z" }, - { url = "https://files.pythonhosted.org/packages/e9/3a/84bebb05143c7715760c9e51c9a7a579106334167c41f4d6aebfb2966042/pillow_avif_plugin-1.5.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:b54e18be1cf28bacf4da3c132fcf9ae9ff7b5dd6c8f5576dd66f0a099b0d1f7a", size = 4199692, upload-time = "2025-04-24T14:11:08.458Z" }, - { url = "https://files.pythonhosted.org/packages/ca/df/93748424d89bddff2192cf82f34b497dbf73ac2ffd022a481ee74e0aeae8/pillow_avif_plugin-1.5.2-cp313-cp313t-win_amd64.whl", hash = "sha256:981b3205d8389f88e7fccacb25f2c75668a371b8346771cc36bd35cae05560dd", size = 9868519, upload-time = "2025-04-24T14:11:09.852Z" }, + { url = "https://files.pythonhosted.org/packages/dd/c7/2530a4aa28248623e9d7f27316b42e27c32ec410f695929696f2e0e4a778/pillow-12.1.0-cp313-cp313-ios_13_0_arm64_iphoneos.whl", hash = "sha256:7b5dd7cbae20285cdb597b10eb5a2c13aa9de6cde9bb64a3c1317427b1db1ae1", size = 4062543, upload-time = "2026-01-02T09:11:31.566Z" }, + { url = "https://files.pythonhosted.org/packages/8f/1f/40b8eae823dc1519b87d53c30ed9ef085506b05281d313031755c1705f73/pillow-12.1.0-cp313-cp313-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:29a4cef9cb672363926f0470afc516dbf7305a14d8c54f7abbb5c199cd8f8179", size = 4138373, upload-time = "2026-01-02T09:11:33.367Z" }, + { url = "https://files.pythonhosted.org/packages/d4/77/6fa60634cf06e52139fd0e89e5bbf055e8166c691c42fb162818b7fda31d/pillow-12.1.0-cp313-cp313-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:681088909d7e8fa9e31b9799aaa59ba5234c58e5e4f1951b4c4d1082a2e980e0", size = 3601241, upload-time = "2026-01-02T09:11:35.011Z" }, + { url = "https://files.pythonhosted.org/packages/4f/bf/28ab865de622e14b747f0cd7877510848252d950e43002e224fb1c9ababf/pillow-12.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:983976c2ab753166dc66d36af6e8ec15bb511e4a25856e2227e5f7e00a160587", size = 5262410, upload-time = "2026-01-02T09:11:36.682Z" }, + { url = "https://files.pythonhosted.org/packages/1c/34/583420a1b55e715937a85bd48c5c0991598247a1fd2eb5423188e765ea02/pillow-12.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:db44d5c160a90df2d24a24760bbd37607d53da0b34fb546c4c232af7192298ac", size = 4657312, upload-time = "2026-01-02T09:11:38.535Z" }, + { url = "https://files.pythonhosted.org/packages/1d/fd/f5a0896839762885b3376ff04878f86ab2b097c2f9a9cdccf4eda8ba8dc0/pillow-12.1.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:6b7a9d1db5dad90e2991645874f708e87d9a3c370c243c2d7684d28f7e133e6b", size = 6232605, upload-time = "2026-01-02T09:11:40.602Z" }, + { url = "https://files.pythonhosted.org/packages/98/aa/938a09d127ac1e70e6ed467bd03834350b33ef646b31edb7452d5de43792/pillow-12.1.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:6258f3260986990ba2fa8a874f8b6e808cf5abb51a94015ca3dc3c68aa4f30ea", size = 8041617, upload-time = "2026-01-02T09:11:42.721Z" }, + { url = "https://files.pythonhosted.org/packages/17/e8/538b24cb426ac0186e03f80f78bc8dc7246c667f58b540bdd57c71c9f79d/pillow-12.1.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e115c15e3bc727b1ca3e641a909f77f8ca72a64fff150f666fcc85e57701c26c", size = 6346509, upload-time = "2026-01-02T09:11:44.955Z" }, + { url = "https://files.pythonhosted.org/packages/01/9a/632e58ec89a32738cabfd9ec418f0e9898a2b4719afc581f07c04a05e3c9/pillow-12.1.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6741e6f3074a35e47c77b23a4e4f2d90db3ed905cb1c5e6e0d49bff2045632bc", size = 7038117, upload-time = "2026-01-02T09:11:46.736Z" }, + { url = "https://files.pythonhosted.org/packages/c7/a2/d40308cf86eada842ca1f3ffa45d0ca0df7e4ab33c83f81e73f5eaed136d/pillow-12.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:935b9d1aed48fcfb3f838caac506f38e29621b44ccc4f8a64d575cb1b2a88644", size = 6460151, upload-time = "2026-01-02T09:11:48.625Z" }, + { url = "https://files.pythonhosted.org/packages/f1/88/f5b058ad6453a085c5266660a1417bdad590199da1b32fb4efcff9d33b05/pillow-12.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5fee4c04aad8932da9f8f710af2c1a15a83582cfb884152a9caa79d4efcdbf9c", size = 7164534, upload-time = "2026-01-02T09:11:50.445Z" }, + { url = "https://files.pythonhosted.org/packages/19/ce/c17334caea1db789163b5d855a5735e47995b0b5dc8745e9a3605d5f24c0/pillow-12.1.0-cp313-cp313-win32.whl", hash = "sha256:a786bf667724d84aa29b5db1c61b7bfdde380202aaca12c3461afd6b71743171", size = 6332551, upload-time = "2026-01-02T09:11:52.234Z" }, + { url = "https://files.pythonhosted.org/packages/e5/07/74a9d941fa45c90a0d9465098fe1ec85de3e2afbdc15cc4766622d516056/pillow-12.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:461f9dfdafa394c59cd6d818bdfdbab4028b83b02caadaff0ffd433faf4c9a7a", size = 7040087, upload-time = "2026-01-02T09:11:54.822Z" }, + { url = "https://files.pythonhosted.org/packages/88/09/c99950c075a0e9053d8e880595926302575bc742b1b47fe1bbcc8d388d50/pillow-12.1.0-cp313-cp313-win_arm64.whl", hash = "sha256:9212d6b86917a2300669511ed094a9406888362e085f2431a7da985a6b124f45", size = 2452470, upload-time = "2026-01-02T09:11:56.522Z" }, + { url = "https://files.pythonhosted.org/packages/b5/ba/970b7d85ba01f348dee4d65412476321d40ee04dcb51cd3735b9dc94eb58/pillow-12.1.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:00162e9ca6d22b7c3ee8e61faa3c3253cd19b6a37f126cad04f2f88b306f557d", size = 5264816, upload-time = "2026-01-02T09:11:58.227Z" }, + { url = "https://files.pythonhosted.org/packages/10/60/650f2fb55fdba7a510d836202aa52f0baac633e50ab1cf18415d332188fb/pillow-12.1.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:7d6daa89a00b58c37cb1747ec9fb7ac3bc5ffd5949f5888657dfddde6d1312e0", size = 4660472, upload-time = "2026-01-02T09:12:00.798Z" }, + { url = "https://files.pythonhosted.org/packages/2b/c0/5273a99478956a099d533c4f46cbaa19fd69d606624f4334b85e50987a08/pillow-12.1.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:e2479c7f02f9d505682dc47df8c0ea1fc5e264c4d1629a5d63fe3e2334b89554", size = 6268974, upload-time = "2026-01-02T09:12:02.572Z" }, + { url = "https://files.pythonhosted.org/packages/b4/26/0bf714bc2e73d5267887d47931d53c4ceeceea6978148ed2ab2a4e6463c4/pillow-12.1.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f188d580bd870cda1e15183790d1cc2fa78f666e76077d103edf048eed9c356e", size = 8073070, upload-time = "2026-01-02T09:12:04.75Z" }, + { url = "https://files.pythonhosted.org/packages/43/cf/1ea826200de111a9d65724c54f927f3111dc5ae297f294b370a670c17786/pillow-12.1.0-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0fde7ec5538ab5095cc02df38ee99b0443ff0e1c847a045554cf5f9af1f4aa82", size = 6380176, upload-time = "2026-01-02T09:12:06.626Z" }, + { url = "https://files.pythonhosted.org/packages/03/e0/7938dd2b2013373fd85d96e0f38d62b7a5a262af21ac274250c7ca7847c9/pillow-12.1.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0ed07dca4a8464bada6139ab38f5382f83e5f111698caf3191cb8dbf27d908b4", size = 7067061, upload-time = "2026-01-02T09:12:08.624Z" }, + { url = "https://files.pythonhosted.org/packages/86/ad/a2aa97d37272a929a98437a8c0ac37b3cf012f4f8721e1bd5154699b2518/pillow-12.1.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:f45bd71d1fa5e5749587613037b172e0b3b23159d1c00ef2fc920da6f470e6f0", size = 6491824, upload-time = "2026-01-02T09:12:10.488Z" }, + { url = "https://files.pythonhosted.org/packages/a4/44/80e46611b288d51b115826f136fb3465653c28f491068a72d3da49b54cd4/pillow-12.1.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:277518bf4fe74aa91489e1b20577473b19ee70fb97c374aa50830b279f25841b", size = 7190911, upload-time = "2026-01-02T09:12:12.772Z" }, + { url = "https://files.pythonhosted.org/packages/86/77/eacc62356b4cf81abe99ff9dbc7402750044aed02cfd6a503f7c6fc11f3e/pillow-12.1.0-cp313-cp313t-win32.whl", hash = "sha256:7315f9137087c4e0ee73a761b163fc9aa3b19f5f606a7fc08d83fd3e4379af65", size = 6336445, upload-time = "2026-01-02T09:12:14.775Z" }, + { url = "https://files.pythonhosted.org/packages/e7/3c/57d81d0b74d218706dafccb87a87ea44262c43eef98eb3b164fd000e0491/pillow-12.1.0-cp313-cp313t-win_amd64.whl", hash = "sha256:0ddedfaa8b5f0b4ffbc2fa87b556dc59f6bb4ecb14a53b33f9189713ae8053c0", size = 7045354, upload-time = "2026-01-02T09:12:16.599Z" }, + { url = "https://files.pythonhosted.org/packages/ac/82/8b9b97bba2e3576a340f93b044a3a3a09841170ab4c1eb0d5c93469fd32f/pillow-12.1.0-cp313-cp313t-win_arm64.whl", hash = "sha256:80941e6d573197a0c28f394753de529bb436b1ca990ed6e765cf42426abc39f8", size = 2454547, upload-time = "2026-01-02T09:12:18.704Z" }, + { url = "https://files.pythonhosted.org/packages/8c/87/bdf971d8bbcf80a348cc3bacfcb239f5882100fe80534b0ce67a784181d8/pillow-12.1.0-cp314-cp314-ios_13_0_arm64_iphoneos.whl", hash = "sha256:5cb7bc1966d031aec37ddb9dcf15c2da5b2e9f7cc3ca7c54473a20a927e1eb91", size = 4062533, upload-time = "2026-01-02T09:12:20.791Z" }, + { url = "https://files.pythonhosted.org/packages/ff/4f/5eb37a681c68d605eb7034c004875c81f86ec9ef51f5be4a63eadd58859a/pillow-12.1.0-cp314-cp314-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:97e9993d5ed946aba26baf9c1e8cf18adbab584b99f452ee72f7ee8acb882796", size = 4138546, upload-time = "2026-01-02T09:12:23.664Z" }, + { url = "https://files.pythonhosted.org/packages/11/6d/19a95acb2edbace40dcd582d077b991646b7083c41b98da4ed7555b59733/pillow-12.1.0-cp314-cp314-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:414b9a78e14ffeb98128863314e62c3f24b8a86081066625700b7985b3f529bd", size = 3601163, upload-time = "2026-01-02T09:12:26.338Z" }, + { url = "https://files.pythonhosted.org/packages/fc/36/2b8138e51cb42e4cc39c3297713455548be855a50558c3ac2beebdc251dd/pillow-12.1.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:e6bdb408f7c9dd2a5ff2b14a3b0bb6d4deb29fb9961e6eb3ae2031ae9a5cec13", size = 5266086, upload-time = "2026-01-02T09:12:28.782Z" }, + { url = "https://files.pythonhosted.org/packages/53/4b/649056e4d22e1caa90816bf99cef0884aed607ed38075bd75f091a607a38/pillow-12.1.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:3413c2ae377550f5487991d444428f1a8ae92784aac79caa8b1e3b89b175f77e", size = 4657344, upload-time = "2026-01-02T09:12:31.117Z" }, + { url = "https://files.pythonhosted.org/packages/6c/6b/c5742cea0f1ade0cd61485dc3d81f05261fc2276f537fbdc00802de56779/pillow-12.1.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:e5dcbe95016e88437ecf33544ba5db21ef1b8dd6e1b434a2cb2a3d605299e643", size = 6232114, upload-time = "2026-01-02T09:12:32.936Z" }, + { url = "https://files.pythonhosted.org/packages/bf/8f/9f521268ce22d63991601aafd3d48d5ff7280a246a1ef62d626d67b44064/pillow-12.1.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d0a7735df32ccbcc98b98a1ac785cc4b19b580be1bdf0aeb5c03223220ea09d5", size = 8042708, upload-time = "2026-01-02T09:12:34.78Z" }, + { url = "https://files.pythonhosted.org/packages/1a/eb/257f38542893f021502a1bbe0c2e883c90b5cff26cc33b1584a841a06d30/pillow-12.1.0-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0c27407a2d1b96774cbc4a7594129cc027339fd800cd081e44497722ea1179de", size = 6347762, upload-time = "2026-01-02T09:12:36.748Z" }, + { url = "https://files.pythonhosted.org/packages/c4/5a/8ba375025701c09b309e8d5163c5a4ce0102fa86bbf8800eb0d7ac87bc51/pillow-12.1.0-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:15c794d74303828eaa957ff8070846d0efe8c630901a1c753fdc63850e19ecd9", size = 7039265, upload-time = "2026-01-02T09:12:39.082Z" }, + { url = "https://files.pythonhosted.org/packages/cf/dc/cf5e4cdb3db533f539e88a7bbf9f190c64ab8a08a9bc7a4ccf55067872e4/pillow-12.1.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c990547452ee2800d8506c4150280757f88532f3de2a58e3022e9b179107862a", size = 6462341, upload-time = "2026-01-02T09:12:40.946Z" }, + { url = "https://files.pythonhosted.org/packages/d0/47/0291a25ac9550677e22eda48510cfc4fa4b2ef0396448b7fbdc0a6946309/pillow-12.1.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:b63e13dd27da389ed9475b3d28510f0f954bca0041e8e551b2a4eb1eab56a39a", size = 7165395, upload-time = "2026-01-02T09:12:42.706Z" }, + { url = "https://files.pythonhosted.org/packages/4f/4c/e005a59393ec4d9416be06e6b45820403bb946a778e39ecec62f5b2b991e/pillow-12.1.0-cp314-cp314-win32.whl", hash = "sha256:1a949604f73eb07a8adab38c4fe50791f9919344398bdc8ac6b307f755fc7030", size = 6431413, upload-time = "2026-01-02T09:12:44.944Z" }, + { url = "https://files.pythonhosted.org/packages/1c/af/f23697f587ac5f9095d67e31b81c95c0249cd461a9798a061ed6709b09b5/pillow-12.1.0-cp314-cp314-win_amd64.whl", hash = "sha256:4f9f6a650743f0ddee5593ac9e954ba1bdbc5e150bc066586d4f26127853ab94", size = 7176779, upload-time = "2026-01-02T09:12:46.727Z" }, + { url = "https://files.pythonhosted.org/packages/b3/36/6a51abf8599232f3e9afbd16d52829376a68909fe14efe29084445db4b73/pillow-12.1.0-cp314-cp314-win_arm64.whl", hash = "sha256:808b99604f7873c800c4840f55ff389936ef1948e4e87645eaf3fccbc8477ac4", size = 2543105, upload-time = "2026-01-02T09:12:49.243Z" }, + { url = "https://files.pythonhosted.org/packages/82/54/2e1dd20c8749ff225080d6ba465a0cab4387f5db0d1c5fb1439e2d99923f/pillow-12.1.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:bc11908616c8a283cf7d664f77411a5ed2a02009b0097ff8abbba5e79128ccf2", size = 5268571, upload-time = "2026-01-02T09:12:51.11Z" }, + { url = "https://files.pythonhosted.org/packages/57/61/571163a5ef86ec0cf30d265ac2a70ae6fc9e28413d1dc94fa37fae6bda89/pillow-12.1.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:896866d2d436563fa2a43a9d72f417874f16b5545955c54a64941e87c1376c61", size = 4660426, upload-time = "2026-01-02T09:12:52.865Z" }, + { url = "https://files.pythonhosted.org/packages/5e/e1/53ee5163f794aef1bf84243f755ee6897a92c708505350dd1923f4afec48/pillow-12.1.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8e178e3e99d3c0ea8fc64b88447f7cac8ccf058af422a6cedc690d0eadd98c51", size = 6269908, upload-time = "2026-01-02T09:12:54.884Z" }, + { url = "https://files.pythonhosted.org/packages/bc/0b/b4b4106ff0ee1afa1dc599fde6ab230417f800279745124f6c50bcffed8e/pillow-12.1.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:079af2fb0c599c2ec144ba2c02766d1b55498e373b3ac64687e43849fbbef5bc", size = 8074733, upload-time = "2026-01-02T09:12:56.802Z" }, + { url = "https://files.pythonhosted.org/packages/19/9f/80b411cbac4a732439e629a26ad3ef11907a8c7fc5377b7602f04f6fe4e7/pillow-12.1.0-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bdec5e43377761c5dbca620efb69a77f6855c5a379e32ac5b158f54c84212b14", size = 6381431, upload-time = "2026-01-02T09:12:58.823Z" }, + { url = "https://files.pythonhosted.org/packages/8f/b7/d65c45db463b66ecb6abc17c6ba6917a911202a07662247e1355ce1789e7/pillow-12.1.0-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:565c986f4b45c020f5421a4cea13ef294dde9509a8577f29b2fc5edc7587fff8", size = 7068529, upload-time = "2026-01-02T09:13:00.885Z" }, + { url = "https://files.pythonhosted.org/packages/50/96/dfd4cd726b4a45ae6e3c669fc9e49deb2241312605d33aba50499e9d9bd1/pillow-12.1.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:43aca0a55ce1eefc0aefa6253661cb54571857b1a7b2964bd8a1e3ef4b729924", size = 6492981, upload-time = "2026-01-02T09:13:03.314Z" }, + { url = "https://files.pythonhosted.org/packages/4d/1c/b5dc52cf713ae46033359c5ca920444f18a6359ce1020dd3e9c553ea5bc6/pillow-12.1.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:0deedf2ea233722476b3a81e8cdfbad786f7adbed5d848469fa59fe52396e4ef", size = 7191878, upload-time = "2026-01-02T09:13:05.276Z" }, + { url = "https://files.pythonhosted.org/packages/53/26/c4188248bd5edaf543864fe4834aebe9c9cb4968b6f573ce014cc42d0720/pillow-12.1.0-cp314-cp314t-win32.whl", hash = "sha256:b17fbdbe01c196e7e159aacb889e091f28e61020a8abeac07b68079b6e626988", size = 6438703, upload-time = "2026-01-02T09:13:07.491Z" }, + { url = "https://files.pythonhosted.org/packages/b8/0e/69ed296de8ea05cb03ee139cee600f424ca166e632567b2d66727f08c7ed/pillow-12.1.0-cp314-cp314t-win_amd64.whl", hash = "sha256:27b9baecb428899db6c0de572d6d305cfaf38ca1596b5c0542a5182e3e74e8c6", size = 7182927, upload-time = "2026-01-02T09:13:09.841Z" }, + { url = "https://files.pythonhosted.org/packages/fc/f5/68334c015eed9b5cff77814258717dec591ded209ab5b6fb70e2ae873d1d/pillow-12.1.0-cp314-cp314t-win_arm64.whl", hash = "sha256:f61333d817698bdcdd0f9d7793e365ac3d2a21c1f1eb02b32ad6aefb8d8ea831", size = 2545104, upload-time = "2026-01-02T09:13:12.068Z" }, ] [[package]] @@ -1199,15 +1179,15 @@ wheels = [ [[package]] name = "pydantic-extra-types" -version = "2.10.6" +version = "2.11.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pydantic" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/3a/10/fb64987804cde41bcc39d9cd757cd5f2bb5d97b389d81aa70238b14b8a7e/pydantic_extra_types-2.10.6.tar.gz", hash = "sha256:c63d70bf684366e6bbe1f4ee3957952ebe6973d41e7802aea0b770d06b116aeb", size = 141858, upload-time = "2025-10-08T13:47:49.483Z" } +sdist = { url = "https://files.pythonhosted.org/packages/fd/35/2fee58b1316a73e025728583d3b1447218a97e621933fc776fb8c0f2ebdd/pydantic_extra_types-2.11.0.tar.gz", hash = "sha256:4e9991959d045b75feb775683437a97991d02c138e00b59176571db9ce634f0e", size = 157226, upload-time = "2025-12-31T16:18:27.944Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/93/04/5c918669096da8d1c9ec7bb716bd72e755526103a61bc5e76a3e4fb23b53/pydantic_extra_types-2.10.6-py3-none-any.whl", hash = "sha256:6106c448316d30abf721b5b9fecc65e983ef2614399a24142d689c7546cc246a", size = 40949, upload-time = "2025-10-08T13:47:48.268Z" }, + { url = "https://files.pythonhosted.org/packages/fe/17/fabd56da47096d240dd45ba627bead0333b0cf0ee8ada9bec579287dadf3/pydantic_extra_types-2.11.0-py3-none-any.whl", hash = "sha256:84b864d250a0fc62535b7ec591e36f2c5b4d1325fa0017eb8cda9aeb63b374a6", size = 74296, upload-time = "2025-12-31T16:18:26.38Z" }, ] [[package]] @@ -1519,6 +1499,32 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d0/02/fa464cdfbe6b26e0600b62c528b72d8608f5cc49f96b8d6e38c95d60c676/rpds_py-0.30.0-cp314-cp314t-win_amd64.whl", hash = "sha256:27f4b0e92de5bfbc6f86e43959e6edd1425c33b5e69aab0984a72047f2bcf1e3", size = 226532, upload-time = "2025-11-30T20:24:14.634Z" }, ] +[[package]] +name = "ruff" +version = "0.14.10" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/57/08/52232a877978dd8f9cf2aeddce3e611b40a63287dfca29b6b8da791f5e8d/ruff-0.14.10.tar.gz", hash = "sha256:9a2e830f075d1a42cd28420d7809ace390832a490ed0966fe373ba288e77aaf4", size = 5859763, upload-time = "2025-12-18T19:28:57.98Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/60/01/933704d69f3f05ee16ef11406b78881733c186fe14b6a46b05cfcaf6d3b2/ruff-0.14.10-py3-none-linux_armv6l.whl", hash = "sha256:7a3ce585f2ade3e1f29ec1b92df13e3da262178df8c8bdf876f48fa0e8316c49", size = 13527080, upload-time = "2025-12-18T19:29:25.642Z" }, + { url = "https://files.pythonhosted.org/packages/df/58/a0349197a7dfa603ffb7f5b0470391efa79ddc327c1e29c4851e85b09cc5/ruff-0.14.10-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:674f9be9372907f7257c51f1d4fc902cb7cf014b9980152b802794317941f08f", size = 13797320, upload-time = "2025-12-18T19:29:02.571Z" }, + { url = "https://files.pythonhosted.org/packages/7b/82/36be59f00a6082e38c23536df4e71cdbc6af8d7c707eade97fcad5c98235/ruff-0.14.10-py3-none-macosx_11_0_arm64.whl", hash = "sha256:d85713d522348837ef9df8efca33ccb8bd6fcfc86a2cde3ccb4bc9d28a18003d", size = 12918434, upload-time = "2025-12-18T19:28:51.202Z" }, + { url = "https://files.pythonhosted.org/packages/a6/00/45c62a7f7e34da92a25804f813ebe05c88aa9e0c25e5cb5a7d23dd7450e3/ruff-0.14.10-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6987ebe0501ae4f4308d7d24e2d0fe3d7a98430f5adfd0f1fead050a740a3a77", size = 13371961, upload-time = "2025-12-18T19:29:04.991Z" }, + { url = "https://files.pythonhosted.org/packages/40/31/a5906d60f0405f7e57045a70f2d57084a93ca7425f22e1d66904769d1628/ruff-0.14.10-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:16a01dfb7b9e4eee556fbfd5392806b1b8550c9b4a9f6acd3dbe6812b193c70a", size = 13275629, upload-time = "2025-12-18T19:29:21.381Z" }, + { url = "https://files.pythonhosted.org/packages/3e/60/61c0087df21894cf9d928dc04bcd4fb10e8b2e8dca7b1a276ba2155b2002/ruff-0.14.10-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7165d31a925b7a294465fa81be8c12a0e9b60fb02bf177e79067c867e71f8b1f", size = 14029234, upload-time = "2025-12-18T19:29:00.132Z" }, + { url = "https://files.pythonhosted.org/packages/44/84/77d911bee3b92348b6e5dab5a0c898d87084ea03ac5dc708f46d88407def/ruff-0.14.10-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:c561695675b972effb0c0a45db233f2c816ff3da8dcfbe7dfc7eed625f218935", size = 15449890, upload-time = "2025-12-18T19:28:53.573Z" }, + { url = "https://files.pythonhosted.org/packages/e9/36/480206eaefa24a7ec321582dda580443a8f0671fdbf6b1c80e9c3e93a16a/ruff-0.14.10-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4bb98fcbbc61725968893682fd4df8966a34611239c9fd07a1f6a07e7103d08e", size = 15123172, upload-time = "2025-12-18T19:29:23.453Z" }, + { url = "https://files.pythonhosted.org/packages/5c/38/68e414156015ba80cef5473d57919d27dfb62ec804b96180bafdeaf0e090/ruff-0.14.10-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f24b47993a9d8cb858429e97bdf8544c78029f09b520af615c1d261bf827001d", size = 14460260, upload-time = "2025-12-18T19:29:27.808Z" }, + { url = "https://files.pythonhosted.org/packages/b3/19/9e050c0dca8aba824d67cc0db69fb459c28d8cd3f6855b1405b3f29cc91d/ruff-0.14.10-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59aabd2e2c4fd614d2862e7939c34a532c04f1084476d6833dddef4afab87e9f", size = 14229978, upload-time = "2025-12-18T19:29:11.32Z" }, + { url = "https://files.pythonhosted.org/packages/51/eb/e8dd1dd6e05b9e695aa9dd420f4577debdd0f87a5ff2fedda33c09e9be8c/ruff-0.14.10-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:213db2b2e44be8625002dbea33bb9c60c66ea2c07c084a00d55732689d697a7f", size = 14338036, upload-time = "2025-12-18T19:29:09.184Z" }, + { url = "https://files.pythonhosted.org/packages/6a/12/f3e3a505db7c19303b70af370d137795fcfec136d670d5de5391e295c134/ruff-0.14.10-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:b914c40ab64865a17a9a5b67911d14df72346a634527240039eb3bd650e5979d", size = 13264051, upload-time = "2025-12-18T19:29:13.431Z" }, + { url = "https://files.pythonhosted.org/packages/08/64/8c3a47eaccfef8ac20e0484e68e0772013eb85802f8a9f7603ca751eb166/ruff-0.14.10-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:1484983559f026788e3a5c07c81ef7d1e97c1c78ed03041a18f75df104c45405", size = 13283998, upload-time = "2025-12-18T19:29:06.994Z" }, + { url = "https://files.pythonhosted.org/packages/12/84/534a5506f4074e5cc0529e5cd96cfc01bb480e460c7edf5af70d2bcae55e/ruff-0.14.10-py3-none-musllinux_1_2_i686.whl", hash = "sha256:c70427132db492d25f982fffc8d6c7535cc2fd2c83fc8888f05caaa248521e60", size = 13601891, upload-time = "2025-12-18T19:28:55.811Z" }, + { url = "https://files.pythonhosted.org/packages/0d/1e/14c916087d8598917dbad9b2921d340f7884824ad6e9c55de948a93b106d/ruff-0.14.10-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:5bcf45b681e9f1ee6445d317ce1fa9d6cba9a6049542d1c3d5b5958986be8830", size = 14336660, upload-time = "2025-12-18T19:29:16.531Z" }, + { url = "https://files.pythonhosted.org/packages/f2/1c/d7b67ab43f30013b47c12b42d1acd354c195351a3f7a1d67f59e54227ede/ruff-0.14.10-py3-none-win32.whl", hash = "sha256:104c49fc7ab73f3f3a758039adea978869a918f31b73280db175b43a2d9b51d6", size = 13196187, upload-time = "2025-12-18T19:29:19.006Z" }, + { url = "https://files.pythonhosted.org/packages/fb/9c/896c862e13886fae2af961bef3e6312db9ebc6adc2b156fe95e615dee8c1/ruff-0.14.10-py3-none-win_amd64.whl", hash = "sha256:466297bd73638c6bdf06485683e812db1c00c7ac96d4ddd0294a338c62fdc154", size = 14661283, upload-time = "2025-12-18T19:29:30.16Z" }, + { url = "https://files.pythonhosted.org/packages/74/31/b0e29d572670dca3674eeee78e418f20bdf97fa8aa9ea71380885e175ca0/ruff-0.14.10-py3-none-win_arm64.whl", hash = "sha256:e51d046cf6dda98a4633b8a8a771451107413b0f07183b2bef03f075599e44e6", size = 13729839, upload-time = "2025-12-18T19:28:48.636Z" }, +] + [[package]] name = "sabnzbd-api" version = "0.1.2"