Adding some more new lints (#393)

Enable `UP` and `TRY` lint
This commit is contained in:
Marcel Hellwig
2026-02-01 18:04:15 +01:00
committed by GitHub
parent 311e625eee
commit 96b84d45db
33 changed files with 345 additions and 340 deletions

View File

@@ -1,5 +1,4 @@
from collections.abc import AsyncGenerator from collections.abc import AsyncGenerator
from typing import Optional
from fastapi import Depends from fastapi import Depends
from fastapi_users.db import ( from fastapi_users.db import (
@@ -17,7 +16,7 @@ from media_manager.database import Base, build_db_url
class OAuthAccount(SQLAlchemyBaseOAuthAccountTableUUID, Base): class OAuthAccount(SQLAlchemyBaseOAuthAccountTableUUID, Base):
access_token: Mapped[str] = mapped_column(String(length=4096), nullable=False) access_token: Mapped[str] = mapped_column(String(length=4096), nullable=False)
refresh_token: Mapped[Optional[str]] = mapped_column( refresh_token: Mapped[str | None] = mapped_column(
String(length=4096), nullable=True String(length=4096), nullable=True
) )
@@ -34,12 +33,12 @@ engine = create_async_engine(
async_session_maker = async_sessionmaker(engine, expire_on_commit=False) async_session_maker = async_sessionmaker(engine, expire_on_commit=False)
async def get_async_session() -> AsyncGenerator[AsyncSession, None]: async def get_async_session() -> AsyncGenerator[AsyncSession]:
async with async_session_maker() as session: async with async_session_maker() as session:
yield session yield session
async def get_user_db( async def get_user_db(
session: AsyncSession = Depends(get_async_session), session: AsyncSession = Depends(get_async_session),
) -> AsyncGenerator[SQLAlchemyUserDatabase, None]: ) -> AsyncGenerator[SQLAlchemyUserDatabase]:
yield SQLAlchemyUserDatabase(session, User, OAuthAccount) yield SQLAlchemyUserDatabase(session, User, OAuthAccount)

View File

@@ -1,5 +1,5 @@
from collections.abc import AsyncGenerator
from contextlib import asynccontextmanager from contextlib import asynccontextmanager
from typing import AsyncGenerator
from fastapi import APIRouter, Depends, FastAPI, status from fastapi import APIRouter, Depends, FastAPI, status
from fastapi_users.router import get_oauth_router from fastapi_users.router import get_oauth_router

View File

@@ -1,7 +1,8 @@
import contextlib import contextlib
import logging import logging
import uuid import uuid
from typing import Any, AsyncGenerator, Optional, override from collections.abc import AsyncGenerator
from typing import Any, override
from fastapi import Depends, Request from fastapi import Depends, Request
from fastapi.responses import RedirectResponse, Response from fastapi.responses import RedirectResponse, Response
@@ -49,7 +50,7 @@ class UserManager(UUIDIDMixin, BaseUserManager[User, uuid.UUID]):
self, self,
user: models.UP, user: models.UP,
update_dict: dict[str, Any], update_dict: dict[str, Any],
request: Optional[Request] = None, request: Request | None = None,
) -> None: ) -> None:
log.info(f"User {user.id} has been updated.") log.info(f"User {user.id} has been updated.")
if update_dict.get("is_superuser"): if update_dict.get("is_superuser"):
@@ -60,7 +61,7 @@ class UserManager(UUIDIDMixin, BaseUserManager[User, uuid.UUID]):
@override @override
async def on_after_register( async def on_after_register(
self, user: User, request: Optional[Request] = None self, user: User, request: Request | None = None
) -> None: ) -> None:
log.info(f"User {user.id} has registered.") log.info(f"User {user.id} has registered.")
if user.email in config.admin_emails: if user.email in config.admin_emails:
@@ -69,7 +70,7 @@ class UserManager(UUIDIDMixin, BaseUserManager[User, uuid.UUID]):
@override @override
async def on_after_forgot_password( async def on_after_forgot_password(
self, user: User, token: str, request: Optional[Request] = None self, user: User, token: str, request: Request | None = None
) -> None: ) -> None:
link = f"{MediaManagerConfig().misc.frontend_url}web/login/reset-password?token={token}" link = f"{MediaManagerConfig().misc.frontend_url}web/login/reset-password?token={token}"
log.info(f"User {user.id} has forgot their password. Reset Link: {link}") log.info(f"User {user.id} has forgot their password. Reset Link: {link}")
@@ -100,28 +101,26 @@ class UserManager(UUIDIDMixin, BaseUserManager[User, uuid.UUID]):
@override @override
async def on_after_reset_password( async def on_after_reset_password(
self, user: User, request: Optional[Request] = None self, user: User, request: Request | None = None
) -> None: ) -> None:
log.info(f"User {user.id} has reset their password.") log.info(f"User {user.id} has reset their password.")
@override @override
async def on_after_request_verify( async def on_after_request_verify(
self, user: User, token: str, request: Optional[Request] = None self, user: User, token: str, request: Request | None = None
) -> None: ) -> None:
log.info( log.info(
f"Verification requested for user {user.id}. Verification token: {token}" f"Verification requested for user {user.id}. Verification token: {token}"
) )
@override @override
async def on_after_verify( async def on_after_verify(self, user: User, request: Request | None = None) -> None:
self, user: User, request: Optional[Request] = None
) -> None:
log.info(f"User {user.id} has been verified") log.info(f"User {user.id} has been verified")
async def get_user_manager( async def get_user_manager(
user_db: SQLAlchemyUserDatabase = Depends(get_user_db), user_db: SQLAlchemyUserDatabase = Depends(get_user_db),
) -> AsyncGenerator[UserManager, None]: ) -> AsyncGenerator[UserManager]:
yield UserManager(user_db) yield UserManager(user_db)
@@ -176,8 +175,8 @@ async def create_default_admin_user() -> None:
log.info( log.info(
f"Found {user_count} existing users. Skipping default user creation." f"Found {user_count} existing users. Skipping default user creation."
) )
except Exception as e: except Exception:
log.error(f"Failed to create default admin user: {e}") log.exception("Failed to create default admin user")
log.info( log.info(
"You can create an admin user manually by registering with an email from the admin_emails list in your config." "You can create an admin user manually by registering with an email from the admin_emails list in your config."
) )

View File

@@ -1,7 +1,6 @@
import logging import logging
import os import os
from pathlib import Path from pathlib import Path
from typing import Tuple, Type
from pydantic import AnyHttpUrl from pydantic import AnyHttpUrl
from pydantic_settings import ( from pydantic_settings import (
@@ -71,12 +70,12 @@ class MediaManagerConfig(BaseSettings):
@classmethod @classmethod
def settings_customise_sources( def settings_customise_sources(
cls, cls,
settings_cls: Type[BaseSettings], settings_cls: type[BaseSettings],
init_settings: PydanticBaseSettingsSource, init_settings: PydanticBaseSettingsSource,
env_settings: PydanticBaseSettingsSource, env_settings: PydanticBaseSettingsSource,
dotenv_settings: PydanticBaseSettingsSource, dotenv_settings: PydanticBaseSettingsSource,
file_secret_settings: PydanticBaseSettingsSource, file_secret_settings: PydanticBaseSettingsSource,
) -> Tuple[PydanticBaseSettingsSource, ...]: ) -> tuple[PydanticBaseSettingsSource, ...]:
return ( return (
init_settings, init_settings,
env_settings, env_settings,

View File

@@ -1,7 +1,8 @@
import logging import logging
import os import os
from collections.abc import Generator
from contextvars import ContextVar from contextvars import ContextVar
from typing import Annotated, Any, Generator, Optional from typing import Annotated
from fastapi import Depends from fastapi import Depends
from sqlalchemy import create_engine from sqlalchemy import create_engine
@@ -15,8 +16,8 @@ log = logging.getLogger(__name__)
Base = declarative_base() Base = declarative_base()
engine: Optional[Engine] = None engine: Engine | None = None
SessionLocal: Optional[sessionmaker] = None SessionLocal: sessionmaker | None = None
def build_db_url( def build_db_url(
@@ -83,7 +84,7 @@ def get_engine() -> Engine:
return engine return engine
def get_session() -> Generator[Session, Any, None]: def get_session() -> Generator[Session]:
if SessionLocal is None: if SessionLocal is None:
msg = "Session factory not initialized. Call init_engine(...) first." msg = "Session factory not initialized. Call init_engine(...) first."
raise RuntimeError(msg) raise RuntimeError(msg)
@@ -91,9 +92,9 @@ def get_session() -> Generator[Session, Any, None]:
try: try:
yield db yield db
db.commit() db.commit()
except Exception as e: except Exception:
db.rollback() db.rollback()
log.critical(f"error occurred: {e}") log.critical("", exc_info=True)
raise raise
finally: finally:
db.close() db.close()

View File

@@ -4,6 +4,13 @@ from psycopg.errors import UniqueViolation
from sqlalchemy.exc import IntegrityError from sqlalchemy.exc import IntegrityError
class RenameError(Exception):
"""Error when renaming something"""
def __init__(self, message: str = "Failed to rename source directory") -> None:
super().__init__(message)
class MediaManagerError(Exception): class MediaManagerError(Exception):
"""Base exception for MediaManager errors.""" """Base exception for MediaManager errors."""

View File

@@ -36,10 +36,8 @@ def run_filesystem_checks(config: MediaManagerConfig, log: Logger) -> None:
if not test_hardlink.samefile(test_torrent_file): if not test_hardlink.samefile(test_torrent_file):
log.critical("Hardlink creation failed!") log.critical("Hardlink creation failed!")
log.info("Successfully created test hardlink in TV directory") log.info("Successfully created test hardlink in TV directory")
except OSError as e: except OSError:
log.error( log.exception("Hardlink creation failed, falling back to copying files")
f"Hardlink creation failed, falling back to copying files. Error: {e}"
)
shutil.copy(src=test_torrent_file, dst=test_hardlink) shutil.copy(src=test_torrent_file, dst=test_hardlink)
finally: finally:
test_hardlink.unlink() test_hardlink.unlink()

View File

@@ -46,8 +46,8 @@ class Jackett(GenericIndexer, TorznabMixin):
result = future.result() result = future.result()
if result is not None: if result is not None:
responses.extend(result) responses.extend(result)
except Exception as e: except Exception:
log.error(f"search result failed with: {e}") log.exception("Searching failed")
return responses return responses

View File

@@ -1,6 +1,6 @@
import logging import logging
import xml.etree.ElementTree as ET import xml.etree.ElementTree as ET
from datetime import datetime, timezone from datetime import datetime
from email.utils import parsedate_to_datetime from email.utils import parsedate_to_datetime
from media_manager.indexer.schemas import IndexerQueryResult from media_manager.indexer.schemas import IndexerQueryResult
@@ -39,7 +39,7 @@ class TorznabMixin:
posted_date = parsedate_to_datetime( posted_date = parsedate_to_datetime(
attribute.attrib["value"] attribute.attrib["value"]
) )
now = datetime.now(timezone.utc) now = datetime.now(datetime.UTC)
age = int((now - posted_date).total_seconds()) age = int((now - posted_date).total_seconds())
else: else:
if attribute.attrib["name"] == "seeders": if attribute.attrib["name"] == "seeders":
@@ -79,6 +79,6 @@ class TorznabMixin:
indexer=indexer_name, indexer=indexer_name,
) )
result_list.append(result) result_list.append(result)
except Exception as e: except Exception:
log.error(f"1 Torznab search result errored with error: {e}") log.exception("1 Torznab search result failed")
return result_list return result_list

View File

@@ -13,7 +13,9 @@ IndexerQueryResultId = typing.NewType("IndexerQueryResultId", UUID)
class IndexerQueryResult(BaseModel): class IndexerQueryResult(BaseModel):
model_config = ConfigDict(from_attributes=True) model_config = ConfigDict(from_attributes=True)
id: IndexerQueryResultId = pydantic.Field(default_factory=lambda: IndexerQueryResultId(uuid4())) id: IndexerQueryResultId = pydantic.Field(
default_factory=lambda: IndexerQueryResultId(uuid4())
)
title: str title: str
download_url: str = pydantic.Field( download_url: str = pydantic.Field(
exclude=True, exclude=True,

View File

@@ -45,9 +45,9 @@ class IndexerService:
log.debug( log.debug(
f"Indexer {indexer.__class__.__name__} returned {len(indexer_results)} results for query: {query}" f"Indexer {indexer.__class__.__name__} returned {len(indexer_results)} results for query: {query}"
) )
except Exception as e: except Exception:
log.error( log.exception(
f"Indexer {indexer.__class__.__name__} failed for query '{query}': {e}" f"Indexer {indexer.__class__.__name__} failed for query '{query}'"
) )
for result in results: for result in results:
@@ -65,9 +65,9 @@ class IndexerService:
indexer_results = indexer.search_movie(query=query, movie=movie) indexer_results = indexer.search_movie(query=query, movie=movie)
if indexer_results: if indexer_results:
results.extend(indexer_results) results.extend(indexer_results)
except Exception as e: except Exception:
log.error( log.exception(
f"Indexer {indexer.__class__.__name__} failed for movie search '{query}': {e}" f"Indexer {indexer.__class__.__name__} failed for movie search '{query}'"
) )
for result in results: for result in results:
@@ -87,9 +87,9 @@ class IndexerService:
) )
if indexer_results: if indexer_results:
results.extend(indexer_results) results.extend(indexer_results)
except Exception as e: except Exception:
log.error( log.exception(
f"Indexer {indexer.__class__.__name__} failed for season search '{query}': {e}" f"Indexer {indexer.__class__.__name__} failed for season search '{query}'"
) )
for result in results: for result in results:

View File

@@ -149,8 +149,11 @@ def follow_redirects_to_final_torrent_url(
raise RuntimeError(msg) raise RuntimeError(msg)
except requests.exceptions.RequestException as e: except requests.exceptions.RequestException as e:
log.debug(f"An error occurred during the request for {initial_url}: {e}") log.debug(
msg = f"An error occurred during the request: {e}" f"An error occurred during the request for {initial_url}",
exc_info=True,
)
msg = "An error occurred during the request"
raise RuntimeError(msg) from e raise RuntimeError(msg) from e
return current_url return current_url

View File

@@ -1,7 +1,7 @@
import logging import logging
import os import os
import sys import sys
from datetime import datetime, timezone from datetime import UTC, datetime
from logging.config import dictConfig from logging.config import dictConfig
from pathlib import Path from pathlib import Path
from typing import override from typing import override
@@ -12,7 +12,7 @@ from pythonjsonlogger.json import JsonFormatter
class ISOJsonFormatter(JsonFormatter): class ISOJsonFormatter(JsonFormatter):
@override @override
def formatTime(self, record: logging.LogRecord, datefmt: str | None = None) -> str: def formatTime(self, record: logging.LogRecord, datefmt: str | None = None) -> str:
dt = datetime.fromtimestamp(record.created, tz=timezone.utc) dt = datetime.fromtimestamp(record.created, tz=UTC)
return dt.isoformat(timespec="milliseconds").replace("+00:00", "Z") return dt.isoformat(timespec="milliseconds").replace("+00:00", "Z")

View File

@@ -18,15 +18,11 @@ class AbstractMetadataProvider(ABC):
pass pass
@abstractmethod @abstractmethod
def get_show_metadata( def get_show_metadata(self, show_id: int, language: str | None = None) -> Show:
self, show_id: int, language: str | None = None
) -> Show:
raise NotImplementedError() raise NotImplementedError()
@abstractmethod @abstractmethod
def get_movie_metadata( def get_movie_metadata(self, movie_id: int, language: str | None = None) -> Movie:
self, movie_id: int, language: str | None = None
) -> Movie:
raise NotImplementedError() raise NotImplementedError()
@abstractmethod @abstractmethod

View File

@@ -51,7 +51,7 @@ class TmdbMetadataProvider(AbstractMetadataProvider):
response.raise_for_status() response.raise_for_status()
return response.json() return response.json()
except requests.RequestException as e: except requests.RequestException as e:
log.error(f"TMDB API error getting show metadata for ID {show_id}: {e}") log.exception(f"TMDB API error getting show metadata for ID {show_id}")
if notification_manager.is_configured(): if notification_manager.is_configured():
notification_manager.send_notification( notification_manager.send_notification(
title="TMDB API Error", title="TMDB API Error",
@@ -68,7 +68,7 @@ class TmdbMetadataProvider(AbstractMetadataProvider):
response.raise_for_status() response.raise_for_status()
return response.json() return response.json()
except requests.RequestException as e: except requests.RequestException as e:
log.error(f"TMDB API error getting show external IDs for ID {show_id}: {e}") log.exception(f"TMDB API error getting show external IDs for ID {show_id}")
if notification_manager.is_configured(): if notification_manager.is_configured():
notification_manager.send_notification( notification_manager.send_notification(
title="TMDB API Error", title="TMDB API Error",
@@ -90,8 +90,8 @@ class TmdbMetadataProvider(AbstractMetadataProvider):
response.raise_for_status() response.raise_for_status()
return response.json() return response.json()
except requests.RequestException as e: except requests.RequestException as e:
log.error( log.exception(
f"TMDB API error getting season {season_number} metadata for show ID {show_id}: {e}" f"TMDB API error getting season {season_number} metadata for show ID {show_id}"
) )
if notification_manager.is_configured(): if notification_manager.is_configured():
notification_manager.send_notification( notification_manager.send_notification(
@@ -113,7 +113,7 @@ class TmdbMetadataProvider(AbstractMetadataProvider):
response.raise_for_status() response.raise_for_status()
return response.json() return response.json()
except requests.RequestException as e: except requests.RequestException as e:
log.error(f"TMDB API error searching TV shows with query '{query}': {e}") log.exception(f"TMDB API error searching TV shows with query '{query}'")
if notification_manager.is_configured(): if notification_manager.is_configured():
notification_manager.send_notification( notification_manager.send_notification(
title="TMDB API Error", title="TMDB API Error",
@@ -131,7 +131,7 @@ class TmdbMetadataProvider(AbstractMetadataProvider):
response.raise_for_status() response.raise_for_status()
return response.json() return response.json()
except requests.RequestException as e: except requests.RequestException as e:
log.error(f"TMDB API error getting trending TV: {e}") log.exception("TMDB API error getting trending TV")
if notification_manager.is_configured(): if notification_manager.is_configured():
notification_manager.send_notification( notification_manager.send_notification(
title="TMDB API Error", title="TMDB API Error",
@@ -151,7 +151,7 @@ class TmdbMetadataProvider(AbstractMetadataProvider):
response.raise_for_status() response.raise_for_status()
return response.json() return response.json()
except requests.RequestException as e: except requests.RequestException as e:
log.error(f"TMDB API error getting movie metadata for ID {movie_id}: {e}") log.exception(f"TMDB API error getting movie metadata for ID {movie_id}")
if notification_manager.is_configured(): if notification_manager.is_configured():
notification_manager.send_notification( notification_manager.send_notification(
title="TMDB API Error", title="TMDB API Error",
@@ -167,8 +167,8 @@ class TmdbMetadataProvider(AbstractMetadataProvider):
response.raise_for_status() response.raise_for_status()
return response.json() return response.json()
except requests.RequestException as e: except requests.RequestException as e:
log.error( log.exception(
f"TMDB API error getting movie external IDs for ID {movie_id}: {e}" f"TMDB API error getting movie external IDs for ID {movie_id}"
) )
if notification_manager.is_configured(): if notification_manager.is_configured():
notification_manager.send_notification( notification_manager.send_notification(
@@ -190,7 +190,7 @@ class TmdbMetadataProvider(AbstractMetadataProvider):
response.raise_for_status() response.raise_for_status()
return response.json() return response.json()
except requests.RequestException as e: except requests.RequestException as e:
log.error(f"TMDB API error searching movies with query '{query}': {e}") log.exception(f"TMDB API error searching movies with query '{query}'")
if notification_manager.is_configured(): if notification_manager.is_configured():
notification_manager.send_notification( notification_manager.send_notification(
title="TMDB API Error", title="TMDB API Error",
@@ -208,7 +208,7 @@ class TmdbMetadataProvider(AbstractMetadataProvider):
response.raise_for_status() response.raise_for_status()
return response.json() return response.json()
except requests.RequestException as e: except requests.RequestException as e:
log.error(f"TMDB API error getting trending movies: {e}") log.exception("TMDB API error getting trending movies")
if notification_manager.is_configured(): if notification_manager.is_configured():
notification_manager.send_notification( notification_manager.send_notification(
title="TMDB API Error", title="TMDB API Error",
@@ -243,9 +243,7 @@ class TmdbMetadataProvider(AbstractMetadataProvider):
return True return True
@override @override
def get_show_metadata( def get_show_metadata(self, show_id: int, language: str | None = None) -> Show:
self, show_id: int, language: str | None = None
) -> Show:
""" """
:param show_id: the external id of the show :param show_id: the external id of the show
@@ -368,14 +366,12 @@ class TmdbMetadataProvider(AbstractMetadataProvider):
original_language=original_language, original_language=original_language,
) )
) )
except Exception as e: except Exception:
log.warning(f"Error processing search result: {e}") log.warning("Error processing search result", exc_info=True)
return formatted_results return formatted_results
@override @override
def get_movie_metadata( def get_movie_metadata(self, movie_id: int, language: str | None = None) -> Movie:
self, movie_id: int, language: str | None = None
) -> Movie:
""" """
Get movie metadata with language-aware fetching. Get movie metadata with language-aware fetching.
@@ -470,8 +466,8 @@ class TmdbMetadataProvider(AbstractMetadataProvider):
original_language=original_language, original_language=original_language,
) )
) )
except Exception as e: except Exception:
log.warning(f"Error processing search result: {e}") log.warning("Error processing search result", exc_info=True)
return formatted_results return formatted_results
@override @override

View File

@@ -63,9 +63,7 @@ class TvdbMetadataProvider(AbstractMetadataProvider):
return False return False
@override @override
def get_show_metadata( def get_show_metadata(self, show_id: int, language: str | None = None) -> Show:
self, show_id: int, language: str | None = None
) -> Show:
""" """
:param show_id: The external id of the show :param show_id: The external id of the show
@@ -150,8 +148,8 @@ class TvdbMetadataProvider(AbstractMetadataProvider):
vote_average=None, vote_average=None,
) )
) )
except Exception as e: except Exception:
log.warning(f"Error processing search result: {e}") log.warning("Error processing search result", exc_info=True)
return formatted_results return formatted_results
results = self.__get_trending_tv() results = self.__get_trending_tv()
formatted_results = [] formatted_results = []
@@ -178,8 +176,8 @@ class TvdbMetadataProvider(AbstractMetadataProvider):
vote_average=None, vote_average=None,
) )
) )
except Exception as e: except Exception:
log.warning(f"Error processing search result: {e}") log.warning("Error processing search result", exc_info=True)
return formatted_results return formatted_results
@override @override
@@ -215,8 +213,8 @@ class TvdbMetadataProvider(AbstractMetadataProvider):
vote_average=None, vote_average=None,
) )
) )
except Exception as e: except Exception:
log.warning(f"Error processing search result: {e}") log.warning("Error processing search result", exc_info=True)
return formatted_results return formatted_results
results = self.__get_trending_movies() results = self.__get_trending_movies()
results = results[0:20] results = results[0:20]
@@ -231,15 +229,15 @@ class TvdbMetadataProvider(AbstractMetadataProvider):
year = None year = None
if result.get("image"): if result.get("image"):
poster_path = "https://artworks.thetvdb.com" + str(result.get("image")) poster_path = "https://artworks.thetvdb.com" + str(
result.get("image")
)
else: else:
poster_path = None poster_path = None
formatted_results.append( formatted_results.append(
MetaDataProviderSearchResult( MetaDataProviderSearchResult(
poster_path= poster_path poster_path=poster_path if result.get("image") else None,
if result.get("image")
else None,
overview=result.get("overview"), overview=result.get("overview"),
name=result["name"], name=result["name"],
external_id=result["id"], external_id=result["id"],
@@ -249,8 +247,8 @@ class TvdbMetadataProvider(AbstractMetadataProvider):
vote_average=None, vote_average=None,
) )
) )
except Exception as e: except Exception:
log.warning(f"Error processing search result: {e}") log.warning("Error processing search result", exc_info=True)
return formatted_results return formatted_results
@override @override
@@ -269,9 +267,7 @@ class TvdbMetadataProvider(AbstractMetadataProvider):
return False return False
@override @override
def get_movie_metadata( def get_movie_metadata(self, movie_id: int, language: str | None = None) -> Movie:
self, movie_id: int, language: str | None = None
) -> Movie:
""" """
:param movie_id: the external id of the movie :param movie_id: the external id of the movie

View File

@@ -59,8 +59,8 @@ class MovieRepository:
msg = f"Movie with id {movie_id} not found." msg = f"Movie with id {movie_id} not found."
raise NotFoundError(msg) raise NotFoundError(msg)
return MovieSchema.model_validate(result) return MovieSchema.model_validate(result)
except SQLAlchemyError as e: except SQLAlchemyError:
log.error(f"Database error while retrieving movie {movie_id}: {e}") log.exception(f"Database error while retrieving movie {movie_id}")
raise raise
def get_movie_by_external_id( def get_movie_by_external_id(
@@ -86,9 +86,9 @@ class MovieRepository:
msg = f"Movie with external_id {external_id} and provider {metadata_provider} not found." msg = f"Movie with external_id {external_id} and provider {metadata_provider} not found."
raise NotFoundError(msg) raise NotFoundError(msg)
return MovieSchema.model_validate(result) return MovieSchema.model_validate(result)
except SQLAlchemyError as e: except SQLAlchemyError:
log.error( log.exception(
f"Database error while retrieving movie by external_id {external_id}: {e}" f"Database error while retrieving movie by external_id {external_id}"
) )
raise raise
@@ -103,8 +103,8 @@ class MovieRepository:
stmt = select(Movie) stmt = select(Movie)
results = self.db.execute(stmt).scalars().unique().all() results = self.db.execute(stmt).scalars().unique().all()
return [MovieSchema.model_validate(movie) for movie in results] return [MovieSchema.model_validate(movie) for movie in results]
except SQLAlchemyError as e: except SQLAlchemyError:
log.error(f"Database error while retrieving all movies: {e}") log.exception("Database error while retrieving all movies")
raise raise
def save_movie(self, movie: MovieSchema) -> MovieSchema: def save_movie(self, movie: MovieSchema) -> MovieSchema:
@@ -140,14 +140,14 @@ class MovieRepository:
return MovieSchema.model_validate(db_movie) return MovieSchema.model_validate(db_movie)
except IntegrityError as e: except IntegrityError as e:
self.db.rollback() self.db.rollback()
log.error(f"Integrity error while saving movie {movie.name}: {e}") log.exception(f"Integrity error while saving movie {movie.name}")
msg = ( msg = (
f"Movie with this primary key or unique constraint violation: {e.orig}" f"Movie with this primary key or unique constraint violation: {e.orig}"
) )
raise ConflictError(msg) from e raise ConflictError(msg) from e
except SQLAlchemyError as e: except SQLAlchemyError:
self.db.rollback() self.db.rollback()
log.error(f"Database error while saving movie {movie.name}: {e}") log.exception(f"Database error while saving movie {movie.name}")
raise raise
def delete_movie(self, movie_id: MovieId) -> None: def delete_movie(self, movie_id: MovieId) -> None:
@@ -168,9 +168,9 @@ class MovieRepository:
self.db.delete(movie) self.db.delete(movie)
self.db.commit() self.db.commit()
log.info(f"Successfully deleted movie with id: {movie_id}") log.info(f"Successfully deleted movie with id: {movie_id}")
except SQLAlchemyError as e: except SQLAlchemyError:
self.db.rollback() self.db.rollback()
log.error(f"Database error while deleting movie {movie_id}: {e}") log.exception(f"Database error while deleting movie {movie_id}")
raise raise
def add_movie_request( def add_movie_request(
@@ -204,13 +204,13 @@ class MovieRepository:
self.db.refresh(db_model) self.db.refresh(db_model)
log.info(f"Successfully added movie request with id: {db_model.id}") log.info(f"Successfully added movie request with id: {db_model.id}")
return MovieRequestSchema.model_validate(db_model) return MovieRequestSchema.model_validate(db_model)
except IntegrityError as e: except IntegrityError:
self.db.rollback() self.db.rollback()
log.error(f"Integrity error while adding movie request: {e}") log.exception("Integrity error while adding movie request")
raise raise
except SQLAlchemyError as e: except SQLAlchemyError:
self.db.rollback() self.db.rollback()
log.error(f"Database error while adding movie request: {e}") log.exception("Database error while adding movie request")
raise raise
def set_movie_library(self, movie_id: MovieId, library: str) -> None: def set_movie_library(self, movie_id: MovieId, library: str) -> None:
@@ -229,9 +229,9 @@ class MovieRepository:
raise NotFoundError(msg) raise NotFoundError(msg)
movie.library = library movie.library = library
self.db.commit() self.db.commit()
except SQLAlchemyError as e: except SQLAlchemyError:
self.db.rollback() self.db.rollback()
log.error(f"Database error setting library for movie {movie_id}: {e}") log.exception(f"Database error setting library for movie {movie_id}")
raise raise
def delete_movie_request(self, movie_request_id: MovieRequestId) -> None: def delete_movie_request(self, movie_request_id: MovieRequestId) -> None:
@@ -251,10 +251,10 @@ class MovieRepository:
raise NotFoundError(msg) raise NotFoundError(msg)
self.db.commit() self.db.commit()
# Successfully deleted movie request with id: {movie_request_id} # Successfully deleted movie request with id: {movie_request_id}
except SQLAlchemyError as e: except SQLAlchemyError:
self.db.rollback() self.db.rollback()
log.error( log.exception(
f"Database error while deleting movie request {movie_request_id}: {e}" f"Database error while deleting movie request {movie_request_id}"
) )
raise raise
@@ -273,8 +273,8 @@ class MovieRepository:
) )
results = self.db.execute(stmt).scalars().unique().all() results = self.db.execute(stmt).scalars().unique().all()
return [RichMovieRequestSchema.model_validate(x) for x in results] return [RichMovieRequestSchema.model_validate(x) for x in results]
except SQLAlchemyError as e: except SQLAlchemyError:
log.error(f"Database error while retrieving movie requests: {e}") log.exception("Database error while retrieving movie requests")
raise raise
def add_movie_file(self, movie_file: MovieFileSchema) -> MovieFileSchema: def add_movie_file(self, movie_file: MovieFileSchema) -> MovieFileSchema:
@@ -292,13 +292,13 @@ class MovieRepository:
self.db.commit() self.db.commit()
self.db.refresh(db_model) self.db.refresh(db_model)
return MovieFileSchema.model_validate(db_model) return MovieFileSchema.model_validate(db_model)
except IntegrityError as e: except IntegrityError:
self.db.rollback() self.db.rollback()
log.error(f"Integrity error while adding movie file: {e}") log.exception("Integrity error while adding movie file")
raise raise
except SQLAlchemyError as e: except SQLAlchemyError:
self.db.rollback() self.db.rollback()
log.error(f"Database error while adding movie file: {e}") log.exception("Database error while adding movie file")
raise raise
def remove_movie_files_by_torrent_id(self, torrent_id: TorrentId) -> int: def remove_movie_files_by_torrent_id(self, torrent_id: TorrentId) -> int:
@@ -313,14 +313,15 @@ class MovieRepository:
stmt = delete(MovieFile).where(MovieFile.torrent_id == torrent_id) stmt = delete(MovieFile).where(MovieFile.torrent_id == torrent_id)
result = self.db.execute(stmt) result = self.db.execute(stmt)
self.db.commit() self.db.commit()
return result.rowcount except SQLAlchemyError:
except SQLAlchemyError as e:
self.db.rollback() self.db.rollback()
log.error( log.exception(
f"Database error removing movie files for torrent_id {torrent_id}: {e}" f"Database error removing movie files for torrent_id {torrent_id}"
) )
raise raise
return result.rowcount
def get_movie_files_by_movie_id(self, movie_id: MovieId) -> list[MovieFileSchema]: def get_movie_files_by_movie_id(self, movie_id: MovieId) -> list[MovieFileSchema]:
""" """
Retrieve all movie files for a given movie ID. Retrieve all movie files for a given movie ID.
@@ -333,9 +334,9 @@ class MovieRepository:
stmt = select(MovieFile).where(MovieFile.movie_id == movie_id) stmt = select(MovieFile).where(MovieFile.movie_id == movie_id)
results = self.db.execute(stmt).scalars().all() results = self.db.execute(stmt).scalars().all()
return [MovieFileSchema.model_validate(sf) for sf in results] return [MovieFileSchema.model_validate(sf) for sf in results]
except SQLAlchemyError as e: except SQLAlchemyError:
log.error( log.exception(
f"Database error retrieving movie files for movie_id {movie_id}: {e}" f"Database error retrieving movie files for movie_id {movie_id}"
) )
raise raise
@@ -367,13 +368,13 @@ class MovieRepository:
usenet=torrent.usenet, usenet=torrent.usenet,
) )
formatted_results.append(movie_torrent) formatted_results.append(movie_torrent)
return formatted_results
except SQLAlchemyError as e: except SQLAlchemyError:
log.error( log.exception(f"Database error retrieving torrents for movie_id {movie_id}")
f"Database error retrieving torrents for movie_id {movie_id}: {e}"
)
raise raise
return formatted_results
def get_all_movies_with_torrents(self) -> list[MovieSchema]: def get_all_movies_with_torrents(self) -> list[MovieSchema]:
""" """
Retrieve all movies that are associated with a torrent, ordered alphabetically by movie name. Retrieve all movies that are associated with a torrent, ordered alphabetically by movie name.
@@ -391,8 +392,8 @@ class MovieRepository:
) )
results = self.db.execute(stmt).scalars().unique().all() results = self.db.execute(stmt).scalars().unique().all()
return [MovieSchema.model_validate(movie) for movie in results] return [MovieSchema.model_validate(movie) for movie in results]
except SQLAlchemyError as e: except SQLAlchemyError:
log.error(f"Database error retrieving all movies with torrents: {e}") log.exception("Database error retrieving all movies with torrents")
raise raise
def get_movie_request(self, movie_request_id: MovieRequestId) -> MovieRequestSchema: def get_movie_request(self, movie_request_id: MovieRequestId) -> MovieRequestSchema:
@@ -410,10 +411,8 @@ class MovieRepository:
msg = f"Movie request with id {movie_request_id} not found." msg = f"Movie request with id {movie_request_id} not found."
raise NotFoundError(msg) raise NotFoundError(msg)
return MovieRequestSchema.model_validate(request) return MovieRequestSchema.model_validate(request)
except SQLAlchemyError as e: except SQLAlchemyError:
log.error( log.exception(f"Database error retrieving movie request {movie_request_id}")
f"Database error retrieving movie request {movie_request_id}: {e}"
)
raise raise
def get_movie_by_torrent_id(self, torrent_id: TorrentId) -> MovieSchema: def get_movie_by_torrent_id(self, torrent_id: TorrentId) -> MovieSchema:
@@ -436,10 +435,8 @@ class MovieRepository:
msg = f"Movie for torrent_id {torrent_id} not found." msg = f"Movie for torrent_id {torrent_id} not found."
raise NotFoundError(msg) raise NotFoundError(msg)
return MovieSchema.model_validate(result) return MovieSchema.model_validate(result)
except SQLAlchemyError as e: except SQLAlchemyError:
log.error( log.exception(f"Database error retrieving movie by torrent_id {torrent_id}")
f"Database error retrieving movie by torrent_id {torrent_id}: {e}"
)
raise raise
def update_movie_attributes( def update_movie_attributes(

View File

@@ -8,7 +8,7 @@ from sqlalchemy.orm import Session
from media_manager.config import MediaManagerConfig from media_manager.config import MediaManagerConfig
from media_manager.database import SessionLocal, get_session from media_manager.database import SessionLocal, get_session
from media_manager.exceptions import InvalidConfigError, NotFoundError from media_manager.exceptions import InvalidConfigError, NotFoundError, RenameError
from media_manager.indexer.repository import IndexerRepository from media_manager.indexer.repository import IndexerRepository
from media_manager.indexer.schemas import IndexerQueryResult, IndexerQueryResultId from media_manager.indexer.schemas import IndexerQueryResult, IndexerQueryResultId
from media_manager.indexer.service import IndexerService from media_manager.indexer.service import IndexerService
@@ -98,9 +98,7 @@ class MovieService:
""" """
return self.movie_repository.add_movie_request(movie_request=movie_request) return self.movie_repository.add_movie_request(movie_request=movie_request)
def get_movie_request_by_id( def get_movie_request_by_id(self, movie_request_id: MovieRequestId) -> MovieRequest:
self, movie_request_id: MovieRequestId
) -> MovieRequest:
""" """
Get a movie request by its ID. Get a movie request by its ID.
@@ -151,10 +149,8 @@ class MovieService:
try: try:
shutil.rmtree(movie_dir) shutil.rmtree(movie_dir)
log.info(f"Deleted movie directory: {movie_dir}") log.info(f"Deleted movie directory: {movie_dir}")
except OSError as e: except OSError:
log.error( log.exception(f"Deleting movie directory: {movie_dir}")
f"Deleting movie directory: {movie_dir} : {e.strerror}"
)
if delete_torrents: if delete_torrents:
# Get all torrents associated with this movie # Get all torrents associated with this movie
@@ -171,8 +167,10 @@ class MovieService:
torrent=torrent, delete_files=True torrent=torrent, delete_files=True
) )
log.info(f"Deleted torrent: {torrent.torrent_title}") log.info(f"Deleted torrent: {torrent.torrent_title}")
except Exception as e: except Exception:
log.warning(f"Failed to delete torrent {torrent.hash}: {e}") log.warning(
f"Failed to delete torrent {torrent.hash}", exc_info=True
)
# Delete from database # Delete from database
self.movie_repository.delete_movie(movie_id=movie.id) self.movie_repository.delete_movie(movie_id=movie.id)
@@ -237,19 +235,19 @@ class MovieService:
self.movie_repository.get_movie_by_external_id( self.movie_repository.get_movie_by_external_id(
external_id=external_id, metadata_provider=metadata_provider external_id=external_id, metadata_provider=metadata_provider
) )
return True
except NotFoundError: except NotFoundError:
return False return False
elif movie_id is not None: elif movie_id is not None:
try: try:
self.movie_repository.get_movie_by_id(movie_id=movie_id) self.movie_repository.get_movie_by_id(movie_id=movie_id)
return True
except NotFoundError: except NotFoundError:
return False return False
else: else:
msg = "Use one of the provided overloads for this function!" msg = "Use one of the provided overloads for this function!"
raise ValueError(msg) raise ValueError(msg)
return True
def get_all_available_torrents_for_movie( def get_all_available_torrents_for_movie(
self, movie: Movie, search_query_override: str | None = None self, movie: Movie, search_query_override: str | None = None
) -> list[IndexerQueryResult]: ) -> list[IndexerQueryResult]:
@@ -570,8 +568,8 @@ class MovieService:
try: try:
movie_root_path.mkdir(parents=True, exist_ok=True) movie_root_path.mkdir(parents=True, exist_ok=True)
except Exception as e: except Exception:
log.error(f"Failed to create directory {movie_root_path}: {e}") log.exception("Failed to create directory {movie_root_path}")
return False return False
# import movie video # import movie video
@@ -682,9 +680,8 @@ class MovieService:
try: try:
source_directory.rename(new_source_path) source_directory.rename(new_source_path)
except Exception as e: except Exception as e:
log.error(f"Failed to rename {source_directory} to {new_source_path}: {e}") log.exception(f"Failed to rename {source_directory} to {new_source_path}")
msg = "Failed to rename directory" raise RenameError from e
raise Exception(msg) from e
video_files, subtitle_files, _all_files = get_files_for_import( video_files, subtitle_files, _all_files = get_files_for_import(
directory=new_source_path directory=new_source_path
@@ -786,12 +783,14 @@ def auto_download_all_approved_movie_requests() -> None:
movie_repository = MovieRepository(db=db) movie_repository = MovieRepository(db=db)
torrent_service = TorrentService(torrent_repository=TorrentRepository(db=db)) torrent_service = TorrentService(torrent_repository=TorrentRepository(db=db))
indexer_service = IndexerService(indexer_repository=IndexerRepository(db=db)) indexer_service = IndexerService(indexer_repository=IndexerRepository(db=db))
notification_service = NotificationService(notification_repository=NotificationRepository(db=db)) notification_service = NotificationService(
notification_repository=NotificationRepository(db=db)
)
movie_service = MovieService( movie_service = MovieService(
movie_repository=movie_repository, movie_repository=movie_repository,
torrent_service=torrent_service, torrent_service=torrent_service,
indexer_service=indexer_service, indexer_service=indexer_service,
notification_service=notification_service notification_service=notification_service,
) )
log.info("Auto downloading all approved movie requests") log.info("Auto downloading all approved movie requests")
@@ -821,7 +820,9 @@ def import_all_movie_torrents() -> None:
movie_repository = MovieRepository(db=db) movie_repository = MovieRepository(db=db)
torrent_service = TorrentService(torrent_repository=TorrentRepository(db=db)) torrent_service = TorrentService(torrent_repository=TorrentRepository(db=db))
indexer_service = IndexerService(indexer_repository=IndexerRepository(db=db)) indexer_service = IndexerService(indexer_repository=IndexerRepository(db=db))
notification_service = NotificationService(notification_repository=NotificationRepository(db=db)) notification_service = NotificationService(
notification_repository=NotificationRepository(db=db)
)
movie_service = MovieService( movie_service = MovieService(
movie_repository=movie_repository, movie_repository=movie_repository,
torrent_service=torrent_service, torrent_service=torrent_service,
@@ -841,11 +842,8 @@ def import_all_movie_torrents() -> None:
) )
continue continue
movie_service.import_torrent_files(torrent=t, movie=movie) movie_service.import_torrent_files(torrent=t, movie=movie)
except RuntimeError as e: except RuntimeError:
log.error( log.exception(f"Failed to import torrent {t.title}")
f"Failed to import torrent {t.title}: {e}",
exc_info=True,
)
log.info("Finished importing all torrents") log.info("Finished importing all torrents")
db.commit() db.commit()
@@ -860,7 +858,9 @@ def update_all_movies_metadata() -> None:
movie_repository=movie_repository, movie_repository=movie_repository,
torrent_service=TorrentService(torrent_repository=TorrentRepository(db=db)), torrent_service=TorrentService(torrent_repository=TorrentRepository(db=db)),
indexer_service=IndexerService(indexer_repository=IndexerRepository(db=db)), indexer_service=IndexerService(indexer_repository=IndexerRepository(db=db)),
notification_service=NotificationService(notification_repository=NotificationRepository(db=db)) notification_service=NotificationService(
notification_repository=NotificationRepository(db=db)
),
) )
log.info("Updating metadata for all movies") log.info("Updating metadata for all movies")
@@ -880,9 +880,9 @@ def update_all_movies_metadata() -> None:
f"Unsupported metadata provider {movie.metadata_provider} for movie {movie.name}, skipping update." f"Unsupported metadata provider {movie.metadata_provider} for movie {movie.name}, skipping update."
) )
continue continue
except InvalidConfigError as e: except InvalidConfigError:
log.error( log.exception(
f"Error initializing metadata provider {movie.metadata_provider} for movie {movie.name}: {e}" f"Error initializing metadata provider {movie.metadata_provider} for movie {movie.name}",
) )
continue continue
movie_service.update_movie_metadata( movie_service.update_movie_metadata(

View File

@@ -3,7 +3,6 @@ Notification Manager - Orchestrates sending notifications through all configured
""" """
import logging import logging
from typing import List
from media_manager.config import MediaManagerConfig from media_manager.config import MediaManagerConfig
from media_manager.notification.schemas import MessageNotification from media_manager.notification.schemas import MessageNotification
@@ -33,7 +32,7 @@ class NotificationManager:
def __init__(self) -> None: def __init__(self) -> None:
self.config = MediaManagerConfig().notifications self.config = MediaManagerConfig().notifications
self.providers: List[AbstractNotificationServiceProvider] = [] self.providers: list[AbstractNotificationServiceProvider] = []
self._initialize_providers() self._initialize_providers()
def _initialize_providers(self) -> None: def _initialize_providers(self) -> None:
@@ -42,32 +41,32 @@ class NotificationManager:
try: try:
self.providers.append(EmailNotificationServiceProvider()) self.providers.append(EmailNotificationServiceProvider())
logger.info("Email notification provider initialized") logger.info("Email notification provider initialized")
except Exception as e: except Exception:
logger.error(f"Failed to initialize Email provider: {e}") logger.exception("Failed to initialize Email provider")
# Gotify provider # Gotify provider
if self.config.gotify.enabled: if self.config.gotify.enabled:
try: try:
self.providers.append(GotifyNotificationServiceProvider()) self.providers.append(GotifyNotificationServiceProvider())
logger.info("Gotify notification provider initialized") logger.info("Gotify notification provider initialized")
except Exception as e: except Exception:
logger.error(f"Failed to initialize Gotify provider: {e}") logger.exception("Failed to initialize Gotify provider")
# Ntfy provider # Ntfy provider
if self.config.ntfy.enabled: if self.config.ntfy.enabled:
try: try:
self.providers.append(NtfyNotificationServiceProvider()) self.providers.append(NtfyNotificationServiceProvider())
logger.info("Ntfy notification provider initialized") logger.info("Ntfy notification provider initialized")
except Exception as e: except Exception:
logger.error(f"Failed to initialize Ntfy provider: {e}") logger.exception("Failed to initialize Ntfy provider")
# Pushover provider # Pushover provider
if self.config.pushover.enabled: if self.config.pushover.enabled:
try: try:
self.providers.append(PushoverNotificationServiceProvider()) self.providers.append(PushoverNotificationServiceProvider())
logger.info("Pushover notification provider initialized") logger.info("Pushover notification provider initialized")
except Exception as e: except Exception:
logger.error(f"Failed to initialize Pushover provider: {e}") logger.exception("Failed to initialize Pushover provider")
logger.info(f"Initialized {len(self.providers)} notification providers") logger.info(f"Initialized {len(self.providers)} notification providers")
@@ -86,10 +85,10 @@ class NotificationManager:
else: else:
logger.warning(f"Failed to send notification via {provider_name}") logger.warning(f"Failed to send notification via {provider_name}")
except Exception as e: except Exception:
logger.error(f"Error sending notification via {provider_name}: {e}") logger.exception(f"Error sending notification via {provider_name}")
def get_configured_providers(self) -> List[str]: def get_configured_providers(self) -> list[str]:
return [provider.__class__.__name__ for provider in self.providers] return [provider.__class__.__name__ for provider in self.providers]
def is_configured(self) -> bool: def is_configured(self) -> bool:

View File

@@ -6,6 +6,7 @@ from sqlalchemy.exc import (
SQLAlchemyError, SQLAlchemyError,
) )
from sqlalchemy.orm import Session from sqlalchemy.orm import Session
from sqlalchemy.sql.expression import false
from media_manager.exceptions import ConflictError, NotFoundError from media_manager.exceptions import ConflictError, NotFoundError
from media_manager.notification.models import Notification from media_manager.notification.models import Notification
@@ -36,7 +37,7 @@ class NotificationRepository:
try: try:
stmt = ( stmt = (
select(Notification) select(Notification)
.where(Notification.read == False) # noqa: E712 .where(Notification.read == false())
.order_by(Notification.timestamp.desc()) .order_by(Notification.timestamp.desc())
) )
results = self.db.execute(stmt).scalars().all() results = self.db.execute(stmt).scalars().all()
@@ -44,8 +45,8 @@ class NotificationRepository:
NotificationSchema.model_validate(notification) NotificationSchema.model_validate(notification)
for notification in results for notification in results
] ]
except SQLAlchemyError as e: except SQLAlchemyError:
log.error(f"Database error while retrieving unread notifications: {e}") log.exception("Database error while retrieving unread notifications")
raise raise
def get_all_notifications(self) -> list[NotificationSchema]: def get_all_notifications(self) -> list[NotificationSchema]:
@@ -56,8 +57,8 @@ class NotificationRepository:
NotificationSchema.model_validate(notification) NotificationSchema.model_validate(notification)
for notification in results for notification in results
] ]
except SQLAlchemyError as e: except SQLAlchemyError:
log.error(f"Database error while retrieving notifications: {e}") log.exception("Database error while retrieving notifications")
raise raise
def save_notification(self, notification: NotificationSchema) -> None: def save_notification(self, notification: NotificationSchema) -> None:
@@ -71,8 +72,8 @@ class NotificationRepository:
) )
) )
self.db.commit() self.db.commit()
except IntegrityError as e: except IntegrityError:
log.error(f"Could not save notification, Error: {e}") log.exception("Could not save notification")
msg = f"Notification with id {notification.id} already exists." msg = f"Notification with id {notification.id} already exists."
raise ConflictError(msg) from None raise ConflictError(msg) from None
return return

View File

@@ -12,7 +12,8 @@ class Notification(BaseModel):
model_config = ConfigDict(from_attributes=True) model_config = ConfigDict(from_attributes=True)
id: NotificationId = Field( id: NotificationId = Field(
default_factory=lambda: NotificationId(uuid.uuid4()), description="Unique identifier for the notification" default_factory=lambda: NotificationId(uuid.uuid4()),
description="Unique identifier for the notification",
) )
read: bool = Field(False, description="Whether the notification has been read") read: bool = Field(False, description="Whether the notification has been read")
message: str = Field(description="The content of the notification") message: str = Field(description="The content of the notification")

View File

@@ -53,8 +53,8 @@ class QbittorrentDownloadClient(AbstractDownloadClient):
) )
try: try:
self.api_client.auth_log_in() self.api_client.auth_log_in()
except Exception as e: except Exception:
log.error(f"Failed to log into qbittorrent: {e}") log.exception("Failed to log into qbittorrent")
raise raise
try: try:
@@ -72,11 +72,8 @@ class QbittorrentDownloadClient(AbstractDownloadClient):
if self.config.category_save_path != "" if self.config.category_save_path != ""
else None, else None,
) )
except Exception as e: except Exception:
if str(e) != "": log.exception("Error on updating MediaManager category in qBittorrent")
log.error(
f"Error on updating MediaManager category in qBittorrent, error: {e}"
)
def download_torrent(self, indexer_result: IndexerQueryResult) -> Torrent: def download_torrent(self, indexer_result: IndexerQueryResult) -> Torrent:
""" """

View File

@@ -38,8 +38,8 @@ class SabnzbdDownloadClient(AbstractDownloadClient):
try: try:
# Test connection # Test connection
self.client.version() self.client.version()
except Exception as e: except Exception:
log.error(f"Failed to connect to SABnzbd: {e}") log.exception("Failed to connect to SABnzbd")
raise raise
def download_torrent(self, indexer_result: IndexerQueryResult) -> Torrent: def download_torrent(self, indexer_result: IndexerQueryResult) -> Torrent:
@@ -55,10 +55,7 @@ class SabnzbdDownloadClient(AbstractDownloadClient):
url=str(indexer_result.download_url), nzbname=indexer_result.title url=str(indexer_result.download_url), nzbname=indexer_result.title
) )
if not response["status"]: if not response["status"]:
error_msg = response raise RuntimeError(f"Failed to add NZB to SABnzbd: {response}") # noqa: EM102, TRY003, TRY301
log.error(f"Failed to add NZB to SABnzbd: {error_msg}")
msg = f"Failed to add NZB to SABnzbd: {error_msg}"
raise RuntimeError(msg)
# Generate a hash for the NZB (using title and download URL) # Generate a hash for the NZB (using title and download URL)
nzo_id = response["nzo_ids"][0] nzo_id = response["nzo_ids"][0]
@@ -75,13 +72,12 @@ class SabnzbdDownloadClient(AbstractDownloadClient):
# Get initial status from SABnzbd # Get initial status from SABnzbd
torrent.status = self.get_torrent_status(torrent) torrent.status = self.get_torrent_status(torrent)
except Exception:
return torrent log.exception(f"Failed to download NZB {indexer_result.title}")
except Exception as e:
log.error(f"Failed to download NZB {indexer_result.title}: {e}")
raise raise
return torrent
def remove_torrent(self, torrent: Torrent, delete_data: bool = False) -> None: def remove_torrent(self, torrent: Torrent, delete_data: bool = False) -> None:
""" """
Remove a torrent from SABnzbd. Remove a torrent from SABnzbd.
@@ -91,8 +87,8 @@ class SabnzbdDownloadClient(AbstractDownloadClient):
""" """
try: try:
self.client.delete_job(nzo_id=torrent.hash, delete_files=delete_data) self.client.delete_job(nzo_id=torrent.hash, delete_files=delete_data)
except Exception as e: except Exception:
log.error(f"Failed to remove torrent {torrent.title}: {e}") log.exception(f"Failed to remove torrent {torrent.title}")
raise raise
def pause_torrent(self, torrent: Torrent) -> None: def pause_torrent(self, torrent: Torrent) -> None:
@@ -103,8 +99,8 @@ class SabnzbdDownloadClient(AbstractDownloadClient):
""" """
try: try:
self.client.pause_job(nzo_id=torrent.hash) self.client.pause_job(nzo_id=torrent.hash)
except Exception as e: except Exception:
log.error(f"Failed to pause torrent {torrent.title}: {e}") log.exception(f"Failed to pause torrent {torrent.title}")
raise raise
def resume_torrent(self, torrent: Torrent) -> None: def resume_torrent(self, torrent: Torrent) -> None:
@@ -115,8 +111,8 @@ class SabnzbdDownloadClient(AbstractDownloadClient):
""" """
try: try:
self.client.resume_job(nzo_id=torrent.hash) self.client.resume_job(nzo_id=torrent.hash)
except Exception as e: except Exception:
log.error(f"Failed to resume torrent {torrent.title}: {e}") log.exception(f"Failed to resume torrent {torrent.title}")
raise raise
def get_torrent_status(self, torrent: Torrent) -> TorrentStatus: def get_torrent_status(self, torrent: Torrent) -> TorrentStatus:

View File

@@ -43,8 +43,8 @@ class TransmissionDownloadClient(AbstractDownloadClient):
) )
# Test connection # Test connection
self._client.session_stats() self._client.session_stats()
except Exception as e: except Exception:
log.error(f"Failed to connect to Transmission: {e}") log.exception("Failed to connect to Transmission")
raise raise
def download_torrent(self, indexer_result: IndexerQueryResult) -> Torrent: def download_torrent(self, indexer_result: IndexerQueryResult) -> Torrent:
@@ -68,8 +68,8 @@ class TransmissionDownloadClient(AbstractDownloadClient):
f"Successfully added torrent to Transmission: {indexer_result.title}" f"Successfully added torrent to Transmission: {indexer_result.title}"
) )
except Exception as e: except Exception:
log.error(f"Failed to add torrent to Transmission: {e}") log.exception("Failed to add torrent to Transmission")
raise raise
torrent = Torrent( torrent = Torrent(
@@ -95,8 +95,8 @@ class TransmissionDownloadClient(AbstractDownloadClient):
try: try:
self._client.remove_torrent(torrent.hash, delete_data=delete_data) self._client.remove_torrent(torrent.hash, delete_data=delete_data)
except Exception as e: except Exception:
log.error(f"Failed to remove torrent: {e}") log.exception("Failed to remove torrent")
raise raise
def get_torrent_status(self, torrent: Torrent) -> TorrentStatus: def get_torrent_status(self, torrent: Torrent) -> TorrentStatus:
@@ -123,13 +123,12 @@ class TransmissionDownloadClient(AbstractDownloadClient):
log.warning( log.warning(
f"Torrent {torrent.title} has error status: {transmission_torrent.error_string}" f"Torrent {torrent.title} has error status: {transmission_torrent.error_string}"
) )
except Exception:
return status log.exception("Failed to get torrent status")
except Exception as e:
log.error(f"Failed to get torrent status: {e}")
return TorrentStatus.error return TorrentStatus.error
return status
def pause_torrent(self, torrent: Torrent) -> None: def pause_torrent(self, torrent: Torrent) -> None:
""" """
Pause a torrent download. Pause a torrent download.
@@ -140,8 +139,8 @@ class TransmissionDownloadClient(AbstractDownloadClient):
self._client.stop_torrent(torrent.hash) self._client.stop_torrent(torrent.hash)
log.debug(f"Successfully paused torrent: {torrent.title}") log.debug(f"Successfully paused torrent: {torrent.title}")
except Exception as e: except Exception:
log.error(f"Failed to pause torrent: {e}") log.exception("Failed to pause torrent")
raise raise
def resume_torrent(self, torrent: Torrent) -> None: def resume_torrent(self, torrent: Torrent) -> None:
@@ -154,6 +153,6 @@ class TransmissionDownloadClient(AbstractDownloadClient):
self._client.start_torrent(torrent.hash) self._client.start_torrent(torrent.hash)
log.debug(f"Successfully resumed torrent: {torrent.title}") log.debug(f"Successfully resumed torrent: {torrent.title}")
except Exception as e: except Exception:
log.error(f"Failed to resume torrent: {e}") log.exception("Failed to resume torrent")
raise raise

View File

@@ -43,22 +43,22 @@ class DownloadManager:
if self.config.qbittorrent.enabled: if self.config.qbittorrent.enabled:
try: try:
self._torrent_client = QbittorrentDownloadClient() self._torrent_client = QbittorrentDownloadClient()
except Exception as e: except Exception:
log.error(f"Failed to initialize qBittorrent client: {e}") log.exception("Failed to initialize qBittorrent client")
# If qBittorrent is not available or failed, try Transmission # If qBittorrent is not available or failed, try Transmission
if self._torrent_client is None and self.config.transmission.enabled: if self._torrent_client is None and self.config.transmission.enabled:
try: try:
self._torrent_client = TransmissionDownloadClient() self._torrent_client = TransmissionDownloadClient()
except Exception as e: except Exception:
log.error(f"Failed to initialize Transmission client: {e}") log.exception("Failed to initialize Transmission client")
# Initialize SABnzbd client for usenet # Initialize SABnzbd client for usenet
if self.config.sabnzbd.enabled: if self.config.sabnzbd.enabled:
try: try:
self._usenet_client = SabnzbdDownloadClient() self._usenet_client = SabnzbdDownloadClient()
except Exception as e: except Exception:
log.error(f"Failed to initialize SABnzbd client: {e}") log.exception("Failed to initialize SABnzbd client")
active_clients = [] active_clients = []
if self._torrent_client: if self._torrent_client:

View File

@@ -87,7 +87,9 @@ class TorrentRepository:
return None return None
return MovieSchema.model_validate(result) return MovieSchema.model_validate(result)
def get_movie_files_of_torrent(self, torrent_id: TorrentId) -> list[MovieFileSchema]: def get_movie_files_of_torrent(
self, torrent_id: TorrentId
) -> list[MovieFileSchema]:
stmt = select(MovieFile).where(MovieFile.torrent_id == torrent_id) stmt = select(MovieFile).where(MovieFile.torrent_id == torrent_id)
result = self.db.execute(stmt).scalars().all() result = self.db.execute(stmt).scalars().all()
return [MovieFileSchema.model_validate(movie_file) for movie_file in result] return [MovieFileSchema.model_validate(movie_file) for movie_file in result]

View File

@@ -92,8 +92,8 @@ class TorrentService:
for x in self.torrent_repository.get_all_torrents(): for x in self.torrent_repository.get_all_torrents():
try: try:
torrents.append(self.get_torrent_status(x)) torrents.append(self.get_torrent_status(x))
except RuntimeError as e: except RuntimeError:
log.error(f"Error fetching status for torrent {x.title}: {e}") log.exception(f"Error fetching status for torrent {x.title}")
return torrents return torrents
def get_torrent_by_id(self, torrent_id: TorrentId) -> Torrent: def get_torrent_by_id(self, torrent_id: TorrentId) -> Torrent:

View File

@@ -57,8 +57,8 @@ def extract_archives(files: list) -> None:
) )
try: try:
patoolib.extract_archive(str(file), outdir=str(file.parent)) patoolib.extract_archive(str(file), outdir=str(file.parent))
except patoolib.util.PatoolError as e: except patoolib.util.PatoolError:
log.error(f"Failed to extract archive {file}. Error: {e}") log.exception(f"Failed to extract archive {file}")
def get_torrent_filepath(torrent: Torrent) -> Path: def get_torrent_filepath(torrent: Torrent) -> Path:
@@ -72,10 +72,10 @@ def import_file(target_file: Path, source_file: Path) -> None:
try: try:
target_file.hardlink_to(source_file) target_file.hardlink_to(source_file)
except FileExistsError: except FileExistsError:
log.error(f"File already exists at {target_file}.") log.exception(f"File already exists at {target_file}.")
except (OSError, UnsupportedOperation, NotImplementedError) as e: except (OSError, UnsupportedOperation, NotImplementedError):
log.error( log.exception(
f"Failed to create hardlink from {source_file} to {target_file}: {e}. Falling back to copying the file." f"Failed to create hardlink from {source_file} to {target_file}. Falling back to copying the file."
) )
shutil.copy(src=source_file, dst=target_file) shutil.copy(src=source_file, dst=target_file)
@@ -148,16 +148,16 @@ def get_torrent_hash(torrent: IndexerQueryResult) -> str:
response = requests.get(str(torrent.download_url), timeout=30) response = requests.get(str(torrent.download_url), timeout=30)
response.raise_for_status() response.raise_for_status()
torrent_content = response.content torrent_content = response.content
except InvalidSchema as e: except InvalidSchema:
log.debug(f"Invalid schema for URL {torrent.download_url}: {e}") log.debug(f"Invalid schema for URL {torrent.download_url}", exc_info=True)
final_url = follow_redirects_to_final_torrent_url( final_url = follow_redirects_to_final_torrent_url(
initial_url=torrent.download_url, initial_url=torrent.download_url,
session=requests.Session(), session=requests.Session(),
timeout=MediaManagerConfig().indexers.prowlarr.timeout_seconds, timeout=MediaManagerConfig().indexers.prowlarr.timeout_seconds,
) )
return str(libtorrent.parse_magnet_uri(final_url).info_hash) return str(libtorrent.parse_magnet_uri(final_url).info_hash)
except Exception as e: except Exception:
log.error(f"Failed to download torrent file: {e}") log.exception("Failed to download torrent file")
raise raise
# saving the torrent file # saving the torrent file
@@ -170,9 +170,10 @@ def get_torrent_hash(torrent: IndexerQueryResult) -> str:
torrent_hash = hashlib.sha1( # noqa: S324 torrent_hash = hashlib.sha1( # noqa: S324
bencoder.encode(decoded_content[b"info"]) bencoder.encode(decoded_content[b"info"])
).hexdigest() ).hexdigest()
except Exception as e: except Exception:
log.error(f"Failed to decode torrent file: {e}") log.exception("Failed to decode torrent file")
raise raise
return torrent_hash return torrent_hash

View File

@@ -67,8 +67,8 @@ class TvRepository:
msg = f"Show with id {show_id} not found." msg = f"Show with id {show_id} not found."
raise NotFoundError(msg) raise NotFoundError(msg)
return ShowSchema.model_validate(result) return ShowSchema.model_validate(result)
except SQLAlchemyError as e: except SQLAlchemyError:
log.error(f"Database error while retrieving show {show_id}: {e}") log.exception(f"Database error while retrieving show {show_id}")
raise raise
def get_show_by_external_id( def get_show_by_external_id(
@@ -95,9 +95,9 @@ class TvRepository:
msg = f"Show with external_id {external_id} and provider {metadata_provider} not found." msg = f"Show with external_id {external_id} and provider {metadata_provider} not found."
raise NotFoundError(msg) raise NotFoundError(msg)
return ShowSchema.model_validate(result) return ShowSchema.model_validate(result)
except SQLAlchemyError as e: except SQLAlchemyError:
log.error( log.exception(
f"Database error while retrieving show by external_id {external_id}: {e}" f"Database error while retrieving show by external_id {external_id}",
) )
raise raise
@@ -114,8 +114,8 @@ class TvRepository:
) )
results = self.db.execute(stmt).scalars().unique().all() results = self.db.execute(stmt).scalars().unique().all()
return [ShowSchema.model_validate(show) for show in results] return [ShowSchema.model_validate(show) for show in results]
except SQLAlchemyError as e: except SQLAlchemyError:
log.error(f"Database error while retrieving all shows: {e}") log.exception("Database error while retrieving all shows")
raise raise
def get_total_downloaded_episodes_count(self) -> int: def get_total_downloaded_episodes_count(self) -> int:
@@ -124,11 +124,9 @@ class TvRepository:
select(func.count()).select_from(Episode).join(Season).join(SeasonFile) select(func.count()).select_from(Episode).join(Season).join(SeasonFile)
) )
return self.db.execute(stmt).scalar_one_or_none() return self.db.execute(stmt).scalar_one_or_none()
except SQLAlchemyError as e: except SQLAlchemyError:
log.error( log.exception("Database error while calculating downloaded episodes count")
f"Database error while calculating downloaded episodes count: {e}" raise
)
raise e
def save_show(self, show: ShowSchema) -> ShowSchema: def save_show(self, show: ShowSchema) -> ShowSchema:
""" """
@@ -192,9 +190,9 @@ class TvRepository:
self.db.rollback() self.db.rollback()
msg = f"Show with this primary key or unique constraint violation: {e.orig}" msg = f"Show with this primary key or unique constraint violation: {e.orig}"
raise ConflictError(msg) from e raise ConflictError(msg) from e
except SQLAlchemyError as e: except SQLAlchemyError:
self.db.rollback() self.db.rollback()
log.error(f"Database error while saving show {show.name}: {e}") log.exception(f"Database error while saving show {show.name}")
raise raise
def delete_show(self, show_id: ShowId) -> None: def delete_show(self, show_id: ShowId) -> None:
@@ -212,9 +210,9 @@ class TvRepository:
raise NotFoundError(msg) raise NotFoundError(msg)
self.db.delete(show) self.db.delete(show)
self.db.commit() self.db.commit()
except SQLAlchemyError as e: except SQLAlchemyError:
self.db.rollback() self.db.rollback()
log.error(f"Database error while deleting show {show_id}: {e}") log.exception(f"Database error while deleting show {show_id}")
raise raise
def get_season(self, season_id: SeasonId) -> SeasonSchema: def get_season(self, season_id: SeasonId) -> SeasonSchema:
@@ -232,8 +230,8 @@ class TvRepository:
msg = f"Season with id {season_id} not found." msg = f"Season with id {season_id} not found."
raise NotFoundError(msg) raise NotFoundError(msg)
return SeasonSchema.model_validate(season) return SeasonSchema.model_validate(season)
except SQLAlchemyError as e: except SQLAlchemyError:
log.error(f"Database error while retrieving season {season_id}: {e}") log.exception(f"Database error while retrieving season {season_id}")
raise raise
def add_season_request( def add_season_request(
@@ -265,13 +263,13 @@ class TvRepository:
self.db.commit() self.db.commit()
self.db.refresh(db_model) self.db.refresh(db_model)
return SeasonRequestSchema.model_validate(db_model) return SeasonRequestSchema.model_validate(db_model)
except IntegrityError as e: except IntegrityError:
self.db.rollback() self.db.rollback()
log.error(f"Integrity error while adding season request: {e}") log.exception("Integrity error while adding season request")
raise raise
except SQLAlchemyError as e: except SQLAlchemyError:
self.db.rollback() self.db.rollback()
log.error(f"Database error while adding season request: {e}") log.exception("Database error while adding season request")
raise raise
def delete_season_request(self, season_request_id: SeasonRequestId) -> None: def delete_season_request(self, season_request_id: SeasonRequestId) -> None:
@@ -290,10 +288,10 @@ class TvRepository:
msg = f"SeasonRequest with id {season_request_id} not found." msg = f"SeasonRequest with id {season_request_id} not found."
raise NotFoundError(msg) raise NotFoundError(msg)
self.db.commit() self.db.commit()
except SQLAlchemyError as e: except SQLAlchemyError:
self.db.rollback() self.db.rollback()
log.error( log.exception(
f"Database error while deleting season request {season_request_id}: {e}" f"Database error while deleting season request {season_request_id}"
) )
raise raise
@@ -319,9 +317,9 @@ class TvRepository:
msg = f"Season number {season_number} for show_id {show_id} not found." msg = f"Season number {season_number} for show_id {show_id} not found."
raise NotFoundError(msg) raise NotFoundError(msg)
return SeasonSchema.model_validate(result) return SeasonSchema.model_validate(result)
except SQLAlchemyError as e: except SQLAlchemyError:
log.error( log.exception(
f"Database error retrieving season {season_number} for show {show_id}: {e}" f"Database error retrieving season {season_number} for show {show_id}"
) )
raise raise
@@ -353,8 +351,8 @@ class TvRepository:
) )
for x in results for x in results
] ]
except SQLAlchemyError as e: except SQLAlchemyError:
log.error(f"Database error while retrieving season requests: {e}") log.exception("Database error while retrieving season requests")
raise raise
def add_season_file(self, season_file: SeasonFileSchema) -> SeasonFileSchema: def add_season_file(self, season_file: SeasonFileSchema) -> SeasonFileSchema:
@@ -372,13 +370,13 @@ class TvRepository:
self.db.commit() self.db.commit()
self.db.refresh(db_model) self.db.refresh(db_model)
return SeasonFileSchema.model_validate(db_model) return SeasonFileSchema.model_validate(db_model)
except IntegrityError as e: except IntegrityError:
self.db.rollback() self.db.rollback()
log.error(f"Integrity error while adding season file: {e}") log.exception("Integrity error while adding season file")
raise raise
except SQLAlchemyError as e: except SQLAlchemyError:
self.db.rollback() self.db.rollback()
log.error(f"Database error while adding season file: {e}") log.exception("Database error while adding season file")
raise raise
def remove_season_files_by_torrent_id(self, torrent_id: TorrentId) -> int: def remove_season_files_by_torrent_id(self, torrent_id: TorrentId) -> int:
@@ -393,13 +391,13 @@ class TvRepository:
stmt = delete(SeasonFile).where(SeasonFile.torrent_id == torrent_id) stmt = delete(SeasonFile).where(SeasonFile.torrent_id == torrent_id)
result = self.db.execute(stmt) result = self.db.execute(stmt)
self.db.commit() self.db.commit()
return result.rowcount except SQLAlchemyError:
except SQLAlchemyError as e:
self.db.rollback() self.db.rollback()
log.error( log.exception(
f"Database error removing season files for torrent_id {torrent_id}: {e}" f"Database error removing season files for torrent_id {torrent_id}"
) )
raise raise
return result.rowcount
def set_show_library(self, show_id: ShowId, library: str) -> None: def set_show_library(self, show_id: ShowId, library: str) -> None:
""" """
@@ -417,9 +415,9 @@ class TvRepository:
raise NotFoundError(msg) raise NotFoundError(msg)
show.library = library show.library = library
self.db.commit() self.db.commit()
except SQLAlchemyError as e: except SQLAlchemyError:
self.db.rollback() self.db.rollback()
log.error(f"Database error setting library for show {show_id}: {e}") log.exception(f"Database error setting library for show {show_id}")
raise raise
def get_season_files_by_season_id( def get_season_files_by_season_id(
@@ -436,9 +434,9 @@ class TvRepository:
stmt = select(SeasonFile).where(SeasonFile.season_id == season_id) stmt = select(SeasonFile).where(SeasonFile.season_id == season_id)
results = self.db.execute(stmt).scalars().all() results = self.db.execute(stmt).scalars().all()
return [SeasonFileSchema.model_validate(sf) for sf in results] return [SeasonFileSchema.model_validate(sf) for sf in results]
except SQLAlchemyError as e: except SQLAlchemyError:
log.error( log.exception(
f"Database error retrieving season files for season_id {season_id}: {e}" f"Database error retrieving season files for season_id {season_id}"
) )
raise raise
@@ -460,8 +458,8 @@ class TvRepository:
) )
results = self.db.execute(stmt).scalars().unique().all() results = self.db.execute(stmt).scalars().unique().all()
return [TorrentSchema.model_validate(torrent) for torrent in results] return [TorrentSchema.model_validate(torrent) for torrent in results]
except SQLAlchemyError as e: except SQLAlchemyError:
log.error(f"Database error retrieving torrents for show_id {show_id}: {e}") log.exception(f"Database error retrieving torrents for show_id {show_id}")
raise raise
def get_all_shows_with_torrents(self) -> list[ShowSchema]: def get_all_shows_with_torrents(self) -> list[ShowSchema]:
@@ -483,8 +481,8 @@ class TvRepository:
) )
results = self.db.execute(stmt).scalars().unique().all() results = self.db.execute(stmt).scalars().unique().all()
return [ShowSchema.model_validate(show) for show in results] return [ShowSchema.model_validate(show) for show in results]
except SQLAlchemyError as e: except SQLAlchemyError:
log.error(f"Database error retrieving all shows with torrents: {e}") log.exception("Database error retrieving all shows with torrents")
raise raise
def get_seasons_by_torrent_id(self, torrent_id: TorrentId) -> list[SeasonNumber]: def get_seasons_by_torrent_id(self, torrent_id: TorrentId) -> list[SeasonNumber]:
@@ -504,9 +502,9 @@ class TvRepository:
) )
results = self.db.execute(stmt).scalars().unique().all() results = self.db.execute(stmt).scalars().unique().all()
return [SeasonNumber(x) for x in results] return [SeasonNumber(x) for x in results]
except SQLAlchemyError as e: except SQLAlchemyError:
log.error( log.exception(
f"Database error retrieving season numbers for torrent_id {torrent_id}: {e}" f"Database error retrieving season numbers for torrent_id {torrent_id}"
) )
raise raise
@@ -528,9 +526,9 @@ class TvRepository:
msg = f"Season request with id {season_request_id} not found." msg = f"Season request with id {season_request_id} not found."
raise NotFoundError(msg) raise NotFoundError(msg)
return SeasonRequestSchema.model_validate(request) return SeasonRequestSchema.model_validate(request)
except SQLAlchemyError as e: except SQLAlchemyError:
log.error( log.exception(
f"Database error retrieving season request {season_request_id}: {e}" f"Database error retrieving season request {season_request_id}"
) )
raise raise
@@ -555,8 +553,8 @@ class TvRepository:
msg = f"Show for season_id {season_id} not found." msg = f"Show for season_id {season_id} not found."
raise NotFoundError(msg) raise NotFoundError(msg)
return ShowSchema.model_validate(result) return ShowSchema.model_validate(result)
except SQLAlchemyError as e: except SQLAlchemyError:
log.error(f"Database error retrieving show by season_id {season_id}: {e}") log.exception(f"Database error retrieving show by season_id {season_id}")
raise raise
def add_season_to_show( def add_season_to_show(

View File

@@ -94,7 +94,9 @@ def get_all_importable_shows(
dependencies=[Depends(current_superuser)], dependencies=[Depends(current_superuser)],
status_code=status.HTTP_204_NO_CONTENT, status_code=status.HTTP_204_NO_CONTENT,
) )
def import_detected_show(tv_service: tv_service_dep, tv_show: show_dep, directory: str) -> None: def import_detected_show(
tv_service: tv_service_dep, tv_show: show_dep, directory: str
) -> None:
""" """
Import a detected show from the specified directory into the library. Import a detected show from the specified directory into the library.
""" """
@@ -145,7 +147,7 @@ def add_a_show(
Add a new show to the library. Add a new show to the library.
""" """
try: try:
show = tv_service.add_show( show = tv_service.add_show(
external_id=show_id, external_id=show_id,
metadata_provider=metadata_provider, metadata_provider=metadata_provider,
language=language, language=language,

View File

@@ -8,7 +8,7 @@ from sqlalchemy.exc import IntegrityError
from media_manager.config import MediaManagerConfig from media_manager.config import MediaManagerConfig
from media_manager.database import get_session from media_manager.database import get_session
from media_manager.exceptions import InvalidConfigError, NotFoundError from media_manager.exceptions import InvalidConfigError, NotFoundError, RenameError
from media_manager.indexer.repository import IndexerRepository from media_manager.indexer.repository import IndexerRepository
from media_manager.indexer.schemas import IndexerQueryResult, IndexerQueryResultId from media_manager.indexer.schemas import IndexerQueryResult, IndexerQueryResultId
from media_manager.indexer.service import IndexerService from media_manager.indexer.service import IndexerService
@@ -174,8 +174,10 @@ class TvService:
try: try:
self.torrent_service.cancel_download(torrent, delete_files=True) self.torrent_service.cancel_download(torrent, delete_files=True)
log.info(f"Deleted torrent: {torrent.hash}") log.info(f"Deleted torrent: {torrent.hash}")
except Exception as e: except Exception:
log.warning(f"Failed to delete torrent {torrent.hash}: {e}") log.warning(
f"Failed to delete torrent {torrent.hash}", exc_info=True
)
self.tv_repository.delete_show(show_id=show.id) self.tv_repository.delete_show(show_id=show.id)
@@ -226,19 +228,19 @@ class TvService:
self.tv_repository.get_show_by_external_id( self.tv_repository.get_show_by_external_id(
external_id=external_id, metadata_provider=metadata_provider external_id=external_id, metadata_provider=metadata_provider
) )
return True
except NotFoundError: except NotFoundError:
return False return False
elif show_id is not None: elif show_id is not None:
try: try:
self.tv_repository.get_show_by_id(show_id=show_id) self.tv_repository.get_show_by_id(show_id=show_id)
return True
except NotFoundError: except NotFoundError:
return False return False
else: else:
msg = "Use one of the provided overloads for this function!" msg = "Use one of the provided overloads for this function!"
raise ValueError(msg) raise ValueError(msg)
return True
def get_all_available_torrents_for_a_season( def get_all_available_torrents_for_a_season(
self, self,
season_number: int, season_number: int,
@@ -379,8 +381,9 @@ class TvService:
if torrent_file.imported: if torrent_file.imported:
return True return True
except RuntimeError as e: except RuntimeError:
log.error(f"Error retrieving torrent, error: {e}") log.exception("Error retrieving torrent")
return False return False
def get_show_by_external_id( def get_show_by_external_id(
@@ -641,7 +644,7 @@ class TvService:
return True return True
else: else:
msg = f"Could not find any video file for episode {episode_number} of show {show.name} S{season.number}" msg = f"Could not find any video file for episode {episode_number} of show {show.name} S{season.number}"
raise Exception(msg) raise Exception(msg) # noqa: TRY002 # TODO: resolve this
def import_season( def import_season(
self, self,
@@ -659,9 +662,9 @@ class TvService:
try: try:
season_path.mkdir(parents=True, exist_ok=True) season_path.mkdir(parents=True, exist_ok=True)
except Exception as e: except Exception as e:
log.warning(f"Could not create path {season_path}: {e}") log.exception(f"Could not create path {season_path}")
msg = f"Could not create path {season_path}" msg = f"Could not create path {season_path}"
raise Exception(msg) from e raise Exception(msg) from e # noqa: TRY002 # TODO: resolve this
for episode in season.episodes: for episode in season.episodes:
try: try:
@@ -901,9 +904,8 @@ class TvService:
try: try:
source_directory.rename(new_source_path) source_directory.rename(new_source_path)
except Exception as e: except Exception as e:
log.error(f"Failed to rename {source_directory} to {new_source_path}: {e}") log.exception(f"Failed to rename {source_directory} to {new_source_path}")
msg = "Failed to rename source directory" raise RenameError from e
raise Exception(msg) from e
video_files, subtitle_files, _all_files = get_files_for_import( video_files, subtitle_files, _all_files = get_files_for_import(
directory=new_source_path directory=new_source_path
@@ -967,12 +969,14 @@ def auto_download_all_approved_season_requests() -> None:
tv_repository = TvRepository(db=db) tv_repository = TvRepository(db=db)
torrent_service = TorrentService(torrent_repository=TorrentRepository(db=db)) torrent_service = TorrentService(torrent_repository=TorrentRepository(db=db))
indexer_service = IndexerService(indexer_repository=IndexerRepository(db=db)) indexer_service = IndexerService(indexer_repository=IndexerRepository(db=db))
notification_service = NotificationService(notification_repository=NotificationRepository(db=db)) notification_service = NotificationService(
notification_repository=NotificationRepository(db=db)
)
tv_service = TvService( tv_service = TvService(
tv_repository=tv_repository, tv_repository=tv_repository,
torrent_service=torrent_service, torrent_service=torrent_service,
indexer_service=indexer_service, indexer_service=indexer_service,
notification_service=notification_service notification_service=notification_service,
) )
log.info("Auto downloading all approved season requests") log.info("Auto downloading all approved season requests")
@@ -1004,12 +1008,14 @@ def import_all_show_torrents() -> None:
tv_repository = TvRepository(db=db) tv_repository = TvRepository(db=db)
torrent_service = TorrentService(torrent_repository=TorrentRepository(db=db)) torrent_service = TorrentService(torrent_repository=TorrentRepository(db=db))
indexer_service = IndexerService(indexer_repository=IndexerRepository(db=db)) indexer_service = IndexerService(indexer_repository=IndexerRepository(db=db))
notification_service = NotificationService(notification_repository=NotificationRepository(db=db)) notification_service = NotificationService(
notification_repository=NotificationRepository(db=db)
)
tv_service = TvService( tv_service = TvService(
tv_repository=tv_repository, tv_repository=tv_repository,
torrent_service=torrent_service, torrent_service=torrent_service,
indexer_service=indexer_service, indexer_service=indexer_service,
notification_service=notification_service notification_service=notification_service,
) )
log.info("Importing all torrents") log.info("Importing all torrents")
torrents = torrent_service.get_all_torrents() torrents = torrent_service.get_all_torrents()
@@ -1024,10 +1030,8 @@ def import_all_show_torrents() -> None:
) )
continue continue
tv_service.import_torrent_files(torrent=t, show=show) tv_service.import_torrent_files(torrent=t, show=show)
except RuntimeError as e: except RuntimeError:
log.error( log.exception(f"Error importing torrent {t.title} for show {show.name}")
f"Error importing torrent {t.title} for show {show.name}: {e}"
)
log.info("Finished importing all torrents") log.info("Finished importing all torrents")
db.commit() db.commit()
@@ -1042,7 +1046,9 @@ def update_all_non_ended_shows_metadata() -> None:
tv_repository=tv_repository, tv_repository=tv_repository,
torrent_service=TorrentService(torrent_repository=TorrentRepository(db=db)), torrent_service=TorrentService(torrent_repository=TorrentRepository(db=db)),
indexer_service=IndexerService(indexer_repository=IndexerRepository(db=db)), indexer_service=IndexerService(indexer_repository=IndexerRepository(db=db)),
notification_service=NotificationService(notification_repository=NotificationRepository(db=db)) notification_service=NotificationService(
notification_repository=NotificationRepository(db=db)
),
) )
log.info("Updating metadata for all non-ended shows") log.info("Updating metadata for all non-ended shows")
@@ -1062,9 +1068,9 @@ def update_all_non_ended_shows_metadata() -> None:
f"Unsupported metadata provider {show.metadata_provider} for show {show.name}, skipping update." f"Unsupported metadata provider {show.metadata_provider} for show {show.name}, skipping update."
) )
continue continue
except InvalidConfigError as e: except InvalidConfigError:
log.error( log.exception(
f"Error initializing metadata provider {show.metadata_provider} for show {show.name}: {e}" f"Error initializing metadata provider {show.metadata_provider} for show {show.name}"
) )
continue continue
updated_show = tv_service.update_show_metadata( updated_show = tv_service.update_show_metadata(

View File

@@ -32,7 +32,9 @@ else:
return TV(show_id).external_ids() return TV(show_id).external_ids()
@router.get("/tv/shows/{show_id}/{season_number}") @router.get("/tv/shows/{show_id}/{season_number}")
async def get_tmdb_season(season_number: int, show_id: int, language: str = "en") -> dict: async def get_tmdb_season(
season_number: int, show_id: int, language: str = "en"
) -> dict:
return TV_Seasons(season_number=season_number, tv_id=show_id).info( return TV_Seasons(season_number=season_number, tv_id=show_id).info(
language=language language=language
) )
@@ -42,7 +44,9 @@ else:
return Trending(media_type="movie").info(language=language) return Trending(media_type="movie").info(language=language)
@router.get("/movies/search") @router.get("/movies/search")
async def search_tmdb_movies(query: str, page: int = 1, language: str = "en") -> dict: async def search_tmdb_movies(
query: str, page: int = 1, language: str = "en"
) -> dict:
return Search().movie(page=page, query=query, language=language) return Search().movie(page=page, query=query, language=language)
@router.get("/movies/{movie_id}") @router.get("/movies/{movie_id}")

View File

@@ -1,3 +1,4 @@
exclude = ["alembic/versions"]
namespace-packages = ["alembic", "metadata_relay"] namespace-packages = ["alembic", "metadata_relay"]
[format] [format]
@@ -5,7 +6,7 @@ line-ending = "lf"
quote-style = "double" quote-style = "double"
[lint] [lint]
# to be enabled: BLE, C90, CPY, D, DOC, DTZ, FBT, G, PL, RSE, SLF, SIM, TC, TRY, UP # to be enabled: BLE, C90, CPY, D, DOC, DTZ, FBT, G, PL, RSE, SLF, SIM, TC
extend-select = [ extend-select = [
"A", "ARG", "ASYNC", "ANN", "A", "ARG", "ASYNC", "ANN",
"B", "B",
@@ -20,7 +21,8 @@ extend-select = [
"Q", "Q",
"RET", "RUF", "RET", "RUF",
"S", "SLOT", "S", "SLOT",
"T10", "T20", "TD", "TID", "T10", "T20", "TD", "TID", "TRY",
"UP",
"W", "W",
"YTT" "YTT"
] ]
@@ -32,6 +34,10 @@ ignore = [
"E501", "E501",
# currently a bug?! with providers and depends # currently a bug?! with providers and depends
"FAST003", "FAST003",
# I'm not sure if we want to lint them
"FIX002",
# let's decide if we want this
"TD002", "TD003",
] ]
[lint.flake8-bugbear] [lint.flake8-bugbear]