mirror of
https://github.com/maxdorninger/MediaManager.git
synced 2026-04-17 15:13:24 +02:00
switch from APScheduler to Taskiq (#461)
This PR replaces the APScheduler lib with the Taskiq task queuing lib. # why APScheduler doesn't support FastAPI's DI in tasks, this makes them quite cumbersome to read and write since DB, Repositories and Services all need to be instanciated manually. Moreover, Taskiq makes it easier to start background tasks from FastAPI requests. This enables MM to move to a more event-based architecture. <!-- This is an auto-generated comment: release notes by coderabbit.ai --> ## Summary by CodeRabbit * **New Features** * App now uses an orchestrated async startup/shutdown and runs background scheduling via a database-backed task queue; startup enqueues pre-load/import/update tasks. * **Bug Fixes** * Improved torrent client handling with clearer conflict messages and guidance for manual resolution. * Enhanced logging around season, episode and metadata update operations; minor regex/behaviour formatting preserved. * **Chores** * Updated dependencies to support the new task queue and connection pooling. <!-- end of auto-generated comment: release notes by coderabbit.ai -->
This commit is contained in:
committed by
GitHub
parent
e529e0c0a3
commit
f5253990e0
@@ -24,7 +24,11 @@ def evaluate_indexer_query_result(
|
||||
log.debug(f"Applying rule {rule.name} to {query_result.title}")
|
||||
if (
|
||||
any(
|
||||
re.search(rf"\b{re.escape(keyword)}\b", query_result.title, re.IGNORECASE)
|
||||
re.search(
|
||||
rf"\b{re.escape(keyword)}\b",
|
||||
query_result.title,
|
||||
re.IGNORECASE,
|
||||
)
|
||||
for keyword in rule.keywords
|
||||
)
|
||||
and not rule.negate
|
||||
@@ -35,7 +39,11 @@ def evaluate_indexer_query_result(
|
||||
query_result.score += rule.score_modifier
|
||||
elif (
|
||||
not any(
|
||||
re.search(rf"\b{re.escape(keyword)}\b", query_result.title, re.IGNORECASE)
|
||||
re.search(
|
||||
rf"\b{re.escape(keyword)}\b",
|
||||
query_result.title,
|
||||
re.IGNORECASE,
|
||||
)
|
||||
for keyword in rule.keywords
|
||||
)
|
||||
and rule.negate
|
||||
|
||||
@@ -83,3 +83,4 @@ def setup_logging() -> None:
|
||||
logging.getLogger("transmission_rpc").setLevel(logging.WARNING)
|
||||
logging.getLogger("qbittorrentapi").setLevel(logging.WARNING)
|
||||
logging.getLogger("sabnzbd_api").setLevel(logging.WARNING)
|
||||
logging.getLogger("taskiq").setLevel(logging.WARNING)
|
||||
|
||||
@@ -1,5 +1,8 @@
|
||||
import asyncio
|
||||
import logging
|
||||
import os
|
||||
from collections.abc import AsyncGenerator
|
||||
from contextlib import asynccontextmanager
|
||||
|
||||
import uvicorn
|
||||
from asgi_correlation_id import CorrelationIdMiddleware
|
||||
@@ -9,6 +12,8 @@ from fastapi.staticfiles import StaticFiles
|
||||
from psycopg.errors import UniqueViolation
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
from starlette.responses import FileResponse, RedirectResponse
|
||||
from taskiq.receiver import Receiver
|
||||
from taskiq_fastapi import populate_dependency_context
|
||||
from uvicorn.middleware.proxy_headers import ProxyHeadersMiddleware
|
||||
|
||||
import media_manager.movies.router as movies_router
|
||||
@@ -28,6 +33,7 @@ from media_manager.auth.users import (
|
||||
fastapi_users,
|
||||
)
|
||||
from media_manager.config import MediaManagerConfig
|
||||
from media_manager.database import init_engine
|
||||
from media_manager.exceptions import (
|
||||
ConflictError,
|
||||
InvalidConfigError,
|
||||
@@ -42,18 +48,24 @@ from media_manager.exceptions import (
|
||||
from media_manager.filesystem_checks import run_filesystem_checks
|
||||
from media_manager.logging import LOGGING_CONFIG, setup_logging
|
||||
from media_manager.notification.router import router as notification_router
|
||||
from media_manager.scheduler import setup_scheduler
|
||||
from media_manager.scheduler import (
|
||||
broker,
|
||||
build_scheduler_loop,
|
||||
import_all_movie_torrents_task,
|
||||
import_all_show_torrents_task,
|
||||
update_all_movies_metadata_task,
|
||||
update_all_non_ended_shows_metadata_task,
|
||||
)
|
||||
|
||||
setup_logging()
|
||||
|
||||
config = MediaManagerConfig()
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
if config.misc.development:
|
||||
log.warning("Development Mode activated!")
|
||||
|
||||
scheduler = setup_scheduler(config)
|
||||
|
||||
run_filesystem_checks(config, log)
|
||||
|
||||
BASE_PATH = os.getenv("BASE_PATH", "")
|
||||
@@ -62,7 +74,57 @@ DISABLE_FRONTEND_MOUNT = os.getenv("DISABLE_FRONTEND_MOUNT", "").lower() == "tru
|
||||
FRONTEND_FOLLOW_SYMLINKS = os.getenv("FRONTEND_FOLLOW_SYMLINKS", "").lower() == "true"
|
||||
|
||||
log.info("Hello World!")
|
||||
app = FastAPI(root_path=BASE_PATH)
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI) -> AsyncGenerator:
|
||||
init_engine(config.database)
|
||||
broker_started = False
|
||||
started_sources: list = []
|
||||
finish_event: asyncio.Event | None = None
|
||||
receiver_task: asyncio.Task | None = None
|
||||
loop_task: asyncio.Task | None = None
|
||||
try:
|
||||
if not broker.is_worker_process:
|
||||
await broker.startup()
|
||||
broker_started = True
|
||||
populate_dependency_context(broker, app)
|
||||
scheduler_loop = build_scheduler_loop()
|
||||
for source in scheduler_loop.scheduler.sources:
|
||||
await source.startup()
|
||||
started_sources.append(source)
|
||||
finish_event = asyncio.Event()
|
||||
receiver = Receiver(broker, run_startup=False, max_async_tasks=10)
|
||||
receiver_task = asyncio.create_task(receiver.listen(finish_event))
|
||||
loop_task = asyncio.create_task(scheduler_loop.run(skip_first_run=True))
|
||||
try:
|
||||
await asyncio.gather(
|
||||
import_all_movie_torrents_task.kiq(),
|
||||
import_all_show_torrents_task.kiq(),
|
||||
update_all_movies_metadata_task.kiq(),
|
||||
update_all_non_ended_shows_metadata_task.kiq(),
|
||||
)
|
||||
except Exception:
|
||||
log.exception("Failed to submit initial background tasks during startup.")
|
||||
raise
|
||||
yield
|
||||
finally:
|
||||
if loop_task is not None:
|
||||
loop_task.cancel()
|
||||
try:
|
||||
await loop_task
|
||||
except asyncio.CancelledError:
|
||||
pass
|
||||
if finish_event is not None and receiver_task is not None:
|
||||
finish_event.set()
|
||||
await receiver_task
|
||||
for source in started_sources:
|
||||
await source.shutdown()
|
||||
if broker_started:
|
||||
await broker.shutdown()
|
||||
|
||||
|
||||
app = FastAPI(root_path=BASE_PATH, lifespan=lifespan)
|
||||
app.add_middleware(ProxyHeadersMiddleware, trusted_hosts="*")
|
||||
origins = config.misc.cors_urls
|
||||
log.info(f"CORS URLs activated for following origins: {origins}")
|
||||
|
||||
@@ -6,9 +6,7 @@ from typing import overload
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
|
||||
from media_manager.config import MediaManagerConfig
|
||||
from media_manager.database import get_session
|
||||
from media_manager.exceptions import InvalidConfigError, NotFoundError, RenameError
|
||||
from media_manager.indexer.repository import IndexerRepository
|
||||
from media_manager.indexer.schemas import IndexerQueryResult, IndexerQueryResultId
|
||||
from media_manager.indexer.service import IndexerService
|
||||
from media_manager.indexer.utils import evaluate_indexer_query_results
|
||||
@@ -28,10 +26,8 @@ from media_manager.movies.schemas import (
|
||||
PublicMovieFile,
|
||||
RichMovieTorrent,
|
||||
)
|
||||
from media_manager.notification.repository import NotificationRepository
|
||||
from media_manager.notification.service import NotificationService
|
||||
from media_manager.schemas import MediaImportSuggestion
|
||||
from media_manager.torrent.repository import TorrentRepository
|
||||
from media_manager.torrent.schemas import (
|
||||
Quality,
|
||||
Torrent,
|
||||
@@ -613,7 +609,7 @@ class MovieService:
|
||||
)
|
||||
if not fresh_movie_data:
|
||||
log.warning(
|
||||
f"Could not fetch fresh metadata for movie: {db_movie.name} (ID: {db_movie.external_id})"
|
||||
f"Could not fetch fresh metadata for movie: {db_movie.name} ({db_movie.year})"
|
||||
)
|
||||
return None
|
||||
log.debug(f"Fetched fresh metadata for movie: {fresh_movie_data.name}")
|
||||
@@ -628,7 +624,9 @@ class MovieService:
|
||||
|
||||
updated_movie = self.movie_repository.get_movie_by_id(movie_id=db_movie.id)
|
||||
|
||||
log.info(f"Successfully updated metadata for movie ID: {db_movie.id}")
|
||||
log.info(
|
||||
f"Successfully updated metadata for movie: {db_movie.name} ({db_movie.year})"
|
||||
)
|
||||
metadata_provider.download_movie_poster_image(movie=updated_movie)
|
||||
return updated_movie
|
||||
|
||||
@@ -663,61 +661,29 @@ class MovieService:
|
||||
log.debug(f"Found {len(importable_movies)} importable movies.")
|
||||
return importable_movies
|
||||
|
||||
|
||||
def import_all_movie_torrents() -> None:
|
||||
with next(get_session()) as db:
|
||||
movie_repository = MovieRepository(db=db)
|
||||
torrent_service = TorrentService(torrent_repository=TorrentRepository(db=db))
|
||||
indexer_service = IndexerService(indexer_repository=IndexerRepository(db=db))
|
||||
notification_service = NotificationService(
|
||||
notification_repository=NotificationRepository(db=db)
|
||||
)
|
||||
movie_service = MovieService(
|
||||
movie_repository=movie_repository,
|
||||
torrent_service=torrent_service,
|
||||
indexer_service=indexer_service,
|
||||
notification_service=notification_service,
|
||||
)
|
||||
def import_all_torrents(self) -> None:
|
||||
log.info("Importing all torrents")
|
||||
torrents = torrent_service.get_all_torrents()
|
||||
torrents = self.torrent_service.get_all_torrents()
|
||||
log.info("Found %d torrents to import", len(torrents))
|
||||
for t in torrents:
|
||||
try:
|
||||
if not t.imported and t.status == TorrentStatus.finished:
|
||||
movie = torrent_service.get_movie_of_torrent(torrent=t)
|
||||
movie = self.torrent_service.get_movie_of_torrent(torrent=t)
|
||||
if movie is None:
|
||||
log.warning(
|
||||
f"torrent {t.title} is not a movie torrent, skipping import."
|
||||
)
|
||||
continue
|
||||
movie_service.import_torrent_files(torrent=t, movie=movie)
|
||||
self.import_torrent_files(torrent=t, movie=movie)
|
||||
except RuntimeError:
|
||||
log.exception(f"Failed to import torrent {t.title}")
|
||||
log.info("Finished importing all torrents")
|
||||
db.commit()
|
||||
|
||||
|
||||
def update_all_movies_metadata() -> None:
|
||||
"""
|
||||
Updates the metadata of all movies.
|
||||
"""
|
||||
with next(get_session()) as db:
|
||||
movie_repository = MovieRepository(db=db)
|
||||
movie_service = MovieService(
|
||||
movie_repository=movie_repository,
|
||||
torrent_service=TorrentService(torrent_repository=TorrentRepository(db=db)),
|
||||
indexer_service=IndexerService(indexer_repository=IndexerRepository(db=db)),
|
||||
notification_service=NotificationService(
|
||||
notification_repository=NotificationRepository(db=db)
|
||||
),
|
||||
)
|
||||
|
||||
def update_all_metadata(self) -> None:
|
||||
"""Updates the metadata of all movies."""
|
||||
log.info("Updating metadata for all movies")
|
||||
|
||||
movies = movie_repository.get_movies()
|
||||
|
||||
movies = self.movie_repository.get_movies()
|
||||
log.info(f"Found {len(movies)} movies to update")
|
||||
|
||||
for movie in movies:
|
||||
try:
|
||||
if movie.metadata_provider == "tmdb":
|
||||
@@ -734,7 +700,6 @@ def update_all_movies_metadata() -> None:
|
||||
f"Error initializing metadata provider {movie.metadata_provider} for movie {movie.name}",
|
||||
)
|
||||
continue
|
||||
movie_service.update_movie_metadata(
|
||||
self.update_movie_metadata(
|
||||
db_movie=movie, metadata_provider=metadata_provider
|
||||
)
|
||||
db.commit()
|
||||
|
||||
@@ -1,52 +1,91 @@
|
||||
from apscheduler.jobstores.sqlalchemy import SQLAlchemyJobStore
|
||||
from apscheduler.schedulers.background import BackgroundScheduler
|
||||
from apscheduler.triggers.cron import CronTrigger
|
||||
import asyncio
|
||||
import logging
|
||||
from urllib.parse import quote
|
||||
|
||||
import media_manager.database
|
||||
from media_manager.config import MediaManagerConfig
|
||||
from media_manager.movies.service import (
|
||||
import_all_movie_torrents,
|
||||
update_all_movies_metadata,
|
||||
)
|
||||
from media_manager.tv.service import (
|
||||
import_all_show_torrents,
|
||||
update_all_non_ended_shows_metadata,
|
||||
import taskiq_fastapi
|
||||
from taskiq import TaskiqDepends, TaskiqScheduler
|
||||
from taskiq.cli.scheduler.run import SchedulerLoop
|
||||
from taskiq_postgresql import PostgresqlBroker
|
||||
from taskiq_postgresql.scheduler_source import PostgresqlSchedulerSource
|
||||
|
||||
from media_manager.movies.dependencies import get_movie_service
|
||||
from media_manager.movies.service import MovieService
|
||||
from media_manager.tv.dependencies import get_tv_service
|
||||
from media_manager.tv.service import TvService
|
||||
|
||||
|
||||
def _build_db_connection_string_for_taskiq() -> str:
|
||||
from media_manager.config import MediaManagerConfig
|
||||
|
||||
db_config = MediaManagerConfig().database
|
||||
user = quote(db_config.user, safe="")
|
||||
password = quote(db_config.password, safe="")
|
||||
dbname = quote(db_config.dbname, safe="")
|
||||
host = quote(str(db_config.host), safe="")
|
||||
port = quote(str(db_config.port), safe="")
|
||||
return f"postgresql://{user}:{password}@{host}:{port}/{dbname}"
|
||||
|
||||
|
||||
broker = PostgresqlBroker(
|
||||
dsn=_build_db_connection_string_for_taskiq,
|
||||
driver="psycopg",
|
||||
run_migrations=True,
|
||||
)
|
||||
|
||||
# Register FastAPI app with the broker so worker processes can resolve FastAPI
|
||||
# dependencies. Using a string reference avoids circular imports.
|
||||
taskiq_fastapi.init(broker, "media_manager.main:app")
|
||||
|
||||
def setup_scheduler(config: MediaManagerConfig) -> BackgroundScheduler:
|
||||
from media_manager.database import init_engine
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
init_engine(config.database)
|
||||
jobstores = {"default": SQLAlchemyJobStore(engine=media_manager.database.engine)}
|
||||
scheduler = BackgroundScheduler(jobstores=jobstores)
|
||||
every_15_minutes_trigger = CronTrigger(minute="*/15", hour="*")
|
||||
weekly_trigger = CronTrigger(
|
||||
day_of_week="mon", hour=0, minute=0, jitter=60 * 60 * 24 * 2
|
||||
|
||||
@broker.task
|
||||
async def import_all_movie_torrents_task(
|
||||
movie_service: MovieService = TaskiqDepends(get_movie_service),
|
||||
) -> None:
|
||||
log.info("Importing all Movie torrents")
|
||||
await asyncio.to_thread(movie_service.import_all_torrents)
|
||||
|
||||
|
||||
@broker.task
|
||||
async def import_all_show_torrents_task(
|
||||
tv_service: TvService = TaskiqDepends(get_tv_service),
|
||||
) -> None:
|
||||
log.info("Importing all Show torrents")
|
||||
await asyncio.to_thread(tv_service.import_all_torrents)
|
||||
|
||||
|
||||
@broker.task
|
||||
async def update_all_movies_metadata_task(
|
||||
movie_service: MovieService = TaskiqDepends(get_movie_service),
|
||||
) -> None:
|
||||
await asyncio.to_thread(movie_service.update_all_metadata)
|
||||
|
||||
|
||||
@broker.task
|
||||
async def update_all_non_ended_shows_metadata_task(
|
||||
tv_service: TvService = TaskiqDepends(get_tv_service),
|
||||
) -> None:
|
||||
await asyncio.to_thread(tv_service.update_all_non_ended_shows_metadata)
|
||||
|
||||
|
||||
# Maps each task to its cron schedule so PostgresqlSchedulerSource can seed
|
||||
# the taskiq_schedulers table on first startup.
|
||||
_STARTUP_SCHEDULES: dict[str, list[dict[str, str]]] = {
|
||||
import_all_movie_torrents_task.task_name: [{"cron": "*/2 * * * *"}],
|
||||
import_all_show_torrents_task.task_name: [{"cron": "*/2 * * * *"}],
|
||||
update_all_movies_metadata_task.task_name: [{"cron": "0 0 * * 1"}],
|
||||
update_all_non_ended_shows_metadata_task.task_name: [{"cron": "0 0 * * 1"}],
|
||||
}
|
||||
|
||||
|
||||
def build_scheduler_loop() -> SchedulerLoop:
|
||||
source = PostgresqlSchedulerSource(
|
||||
dsn=_build_db_connection_string_for_taskiq,
|
||||
driver="psycopg",
|
||||
broker=broker,
|
||||
run_migrations=True,
|
||||
startup_schedule=_STARTUP_SCHEDULES,
|
||||
)
|
||||
scheduler.add_job(
|
||||
import_all_movie_torrents,
|
||||
every_15_minutes_trigger,
|
||||
id="import_all_movie_torrents",
|
||||
replace_existing=True,
|
||||
)
|
||||
scheduler.add_job(
|
||||
import_all_show_torrents,
|
||||
every_15_minutes_trigger,
|
||||
id="import_all_show_torrents",
|
||||
replace_existing=True,
|
||||
)
|
||||
scheduler.add_job(
|
||||
update_all_movies_metadata,
|
||||
weekly_trigger,
|
||||
id="update_all_movies_metadata",
|
||||
replace_existing=True,
|
||||
)
|
||||
scheduler.add_job(
|
||||
update_all_non_ended_shows_metadata,
|
||||
weekly_trigger,
|
||||
id="update_all_non_ended_shows_metadata",
|
||||
replace_existing=True,
|
||||
)
|
||||
scheduler.start()
|
||||
return scheduler
|
||||
scheduler = TaskiqScheduler(broker=broker, sources=[source])
|
||||
return SchedulerLoop(scheduler)
|
||||
|
||||
@@ -57,21 +57,45 @@ class QbittorrentDownloadClient(AbstractDownloadClient):
|
||||
log.exception("Failed to log into qbittorrent")
|
||||
raise
|
||||
|
||||
try:
|
||||
self.api_client.torrents_create_category(
|
||||
name=self.config.category_name,
|
||||
save_path=self.config.category_save_path,
|
||||
categories = self.api_client.torrents_categories()
|
||||
log.debug(f"Found following categories in qBittorrent: {categories}")
|
||||
if self.config.category_name in categories:
|
||||
category = categories.get(self.config.category_name)
|
||||
if category.get("savePath") == self.config.category_save_path:
|
||||
log.debug(
|
||||
f"Category '{self.config.category_name}' already exists in qBittorrent with the correct save path."
|
||||
)
|
||||
return
|
||||
# category exists but with a different save path, attempt to update it
|
||||
log.debug(
|
||||
f"Category '{self.config.category_name}' already exists in qBittorrent but with a different save path. Attempting to update it."
|
||||
)
|
||||
except Conflict409Error:
|
||||
try:
|
||||
self.api_client.torrents_edit_category(
|
||||
name=self.config.category_name,
|
||||
save_path=self.config.category_save_path
|
||||
if self.config.category_save_path != ""
|
||||
else None,
|
||||
save_path=self.config.category_save_path,
|
||||
)
|
||||
except Conflict409Error:
|
||||
log.exception(
|
||||
f"Attempt to update category '{self.config.category_name}' in qBittorrent with a different save"
|
||||
f" path failed. The configured save path and the save path saved in Qbittorrent differ,"
|
||||
f" manually update it in the qBittorrent WebUI or change the save path in the MediaManager"
|
||||
f" config to match the one in qBittorrent."
|
||||
)
|
||||
else:
|
||||
# create category if it doesn't exist
|
||||
log.debug(
|
||||
f"Category '{self.config.category_name}' does not exist in qBittorrent. Attempting to create it."
|
||||
)
|
||||
try:
|
||||
self.api_client.torrents_create_category(
|
||||
name=self.config.category_name,
|
||||
save_path=self.config.category_save_path,
|
||||
)
|
||||
except Conflict409Error:
|
||||
log.exception(
|
||||
f"Attempt to create category '{self.config.category_name}' in qBittorrent failed. The category already exists but was not found in the initial category list, manually check if the category exists in the qBittorrent WebUI or change the category name in the MediaManager config."
|
||||
)
|
||||
except Exception:
|
||||
log.exception("Error on updating MediaManager category in qBittorrent")
|
||||
|
||||
def download_torrent(self, indexer_result: IndexerQueryResult) -> Torrent:
|
||||
"""
|
||||
|
||||
@@ -684,6 +684,9 @@ class TvRepository:
|
||||
if updated:
|
||||
self.db.commit()
|
||||
self.db.refresh(db_season)
|
||||
log.debug(
|
||||
f"Updating existing season {db_season.number} for show {db_season.show.name}"
|
||||
)
|
||||
return SeasonSchema.model_validate(db_season)
|
||||
|
||||
def update_episode_attributes(
|
||||
@@ -719,4 +722,5 @@ class TvRepository:
|
||||
if updated:
|
||||
self.db.commit()
|
||||
self.db.refresh(db_episode)
|
||||
log.info(f"Updating existing episode {db_episode.number}")
|
||||
return EpisodeSchema.model_validate(db_episode)
|
||||
|
||||
@@ -7,9 +7,7 @@ from typing import overload
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
|
||||
from media_manager.config import MediaManagerConfig
|
||||
from media_manager.database import get_session
|
||||
from media_manager.exceptions import InvalidConfigError, NotFoundError, RenameError
|
||||
from media_manager.indexer.repository import IndexerRepository
|
||||
from media_manager.indexer.schemas import IndexerQueryResult, IndexerQueryResultId
|
||||
from media_manager.indexer.service import IndexerService
|
||||
from media_manager.indexer.utils import evaluate_indexer_query_results
|
||||
@@ -19,10 +17,8 @@ from media_manager.metadataProvider.abstract_metadata_provider import (
|
||||
from media_manager.metadataProvider.schemas import MetaDataProviderSearchResult
|
||||
from media_manager.metadataProvider.tmdb import TmdbMetadataProvider
|
||||
from media_manager.metadataProvider.tvdb import TvdbMetadataProvider
|
||||
from media_manager.notification.repository import NotificationRepository
|
||||
from media_manager.notification.service import NotificationService
|
||||
from media_manager.schemas import MediaImportSuggestion
|
||||
from media_manager.torrent.repository import TorrentRepository
|
||||
from media_manager.torrent.schemas import (
|
||||
Quality,
|
||||
Torrent,
|
||||
@@ -905,9 +901,7 @@ class TvService:
|
||||
existing_season = existing_season_external_ids[
|
||||
fresh_season_data.external_id
|
||||
]
|
||||
log.debug(
|
||||
f"Updating existing season {existing_season.number} for show {db_show.name}"
|
||||
)
|
||||
|
||||
self.tv_repository.update_season_attributes(
|
||||
season_id=existing_season.id,
|
||||
name=fresh_season_data.name,
|
||||
@@ -919,14 +913,12 @@ class TvService:
|
||||
ep.external_id: ep for ep in existing_season.episodes
|
||||
}
|
||||
for fresh_episode_data in fresh_season_data.episodes:
|
||||
if fresh_episode_data.number in existing_episode_external_ids:
|
||||
if fresh_episode_data.external_id in existing_episode_external_ids:
|
||||
# Update existing episode
|
||||
existing_episode = existing_episode_external_ids[
|
||||
fresh_episode_data.external_id
|
||||
]
|
||||
log.debug(
|
||||
f"Updating existing episode {existing_episode.number} for season {existing_season.number}"
|
||||
)
|
||||
|
||||
self.tv_repository.update_episode_attributes(
|
||||
episode_id=existing_episode.id,
|
||||
title=fresh_episode_data.title,
|
||||
@@ -977,7 +969,7 @@ class TvService:
|
||||
|
||||
updated_show = self.tv_repository.get_show_by_id(show_id=db_show.id)
|
||||
|
||||
log.info(f"Successfully updated metadata for show ID: {db_show.id}")
|
||||
log.info(f"Successfully updated metadata for show: {updated_show.name}")
|
||||
metadata_provider.download_show_poster_image(show=updated_show)
|
||||
return updated_show
|
||||
|
||||
@@ -1070,64 +1062,34 @@ class TvService:
|
||||
log.debug(f"Detected {len(import_suggestions)} importable TV shows.")
|
||||
return import_suggestions
|
||||
|
||||
|
||||
def import_all_show_torrents() -> None:
|
||||
with next(get_session()) as db:
|
||||
tv_repository = TvRepository(db=db)
|
||||
torrent_service = TorrentService(torrent_repository=TorrentRepository(db=db))
|
||||
indexer_service = IndexerService(indexer_repository=IndexerRepository(db=db))
|
||||
notification_service = NotificationService(
|
||||
notification_repository=NotificationRepository(db=db)
|
||||
)
|
||||
tv_service = TvService(
|
||||
tv_repository=tv_repository,
|
||||
torrent_service=torrent_service,
|
||||
indexer_service=indexer_service,
|
||||
notification_service=notification_service,
|
||||
)
|
||||
def import_all_torrents(self) -> None:
|
||||
log.info("Importing all torrents")
|
||||
torrents = torrent_service.get_all_torrents()
|
||||
torrents = self.torrent_service.get_all_torrents()
|
||||
log.info("Found %d torrents to import", len(torrents))
|
||||
for t in torrents:
|
||||
show = None
|
||||
try:
|
||||
if not t.imported and t.status == TorrentStatus.finished:
|
||||
show = torrent_service.get_show_of_torrent(torrent=t)
|
||||
show = self.torrent_service.get_show_of_torrent(torrent=t)
|
||||
if show is None:
|
||||
log.warning(
|
||||
f"torrent {t.title} is not a tv torrent, skipping import."
|
||||
)
|
||||
continue
|
||||
tv_service.import_episode_files_from_torrent(torrent=t, show=show)
|
||||
self.import_episode_files_from_torrent(torrent=t, show=show)
|
||||
except RuntimeError as e:
|
||||
show_name = show.name if show is not None else "<unknown>"
|
||||
log.error(
|
||||
f"Error importing torrent {t.title} for show {show.name}: {e}",
|
||||
f"Error importing torrent {t.title} for show {show_name}: {e}",
|
||||
exc_info=True,
|
||||
)
|
||||
log.info("Finished importing all torrents")
|
||||
db.commit()
|
||||
|
||||
|
||||
def update_all_non_ended_shows_metadata() -> None:
|
||||
"""
|
||||
Updates the metadata of all non-ended shows.
|
||||
"""
|
||||
with next(get_session()) as db:
|
||||
tv_repository = TvRepository(db=db)
|
||||
tv_service = TvService(
|
||||
tv_repository=tv_repository,
|
||||
torrent_service=TorrentService(torrent_repository=TorrentRepository(db=db)),
|
||||
indexer_service=IndexerService(indexer_repository=IndexerRepository(db=db)),
|
||||
notification_service=NotificationService(
|
||||
notification_repository=NotificationRepository(db=db)
|
||||
),
|
||||
)
|
||||
|
||||
def update_all_non_ended_shows_metadata(self) -> None:
|
||||
"""Updates the metadata of all non-ended shows."""
|
||||
log.info("Updating metadata for all non-ended shows")
|
||||
|
||||
shows = [show for show in tv_repository.get_shows() if not show.ended]
|
||||
|
||||
shows = [show for show in self.tv_repository.get_shows() if not show.ended]
|
||||
log.info(f"Found {len(shows)} non-ended shows to update")
|
||||
|
||||
for show in shows:
|
||||
try:
|
||||
if show.metadata_provider == "tmdb":
|
||||
@@ -1144,12 +1106,10 @@ def update_all_non_ended_shows_metadata() -> None:
|
||||
f"Error initializing metadata provider {show.metadata_provider} for show {show.name}"
|
||||
)
|
||||
continue
|
||||
updated_show = tv_service.update_show_metadata(
|
||||
updated_show = self.update_show_metadata(
|
||||
db_show=show, metadata_provider=metadata_provider
|
||||
)
|
||||
|
||||
if updated_show:
|
||||
log.debug("Updated show metadata", extra={"show": updated_show.name})
|
||||
else:
|
||||
log.warning(f"Failed to update metadata for show: {show.name}")
|
||||
db.commit()
|
||||
|
||||
Reference in New Issue
Block a user