diff --git a/backend/alembic.ini b/backend/alembic.ini new file mode 100644 index 0000000..829c896 --- /dev/null +++ b/backend/alembic.ini @@ -0,0 +1,119 @@ +# A generic, single database configuration. + +[alembic] +# path to migration scripts +# Use forward slashes (/) also on windows to provide an os agnostic path +script_location = alembic + +# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s +# Uncomment the line below if you want the files to be prepended with date and time +# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file +# for all available tokens +# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s + +# sys.path path, will be prepended to sys.path if present. +# defaults to the current working directory. +prepend_sys_path = . + +# timezone to use when rendering the date within the migration file +# as well as the filename. +# If specified, requires the python>=3.9 or backports.zoneinfo library and tzdata library. +# Any required deps can installed by adding `alembic[tz]` to the pip requirements +# string value is passed to ZoneInfo() +# leave blank for localtime +# timezone = + +# max length of characters to apply to the "slug" field +# truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version location specification; This defaults +# to alembic/versions. When using multiple version +# directories, initial revisions must be specified with --version-path. +# The path separator used here should be the separator specified by "version_path_separator" below. +# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions + +# version path separator; As mentioned above, this is the character used to split +# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep. +# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas. +# Valid values for version_path_separator are: +# +# version_path_separator = : +# version_path_separator = ; +# version_path_separator = space +# version_path_separator = newline +# +# Use os.pathsep. Default configuration used for new projects. +version_path_separator = os + +# set to 'true' to search source files recursively +# in each "version_locations" directory +# new in Alembic version 1.10 +# recursive_version_locations = false + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = utf-8 + +# sqlalchemy.url = driver://user:pass@localhost/dbname + + +[post_write_hooks] +# post_write_hooks defines scripts or Python functions that are run +# on newly generated revision scripts. See the documentation for further +# detail and examples + +# format using "black" - use the console_scripts runner, against the "black" entrypoint +# hooks = black +# black.type = console_scripts +# black.entrypoint = black +# black.options = -l 79 REVISION_SCRIPT_FILENAME + +# lint with attempts to fix using "ruff" - use the exec runner, execute a binary +# hooks = ruff +# ruff.type = exec +# ruff.executable = %(here)s/.venv/bin/ruff +# ruff.options = check --fix REVISION_SCRIPT_FILENAME + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARNING +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARNING +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/backend/alembic/README b/backend/alembic/README new file mode 100644 index 0000000..98e4f9c --- /dev/null +++ b/backend/alembic/README @@ -0,0 +1 @@ +Generic single-database configuration. \ No newline at end of file diff --git a/backend/alembic/env.py b/backend/alembic/env.py new file mode 100644 index 0000000..523b54f --- /dev/null +++ b/backend/alembic/env.py @@ -0,0 +1,103 @@ +import os +import sys +from logging.config import fileConfig + +from alembic import context +from pydantic_settings import BaseSettings, SettingsConfigDict +from sqlalchemy import engine_from_config +from sqlalchemy import pool + +project_root = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)) +sys.path.insert(0, project_root) + +# SQLAlchemy Base +from ..src.database import Base +# All Tables + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +if config.config_file_name is not None: + fileConfig(config.config_file_name) + +# add your model's MetaData object here +# for 'autogenerate' support +# from myapp import mymodel +# target_metadata = mymodel.Base.metadata +target_metadata = Base.metadata + + +# other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. + +class DbConfig(BaseSettings): + model_config = SettingsConfigDict(env_prefix='DB_') + HOST: str = "localhost" + PORT: int = 5432 + USER: str = "MediaManager" + PASSWORD: str = "MediaManager" + DBNAME: str = "MediaManager" + + +db_config = DbConfig() +db_url = "postgresql+psycopg" + "://" + db_config.USER + ":" + db_config.PASSWORD + "@" + db_config.HOST + ":" + str( + db_config.PORT) + "/" + db_config.DBNAME + +config.set_main_option("sqlalchemy.url", db_url) + + +def run_migrations_offline() -> None: + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + dialect_opts={"paramstyle": "named"}, + ) + + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online() -> None: + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + connectable = engine_from_config( + config.get_section(config.config_ini_section, {}), + prefix="sqlalchemy.", + poolclass=pool.NullPool, + ) + + with connectable.connect() as connection: + context.configure( + connection=connection, target_metadata=target_metadata + ) + + with context.begin_transaction(): + context.run_migrations() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/backend/alembic/script.py.mako b/backend/alembic/script.py.mako new file mode 100644 index 0000000..480b130 --- /dev/null +++ b/backend/alembic/script.py.mako @@ -0,0 +1,28 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision: str = ${repr(up_revision)} +down_revision: Union[str, None] = ${repr(down_revision)} +branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)} +depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} + + +def upgrade() -> None: + """Upgrade schema.""" + ${upgrades if upgrades else "pass"} + + +def downgrade() -> None: + """Downgrade schema.""" + ${downgrades if downgrades else "pass"} diff --git a/backend/src/requirements.txt b/backend/requirements.txt similarity index 100% rename from backend/src/requirements.txt rename to backend/requirements.txt diff --git a/backend/src/auth/config.py b/backend/src/auth/config.py index ffabe72..597e732 100644 --- a/backend/src/auth/config.py +++ b/backend/src/auth/config.py @@ -10,3 +10,12 @@ class AuthConfig(BaseSettings): @property def jwt_signing_key(self): return self._jwt_signing_key + + +class OAuth2Config(BaseSettings): + model_config = SettingsConfigDict(env_prefix='OAUTH_') + client_id: str + client_secret: str + authorize_endpoint: str + access_token_endpoint: str + name: str = "OAuth2" diff --git a/backend/src/auth/db.py b/backend/src/auth/db.py index 1c333b2..2672054 100644 --- a/backend/src/auth/db.py +++ b/backend/src/auth/db.py @@ -1,16 +1,24 @@ from collections.abc import AsyncGenerator from fastapi import Depends -from fastapi_users.db import SQLAlchemyBaseUserTableUUID, SQLAlchemyUserDatabase +from fastapi_users.db import SQLAlchemyBaseUserTableUUID, SQLAlchemyUserDatabase, SQLAlchemyBaseOAuthAccountTableUUID from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine +from sqlalchemy.orm import Mapped, relationship -import database +import backend.src.database as database +from backend.src.database import Base -class User(SQLAlchemyBaseUserTableUUID, database.Base): +class OAuthAccount(SQLAlchemyBaseOAuthAccountTableUUID, Base): pass +class User(SQLAlchemyBaseUserTableUUID, Base): + oauth_accounts: Mapped[list[OAuthAccount]] = relationship( + "OAuthAccount", lazy="joined" + ) + + engine = create_async_engine(database.db_url, echo=False) async_session_maker = async_sessionmaker(engine, expire_on_commit=False) @@ -21,4 +29,4 @@ async def get_async_session() -> AsyncGenerator[AsyncSession, None]: async def get_user_db(session: AsyncSession = Depends(get_async_session)): - yield SQLAlchemyUserDatabase(session, User) + yield SQLAlchemyUserDatabase(session, User, OAuthAccount) diff --git a/backend/src/auth/users.py b/backend/src/auth/users.py index 26ed566..eee3754 100644 --- a/backend/src/auth/users.py +++ b/backend/src/auth/users.py @@ -1,3 +1,4 @@ +import os import uuid from typing import Optional @@ -9,6 +10,7 @@ from fastapi_users.authentication import ( CookieTransport, JWTStrategy, ) from fastapi_users.db import SQLAlchemyUserDatabase +from httpx_oauth.oauth2 import OAuth2 import auth.config from auth.db import User, get_user_db @@ -17,6 +19,19 @@ config = auth.config.AuthConfig() SECRET = config.token_secret LIFETIME = config.session_lifetime +if os.getenv("OAUTH_ENABLED") == "True": + oauth2_config = auth.config.OAuth2Config() + + oauth_client = OAuth2( + client_id=oauth2_config.client_id, + client_secret=oauth2_config.client_secret, + name=oauth2_config.name, + authorize_endpoint=oauth2_config.authorize_endpoint, + access_token_endpoint=oauth2_config.access_token_endpoint, + ) +else: + oauth_client = None + # TODO: implement on_xxx methods class UserManager(UUIDIDMixin, BaseUserManager[User, uuid.UUID]): @@ -31,11 +46,19 @@ class UserManager(UUIDIDMixin, BaseUserManager[User, uuid.UUID]): ): print(f"User {user.id} has forgot their password. Reset token: {token}") + async def on_after_reset_password(self, user: User, request: Optional[Request] = None): + print(f"User {user.id} has reset their password.") + async def on_after_request_verify( self, user: User, token: str, request: Optional[Request] = None ): print(f"Verification requested for user {user.id}. Verification token: {token}") + async def on_after_verify( + self, user: User, request: Optional[Request] = None + ): + print(f"User {user.id} has been verified") + async def get_user_manager(user_db: SQLAlchemyUserDatabase = Depends(get_user_db)): yield UserManager(user_db) diff --git a/backend/src/config.py b/backend/src/config.py index 405ed01..55d301e 100644 --- a/backend/src/config.py +++ b/backend/src/config.py @@ -1,5 +1,11 @@ +from pathlib import Path + from pydantic_settings import BaseSettings class BasicConfig(BaseSettings): - storage_directory: str = "." + storage_directory: Path = "./data" + tv_directory: Path = "./tv" + movie_directory: Path = "./movie" + torrent_directory: Path = "./torrent" + DEVELOPMENT: bool = False diff --git a/backend/src/database/__init__.py b/backend/src/database/__init__.py index aa82af5..2d7f2a5 100644 --- a/backend/src/database/__init__.py +++ b/backend/src/database/__init__.py @@ -6,7 +6,7 @@ from fastapi import Depends from sqlalchemy import create_engine from sqlalchemy.orm import Session, declarative_base, sessionmaker -from database.config import DbConfig +from .config import DbConfig log = logging.getLogger(__name__) config = DbConfig() @@ -32,7 +32,6 @@ def get_session() -> Generator[Session, Any, None]: except Exception as e: db.rollback() log.critical(f"error occurred: {e}") - print("OIDA OIDA OIDA OIDA OIDA", e) finally: db.close() diff --git a/backend/src/indexer/service.py b/backend/src/indexer/service.py index 00057b9..1b572df 100644 --- a/backend/src/indexer/service.py +++ b/backend/src/indexer/service.py @@ -1,5 +1,6 @@ from sqlalchemy.orm import Session +import indexer.repository from indexer import IndexerQueryResult, log, indexers from indexer.repository import save_result from indexer.schemas import IndexerQueryResultId @@ -18,4 +19,4 @@ def search(query: str, db: Session) -> list[IndexerQueryResult]: def get_indexer_query_result(result_id: IndexerQueryResultId, db: Session) -> IndexerQueryResult: - return get_indexer_query_result(result_id=result_id, db=db) + return indexer.repository.get_result(result_id=result_id, db=db) diff --git a/backend/src/main.py b/backend/src/main.py index 47fc35b..40af85b 100644 --- a/backend/src/main.py +++ b/backend/src/main.py @@ -2,28 +2,21 @@ import logging import sys from logging.config import dictConfig -import database -from auth.schemas import UserCreate, UserRead, UserUpdate -from auth.users import bearer_auth_backend, fastapi_users +from pythonjsonlogger.json import JsonFormatter -logging.basicConfig(level=logging.DEBUG, - format="%(asctime)s - %(levelname)s - %(name)s - %(funcName)s(): %(message)s", - stream=sys.stdout, - ) -log = logging.getLogger(__name__) - -import uvicorn -from fastapi import FastAPI - -import tv.router +import auth.users LOGGING_CONFIG = { "version": 1, - "disable_existing_loggers": True, + "disable_existing_loggers": False, "formatters": { "default": { "format": "%(asctime)s - %(levelname)s - %(name)s - %(funcName)s(): %(message)s" + }, + "json": { + "()": JsonFormatter, } + }, "handlers": { "console": { @@ -31,28 +24,60 @@ LOGGING_CONFIG = { "formatter": "default", "stream": sys.stdout, }, + "file": { + "class": "logging.handlers.RotatingFileHandler", + "formatter": "json", + "filename": "./log.txt", + "maxBytes": 10485760, + "backupCount": 5, + } }, "loggers": { - "uvicorn": {"handlers": ["console"], "level": "DEBUG"}, - "uvicorn.access": {"handlers": ["console"], "level": "DEBUG"}, - "fastapi": {"handlers": ["console"], "level": "DEBUG"}, - "__main__": {"handlers": ["console"], "level": "DEBUG"}, + "uvicorn": {"handlers": ["console", "file"], "level": "DEBUG"}, + "uvicorn.access": {"handlers": ["console", "file"], "level": "DEBUG"}, + "fastapi": {"handlers": ["console", "file"], "level": "DEBUG"}, }, } - -# Apply logging config dictConfig(LOGGING_CONFIG) +logging.basicConfig(level=logging.DEBUG, + format="%(asctime)s - %(levelname)s - %(name)s - %(funcName)s(): %(message)s", + stream=sys.stdout, + ) +log = logging.getLogger(__name__) + +import database +from auth.schemas import UserCreate, UserRead, UserUpdate +from auth.users import bearer_auth_backend, fastapi_users, cookie_auth_backend +from config import BasicConfig +from auth.users import oauth_client +import uvicorn +from fastapi import FastAPI + +import tv.router +import torrent.router + +basic_config = BasicConfig() +if basic_config.DEVELOPMENT: + basic_config.torrent_directory.mkdir(parents=True, exist_ok=True) + basic_config.tv_directory.mkdir(parents=True, exist_ok=True) + basic_config.movie_directory.mkdir(parents=True, exist_ok=True) + basic_config.storage_directory.mkdir(parents=True, exist_ok=True) + log.warning("Development Mode activated!") +else: + log.info("Development Mode not activated!") + database.init_db() app = FastAPI(root_path="/api/v1") +# Standard Auth Routers app.include_router( fastapi_users.get_auth_router(bearer_auth_backend), prefix="/auth/jwt", tags=["auth"] ) app.include_router( - fastapi_users.get_auth_router(bearer_auth_backend), + fastapi_users.get_auth_router(cookie_auth_backend), prefix="/auth/cookie", tags=["auth"] ) @@ -71,17 +96,45 @@ app.include_router( prefix="/auth", tags=["auth"], ) +# User Router app.include_router( fastapi_users.get_users_router(UserRead, UserUpdate), prefix="/users", tags=["users"], ) +# OAuth2 Routers +if oauth_client is not None: + app.include_router( + fastapi_users.get_oauth_router(oauth_client, + bearer_auth_backend, + auth.users.SECRET, + associate_by_email=True, + is_verified_by_default=True + ), + prefix=f"/auth/jwt/{oauth_client.name}", + tags=["oauth"], + ) + app.include_router( + fastapi_users.get_oauth_router(oauth_client, + cookie_auth_backend, + auth.users.SECRET, + associate_by_email=True, + is_verified_by_default=True + ), + prefix=f"/auth/cookie/{oauth_client.name}", + tags=["oauth"], + + ) app.include_router( tv.router.router, prefix="/tv", tags=["tv"] ) - +app.include_router( + torrent.router.router, + prefix="/torrent", + tags=["torrent"] +) if __name__ == "__main__": uvicorn.run(app, host="127.0.0.1", port=5049, log_config=LOGGING_CONFIG) diff --git a/backend/src/metadataProvider/tmdb.py b/backend/src/metadataProvider/tmdb.py index 2a3e23d..66e7346 100644 --- a/backend/src/metadataProvider/tmdb.py +++ b/backend/src/metadataProvider/tmdb.py @@ -7,7 +7,7 @@ from pydantic_settings import BaseSettings from tmdbsimple import TV, TV_Seasons from metadataProvider.abstractMetaDataProvider import AbstractMetadataProvider, register_metadata_provider -from tv.schemas import Episode, Season, Show +from tv.schemas import Episode, Season, Show, SeasonNumber, EpisodeNumber class TmdbConfig(BaseSettings): @@ -41,7 +41,7 @@ class TmdbMetadataProvider(AbstractMetadataProvider): Episode( external_id=int(episode["id"]), title=episode["name"], - number=int(episode["episode_number"]) + number=EpisodeNumber(episode["episode_number"]) ) ) @@ -50,7 +50,7 @@ class TmdbMetadataProvider(AbstractMetadataProvider): external_id=int(season_metadata["id"]), name=season_metadata["name"], overview=season_metadata["overview"], - number=int(season_metadata["season_number"]), + number=SeasonNumber(season_metadata["season_number"]), episodes=episode_list, ) @@ -77,7 +77,7 @@ class TmdbMetadataProvider(AbstractMetadataProvider): content_type = res.headers["content-type"] file_extension = mimetypes.guess_extension(content_type) if res.status_code == 200: - with open(f"{self.storage_path}/{show.id}{file_extension}", 'wb') as f: + with open(self.storage_path.joinpath(str(show.id) + file_extension), 'wb') as f: f.write(res.content) log.info(f"image for show {show.name} successfully downloaded") diff --git a/backend/src/ml/__init__.py b/backend/src/ml/__init__.py deleted file mode 100644 index 5cc938c..0000000 --- a/backend/src/ml/__init__.py +++ /dev/null @@ -1,84 +0,0 @@ -import json -import logging -from collections import Counter -from typing import List - -from ollama import ChatResponse, chat -from pydantic import BaseModel - -from ml.config import MachineLearningConfig - - -class NFO(BaseModel): - season: int - - -class Contains(BaseModel): - contains: bool - -def get_season(nfo: str) -> int | None: - responses: List[ChatResponse] = [] - parsed_responses: List[int] = [] - - for i in range(0, 5): - responses.append(chat( - model=config.ollama_model_name, - format=NFO.model_json_schema(), - messages=[ - { - 'role': 'USER', - 'content': - "Tell me which season the torrent with this description contains?" + - " output a season number in json format, the season number is an integer" + - nfo - }, - ])) - - for response in responses: - season_number: int - try: - season_number: int = json.loads(response.message.content)['season'] - except Exception as e: - log.warning(f"failed to parse season number: {e}") - break - parsed_responses.append(season_number) - - most_common = Counter(parsed_responses).most_common(1) - log.debug(f"extracted season number: {most_common} from nfo: {nfo}") - return most_common[0][0] - - -def contains_season(season_number: int, string_to_analyze: str) -> bool: - responses: List[ChatResponse] = [] - parsed_responses: List[bool] = [] - - for i in range(0, 3): - responses.append(chat( - model=config.ollama_model_name, - format=Contains.model_json_schema(), - messages=[ - { - 'role': 'USER', - 'content': - "Does this torrent contain the season " + season_number.__str__() + " ?" + - " output a boolean json format" + - string_to_analyze - }, - ])) - - for response in responses: - try: - answer: bool = json.loads(response.message.content)['contains'] - log.debug(f"extracted contains: {answer}") - except Exception as e: - log.warning(f"failed to parse season number: {e}") - break - parsed_responses.append(answer) - - most_common = Counter(parsed_responses).most_common(1) - log.debug(f"according to AI {string_to_analyze} contains season {season_number} {most_common[0][0]}") - return most_common[0][0] - - -config = MachineLearningConfig -log = logging.getLogger(__name__) diff --git a/backend/src/ml/_testing.py b/backend/src/ml/_testing.py deleted file mode 100644 index b1e012d..0000000 --- a/backend/src/ml/_testing.py +++ /dev/null @@ -1,45 +0,0 @@ -import json -from datetime import datetime, timedelta - -from ollama import ChatResponse, chat -from pydantic import BaseModel - - -class NFO(BaseModel): - season: int - - -# or access fields directly from the response object -start_time = datetime.now() + timedelta(seconds=300) -i = 0 -failed_prompts = 0 -while start_time > datetime.now(): - response: ChatResponse = chat(model='qwen2.5:0.5b', - format=NFO.model_json_schema() - , messages=[ - { - 'role': 'USER', - 'content': - "which season does a torrent with the following NFO contain? output the season number, which is an integer in json please\n" + - "The.Big.Bang.Theory.(2007).Season.9.S09.(1080p.BluRay.x265.HEVC.10bit.AAC.5.1.Vyndros)" - }, - ]) - i += 1 - print("prompt #", i) - print("remaining time: ", start_time - datetime.now()) - try: - json2 = json.loads(response.message.content) - print(json2) - except Exception as e: - print("prompt failed", e) - print(response.message.content) - failed_prompts += 1 - - if json2['season'] != 9: - failed_prompts += 1 - -print("prompts: ", i, " total time: 120s") -print("failed prompts: ", failed_prompts) -print("average time per prompt: ", 300 / i) -print("average time per successful prompt: ", 300 / (i - failed_prompts)) -print("ratio successful/failed prompts: ", failed_prompts / (i - failed_prompts)) diff --git a/backend/src/ml/config.py b/backend/src/ml/config.py deleted file mode 100644 index 804cfea..0000000 --- a/backend/src/ml/config.py +++ /dev/null @@ -1,5 +0,0 @@ -from pydantic_settings import BaseSettings - - -class MachineLearningConfig(BaseSettings): - ollama_model_name: str = "qwen2.5:0.5b" diff --git a/backend/src/ml/test__init__.py b/backend/src/ml/test__init__.py deleted file mode 100644 index 13a2758..0000000 --- a/backend/src/ml/test__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -from ml import get_season - - -def test_get_season(): - for i in range(0, 50): - assert get_season("The.Big.Bang.Theory.(2007).Season.9.S09.(1080p.BluRay.x265.HEVC.10bit.AAC.5.1.Vyndros)") == 9 diff --git a/backend/src/torrent/dependencies.py b/backend/src/torrent/dependencies.py new file mode 100644 index 0000000..b6ae237 --- /dev/null +++ b/backend/src/torrent/dependencies.py @@ -0,0 +1,13 @@ +from typing import Annotated + +from fastapi import Depends + +from database import DbSessionDependency +from torrent.service import TorrentService + + +def get_torrent_service(db: DbSessionDependency) -> TorrentService: + return TorrentService(db=db) + + +TorrentServiceDependency = Annotated[TorrentService, Depends(get_torrent_service)] diff --git a/backend/src/torrent/dowloadClients/__init__.py b/backend/src/torrent/dowloadClients/__init__.py deleted file mode 100644 index 8461751..0000000 --- a/backend/src/torrent/dowloadClients/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -from torrent.dowloadClients.config import DownloadClientConfig -from torrent.dowloadClients.qbittorrent import QbittorrentClient - -config = DownloadClientConfig() - -if config.download_client == "qbit": - client = QbittorrentClient() -else: - raise ValueError("Unknown download client") diff --git a/backend/src/torrent/dowloadClients/config.py b/backend/src/torrent/dowloadClients/config.py deleted file mode 100644 index b3f3a51..0000000 --- a/backend/src/torrent/dowloadClients/config.py +++ /dev/null @@ -1,5 +0,0 @@ -from pydantic_settings import BaseSettings - - -class DownloadClientConfig(BaseSettings): - download_client: str = "qbit" diff --git a/backend/src/torrent/dowloadClients/genericDownloadClient.py b/backend/src/torrent/dowloadClients/genericDownloadClient.py deleted file mode 100644 index dd93689..0000000 --- a/backend/src/torrent/dowloadClients/genericDownloadClient.py +++ /dev/null @@ -1,50 +0,0 @@ -from abc import ABCMeta, abstractmethod - -from torrent.schemas import TorrentBase - - -class GenericDownloadClient(metaclass=ABCMeta): - name: str - - @abstractmethod - def __init__(cls, name: str = None, **kwargs): - super().__init__(**kwargs) - if name is None: - raise ValueError('name cannot be None') - cls.name = name - - @abstractmethod - def download(self, torrent: TorrentBase) -> TorrentBase: - """ - downloads a torrent - - :param torrent: id of the torrent to download - """ - raise NotImplementedError() - - @abstractmethod - def get_torrent_status(self, torrent: TorrentBase) -> TorrentBase: - """ - updates a torrents 'status' field - - :param torrent: id of the media to update - """ - raise NotImplementedError() - - @abstractmethod - def cancel_download(self, torrent: TorrentBase) -> TorrentBase: - """ - cancels download of a torrent - - :param torrent: id of the torrent to download - """ - raise NotImplementedError() - - @abstractmethod - def pause_download(self, torrent: TorrentBase) -> TorrentBase: - """ - pauses download of a torrent - - :param torrent: id of the torrent to download - """ - raise NotImplementedError() diff --git a/backend/src/torrent/dowloadClients/qbittorrent.py b/backend/src/torrent/dowloadClients/qbittorrent.py deleted file mode 100644 index 0e49cd9..0000000 --- a/backend/src/torrent/dowloadClients/qbittorrent.py +++ /dev/null @@ -1,75 +0,0 @@ -import logging - -import qbittorrentapi -from database.torrents import Torrent -from dowloadClients.genericDownloadClient import GenericDownloadClient -from pydantic_settings import BaseSettings, SettingsConfigDict - -from indexer import IndexerQueryResult - -log = logging.getLogger(__name__) - - -class QbittorrentConfig(BaseSettings): - model_config = SettingsConfigDict(env_prefix='QBITTORRENT_') - host: str = "localhost" - port: int = 8080 - username: str = "admin" - password: str = "admin" - - -class QbittorrentClient(GenericDownloadClient): - DOWNLOADING_STATE = ("allocating", "downloading", "metaDL", "pausedDL", "queuedDL", "stalledDL", "checkingDL", - "forcedDL", "moving") - FINISHED_STATE = ("uploading", "pausedUP", "queuedUP", "stalledUP", "checkingUP", "forcedUP") - ERROR_STATE = ("missingFiles", "error", "checkingResumeData") - UNKNOWN_STATE = ("unknown",) - api_client = qbittorrentapi.Client(**QbittorrentConfig().model_dump()) - - def __init__(self): - super().__init__(name="qBittorrent") - try: - self.api_client.auth_log_in() - log.info("Successfully logged into qbittorrent") - except Exception as e: - log.error(f"Failed to log into qbittorrent: {e}") - raise - finally: - self.api_client.auth_log_out() - - def download(self, torrent: IndexerQueryResult) -> Torrent: - log.info(f"Attempting to download torrent: {torrent.title} with tag {torrent.id}") - with open(torrent.torrent_filepath, "rb") as torrent_file: - answer = self.api_client.torrents_add(category="MediaManager", - torrent_files=torrent_file, - tags=[torrent.id.__str__()]) - if answer == "Ok.": - log.info(f"Successfully added torrent: {torrent.torrent_filepath}") - return self.get_torrent_status(torrent=torrent) - else: - log.error(f"Failed to download torrent. API response: {answer}") - raise RuntimeError(f"Failed to download torrent, API-Answer isn't 'Ok.'; API Answer: {answer}") - - def get_torrent_status(self, torrent: Torrent) -> Torrent: - log.info(f"Fetching status for torrent: {torrent.id}") - info = self.api_client.torrents_info(tag=f"{torrent.id}") - - if not info: - log.warning(f"No information found for torrent: {torrent.id}") - torrent.torrent_status = "error" - else: - state: str = info[0]["state"] - log.info(f"Torrent {torrent.id} is in state: {state}") - - if state in self.DOWNLOADING_STATE: - torrent.torrent_status = "downloading" - elif state in self.FINISHED_STATE: - torrent.torrent_status = "finished" - elif state in self.ERROR_STATE: - torrent.torrent_status = "error" - elif state in self.UNKNOWN_STATE: - torrent.torrent_status = "unknown" - else: - torrent.torrent_status = "error" - - return torrent diff --git a/backend/src/torrent/models.py b/backend/src/torrent/models.py index 0a89b72..3f9c912 100644 --- a/backend/src/torrent/models.py +++ b/backend/src/torrent/models.py @@ -1,16 +1,18 @@ from uuid import UUID -from sqlalchemy.orm import Mapped, mapped_column +from sqlalchemy.orm import Mapped, mapped_column, relationship from database import Base from torrent.schemas import Quality, TorrentStatus -class TorrentBase(Base): - __abstract__ = True - +class Torrent(Base): + __tablename__ = "torrent" id: Mapped[UUID] = mapped_column(primary_key=True) - status: Mapped[TorrentStatus | None] + status: Mapped[TorrentStatus] title: Mapped[str] quality: Mapped[Quality] imported: Mapped[bool] + hash: Mapped[str] + + season_files = relationship("SeasonFile", back_populates="torrent") diff --git a/backend/src/torrent/repository.py b/backend/src/torrent/repository.py new file mode 100644 index 0000000..45cdbaa --- /dev/null +++ b/backend/src/torrent/repository.py @@ -0,0 +1,45 @@ +from sqlalchemy import select +from sqlalchemy.orm import Session + +from torrent.models import Torrent +from torrent.schemas import TorrentId, Torrent as TorrentSchema +from tv.models import SeasonFile, Show, Season +from tv.schemas import SeasonFile as SeasonFileSchema, Show as ShowSchema + + +def get_seasons_files_of_torrent(db: Session, torrent_id: TorrentId) -> list[SeasonFileSchema]: + stmt = select(SeasonFile).where(SeasonFile.torrent_id == torrent_id) + result = db.execute(stmt).scalars().all() + return [SeasonFileSchema.model_validate(season_file) for season_file in result] + + +def get_show_of_torrent(db: Session, torrent_id: TorrentId) -> ShowSchema: + stmt = ( + select(Show). + join(SeasonFile.season). + join(Season.show). + where(SeasonFile.torrent_id == torrent_id) + ) + result = db.execute(stmt).unique().scalar_one_or_none() + return ShowSchema.model_validate(result) + + +def save_torrent(db: Session, torrent_schema: TorrentSchema) -> TorrentSchema: + db.merge(Torrent(**torrent_schema.model_dump())) + db.commit() + return TorrentSchema.model_validate(torrent_schema) + + +def get_all_torrents(db: Session) -> list[TorrentSchema]: + stmt = select(Torrent) + result = db.execute(stmt).scalars().all() + + return [TorrentSchema.model_validate(torrent_schema) for torrent_schema in result] + + +def get_torrent_by_id(db: Session, torrent_id: TorrentId) -> TorrentSchema: + return TorrentSchema.model_validate(db.get(Torrent, torrent_id)) + + +def delete_torrent(db: Session, torrent_id: TorrentId): + db.delete(db.get(Torrent, torrent_id)) diff --git a/backend/src/torrent/router.py b/backend/src/torrent/router.py new file mode 100644 index 0000000..0b00c27 --- /dev/null +++ b/backend/src/torrent/router.py @@ -0,0 +1,37 @@ +from fastapi import APIRouter +from fastapi import status +from fastapi.params import Depends + +from auth.users import current_active_user +from torrent.dependencies import TorrentServiceDependency +from torrent.schemas import TorrentId, Torrent + +router = APIRouter() + + +@router.get("/", status_code=status.HTTP_200_OK, dependencies=[Depends(current_active_user)], + response_model=list[Torrent]) +def get_all_torrents(service: TorrentServiceDependency, ): + return service.get_all_torrents() + + +@router.post("/{torrent_id}", status_code=status.HTTP_200_OK, dependencies=[Depends(current_active_user)], + response_model=Torrent) +def import_torrent(service: TorrentServiceDependency, torrent_id: TorrentId): + return service.import_torrent(service.get_torrent_by_id(id=torrent_id)) + + +@router.post("/", status_code=status.HTTP_200_OK, dependencies=[Depends(current_active_user)], + response_model=list[Torrent]) +def import_all_torrents(service: TorrentServiceDependency): + return service.import_all_torrents() + + +@router.get("/{torrent_id}", status_code=status.HTTP_200_OK, response_model=Torrent) +def get_torrent(service: TorrentServiceDependency, torrent_id: TorrentId): + return service.get_torrent_by_id(id=torrent_id) + + +@router.delete("/torrents", status_code=status.HTTP_200_OK, dependencies=[Depends(current_active_user)]) +def delete_torrent(service: TorrentServiceDependency, torrent_id: TorrentId): + service.delete_torrent(torrent_id=torrent_id) diff --git a/backend/src/torrent/schemas.py b/backend/src/torrent/schemas.py index 856f120..db5a861 100644 --- a/backend/src/torrent/schemas.py +++ b/backend/src/torrent/schemas.py @@ -1,12 +1,12 @@ import typing import uuid -from abc import ABC from enum import Enum -from pydantic import ConfigDict +from pydantic import ConfigDict, BaseModel, Field TorrentId = typing.NewType("TorrentId", uuid.UUID) + class Quality(Enum): high = 1 medium = 2 @@ -22,11 +22,12 @@ class TorrentStatus(Enum): unknown = 4 -class TorrentBase(ABC): +class Torrent(BaseModel): model_config = ConfigDict(from_attributes=True) - id: TorrentId + id: TorrentId = Field(default_factory=uuid.uuid4) status: TorrentStatus title: str quality: Quality imported: bool + hash: str diff --git a/backend/src/torrent/service.py b/backend/src/torrent/service.py index 183b676..92187f4 100644 --- a/backend/src/torrent/service.py +++ b/backend/src/torrent/service.py @@ -1 +1,213 @@ -download_torrent() +import hashlib +import logging +import mimetypes +import pprint +import re +from pathlib import Path + +import bencoder +import qbittorrentapi +import requests +from fastapi_utils.tasks import repeat_every +from pydantic_settings import BaseSettings, SettingsConfigDict +from sqlalchemy.orm import Session + +import torrent.repository +import tv.repository +import tv.service +from config import BasicConfig +from indexer import IndexerQueryResult +from torrent.repository import get_seasons_files_of_torrent, get_show_of_torrent, save_torrent +from torrent.schemas import Torrent, TorrentStatus, TorrentId +from torrent.utils import list_files_recursively, get_torrent_filepath +from tv.schemas import SeasonFile, Show + +log = logging.getLogger(__name__) + + +class TorrentServiceConfig(BaseSettings): + model_config = SettingsConfigDict(env_prefix='QBITTORRENT_') + host: str = "localhost" + port: int = 8080 + username: str = "admin" + password: str = "admin" + + +class TorrentService: + DOWNLOADING_STATE = ("allocating", "downloading", "metaDL", "pausedDL", "queuedDL", "stalledDL", "checkingDL", + "forcedDL", "moving") + FINISHED_STATE = ("uploading", "pausedUP", "queuedUP", "stalledUP", "checkingUP", "forcedUP") + ERROR_STATE = ("missingFiles", "error", "checkingResumeData") + UNKNOWN_STATE = ("unknown",) + api_client = qbittorrentapi.Client(**TorrentServiceConfig().model_dump()) + + def __init__(self, db: Session): + try: + self.api_client.auth_log_in() + log.info("Successfully logged into qbittorrent") + self.db = db + except Exception as e: + log.error(f"Failed to log into qbittorrent: {e}") + raise + finally: + self.api_client.auth_log_out() + + def download(self, indexer_result: IndexerQueryResult) -> Torrent: + log.info(f"Attempting to download torrent: {indexer_result.title}") + torrent = Torrent(status=TorrentStatus.unknown, + title=indexer_result.title, + quality=indexer_result.quality, + imported=False, + hash="") + + url = indexer_result.download_url + torrent_filepath = BasicConfig().torrent_directory / f"{torrent.title}.torrent" + with open(torrent_filepath, 'wb') as file: + content = requests.get(url).content + file.write(content) + + with open(torrent_filepath, 'rb') as file: + content = file.read() + try: + decoded_content = bencoder.decode(content) + except Exception as e: + log.error(f"Failed to decode torrent file: {e}") + raise e + torrent.hash = hashlib.sha1(bencoder.encode(decoded_content[b'info'])).hexdigest() + answer = self.api_client.torrents_add(category="MediaManager", torrent_files=content, + save_path=torrent.title) + + if answer == "Ok.": + log.info(f"Successfully added torrent: {torrent.title}") + return self.get_torrent_status(torrent=torrent) + else: + log.error(f"Failed to download torrent. API response: {answer}") + raise RuntimeError(f"Failed to download torrent, API-Answer isn't 'Ok.'; API Answer: {answer}") + + def get_torrent_status(self, torrent: Torrent) -> Torrent: + log.info(f"Fetching status for torrent: {torrent.title}") + info = self.api_client.torrents_info(torrent_hashes=torrent.hash) + + if not info: + log.warning(f"No information found for torrent: {torrent.id}") + torrent.status = TorrentStatus.unknown + else: + state: str = info[0]["state"] + log.info(f"Torrent {torrent.id} is in state: {state}") + + if state in self.DOWNLOADING_STATE: + torrent.status = TorrentStatus.downloading + elif state in self.FINISHED_STATE: + torrent.status = TorrentStatus.finished + elif state in self.ERROR_STATE: + torrent.status = TorrentStatus.error + elif state in self.UNKNOWN_STATE: + torrent.status = TorrentStatus.unknown + else: + torrent.status = TorrentStatus.error + save_torrent(db=self.db, torrent_schema=torrent) + return torrent + + def cancel_download(self, torrent: Torrent, delete_files: bool = False) -> Torrent: + """ + cancels download of a torrent + + :param delete_files: Deletes the downloaded files of the torrent too, deactivated by default + :param torrent: the torrent to cancel + """ + log.info(f"Cancelling download for torrent: {torrent.title}") + self.api_client.torrents_delete(delete_files=delete_files) + return self.get_torrent_status(torrent=torrent) + + def pause_download(self, torrent: Torrent) -> Torrent: + """ + pauses download of a torrent + + :param torrent: the torrent to pause + """ + log.info(f"Pausing download for torrent: {torrent.title}") + self.api_client.torrents_pause(torrent_hashes=torrent.hash) + return self.get_torrent_status(torrent=torrent) + + def resume_download(self, torrent: Torrent) -> Torrent: + """ + resumes download of a torrent + + :param torrent: the torrent to resume + """ + log.info(f"Resuming download for torrent: {torrent.title}") + self.api_client.torrents_resume(torrent_hashes=torrent.hash) + return self.get_torrent_status(torrent=torrent) + + # TODO: add function to differentiate between .srt files and stuff + def import_torrent(self, torrent: Torrent) -> Torrent: + log.info(f"importing torrent {torrent}") + all_files = list_files_recursively(path=get_torrent_filepath(torrent=torrent)) + log.debug(f"Found {len(all_files)} files downloaded by the torrent") + files = [] + for file in all_files: + file_type = mimetypes.guess_file_type(file) + if file_type[0] is not None: + if file_type[0].startswith("video"): + files.append(file) + log.debug(f"File is a video, it will be imported: {file}") + else: + log.debug(f"File is not a video, will not be imported: {file}") + log.debug(f"Importing these {len(files)} files:\n" + pprint.pformat(files)) + + show: Show = get_show_of_torrent(db=self.db, torrent_id=torrent.id) + show_file_path = BasicConfig().tv_directory / f"{show.name} ({show.year}) [{show.metadata_provider}id-{show.external_id}]" + + season_files: list[SeasonFile] = get_seasons_files_of_torrent(db=self.db, torrent_id=torrent.id) + for season_file in season_files: + season = tv.service.get_season(db=self.db, season_id=season_file.season_id) + season_path = show_file_path / Path(f"Season {season.number}") + try: + season_path.mkdir(parents=True) + except FileExistsError: + log.warning(f"Path already exists: {season_path}") + for episode in season.episodes: + episode_file_name = f"{show.name} S{season.number:02d}E{episode.number:02d}" + if season_file.file_path_suffix != "": + episode_file_name += f" - {season_file.file_path_suffix}" + target_file = season_path / episode_file_name + for file in files: + pattern = r'.*[.]S0?' + str(season.number) + r'E0?' + str(episode.number) + r"[.].*" + # NOTE: irgendwos passt mit file.name glauwi ned???? + log.debug(f"Searching for pattern {pattern} in file: {file.name}") + if re.search(pattern, file.name): + log.debug(f"Found matching pattern: {pattern} in file {file.name}") + target_file = target_file.with_suffix(file.suffix) + if target_file.exists(): + target_file.unlink() + + target_file.hardlink_to(file) + break + else: + log.warning(f"S{season.number}E{episode.number} in Torrent {torrent.title}'s files not found.") + torrent.imported = True + + return self.get_torrent_status(torrent=torrent) + + def get_all_torrents(self) -> list[Torrent]: + return [self.get_torrent_status(x) for x in torrent.repository.get_all_torrents(db=self.db)] + + def get_torrent_by_id(self, id: TorrentId) -> Torrent: + return self.get_torrent_status(torrent.repository.get_torrent_by_id(torrent_id=id, db=self.db)) + + def delete_torrent(self, torrent_id: TorrentId): + t = torrent.repository.get_torrent_by_id(torrent_id=torrent_id, db=self.db) + if not t.imported: + tv.repository.remove_season_files_by_torrent_id(db=self.db, torrent_id=torrent_id) + torrent.repository.delete_torrent(db=self.db, torrent_id=t.id) + + @repeat_every(seconds=3600) + def import_all_torrents(self) -> list[Torrent]: + log.info("Importing all torrents") + torrents = self.get_all_torrents() + imported_torrents = [] + for t in torrents: + if t.imported == False and t.status == TorrentStatus.finished: + imported_torrents.append(self.import_torrent(t)) + log.info("Finished importing all torrents") + return imported_torrents diff --git a/backend/src/torrent/utils.py b/backend/src/torrent/utils.py new file mode 100644 index 0000000..c3a2a42 --- /dev/null +++ b/backend/src/torrent/utils.py @@ -0,0 +1,24 @@ +import logging +from pathlib import Path + +from config import BasicConfig +from torrent.schemas import Torrent + + +def list_files_recursively(path: Path = Path(".")) -> list[Path]: + files = list(path.glob("**/*")) + logging.debug(f"Found {len(files)} entries via glob") + valid_files = [] + for x in files: + if x.is_dir(): + logging.debug(f"'{x}' is a directory") + elif x.is_symlink(): + logging.debug(f"'{x}' is a symlink") + else: + valid_files.append(x) + logging.debug(f"Returning {len(valid_files)} files after filtering") + return valid_files + + +def get_torrent_filepath(torrent: Torrent): + return BasicConfig().torrent_directory / torrent.title diff --git a/backend/src/tv/models.py b/backend/src/tv/models.py index 62accb1..d721649 100644 --- a/backend/src/tv/models.py +++ b/backend/src/tv/models.py @@ -4,7 +4,7 @@ from sqlalchemy import ForeignKey, PrimaryKeyConstraint, UniqueConstraint from sqlalchemy.orm import Mapped, mapped_column, relationship from database import Base -from torrent.models import Quality, TorrentBase +from torrent.models import Quality class Show(Base): @@ -35,6 +35,8 @@ class Season(Base): show: Mapped["Show"] = relationship(back_populates="seasons") episodes: Mapped[list["Episode"]] = relationship(back_populates="season", cascade="all, delete") + season_files = relationship("SeasonFile", back_populates="season", cascade="all, delete") + class Episode(Base): __tablename__ = "episode" @@ -53,15 +55,15 @@ class Episode(Base): class SeasonFile(Base): __tablename__ = "season_file" __table_args__ = ( - PrimaryKeyConstraint("season_id", "file_path"), + PrimaryKeyConstraint("season_id", "file_path_suffix"), ) season_id: Mapped[UUID] = mapped_column(ForeignKey(column="season.id", ondelete="CASCADE"), ) torrent_id: Mapped[UUID | None] = mapped_column(ForeignKey(column="torrent.id", ondelete="SET NULL"), ) - file_path: Mapped[str] + file_path_suffix: Mapped[str] quality: Mapped[Quality] - torrent: Mapped["SeasonTorrent"] = relationship(back_populates="season_files") - + torrent = relationship("Torrent", back_populates="season_files", uselist=False) + season = relationship("Season", back_populates="season_files", uselist=False) class SeasonRequest(Base): @@ -72,8 +74,3 @@ class SeasonRequest(Base): season_id: Mapped[UUID] = mapped_column(ForeignKey(column="season.id", ondelete="CASCADE"), ) wanted_quality: Mapped[Quality] min_quality: Mapped[Quality] - - -class SeasonTorrent(TorrentBase): - __tablename__ = "season_torrent" - season_files: Mapped[list["SeasonFile"]] = relationship(back_populates="torrent", cascade="all, delete") diff --git a/backend/src/tv/repository.py b/backend/src/tv/repository.py index f37a4e8..3d67ac6 100644 --- a/backend/src/tv/repository.py +++ b/backend/src/tv/repository.py @@ -1,7 +1,8 @@ -from sqlalchemy import select +from sqlalchemy import select, delete from sqlalchemy.exc import IntegrityError from sqlalchemy.orm import Session, joinedload +from torrent.schemas import TorrentId from tv.models import Season, Show, Episode, SeasonRequest, SeasonFile from tv.schemas import Season as SeasonSchema, SeasonId, Show as ShowSchema, ShowId, \ SeasonRequest as SeasonRequestSchema, SeasonFile as SeasonFileSchema @@ -138,7 +139,7 @@ def get_season(season_id: SeasonId, db: Session) -> SeasonSchema: :param db: The database session. :return: a Season object. """ - return SeasonSchema.model_validate(db.get(Season(), season_id)) + return SeasonSchema.model_validate(db.get(Season, season_id)) def add_season_to_requested_list(season_request: SeasonRequestSchema, db: Session) -> None: @@ -165,12 +166,14 @@ def get_season_by_number(db: Session, season_number: int, show_id: ShowId) -> Se where(Season.show_id == show_id). where(Season.number == season_number). options( - joinedload(Season.episodes).joinedload(Season.show) + joinedload(Season.episodes), + joinedload(Season.show) ) ) result = db.execute(stmt).unique().scalar_one_or_none() return SeasonSchema.model_validate(result) + def get_season_requests(db: Session) -> list[SeasonRequestSchema]: stmt = select(SeasonRequest) result = db.execute(stmt).scalars().all() @@ -181,3 +184,11 @@ def add_season_file(db: Session, season_file: SeasonFileSchema) -> SeasonFileSch db.add(SeasonFile(**season_file.model_dump())) db.commit() return season_file + + +def remove_season_files_by_torrent_id(db: Session, torrent_id: TorrentId): + stmt = ( + delete(SeasonFile). + where(SeasonFile.torrent_id == torrent_id) + ) + db.execute(stmt) diff --git a/backend/src/tv/router.py b/backend/src/tv/router.py index 4d3e2f5..a4f4d0d 100644 --- a/backend/src/tv/router.py +++ b/backend/src/tv/router.py @@ -69,6 +69,7 @@ def request_a_season(db: DbSessionDependency, season_request: SeasonRequest): """ tv.service.request_season(db=db, season_request=season_request) + @router.get("/season/request", status_code=status.HTTP_200_OK, dependencies=[Depends(current_active_user)]) def get_requested_seasons(db: DbSessionDependency) -> list[SeasonRequest]: return tv.service.get_all_requested_seasons(db=db) @@ -89,11 +90,13 @@ def unrequest_season(db: DbSessionDependency, request: SeasonRequest): def get_torrents_for_a_season(db: DbSessionDependency, show_id: ShowId, season_number: int = 1): return tv.service.get_all_available_torrents_for_a_season(db=db, season_number=season_number, show_id=show_id) + # download a torrent @router.post("/torrents", status_code=status.HTTP_200_OK, dependencies=[Depends(current_active_user)]) -def download_a_torrent(db: DbSessionDependency, public_indexer_result_id: IndexerQueryResultId, show_id: ShowId): - return - +def download_a_torrent(db: DbSessionDependency, public_indexer_result_id: IndexerQueryResultId, show_id: ShowId, + override_file_path_suffix: str = ""): + return tv.service.download_torrent(db=db, public_indexer_result_id=public_indexer_result_id, show_id=show_id, + override_show_file_path_suffix=override_file_path_suffix) # -------------------------------- # SEARCH SHOWS ON METADATA PROVIDERS diff --git a/backend/src/tv/schemas.py b/backend/src/tv/schemas.py index 9afa1be..2a1e016 100644 --- a/backend/src/tv/schemas.py +++ b/backend/src/tv/schemas.py @@ -5,7 +5,7 @@ from uuid import UUID from pydantic import BaseModel, Field, ConfigDict from torrent.models import Quality -from torrent.schemas import TorrentBase +from torrent.schemas import TorrentId ShowId = typing.NewType("ShowId", UUID) SeasonId = typing.NewType("SeasonId", UUID) @@ -66,11 +66,5 @@ class SeasonFile(BaseModel): season_id: SeasonId quality: Quality - torrent_id: UUID - file_path: str - - torrent: "SeasonTorrent" - - -class SeasonTorrent(TorrentBase): - season_files: list[SeasonFile] + torrent_id: TorrentId + file_path_suffix: str diff --git a/backend/src/tv/service.py b/backend/src/tv/service.py index b4f8f23..5138658 100644 --- a/backend/src/tv/service.py +++ b/backend/src/tv/service.py @@ -5,11 +5,12 @@ import metadataProvider import tv.repository from indexer import IndexerQueryResult from indexer.schemas import IndexerQueryResultId -from torrent.schemas import Quality +from torrent.schemas import Torrent +from torrent.service import TorrentService from tv import log from tv.exceptions import MediaAlreadyExists from tv.repository import add_season_file -from tv.schemas import Show, ShowId, SeasonRequest, SeasonFile +from tv.schemas import Show, ShowId, SeasonRequest, SeasonFile, SeasonId, Season def add_show(db: Session, external_id: int, metadata_provider: str) -> Show | None: @@ -68,31 +69,22 @@ def get_show_by_id(db: Session, show_id: ShowId) -> Show | None: return tv.repository.get_show(show_id=show_id, db=db) +def get_season(db: Session, season_id: SeasonId) -> Season: + return tv.repository.get_season(season_id=season_id, db=db) + + def get_all_requested_seasons(db: Session) -> list[SeasonRequest]: return tv.repository.get_season_requests(db=db) def download_torrent(db: Session, public_indexer_result_id: IndexerQueryResultId, show_id: ShowId, - override_show_file_path: str = None, override_quality: Quality = None) -> None: + override_show_file_path_suffix: str = "") -> Torrent: indexer_result = indexer.service.get_indexer_query_result(db=db, result_id=public_indexer_result_id) - torrent_id = torrent.service.download_torrent().id - - if override_quality is None: - result_quality = indexer_result.quality - else: - result_quality = override_quality - - show = tv.repository.get_show(show_id=show_id, db=db) - if override_show_file_path is None: - show_file_path = f"{show.title} ({show.year})" - if show.metadata_provider == "tmdb" or show.metadata_provider == "tvdb": - show_file_path += f" [{show.metadata_provider}id-{show.external_id}]" - else: - show_file_path = override_show_file_path + torrent = TorrentService(db=db).download(indexer_result=indexer_result) for season_number in indexer_result.season: season = tv.repository.get_season_by_number(db=db, season_number=season_number, show_id=show_id) - season_file_path = show_file_path + f" Season {season.number}" - season_file = SeasonFile(season_id=season.id, quality=result_quality, torrent_id=torrent_id, - file_path=season_file_path) + season_file = SeasonFile(season_id=season.id, quality=indexer_result.quality, torrent_id=torrent.id, + file_path_suffix=override_show_file_path_suffix) add_season_file(db=db, season_file=season_file) + return torrent diff --git a/docker-compose.yml b/docker-compose.yml index a67aa3d..2b0ce6f 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -35,4 +35,6 @@ services: - 8080:8080 - 6881:6881 - 6881:6881/udp - restart: unless-stopped \ No newline at end of file + restart: unless-stopped + volumes: + - ./torrent/:/torrent/:rw \ No newline at end of file