Merge pull request #43 from maxdorninger/add-usenet-support

Add usenet support and fix jackett using magnet link
This commit is contained in:
Maximilian Dorninger
2025-07-10 18:19:02 +02:00
committed by GitHub
32 changed files with 1300 additions and 331 deletions

View File

@@ -0,0 +1,95 @@
"""add-usenet-columns
Revision ID: 333866afcd2c
Revises: aa4689f80796
Create Date: 2025-07-09 20:55:42.338629
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision: str = "333866afcd2c"
down_revision: Union[str, None] = "aa4689f80796"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table("usenet_download")
op.add_column(
"indexer_query_result",
sa.Column(
"usenet", sa.Boolean(), nullable=False, server_default=sa.text("false")
),
)
op.add_column(
"indexer_query_result",
sa.Column("age", sa.Integer(), nullable=False, server_default=sa.text("0")),
)
op.add_column(
"torrent",
sa.Column(
"usenet", sa.Boolean(), nullable=False, server_default=sa.text("false")
),
)
# ### end Alembic commands ###
def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column("torrent", "usenet")
op.drop_column("indexer_query_result", "age")
op.drop_column("indexer_query_result", "usenet")
op.create_table(
"usenet_download",
sa.Column("id", sa.UUID(), autoincrement=False, nullable=False),
sa.Column(
"status",
postgresql.ENUM(
"queued",
"downloading",
"completed",
"failed",
"paused",
"extracting",
"verifying",
"repairing",
"unknown",
name="usenetdownloadstatus",
),
autoincrement=False,
nullable=False,
),
sa.Column("title", sa.VARCHAR(), autoincrement=False, nullable=False),
sa.Column(
"quality",
postgresql.ENUM("uhd", "fullhd", "hd", "sd", "unknown", name="quality"),
autoincrement=False,
nullable=False,
),
sa.Column("imported", sa.BOOLEAN(), autoincrement=False, nullable=False),
sa.Column("nzb_id", sa.VARCHAR(), autoincrement=False, nullable=False),
sa.Column("category", sa.VARCHAR(), autoincrement=False, nullable=False),
sa.Column(
"size_mb",
sa.DOUBLE_PRECISION(precision=53),
autoincrement=False,
nullable=False,
),
sa.Column(
"progress_percent",
sa.DOUBLE_PRECISION(precision=53),
autoincrement=False,
nullable=False,
),
sa.PrimaryKeyConstraint("id", name=op.f("usenet_download_pkey")),
)
# ### end Alembic commands ###

View File

@@ -23,189 +23,250 @@ def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
# Create user table
op.create_table('user',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('email', sa.String(length=320), nullable=False),
sa.Column('hashed_password', sa.String(length=1024), nullable=False),
sa.Column('is_active', sa.Boolean(), nullable=False),
sa.Column('is_superuser', sa.Boolean(), nullable=False),
sa.Column('is_verified', sa.Boolean(), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_user_email'), 'user', ['email'], unique=True)
op.create_table(
"user",
sa.Column("id", sa.UUID(), nullable=False),
sa.Column("email", sa.String(length=320), nullable=False),
sa.Column("hashed_password", sa.String(length=1024), nullable=False),
sa.Column("is_active", sa.Boolean(), nullable=False),
sa.Column("is_superuser", sa.Boolean(), nullable=False),
sa.Column("is_verified", sa.Boolean(), nullable=False),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(op.f("ix_user_email"), "user", ["email"], unique=True)
# Create oauth account table
op.create_table('oauth_account',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('oauth_name', sa.String(length=100), nullable=False),
sa.Column('access_token', sa.String(length=1024), nullable=False),
sa.Column('expires_at', sa.Integer(), nullable=True),
sa.Column('refresh_token', sa.String(length=1024), nullable=True),
sa.Column('account_id', sa.String(length=320), nullable=False),
sa.Column('account_email', sa.String(length=320), nullable=False),
sa.Column('user_id', sa.UUID(), nullable=False),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ondelete='cascade'),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_oauth_account_account_id'), 'oauth_account', ['account_id'], unique=False)
op.create_index(op.f('ix_oauth_account_oauth_name'), 'oauth_account', ['oauth_name'], unique=False)
op.create_table(
"oauth_account",
sa.Column("id", sa.UUID(), nullable=False),
sa.Column("oauth_name", sa.String(length=100), nullable=False),
sa.Column("access_token", sa.String(length=1024), nullable=False),
sa.Column("expires_at", sa.Integer(), nullable=True),
sa.Column("refresh_token", sa.String(length=1024), nullable=True),
sa.Column("account_id", sa.String(length=320), nullable=False),
sa.Column("account_email", sa.String(length=320), nullable=False),
sa.Column("user_id", sa.UUID(), nullable=False),
sa.ForeignKeyConstraint(["user_id"], ["user.id"], ondelete="cascade"),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(
op.f("ix_oauth_account_account_id"),
"oauth_account",
["account_id"],
unique=False,
)
op.create_index(
op.f("ix_oauth_account_oauth_name"),
"oauth_account",
["oauth_name"],
unique=False,
)
# Create torrent table
op.create_table('torrent',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('status', sa.Enum('finished', 'downloading', 'error', 'unknown', name='torrentstatus'), nullable=False),
sa.Column('title', sa.String(), nullable=False),
sa.Column('quality', sa.Enum('uhd', 'fullhd', 'hd', 'sd', 'unknown', name='quality'), nullable=False),
sa.Column('imported', sa.Boolean(), nullable=False),
sa.Column('hash', sa.String(), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_table(
"torrent",
sa.Column("id", sa.UUID(), nullable=False),
sa.Column(
"status",
sa.Enum(
"finished", "downloading", "error", "unknown", name="torrentstatus"
),
nullable=False,
),
sa.Column("title", sa.String(), nullable=False),
sa.Column(
"quality",
sa.Enum("uhd", "fullhd", "hd", "sd", "unknown", name="quality"),
nullable=False,
),
sa.Column("imported", sa.Boolean(), nullable=False),
sa.Column("hash", sa.String(), nullable=False),
sa.PrimaryKeyConstraint("id"),
)
# Create indexer query result table
op.create_table('indexer_query_result',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('title', sa.String(), nullable=False),
sa.Column('download_url', sa.String(), nullable=False),
sa.Column('seeders', sa.Integer(), nullable=False),
sa.Column('flags', postgresql.ARRAY(sa.String()), nullable=True),
sa.Column('quality', sa.Enum('uhd', 'fullhd', 'hd', 'sd', 'unknown', name='quality'), nullable=False),
sa.Column('season', postgresql.ARRAY(sa.Integer()), nullable=True),
sa.Column('size', sa.BigInteger(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_table(
"indexer_query_result",
sa.Column("id", sa.UUID(), nullable=False),
sa.Column("title", sa.String(), nullable=False),
sa.Column("download_url", sa.String(), nullable=False),
sa.Column("seeders", sa.Integer(), nullable=False),
sa.Column("flags", postgresql.ARRAY(sa.String()), nullable=True),
sa.Column(
"quality",
sa.Enum("uhd", "fullhd", "hd", "sd", "unknown", name="quality"),
nullable=False,
),
sa.Column("season", postgresql.ARRAY(sa.Integer()), nullable=True),
sa.Column("size", sa.BigInteger(), nullable=True),
sa.PrimaryKeyConstraint("id"),
)
# Create notification table
op.create_table('notification',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('message', sa.String(), nullable=False),
sa.Column('read', sa.Boolean(), nullable=False),
sa.Column('timestamp', sa.DateTime(), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_table(
"notification",
sa.Column("id", sa.UUID(), nullable=False),
sa.Column("message", sa.String(), nullable=False),
sa.Column("read", sa.Boolean(), nullable=False),
sa.Column("timestamp", sa.DateTime(), nullable=False),
sa.PrimaryKeyConstraint("id"),
)
# Create show table
op.create_table('show',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('external_id', sa.Integer(), nullable=False),
sa.Column('metadata_provider', sa.String(), nullable=False),
sa.Column('name', sa.String(), nullable=False),
sa.Column('overview', sa.String(), nullable=False),
sa.Column('year', sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('external_id', 'metadata_provider')
)
op.create_table(
"show",
sa.Column("id", sa.UUID(), nullable=False),
sa.Column("external_id", sa.Integer(), nullable=False),
sa.Column("metadata_provider", sa.String(), nullable=False),
sa.Column("name", sa.String(), nullable=False),
sa.Column("overview", sa.String(), nullable=False),
sa.Column("year", sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("external_id", "metadata_provider"),
)
# Create movie table
op.create_table('movie',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('external_id', sa.Integer(), nullable=False),
sa.Column('metadata_provider', sa.String(), nullable=False),
sa.Column('name', sa.String(), nullable=False),
sa.Column('overview', sa.String(), nullable=False),
sa.Column('year', sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('external_id', 'metadata_provider')
)
op.create_table(
"movie",
sa.Column("id", sa.UUID(), nullable=False),
sa.Column("external_id", sa.Integer(), nullable=False),
sa.Column("metadata_provider", sa.String(), nullable=False),
sa.Column("name", sa.String(), nullable=False),
sa.Column("overview", sa.String(), nullable=False),
sa.Column("year", sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("external_id", "metadata_provider"),
)
# Create season table
op.create_table('season',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('show_id', sa.UUID(), nullable=False),
sa.Column('number', sa.Integer(), nullable=False),
sa.Column('external_id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(), nullable=False),
sa.Column('overview', sa.String(), nullable=False),
sa.ForeignKeyConstraint(['show_id'], ['show.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('show_id', 'number')
)
op.create_table(
"season",
sa.Column("id", sa.UUID(), nullable=False),
sa.Column("show_id", sa.UUID(), nullable=False),
sa.Column("number", sa.Integer(), nullable=False),
sa.Column("external_id", sa.Integer(), nullable=False),
sa.Column("name", sa.String(), nullable=False),
sa.Column("overview", sa.String(), nullable=False),
sa.ForeignKeyConstraint(["show_id"], ["show.id"], ondelete="CASCADE"),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("show_id", "number"),
)
# Create movie file table
op.create_table('movie_file',
sa.Column('movie_id', sa.UUID(), nullable=False),
sa.Column('file_path_suffix', sa.String(), nullable=False),
sa.Column('quality', sa.Enum('uhd', 'fullhd', 'hd', 'sd', 'unknown', name='quality'), nullable=False),
sa.Column('torrent_id', sa.UUID(), nullable=True),
sa.ForeignKeyConstraint(['movie_id'], ['movie.id'], ondelete='CASCADE'),
sa.ForeignKeyConstraint(['torrent_id'], ['torrent.id'], ondelete='SET NULL'),
sa.PrimaryKeyConstraint('movie_id', 'file_path_suffix')
)
op.create_table(
"movie_file",
sa.Column("movie_id", sa.UUID(), nullable=False),
sa.Column("file_path_suffix", sa.String(), nullable=False),
sa.Column(
"quality",
sa.Enum("uhd", "fullhd", "hd", "sd", "unknown", name="quality"),
nullable=False,
),
sa.Column("torrent_id", sa.UUID(), nullable=True),
sa.ForeignKeyConstraint(["movie_id"], ["movie.id"], ondelete="CASCADE"),
sa.ForeignKeyConstraint(["torrent_id"], ["torrent.id"], ondelete="SET NULL"),
sa.PrimaryKeyConstraint("movie_id", "file_path_suffix"),
)
# Create movie request table
op.create_table('movie_request',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('movie_id', sa.UUID(), nullable=False),
sa.Column('wanted_quality', sa.Enum('uhd', 'fullhd', 'hd', 'sd', 'unknown', name='quality'), nullable=False),
sa.Column('min_quality', sa.Enum('uhd', 'fullhd', 'hd', 'sd', 'unknown', name='quality'), nullable=False),
sa.Column('authorized', sa.Boolean(), nullable=False),
sa.Column('requested_by_id', sa.UUID(), nullable=True),
sa.Column('authorized_by_id', sa.UUID(), nullable=True),
sa.ForeignKeyConstraint(['authorized_by_id'], ['user.id'], ondelete='SET NULL'),
sa.ForeignKeyConstraint(['movie_id'], ['movie.id'], ondelete='CASCADE'),
sa.ForeignKeyConstraint(['requested_by_id'], ['user.id'], ondelete='SET NULL'),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('movie_id', 'wanted_quality')
)
op.create_table(
"movie_request",
sa.Column("id", sa.UUID(), nullable=False),
sa.Column("movie_id", sa.UUID(), nullable=False),
sa.Column(
"wanted_quality",
sa.Enum("uhd", "fullhd", "hd", "sd", "unknown", name="quality"),
nullable=False,
),
sa.Column(
"min_quality",
sa.Enum("uhd", "fullhd", "hd", "sd", "unknown", name="quality"),
nullable=False,
),
sa.Column("authorized", sa.Boolean(), nullable=False),
sa.Column("requested_by_id", sa.UUID(), nullable=True),
sa.Column("authorized_by_id", sa.UUID(), nullable=True),
sa.ForeignKeyConstraint(["authorized_by_id"], ["user.id"], ondelete="SET NULL"),
sa.ForeignKeyConstraint(["movie_id"], ["movie.id"], ondelete="CASCADE"),
sa.ForeignKeyConstraint(["requested_by_id"], ["user.id"], ondelete="SET NULL"),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("movie_id", "wanted_quality"),
)
# Create episode table
op.create_table('episode',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('season_id', sa.UUID(), nullable=False),
sa.Column('number', sa.Integer(), nullable=False),
sa.Column('external_id', sa.Integer(), nullable=False),
sa.Column('title', sa.String(), nullable=False),
sa.ForeignKeyConstraint(['season_id'], ['season.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('season_id', 'number')
)
op.create_table(
"episode",
sa.Column("id", sa.UUID(), nullable=False),
sa.Column("season_id", sa.UUID(), nullable=False),
sa.Column("number", sa.Integer(), nullable=False),
sa.Column("external_id", sa.Integer(), nullable=False),
sa.Column("title", sa.String(), nullable=False),
sa.ForeignKeyConstraint(["season_id"], ["season.id"], ondelete="CASCADE"),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("season_id", "number"),
)
# Create season file table
op.create_table('season_file',
sa.Column('season_id', sa.UUID(), nullable=False),
sa.Column('torrent_id', sa.UUID(), nullable=True),
sa.Column('file_path_suffix', sa.String(), nullable=False),
sa.Column('quality', sa.Enum('uhd', 'fullhd', 'hd', 'sd', 'unknown', name='quality'), nullable=False),
sa.ForeignKeyConstraint(['season_id'], ['season.id'], ondelete='CASCADE'),
sa.ForeignKeyConstraint(['torrent_id'], ['torrent.id'], ondelete='SET NULL'),
sa.PrimaryKeyConstraint('season_id', 'file_path_suffix')
)
op.create_table(
"season_file",
sa.Column("season_id", sa.UUID(), nullable=False),
sa.Column("torrent_id", sa.UUID(), nullable=True),
sa.Column("file_path_suffix", sa.String(), nullable=False),
sa.Column(
"quality",
sa.Enum("uhd", "fullhd", "hd", "sd", "unknown", name="quality"),
nullable=False,
),
sa.ForeignKeyConstraint(["season_id"], ["season.id"], ondelete="CASCADE"),
sa.ForeignKeyConstraint(["torrent_id"], ["torrent.id"], ondelete="SET NULL"),
sa.PrimaryKeyConstraint("season_id", "file_path_suffix"),
)
# Create season request table
op.create_table('season_request',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('season_id', sa.UUID(), nullable=False),
sa.Column('wanted_quality', sa.Enum('uhd', 'fullhd', 'hd', 'sd', 'unknown', name='quality'), nullable=False),
sa.Column('min_quality', sa.Enum('uhd', 'fullhd', 'hd', 'sd', 'unknown', name='quality'), nullable=False),
sa.Column('requested_by_id', sa.UUID(), nullable=True),
sa.Column('authorized', sa.Boolean(), nullable=False),
sa.Column('authorized_by_id', sa.UUID(), nullable=True),
sa.ForeignKeyConstraint(['authorized_by_id'], ['user.id'], ondelete='SET NULL'),
sa.ForeignKeyConstraint(['requested_by_id'], ['user.id'], ondelete='SET NULL'),
sa.ForeignKeyConstraint(['season_id'], ['season.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('season_id', 'wanted_quality')
)
op.create_table(
"season_request",
sa.Column("id", sa.UUID(), nullable=False),
sa.Column("season_id", sa.UUID(), nullable=False),
sa.Column(
"wanted_quality",
sa.Enum("uhd", "fullhd", "hd", "sd", "unknown", name="quality"),
nullable=False,
),
sa.Column(
"min_quality",
sa.Enum("uhd", "fullhd", "hd", "sd", "unknown", name="quality"),
nullable=False,
),
sa.Column("requested_by_id", sa.UUID(), nullable=True),
sa.Column("authorized", sa.Boolean(), nullable=False),
sa.Column("authorized_by_id", sa.UUID(), nullable=True),
sa.ForeignKeyConstraint(["authorized_by_id"], ["user.id"], ondelete="SET NULL"),
sa.ForeignKeyConstraint(["requested_by_id"], ["user.id"], ondelete="SET NULL"),
sa.ForeignKeyConstraint(["season_id"], ["season.id"], ondelete="CASCADE"),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("season_id", "wanted_quality"),
)
# ### end Alembic commands ###
def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('season_request')
op.drop_table('season_file')
op.drop_table('episode')
op.drop_table('movie_request')
op.drop_table('movie_file')
op.drop_table('season')
op.drop_table('movie')
op.drop_table('show')
op.drop_table('notification')
op.drop_table('indexer_query_result')
op.drop_table('torrent')
op.drop_index(op.f('ix_oauth_account_oauth_name'), table_name='oauth_account')
op.drop_index(op.f('ix_oauth_account_account_id'), table_name='oauth_account')
op.drop_table('oauth_account')
op.drop_index(op.f('ix_user_email'), table_name='user')
op.drop_table('user')
op.drop_table("season_request")
op.drop_table("season_file")
op.drop_table("episode")
op.drop_table("movie_request")
op.drop_table("movie_file")
op.drop_table("season")
op.drop_table("movie")
op.drop_table("show")
op.drop_table("notification")
op.drop_table("indexer_query_result")
op.drop_table("torrent")
op.drop_index(op.f("ix_oauth_account_oauth_name"), table_name="oauth_account")
op.drop_index(op.f("ix_oauth_account_account_id"), table_name="oauth_account")
op.drop_table("oauth_account")
op.drop_index(op.f("ix_user_email"), table_name="user")
op.drop_table("user")
# ### end Alembic commands ###

View File

@@ -9,6 +9,8 @@ class BasicConfig(BaseSettings):
tv_directory: Path = Path(__file__).parent.parent / "data" / "tv"
movie_directory: Path = Path(__file__).parent.parent / "data" / "movies"
torrent_directory: Path = Path(__file__).parent.parent / "data" / "torrents"
usenet_directory: Path = Path(__file__).parent.parent / "data" / "usenet"
FRONTEND_URL: AnyHttpUrl = "http://localhost:3000/"
CORS_URLS: list[str] = []
DEVELOPMENT: bool = False

View File

@@ -10,10 +10,11 @@ class GenericIndexer(object):
else:
raise ValueError("indexer name must not be None")
def search(self, query: str) -> list[IndexerQueryResult]:
def search(self, query: str, is_tv: bool) -> list[IndexerQueryResult]:
"""
Sends a search request to the Indexer and returns the results.
:param is_tv: Whether to search for TV shows or movies.
:param query: The search query to send to the Indexer.
:return: A list of IndexerQueryResult objects representing the search results.
"""

View File

@@ -3,6 +3,7 @@ import xml.etree.ElementTree as ET
from xml.etree.ElementTree import Element
import requests
from pydantic import HttpUrl
from media_manager.indexer.indexers.generic import GenericIndexer
from media_manager.indexer.config import JackettConfig
@@ -25,7 +26,7 @@ class Jackett(GenericIndexer):
log.debug("Registering Jacket as Indexer")
# NOTE: this could be done in parallel, but if there aren't more than a dozen indexers, it shouldn't matter
def search(self, query: str) -> list[IndexerQueryResult]:
def search(self, query: str, is_tv: bool) -> list[IndexerQueryResult]:
log.debug("Searching for " + query)
responses = []
@@ -33,7 +34,7 @@ class Jackett(GenericIndexer):
log.debug(f"Searching in indexer: {indexer}")
url = (
self.url
+ f"/api/v2.0/indexers/{indexer}/results/torznab/api?apikey={self.api_key}&t=search&q={query}"
+ f"/api/v2.0/indexers/{indexer}/results/torznab/api?apikey={self.api_key}&t={'tvsearch' if is_tv else 'movie'}&q={query}"
)
response = requests.get(url)
responses.append(response)
@@ -62,10 +63,12 @@ class Jackett(GenericIndexer):
result = IndexerQueryResult(
title=item.find("title").text,
download_url=item.find("link").text,
download_url=HttpUrl(item.find("enclosure").attrib["url"]),
seeders=seeders,
flags=[],
size=int(item.find("size").text),
usenet=False, # always False, because Jackett doesn't support usenet
age=0, # always 0 for torrents, as Jackett does not provide age information in a convenient format
)
result_list.append(result)
log.debug(f"Raw result: {result.model_dump()}")

View File

@@ -23,21 +23,23 @@ class Prowlarr(GenericIndexer):
self.url = config.url
log.debug("Registering Prowlarr as Indexer")
def search(self, query: str) -> list[IndexerQueryResult]:
def search(self, query: str, is_tv: bool) -> list[IndexerQueryResult]:
log.debug("Searching for " + query)
url = self.url + "/api/v1/search"
headers = {"accept": "application/json", "X-Api-Key": self.api_key}
params = {
"query": query,
"apikey": self.api_key,
"categories": "5000" if is_tv else "2000", # TV: 5000, Movies: 2000
"limit": 10000,
}
response = requests.get(url, headers=headers, params=params)
response = requests.get(url, params=params)
if response.status_code == 200:
result_list: list[IndexerQueryResult] = []
for result in response.json():
if result["protocol"] == "torrent":
log.debug("torrent result: " + result.__str__())
is_torrent = result["protocol"] == "torrent"
if is_torrent:
result_list.append(
IndexerQueryResult(
download_url=result["downloadUrl"],
@@ -45,8 +47,24 @@ class Prowlarr(GenericIndexer):
seeders=result["seeders"],
flags=result["indexerFlags"],
size=result["size"],
usenet=False,
age=0, # Torrent results do not need age information
)
)
else:
result_list.append(
IndexerQueryResult(
download_url=result["downloadUrl"],
title=result["sortTitle"],
seeders=0, # Usenet results do not have seeders
flags=result["indexerFlags"],
size=result["size"],
usenet=True,
age=int(result["ageMinutes"]) * 60,
)
)
log.debug("torrent result: " + result.__str__())
return result_list
else:
log.error(f"Prowlarr Error: {response.status_code}")

View File

@@ -19,3 +19,5 @@ class IndexerQueryResult(Base):
quality: Mapped[Quality]
season = mapped_column(ARRAY(Integer))
size = mapped_column(BigInteger)
usenet: Mapped[bool]
age: Mapped[int]

View File

@@ -22,6 +22,12 @@ class IndexerRepository:
def save_result(self, result: IndexerQueryResultSchema) -> IndexerQueryResultSchema:
log.debug("Saving indexer query result: %s", result)
self.db.add(IndexerQueryResult(**result.model_dump()))
result_data = result.model_dump()
result_data["download_url"] = str(
result.download_url
) # this is the needful, because sqlalchemy is too dumb to handle the HttpUrl type
self.db.add(IndexerQueryResult(**result_data))
self.db.commit()
return result

View File

@@ -3,7 +3,7 @@ import typing
from uuid import UUID, uuid4
import pydantic
from pydantic import BaseModel, computed_field, ConfigDict
from pydantic import BaseModel, computed_field, ConfigDict, HttpUrl
from media_manager.torrent.models import Quality
@@ -15,11 +15,14 @@ class IndexerQueryResult(BaseModel):
id: IndexerQueryResultId = pydantic.Field(default_factory=uuid4)
title: str
download_url: str
download_url: HttpUrl
seeders: int
flags: list[str]
size: int
usenet: bool
age: int
@computed_field(return_type=Quality)
@property
def quality(self) -> Quality:
@@ -73,3 +76,6 @@ class PublicIndexerQueryResult(BaseModel):
flags: list[str]
season: list[int]
size: int
usenet: bool
age: int

View File

@@ -11,10 +11,11 @@ class IndexerService:
def get_result(self, result_id: IndexerQueryResultId) -> IndexerQueryResult:
return self.repository.get_result(result_id=result_id)
def search(self, query: str) -> list[IndexerQueryResult]:
def search(self, query: str, is_tv: bool) -> list[IndexerQueryResult]:
"""
Search for results using the indexers based on a query.
:param is_tv: Whether the search is for TV shows or movies.
:param query: The search query.
:param db: The database session.
:return: A list of search results.
@@ -25,7 +26,7 @@ class IndexerService:
for indexer in indexers:
try:
indexer_results = indexer.search(query)
indexer_results = indexer.search(query, is_tv=is_tv)
results.extend(indexer_results)
log.debug(
f"Indexer {indexer.__class__.__name__} returned {len(indexer_results)} results for query: {query}"

View File

@@ -362,6 +362,7 @@ class MovieRepository:
quality=torrent.quality,
imported=torrent.imported,
file_path_suffix=file_path_suffix,
usenet=torrent.usenet,
)
formatted_results.append(movie_torrent)
return formatted_results

View File

@@ -83,6 +83,7 @@ class MovieTorrent(BaseModel):
quality: Quality
imported: bool
file_path_suffix: str
usenet: bool
class RichMovieTorrent(BaseModel):

View File

@@ -178,7 +178,7 @@ class MovieService:
search_query = f"{movie.name}"
torrents: list[IndexerQueryResult] = self.indexer_service.search(
query=search_query
query=search_query, is_tv=False
)
if search_query_override:

View File

@@ -0,0 +1,59 @@
from abc import ABC, abstractmethod
from media_manager.indexer.schemas import IndexerQueryResult
from media_manager.torrent.schemas import TorrentStatus, Torrent
class AbstractDownloadClient(ABC):
"""
Abstract base class for download clients.
Defines the interface that all download clients must implement.
"""
@abstractmethod
def download_torrent(self, torrent: IndexerQueryResult) -> Torrent:
"""
Add a torrent to the download client and return the torrent object.
:param torrent: The indexer query result of the torrent file to download.
:return: The torrent object with calculated hash and initial status.
"""
pass
@abstractmethod
def remove_torrent(self, torrent: Torrent, delete_data: bool = False) -> None:
"""
Remove a torrent from the download client.
:param torrent: The torrent to remove.
:param delete_data: Whether to delete the downloaded data.
"""
pass
@abstractmethod
def get_torrent_status(self, torrent: Torrent) -> TorrentStatus:
"""
Get the status of a specific torrent.
:param torrent: The torrent to get the status of.
:return: The status of the torrent.
"""
pass
@abstractmethod
def pause_torrent(self, torrent: Torrent) -> None:
"""
Pause a torrent download.
:param torrent: The torrent to pause.
"""
pass
@abstractmethod
def resume_torrent(self, torrent: Torrent) -> None:
"""
Resume a torrent download.
:param torrent: The torrent to resume.
"""
pass

View File

@@ -0,0 +1,207 @@
import hashlib
import logging
import bencoder
import qbittorrentapi
import requests
from pydantic_settings import BaseSettings, SettingsConfigDict
from media_manager.config import BasicConfig
from media_manager.indexer.schemas import IndexerQueryResult
from media_manager.torrent.download_clients.abstractDownloadClient import (
AbstractDownloadClient,
)
from media_manager.torrent.schemas import TorrentStatus, Torrent
log = logging.getLogger(__name__)
class QbittorrentConfig(BaseSettings):
model_config = SettingsConfigDict(env_prefix="QBITTORRENT_")
host: str = "localhost"
port: int = 8080
username: str = "admin"
password: str = "admin"
class QbittorrentDownloadClient(AbstractDownloadClient):
DOWNLOADING_STATE = (
"allocating",
"downloading",
"metaDL",
"pausedDL",
"queuedDL",
"stalledDL",
"checkingDL",
"forcedDL",
"moving",
)
FINISHED_STATE = (
"uploading",
"pausedUP",
"queuedUP",
"stalledUP",
"checkingUP",
"forcedUP",
)
ERROR_STATE = ("missingFiles", "error", "checkingResumeData")
UNKNOWN_STATE = ("unknown",)
def __init__(self):
self.config = QbittorrentConfig()
self.api_client = qbittorrentapi.Client(**self.config.model_dump())
try:
self.api_client.auth_log_in()
log.info("Successfully logged into qbittorrent")
except Exception as e:
log.error(f"Failed to log into qbittorrent: {e}")
raise
finally:
self.api_client.auth_log_out()
def download_torrent(self, indexer_result: IndexerQueryResult) -> Torrent:
"""
Add a torrent to the download client and return the torrent object.
:param indexer_result: The indexer query result of the torrent file to download.
:return: The torrent object with calculated hash and initial status.
"""
log.info(f"Attempting to download torrent: {indexer_result.title}")
torrent_filepath = (
BasicConfig().torrent_directory / f"{indexer_result.title}.torrent"
)
if torrent_filepath.exists():
log.warning(f"Torrent already exists: {torrent_filepath}")
# Calculate hash from existing file
with open(torrent_filepath, "rb") as file:
content = file.read()
decoded_content = bencoder.decode(content)
torrent_hash = hashlib.sha1(
bencoder.encode(decoded_content[b"info"])
).hexdigest()
else:
# Download the torrent file
with open(torrent_filepath, "wb") as file:
content = requests.get(str(indexer_result.download_url)).content
file.write(content)
# Calculate hash and add to qBittorrent
with open(torrent_filepath, "rb") as file:
content = file.read()
try:
decoded_content = bencoder.decode(content)
except Exception as e:
log.error(f"Failed to decode torrent file: {e}")
raise e
torrent_hash = hashlib.sha1(
bencoder.encode(decoded_content[b"info"])
).hexdigest()
try:
self.api_client.auth_log_in()
answer = self.api_client.torrents_add(
category="MediaManager",
torrent_files=content,
save_path=indexer_result.title,
)
finally:
self.api_client.auth_log_out()
if answer != "Ok.":
log.error(f"Failed to download torrent. API response: {answer}")
raise RuntimeError(
f"Failed to download torrent, API-Answer isn't 'Ok.'; API Answer: {answer}"
)
log.info(f"Successfully processed torrent: {indexer_result.title}")
# Create and return torrent object
torrent = Torrent(
status=TorrentStatus.unknown,
title=indexer_result.title,
quality=indexer_result.quality,
imported=False,
hash=torrent_hash,
)
# Get initial status from download client
torrent.status = self.get_torrent_status(torrent)
return torrent
def remove_torrent(self, torrent: Torrent, delete_data: bool = False) -> None:
"""
Remove a torrent from the download client.
:param torrent: The torrent to remove.
:param delete_data: Whether to delete the downloaded data.
"""
log.info(f"Removing torrent: {torrent.title}")
try:
self.api_client.auth_log_in()
self.api_client.torrents_delete(
torrent_hashes=torrent.hash, delete_files=delete_data
)
finally:
self.api_client.auth_log_out()
def get_torrent_status(self, torrent: Torrent) -> TorrentStatus:
"""
Get the status of a specific torrent.
:param torrent: The torrent to get the status of.
:return: The status of the torrent.
"""
log.info(f"Fetching status for torrent: {torrent.title}")
try:
self.api_client.auth_log_in()
info = self.api_client.torrents_info(torrent_hashes=torrent.hash)
finally:
self.api_client.auth_log_out()
if not info:
log.warning(f"No information found for torrent: {torrent.id}")
return TorrentStatus.unknown
else:
state: str = info[0]["state"]
log.info(f"Torrent {torrent.id} is in state: {state}")
if state in self.DOWNLOADING_STATE:
return TorrentStatus.downloading
elif state in self.FINISHED_STATE:
return TorrentStatus.finished
elif state in self.ERROR_STATE:
return TorrentStatus.error
elif state in self.UNKNOWN_STATE:
return TorrentStatus.unknown
else:
return TorrentStatus.error
def pause_torrent(self, torrent: Torrent) -> None:
"""
Pause a torrent download.
:param torrent: The torrent to pause.
"""
log.info(f"Pausing torrent: {torrent.title}")
try:
self.api_client.auth_log_in()
self.api_client.torrents_pause(torrent_hashes=torrent.hash)
finally:
self.api_client.auth_log_out()
def resume_torrent(self, torrent: Torrent) -> None:
"""
Resume a torrent download.
:param torrent: The torrent to resume.
"""
log.info(f"Resuming torrent: {torrent.title}")
try:
self.api_client.auth_log_in()
self.api_client.torrents_resume(torrent_hashes=torrent.hash)
finally:
self.api_client.auth_log_out()

View File

@@ -0,0 +1,165 @@
import logging
from pydantic_settings import BaseSettings, SettingsConfigDict
from media_manager.indexer.schemas import IndexerQueryResult
from media_manager.torrent.download_clients.abstractDownloadClient import (
AbstractDownloadClient,
)
from media_manager.torrent.schemas import Torrent, TorrentStatus
import sabnzbd_api
log = logging.getLogger(__name__)
class SabnzbdConfig(BaseSettings):
model_config = SettingsConfigDict(env_prefix="SABNZBD_")
host: str = "localhost"
port: int = 8080
api_key: str = ""
class SabnzbdDownloadClient(AbstractDownloadClient):
DOWNLOADING_STATE = (
"Downloading",
"Queued",
"Paused",
"Extracting",
"Moving",
"Running",
)
FINISHED_STATE = ("Completed",)
ERROR_STATE = ("Failed",)
UNKNOWN_STATE = ("Unknown",)
def __init__(self):
self.config = SabnzbdConfig()
self.client = sabnzbd_api.SabnzbdClient(
host=self.config.host,
port=str(self.config.port),
api_key=self.config.api_key,
)
self.client._base_url = f"{self.config.host.rstrip('/')}:{self.config.port}/api" # the library expects a /sabnzbd prefix for whatever reason
try:
# Test connection
version = self.client.version()
log.info(f"Successfully connected to SABnzbd version: {version}")
except Exception as e:
log.error(f"Failed to connect to SABnzbd: {e}")
raise
def download_torrent(self, indexer_result: IndexerQueryResult) -> Torrent:
"""
Add a NZB/torrent to SABnzbd and return the torrent object.
:param indexer_result: The indexer query result of the NZB file to download.
:return: The torrent object with calculated hash and initial status.
"""
log.info(f"Attempting to download NZB: {indexer_result.title}")
try:
# Add NZB to SABnzbd queue
response = self.client.add_uri(
url=str(indexer_result.download_url),
)
if not response["status"]:
error_msg = response
log.error(f"Failed to add NZB to SABnzbd: {error_msg}")
raise RuntimeError(f"Failed to add NZB to SABnzbd: {error_msg}")
# Generate a hash for the NZB (using title and download URL)
nzo_id = response["nzo_ids"][0]
log.info(f"Successfully added NZB: {indexer_result.title}")
# Create and return torrent object
torrent = Torrent(
status=TorrentStatus.unknown,
title=indexer_result.title,
quality=indexer_result.quality,
imported=False,
hash=nzo_id,
usenet=True,
)
# Get initial status from SABnzbd
torrent.status = self.get_torrent_status(torrent)
return torrent
except Exception as e:
log.error(f"Failed to download NZB {indexer_result.title}: {e}")
raise
def remove_torrent(self, torrent: Torrent, delete_data: bool = False) -> None:
"""
Remove a torrent from SABnzbd.
:param torrent: The torrent to remove.
:param delete_data: Whether to delete the downloaded files.
"""
log.info(f"Removing torrent: {torrent.title} (Delete data: {delete_data})")
try:
self.client.delete_job(nzo_id=torrent.hash, delete_files=delete_data)
log.info(f"Successfully removed torrent: {torrent.title}")
except Exception as e:
log.error(f"Failed to remove torrent {torrent.title}: {e}")
raise
def pause_torrent(self, torrent: Torrent) -> None:
"""
Pause a torrent in SABnzbd.
:param torrent: The torrent to pause.
"""
log.info(f"Pausing torrent: {torrent.title}")
try:
self.client.pause_job(nzo_id=torrent.hash)
log.info(f"Successfully paused torrent: {torrent.title}")
except Exception as e:
log.error(f"Failed to pause torrent {torrent.title}: {e}")
raise
def resume_torrent(self, torrent: Torrent) -> None:
"""
Resume a paused torrent in SABnzbd.
:param torrent: The torrent to resume.
"""
log.info(f"Resuming torrent: {torrent.title}")
try:
self.client.resume_job(nzo_id=torrent.hash)
log.info(f"Successfully resumed torrent: {torrent.title}")
except Exception as e:
log.error(f"Failed to resume torrent {torrent.title}: {e}")
raise
def get_torrent_status(self, torrent: Torrent) -> TorrentStatus:
"""
Get the status of a specific download from SABnzbd.
:param torrent: The torrent to get the status of.
:return: The status of the torrent.
"""
log.info(f"Fetching status for download: {torrent.title}")
response = self.client.get_downloads(nzo_ids=torrent.hash)
log.debug("SABnzbd response: %s", response)
status = response["queue"]["status"]
log.info(f"Download status for NZB {torrent.title}: {status}")
return self._map_status(status)
def _map_status(self, sabnzbd_status: str) -> TorrentStatus:
"""
Map SABnzbd status to TorrentStatus.
:param sabnzbd_status: The status from SABnzbd.
:return: The corresponding TorrentStatus.
"""
if sabnzbd_status in self.DOWNLOADING_STATE:
return TorrentStatus.downloading
elif sabnzbd_status in self.FINISHED_STATE:
return TorrentStatus.finished
elif sabnzbd_status in self.ERROR_STATE:
return TorrentStatus.error
else:
return TorrentStatus.unknown

View File

@@ -0,0 +1,146 @@
import logging
import os
from enum import Enum
from media_manager.indexer.schemas import IndexerQueryResult
from media_manager.torrent.download_clients.abstractDownloadClient import (
AbstractDownloadClient,
)
from media_manager.torrent.download_clients.qbittorrent import QbittorrentDownloadClient
from media_manager.torrent.download_clients.sabnzbd import SabnzbdDownloadClient
from media_manager.torrent.schemas import Torrent, TorrentStatus
log = logging.getLogger(__name__)
class DownloadClientType(Enum):
"""Types of download clients supported"""
TORRENT = "torrent"
USENET = "usenet"
class DownloadManager:
"""
Manages download clients and routes downloads to the appropriate client
based on the content type (torrent vs usenet).
Only one torrent client and one usenet client are active at a time.
"""
def __init__(self):
self._torrent_client: AbstractDownloadClient | None = None
self._usenet_client: AbstractDownloadClient | None = None
self._initialize_clients()
def _initialize_clients(self) -> None:
"""Initialize and register the default download clients"""
# Initialize qBittorrent client for torrents
if os.getenv("QBITTORRENT_ENABLED", "false").lower() == "true":
try:
self._torrent_client = QbittorrentDownloadClient()
log.info(
"qBittorrent client initialized and set as active torrent client"
)
except Exception as e:
log.error(f"Failed to initialize qBittorrent client: {e}")
# Initialize SABnzbd client for usenet
if os.getenv("SABNZBD_ENABLED", "false").lower() == "true":
try:
self._usenet_client = SabnzbdDownloadClient()
log.info("SABnzbd client initialized and set as active usenet client")
except Exception as e:
log.error(f"Failed to initialize SABnzbd client: {e}")
active_clients = []
if self._torrent_client:
active_clients.append("torrent")
if self._usenet_client:
active_clients.append("usenet")
log.info(
f"Download manager initialized with active download clients: {', '.join(active_clients) if active_clients else 'none'}"
)
def _get_appropriate_client(
self, indexer_result: IndexerQueryResult | Torrent
) -> AbstractDownloadClient:
"""
Select the appropriate download client based on the indexer result
:param indexer_result: The indexer query result to determine client type
:return: The appropriate download client
:raises RuntimeError: If no suitable client is available
"""
# Use the usenet flag from the indexer result to determine the client type
if indexer_result.usenet:
if not self._usenet_client:
raise RuntimeError("No usenet download client configured")
log.info(
f"Selected usenet client: {self._usenet_client.__class__.__name__}"
)
return self._usenet_client
else:
if not self._torrent_client:
raise RuntimeError("No torrent download client configured")
log.info(
f"Selected torrent client: {self._torrent_client.__class__.__name__}"
)
return self._torrent_client
def download(self, indexer_result: IndexerQueryResult) -> Torrent:
"""
Download content using the appropriate client
:param indexer_result: The indexer query result to download
:return: The torrent object representing the download
"""
log.info(f"Processing download request for: {indexer_result.title}")
client = self._get_appropriate_client(indexer_result)
return client.download_torrent(indexer_result)
def remove_torrent(self, torrent: Torrent, delete_data: bool = False) -> None:
"""
Remove a torrent using the appropriate client
:param torrent: The torrent to remove
:param delete_data: Whether to delete the downloaded data
"""
log.info(f"Removing torrent: {torrent.title}")
client = self._get_appropriate_client(torrent)
client.remove_torrent(torrent, delete_data)
def get_torrent_status(self, torrent: Torrent) -> TorrentStatus:
"""
Get the status of a torrent using the appropriate client
:param torrent: The torrent to get status for
:return: The current status of the torrent
"""
client = self._get_appropriate_client(torrent)
return client.get_torrent_status(torrent)
def pause_torrent(self, torrent: Torrent) -> None:
"""
Pause a torrent using the appropriate client
:param torrent: The torrent to pause
"""
log.info(f"Pausing torrent: {torrent.title}")
client = self._get_appropriate_client(torrent)
client.pause_torrent(torrent)
def resume_torrent(self, torrent: Torrent) -> None:
"""
Resume a torrent using the appropriate client
:param torrent: The torrent to resume
"""
log.info(f"Resuming torrent: {torrent.title}")
client = self._get_appropriate_client(torrent)
client.resume_torrent(torrent)

View File

@@ -14,6 +14,7 @@ class Torrent(Base):
quality: Mapped[Quality]
imported: Mapped[bool]
hash: Mapped[str]
usenet: Mapped[bool]
season_files = relationship("SeasonFile", back_populates="torrent")
movie_files = relationship("MovieFile", back_populates="torrent")

View File

@@ -39,3 +39,4 @@ class Torrent(BaseModel):
quality: Quality
imported: bool
hash: str
usenet: bool = False

View File

@@ -1,63 +1,23 @@
import hashlib
import logging
import bencoder
import qbittorrentapi
import requests
from pydantic_settings import BaseSettings, SettingsConfigDict
from media_manager.config import BasicConfig
from media_manager.indexer.schemas import IndexerQueryResult
from media_manager.torrent.manager import DownloadManager
from media_manager.torrent.repository import TorrentRepository
from media_manager.torrent.schemas import Torrent, TorrentStatus, TorrentId
from media_manager.torrent.schemas import Torrent, TorrentId
from media_manager.tv.schemas import SeasonFile, Show
from media_manager.movies.schemas import Movie
log = logging.getLogger(__name__)
class TorrentServiceConfig(BaseSettings):
model_config = SettingsConfigDict(env_prefix="QBITTORRENT_")
host: str = "localhost"
port: int = 8080
username: str = "admin"
password: str = "admin"
class TorrentService:
DOWNLOADING_STATE = (
"allocating",
"downloading",
"metaDL",
"pausedDL",
"queuedDL",
"stalledDL",
"checkingDL",
"forcedDL",
"moving",
)
FINISHED_STATE = (
"uploading",
"pausedUP",
"queuedUP",
"stalledUP",
"checkingUP",
"forcedUP",
)
ERROR_STATE = ("missingFiles", "error", "checkingResumeData")
UNKNOWN_STATE = ("unknown",)
api_client = qbittorrentapi.Client(**TorrentServiceConfig().model_dump())
def __init__(self, torrent_repository: TorrentRepository):
try:
self.api_client.auth_log_in()
log.info("Successfully logged into qbittorrent")
self.torrent_repository = torrent_repository
except Exception as e:
log.error(f"Failed to log into qbittorrent: {e}")
raise
finally:
self.api_client.auth_log_out()
def __init__(
self,
torrent_repository: TorrentRepository,
download_manager: DownloadManager = None,
):
self.torrent_repository = torrent_repository
self.download_manager = download_manager or DownloadManager()
def get_season_files_of_torrent(self, torrent: Torrent) -> list[SeasonFile]:
"""
@@ -87,69 +47,16 @@ class TorrentService:
def download(self, indexer_result: IndexerQueryResult) -> Torrent:
log.info(f"Attempting to download torrent: {indexer_result.title}")
torrent = Torrent(
status=TorrentStatus.unknown,
title=indexer_result.title,
quality=indexer_result.quality,
imported=False,
hash="",
)
url = indexer_result.download_url
torrent_filepath = BasicConfig().torrent_directory / f"{torrent.title}.torrent"
torrent = self.download_manager.download(indexer_result)
if torrent_filepath.exists():
log.warning(f"Torrent already exists: {torrent_filepath}")
return self.get_torrent_status(torrent=torrent)
with open(torrent_filepath, "wb") as file:
content = requests.get(url).content
file.write(content)
with open(torrent_filepath, "rb") as file:
content = file.read()
try:
decoded_content = bencoder.decode(content)
except Exception as e:
log.error(f"Failed to decode torrent file: {e}")
raise e
torrent.hash = hashlib.sha1(
bencoder.encode(decoded_content[b"info"])
).hexdigest()
answer = self.api_client.torrents_add(
category="MediaManager", torrent_files=content, save_path=torrent.title
)
if answer == "Ok.":
log.info(f"Successfully added torrent: {torrent.title}")
return self.get_torrent_status(torrent=torrent)
else:
log.error(f"Failed to download torrent. API response: {answer}")
raise RuntimeError(
f"Failed to download torrent, API-Answer isn't 'Ok.'; API Answer: {answer}"
)
return self.torrent_repository.save_torrent(torrent=torrent)
def get_torrent_status(self, torrent: Torrent) -> Torrent:
log.info(f"Fetching status for torrent: {torrent.title}")
info = self.api_client.torrents_info(torrent_hashes=torrent.hash)
if not info:
log.warning(f"No information found for torrent: {torrent.id}")
torrent.status = TorrentStatus.unknown
else:
state: str = info[0]["state"]
log.info(f"Torrent {torrent.id} is in state: {state}")
torrent.status = self.download_manager.get_torrent_status(torrent)
if state in self.DOWNLOADING_STATE:
torrent.status = TorrentStatus.downloading
elif state in self.FINISHED_STATE:
torrent.status = TorrentStatus.finished
elif state in self.ERROR_STATE:
torrent.status = TorrentStatus.error
elif state in self.UNKNOWN_STATE:
torrent.status = TorrentStatus.unknown
else:
torrent.status = TorrentStatus.error
self.torrent_repository.save_torrent(torrent=torrent)
return torrent
@@ -161,7 +68,7 @@ class TorrentService:
:param torrent: the torrent to cancel
"""
log.info(f"Cancelling download for torrent: {torrent.title}")
self.api_client.torrents_delete(delete_files=delete_files)
self.download_manager.remove_torrent(torrent, delete_data=delete_files)
return self.get_torrent_status(torrent=torrent)
def pause_download(self, torrent: Torrent) -> Torrent:
@@ -171,7 +78,7 @@ class TorrentService:
:param torrent: the torrent to pause
"""
log.info(f"Pausing download for torrent: {torrent.title}")
self.api_client.torrents_pause(torrent_hashes=torrent.hash)
self.download_manager.pause_torrent(torrent)
return self.get_torrent_status(torrent=torrent)
def resume_download(self, torrent: Torrent) -> Torrent:
@@ -181,7 +88,7 @@ class TorrentService:
:param torrent: the torrent to resume
"""
log.info(f"Resuming download for torrent: {torrent.title}")
self.api_client.torrents_resume(torrent_hashes=torrent.hash)
self.download_manager.resume_torrent(torrent)
return self.get_torrent_status(torrent=torrent)
def get_all_torrents(self) -> list[Torrent]:
@@ -202,5 +109,6 @@ class TorrentService:
# from media_manager.tv.repository import remove_season_files_by_torrent_id
# remove_season_files_by_torrent_id(db=self.db, torrent_id=torrent_id)
# media_manager.torrent.repository.delete_torrent(db=self.db, torrent_id=t.id)
def get_movie_files_of_torrent(self, torrent: Torrent):
return self.torrent_repository.get_movie_files_of_torrent(torrent_id=torrent.id)

View File

@@ -117,6 +117,7 @@ class RichSeasonTorrent(BaseModel):
status: TorrentStatus
quality: Quality
imported: bool
usenet: bool
file_path_suffix: str
seasons: list[SeasonNumber]

View File

@@ -189,7 +189,7 @@ class TvService:
search_query = show.name + " s" + str(season_number).zfill(2)
torrents: list[IndexerQueryResult] = self.indexer_service.search(
query=search_query
query=search_query, is_tv=True
)
if search_query_override:
@@ -364,6 +364,7 @@ class TvService:
imported=show_torrent.imported,
seasons=seasons,
file_path_suffix=file_path_suffix,
usenet=show_torrent.usenet,
)
rich_season_torrents.append(season_torrent)
return RichShowTorrent(

View File

@@ -31,6 +31,7 @@ dependencies = [
"pytest>=8.4.0",
"pillow>=11.2.1",
"pillow-avif-plugin>=1.5.2",
"sabnzbd-api>=0.1.2",
]
[tool.setuptools.packages.find]

View File

@@ -36,10 +36,12 @@ def test_save_and_get_result(repo, dummy_db):
result = IndexerQueryResult(
id=result_id,
title="Test Title",
download_url="http://example.com",
download_url="https://example.com/test1",
seeders=5,
flags=["flag1"],
size=1234,
usenet=False,
age=1,
)
saved = repo.save_result(result)
assert saved == result
@@ -53,10 +55,12 @@ def test_save_result_calls_db_methods(repo, dummy_db):
result = IndexerQueryResult(
id=IndexerQueryResultId(uuid.uuid4()),
title="Another Title",
download_url="http://example.com/2",
download_url="https://example.com/test2",
seeders=2,
flags=[],
size=5678,
usenet=False,
age=1,
)
repo.save_result(result)
assert dummy_db.added[0].title == "Another Title"

View File

@@ -5,31 +5,61 @@ from media_manager.torrent.models import Quality
def test_quality_computed_field():
assert (
IndexerQueryResult(
title="Show S01 4K", download_url="", seeders=1, flags=[], size=1
title="Show S01 4K",
download_url="https://example.com/1",
seeders=1,
flags=[],
size=1,
usenet=False,
age=1,
).quality
== Quality.uhd
)
assert (
IndexerQueryResult(
title="Show S01 1080p", download_url="", seeders=1, flags=[], size=1
title="Show S01 1080p",
download_url="https://example.com/2",
seeders=1,
flags=[],
size=1,
usenet=False,
age=1,
).quality
== Quality.fullhd
)
assert (
IndexerQueryResult(
title="Show S01 720p", download_url="", seeders=1, flags=[], size=1
title="Show S01 720p",
download_url="https://example.com/3",
seeders=1,
flags=[],
size=1,
usenet=False,
age=1,
).quality
== Quality.hd
)
assert (
IndexerQueryResult(
title="Show S01 480p", download_url="", seeders=1, flags=[], size=1
title="Show S01 480p",
download_url="https://example.com/4",
seeders=1,
flags=[],
size=1,
usenet=False,
age=1,
).quality
== Quality.sd
)
assert (
IndexerQueryResult(
title="Show S01", download_url="", seeders=1, flags=[], size=1
title="Show S01",
download_url="https://example.com/5",
seeders=1,
flags=[],
size=1,
usenet=False,
age=1,
).quality
== Quality.unknown
)
@@ -39,52 +69,100 @@ def test_quality_computed_field_edge_cases():
# Case-insensitive
assert (
IndexerQueryResult(
title="Show S01 4k", download_url="", seeders=1, flags=[], size=1
title="Show S01 4k",
download_url="https://example.com/6",
seeders=1,
flags=[],
size=1,
usenet=False,
age=1,
).quality
== Quality.uhd
)
assert (
IndexerQueryResult(
title="Show S01 1080P", download_url="", seeders=1, flags=[], size=1
title="Show S01 1080P",
download_url="https://example.com/7",
seeders=1,
flags=[],
size=1,
usenet=False,
age=1,
).quality
== Quality.fullhd
)
assert (
IndexerQueryResult(
title="Show S01 720P", download_url="", seeders=1, flags=[], size=1
title="Show S01 720P",
download_url="https://example.com/8",
seeders=1,
flags=[],
size=1,
usenet=False,
age=1,
).quality
== Quality.hd
)
assert (
IndexerQueryResult(
title="Show S01 480P", download_url="", seeders=1, flags=[], size=1
title="Show S01 480P",
download_url="https://example.com/9",
seeders=1,
flags=[],
size=1,
usenet=False,
age=1,
).quality
== Quality.sd
)
# Multiple quality tags, prefer highest
assert (
IndexerQueryResult(
title="Show S01 4K 1080p 720p", download_url="", seeders=1, flags=[], size=1
title="Show S01 4K 1080p 720p",
download_url="https://example.com/10",
seeders=1,
flags=[],
size=1,
usenet=False,
age=1,
).quality
== Quality.uhd
)
assert (
IndexerQueryResult(
title="Show S01 1080p 720p", download_url="", seeders=1, flags=[], size=1
title="Show S01 1080p 720p",
download_url="https://example.com/11",
seeders=1,
flags=[],
size=1,
usenet=False,
age=1,
).quality
== Quality.fullhd
)
# No quality tag
assert (
IndexerQueryResult(
title="Show S01", download_url="", seeders=1, flags=[], size=1
title="Show S01",
download_url="https://example.com/12",
seeders=1,
flags=[],
size=1,
usenet=False,
age=1,
).quality
== Quality.unknown
)
# Quality tag in the middle
assert (
IndexerQueryResult(
title="4K Show S01", download_url="", seeders=1, flags=[], size=1
title="4K Show S01",
download_url="https://example.com/13",
seeders=1,
flags=[],
size=1,
usenet=False,
age=1,
).quality
== Quality.uhd
)
@@ -93,16 +171,34 @@ def test_quality_computed_field_edge_cases():
def test_season_computed_field():
# Single season
assert IndexerQueryResult(
title="Show S01", download_url="", seeders=1, flags=[], size=1
title="Show S01",
download_url="https://example.com/14",
seeders=1,
flags=[],
size=1,
usenet=False,
age=1,
).season == [1]
# Range of seasons
assert IndexerQueryResult(
title="Show S01 S03", download_url="", seeders=1, flags=[], size=1
title="Show S01 S03",
download_url="https://example.com/15",
seeders=1,
flags=[],
size=1,
usenet=False,
age=1,
).season == [1, 2, 3]
# No season
assert (
IndexerQueryResult(
title="Show", download_url="", seeders=1, flags=[], size=1
title="Show",
download_url="https://example.com/16",
seeders=1,
flags=[],
size=1,
usenet=False,
age=1,
).season
== []
)
@@ -112,46 +208,106 @@ def test_season_computed_field_edge_cases():
# Multiple seasons, unordered
assert (
IndexerQueryResult(
title="Show S03 S01", download_url="", seeders=1, flags=[], size=1
title="Show S03 S01",
download_url="https://example.com/17",
seeders=1,
flags=[],
size=1,
usenet=False,
age=1,
).season
== []
)
# Season with leading zeros
assert IndexerQueryResult(
title="Show S01 S03", download_url="", seeders=1, flags=[], size=1
title="Show S01 S03",
download_url="https://example.com/18",
seeders=1,
flags=[],
size=1,
usenet=False,
age=1,
).season == [1, 2, 3]
assert IndexerQueryResult(
title="Show S01 S01", download_url="", seeders=1, flags=[], size=1
title="Show S01 S01",
download_url="https://example.com/19",
seeders=1,
flags=[],
size=1,
usenet=False,
age=1,
).season == [1]
# No season at all
assert (
IndexerQueryResult(
title="Show", download_url="", seeders=1, flags=[], size=1
title="Show",
download_url="https://example.com/20",
seeders=1,
flags=[],
size=1,
usenet=False,
age=1,
).season
== []
)
# Season in lower/upper case
assert IndexerQueryResult(
title="Show s02", download_url="", seeders=1, flags=[], size=1
title="Show s02",
download_url="https://example.com/21",
seeders=1,
flags=[],
size=1,
usenet=False,
age=1,
).season == [2]
assert IndexerQueryResult(
title="Show S02", download_url="", seeders=1, flags=[], size=1
title="Show S02",
download_url="https://example.com/22",
seeders=1,
flags=[],
size=1,
usenet=False,
age=1,
).season == [2]
# Season with extra text
assert IndexerQueryResult(
title="Show S01 Complete", download_url="", seeders=1, flags=[], size=1
title="Show S01 Complete",
download_url="https://example.com/23",
seeders=1,
flags=[],
size=1,
usenet=False,
age=1,
).season == [1]
def test_gt_and_lt_methods():
a = IndexerQueryResult(
title="Show S01 1080p", download_url="", seeders=5, flags=[], size=1
title="Show S01 1080p",
download_url="https://example.com/24",
seeders=5,
flags=[],
size=1,
usenet=False,
age=1,
)
b = IndexerQueryResult(
title="Show S01 720p", download_url="", seeders=10, flags=[], size=1
title="Show S01 720p",
download_url="https://example.com/25",
seeders=10,
flags=[],
size=1,
usenet=False,
age=1,
)
c = IndexerQueryResult(
title="Show S01 1080p", download_url="", seeders=2, flags=[], size=1
title="Show S01 1080p",
download_url="https://example.com/26",
seeders=2,
flags=[],
size=1,
usenet=False,
age=1,
)
# a (fullhd) > b (hd)
assert a > b
@@ -161,7 +317,13 @@ def test_gt_and_lt_methods():
assert a > c
# If quality is equal, but seeders are equal, neither is greater
d = IndexerQueryResult(
title="Show S01 1080p", download_url="", seeders=5, flags=[], size=1
title="Show S01 1080p",
download_url="https://example.com/27",
seeders=5,
flags=[],
size=1,
usenet=False,
age=1,
)
assert not (a < d)
assert not (a > d)
@@ -170,40 +332,88 @@ def test_gt_and_lt_methods():
def test_gt_and_lt_methods_edge_cases():
# Different qualities
a = IndexerQueryResult(
title="Show S01 4K", download_url="", seeders=1, flags=[], size=1
title="Show S01 4K",
download_url="https://example.com/28",
seeders=1,
flags=[],
size=1,
usenet=False,
age=1,
)
b = IndexerQueryResult(
title="Show S01 1080p", download_url="", seeders=100, flags=[], size=1
title="Show S01 1080p",
download_url="https://example.com/29",
seeders=100,
flags=[],
size=1,
usenet=False,
age=1,
)
assert a > b
assert not (b > a)
# Same quality, different seeders
c = IndexerQueryResult(
title="Show S01 4K", download_url="", seeders=2, flags=[], size=1
title="Show S01 4K",
download_url="https://example.com/30",
seeders=2,
flags=[],
size=1,
usenet=False,
age=1,
)
assert a < c
assert c > a
# Same quality and seeders
d = IndexerQueryResult(
title="Show S01 4K", download_url="", seeders=1, flags=[], size=1
title="Show S01 4K",
download_url="https://example.com/31",
seeders=1,
flags=[],
size=1,
usenet=False,
age=1,
)
assert not (a < d)
assert not (a > d)
# Unknown quality, should compare by seeders
e = IndexerQueryResult(
title="Show S01", download_url="", seeders=5, flags=[], size=1
title="Show S01",
download_url="https://example.com/32",
seeders=5,
flags=[],
size=1,
usenet=False,
age=1,
)
f = IndexerQueryResult(
title="Show S01", download_url="", seeders=10, flags=[], size=1
title="Show S01",
download_url="https://example.com/33",
seeders=10,
flags=[],
size=1,
usenet=False,
age=1,
)
assert e < f
assert f > e
# Mixed known and unknown quality
g = IndexerQueryResult(
title="Show S01 720p", download_url="", seeders=1, flags=[], size=1
title="Show S01 720p",
download_url="https://example.com/34",
seeders=1,
flags=[],
size=1,
usenet=False,
age=1,
)
h = IndexerQueryResult(
title="Show S01", download_url="", seeders=100, flags=[], size=1
title="Show S01",
download_url="https://example.com/35",
seeders=100,
flags=[],
size=1,
usenet=False,
age=1,
)
assert g > h
assert not (h > g)

View File

@@ -8,15 +8,17 @@ from media_manager.indexer.service import IndexerService
class DummyIndexer:
def search(self, query):
def search(self, query, is_tv=True):
return [
IndexerQueryResult(
id=IndexerQueryResultId(uuid.uuid4()),
title=f"{query} S01 1080p",
download_url="http://example.com/1",
download_url="https://example.com/torrent1",
seeders=10,
flags=["test"],
size=123456,
usenet=False,
age=1,
)
]
@@ -37,7 +39,7 @@ def indexer_service(monkeypatch, mock_indexer_repository):
def test_search_returns_results(indexer_service, mock_indexer_repository):
query = "TestShow"
results = indexer_service.search(query)
results = indexer_service.search(query, is_tv=True)
assert len(results) == 1
assert results[0].title == f"{query} S01 1080p"
mock_indexer_repository.save_result.assert_called_once()
@@ -48,10 +50,12 @@ def test_get_result_returns_result(mock_indexer_repository):
expected_result = IndexerQueryResult(
id=result_id,
title="Test S01 1080p",
download_url="http://example.com/1",
download_url="https://example.com/torrent2",
seeders=10,
flags=["test"],
size=123456,
usenet=False,
age=1,
)
mock_indexer_repository.get_result.return_value = expected_result
service = IndexerService(indexer_repository=mock_indexer_repository)

View File

@@ -414,42 +414,52 @@ def test_get_all_available_torrents_for_a_season_no_override(
torrent1 = IndexerQueryResult(
id=IndexerQueryResultId(uuid.uuid4()),
title="Test Show 1080p S01",
download_url="url1",
download_url="https://example.com/torrent1",
seeders=10,
flags=[],
size=100,
usenet=False,
age=1,
)
torrent2 = IndexerQueryResult(
id=IndexerQueryResultId(uuid.uuid4()),
title="Test Show 720p S01",
download_url="url2",
download_url="https://example.com/torrent2",
seeders=5,
flags=[],
size=100,
usenet=False,
age=1,
)
torrent3 = IndexerQueryResult(
id=IndexerQueryResultId(uuid.uuid4()),
title="Test Show 720p S01",
download_url="url3",
download_url="https://example.com/torrent3",
seeders=20,
flags=[],
size=100,
usenet=False,
age=1,
)
torrent4 = IndexerQueryResult(
id=IndexerQueryResultId(uuid.uuid4()),
title="Test Show S01E02",
download_url="url4",
download_url="https://example.com/torrent4",
seeders=5,
flags=[],
size=100,
usenet=False,
age=1,
) # Episode
torrent5 = IndexerQueryResult(
id=IndexerQueryResultId(uuid.uuid4()),
title="Test Show S02",
download_url="url5",
download_url="https://example.com/torrent5",
seeders=10,
flags=[],
size=100,
usenet=False,
age=1,
) # Different season
mock_indexer_service.search.return_value = [
@@ -466,7 +476,7 @@ def test_get_all_available_torrents_for_a_season_no_override(
mock_tv_repository.get_show_by_id.assert_called_once_with(show_id=show_id)
mock_indexer_service.search.assert_called_once_with(
query=f"{show_name} s{str(season_number).zfill(2)}"
query=f"{show_name} s{str(season_number).zfill(2)}", is_tv=True
)
assert len(results) == 3
assert torrent1 in results
@@ -499,11 +509,12 @@ def test_get_all_available_torrents_for_a_season_with_override(
torrent1 = IndexerQueryResult(
id=IndexerQueryResultId(uuid.uuid4()),
title="Custom Query S01E01",
download_url="url1",
download_url="https://example.com/torrent1",
seeders=10,
flags=[],
size=100,
# Remove 'season' argument if not supported by IndexerQueryResult
usenet=False,
age=1,
)
mock_indexer_service.search.return_value = [torrent1]
@@ -513,7 +524,9 @@ def test_get_all_available_torrents_for_a_season_with_override(
search_query_override=override_query,
)
mock_indexer_service.search.assert_called_once_with(query=override_query)
mock_indexer_service.search.assert_called_once_with(
query=override_query, is_tv=True
)
assert results == [torrent1]

15
uv.lock generated
View File

@@ -642,6 +642,7 @@ dependencies = [
{ name = "python-json-logger" },
{ name = "qbittorrent-api" },
{ name = "requests" },
{ name = "sabnzbd-api" },
{ name = "sqlalchemy" },
{ name = "starlette" },
{ name = "tmdbsimple" },
@@ -673,6 +674,7 @@ requires-dist = [
{ name = "python-json-logger", specifier = ">=3.3.0" },
{ name = "qbittorrent-api", specifier = ">=2025.5.0" },
{ name = "requests", specifier = ">=2.32.3" },
{ name = "sabnzbd-api", specifier = ">=0.1.2" },
{ name = "sqlalchemy", specifier = ">=2.0.41" },
{ name = "starlette", specifier = ">=0.46.2" },
{ name = "tmdbsimple", specifier = ">=2.9.1" },
@@ -1092,6 +1094,19 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/b6/97/5a4b59697111c89477d20ba8a44df9ca16b41e737fa569d5ae8bff99e650/rpds_py-0.25.1-cp313-cp313t-win_amd64.whl", hash = "sha256:401ca1c4a20cc0510d3435d89c069fe0a9ae2ee6495135ac46bdd49ec0495763", size = 232218, upload-time = "2025-05-21T12:44:40.512Z" },
]
[[package]]
name = "sabnzbd-api"
version = "0.1.2"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "httpx" },
{ name = "urllib3" },
]
sdist = { url = "https://files.pythonhosted.org/packages/a5/ab/a0cf6a4bc977afd60a7f9846c1805cf709db73feef7705ee0c4397924f48/sabnzbd-api-0.1.2.tar.gz", hash = "sha256:1bb0defcb1aa19333f717a63464fbdc8b8a748a00ca4289be5c7496f045d339f", size = 7529, upload-time = "2025-02-20T19:36:03.397Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/14/06/b9a7135a8fe164fe928265647694c0963a6e6a5439228c915ae866d13586/sabnzbd_api-0.1.2-py3-none-any.whl", hash = "sha256:65ab9aecda300e574c5074b0ae4b971f61346d89d9e3f80393dc72c712ac3797", size = 8300, upload-time = "2025-02-20T19:35:58.948Z" },
]
[[package]]
name = "shellingham"
version = "1.5.4"

View File

@@ -6,7 +6,11 @@
import { toast } from 'svelte-sonner';
import type { PublicIndexerQueryResult } from '$lib/types.js';
import { convertTorrentSeasonRangeToIntegerRange, getFullyQualifiedMediaName } from '$lib/utils';
import {
convertTorrentSeasonRangeToIntegerRange,
formatSecondsToOptimalUnit,
getFullyQualifiedMediaName
} from '$lib/utils';
import { LoaderCircle } from 'lucide-svelte';
import * as Dialog from '$lib/components/ui/dialog/index.js';
import * as Tabs from '$lib/components/ui/tabs/index.js';
@@ -269,7 +273,9 @@
<Table.Row>
<Table.Head>Title</Table.Head>
<Table.Head>Size</Table.Head>
<Table.Head>Usenet</Table.Head>
<Table.Head>Seeders</Table.Head>
<Table.Head>Age</Table.Head>
<Table.Head>Indexer Flags</Table.Head>
<Table.Head>Seasons</Table.Head>
<Table.Head class="text-right">Actions</Table.Head>
@@ -280,7 +286,11 @@
<Table.Row>
<Table.Cell class="max-w-[300px] font-medium">{torrent.title}</Table.Cell>
<Table.Cell>{(torrent.size / 1024 / 1024 / 1024).toFixed(2)}GB</Table.Cell>
<Table.Cell>{torrent.seeders}</Table.Cell>
<Table.Cell>{torrent.usenet}</Table.Cell>
<Table.Cell>{torrent.usenet ? 'N/A' : torrent.seeders}</Table.Cell>
<Table.Cell
>{torrent.usenet ? formatSecondsToOptimalUnit(torrent.age) : 'N/A'}</Table.Cell
>
<Table.Cell>
{#each torrent.flags as flag}
<Badge variant="outline">{flag}</Badge>

View File

@@ -73,6 +73,8 @@ export interface PublicIndexerQueryResult {
flags: string[]; // items: { type: string }, type: array
season: number[]; // items: { type: integer }, type: array
size: number;
usenet: boolean;
age: number;
}
export interface Season {
@@ -152,6 +154,7 @@ export interface Torrent {
imported: boolean;
hash: string;
id?: string; // type: string, format: uuid
usenet: boolean;
}
export interface UserCreate {
@@ -186,6 +189,7 @@ export interface RichSeasonTorrent {
status: TorrentStatus;
quality: Quality;
imported: boolean;
usenet: boolean;
file_path_suffix: string;
seasons: number[];

View File

@@ -74,3 +74,25 @@ export async function handleLogout() {
toast.error('Logout failed: ' + response.status);
}
}
export function formatSecondsToOptimalUnit(seconds: number): string {
if (seconds < 0) return '0s';
const units = [
{ name: 'y', seconds: 365.25 * 24 * 60 * 60 }, // year (accounting for leap years)
{ name: 'mo', seconds: 30.44 * 24 * 60 * 60 }, // month (average)
{ name: 'd', seconds: 24 * 60 * 60 }, // day
{ name: 'h', seconds: 60 * 60 }, // hour
{ name: 'm', seconds: 60 }, // minute
{ name: 's', seconds: 1 } // second
];
for (const unit of units) {
const value = seconds / unit.seconds;
if (value >= 1) {
return `${Math.floor(value)}${unit.name}`;
}
}
return '0s';
}