ruff: add S linter

this mostly adds a timeout=60 to all requests

this does mainly wants a timeout to all requests functions, since when
left out they hang infinitly.
I added a timeout of 60s, which is probably way too high, but since
before this there was none, I guess it's an improvement?
This commit is contained in:
Marcel Hellwig
2026-01-04 14:49:05 +01:00
parent 1857cf501c
commit 5368cad77a
13 changed files with 35 additions and 22 deletions

View File

@@ -36,9 +36,9 @@ def get_openid_router():
return get_oauth_router(
oauth_client=OAuth2(
client_id="mock",
client_secret="mock",
client_secret="mock", # noqa: S106
authorize_endpoint="https://example.com/authorize",
access_token_endpoint="https://example.com/token",
access_token_endpoint="https://example.com/token", # noqa: S106
),
backend=openid_cookie_auth_backend,
get_user_manager=fastapi_users.get_user_manager,

View File

@@ -140,7 +140,7 @@ async def create_default_admin_user():
if config.auth.admin_emails
else "admin@example.com"
)
default_password = "admin" # Simple default password
default_password = "admin" # noqa: S105 # Simple default password
user_create = UserCreate(
email=admin_email,

View File

@@ -5,5 +5,5 @@ class DbConfig(BaseSettings):
host: str = "localhost"
port: int = 5432
user: str = "MediaManager"
password: str = "MediaManager"
password: str = "MediaManager" # noqa: S105
dbname: str = "MediaManager"

View File

@@ -11,7 +11,7 @@ log = logging.getLogger(__name__)
class TorznabMixin:
def process_search_result(self, xml: str) -> list[IndexerQueryResult]:
result_list: list[IndexerQueryResult] = []
xml_tree = ET.fromstring(xml)
xml_tree = ET.fromstring(xml) # noqa: S314 # trusted source, since it is user controlled
xmlns = {
"torznab": "http://torznab.com/schemas/2015/feed",
"atom": "http://www.w3.org/2005/Atom",

View File

@@ -43,7 +43,9 @@ class TmdbMetadataProvider(AbstractMetadataProvider):
language = self.default_language
try:
response = requests.get(
url=f"{self.url}/tv/shows/{id}", params={"language": language}
url=f"{self.url}/tv/shows/{id}",
params={"language": language},
timeout=60,
)
response.raise_for_status()
return response.json()
@@ -58,7 +60,10 @@ class TmdbMetadataProvider(AbstractMetadataProvider):
def __get_show_external_ids(self, id: int) -> dict:
try:
response = requests.get(url=f"{self.url}/tv/shows/{id}/external_ids")
response = requests.get(
url=f"{self.url}/tv/shows/{id}/external_ids",
timeout=60,
)
response.raise_for_status()
return response.json()
except requests.RequestException as e:
@@ -79,6 +84,7 @@ class TmdbMetadataProvider(AbstractMetadataProvider):
response = requests.get(
url=f"{self.url}/tv/shows/{show_id}/{season_number}",
params={"language": language},
timeout=60,
)
response.raise_for_status()
return response.json()
@@ -101,6 +107,7 @@ class TmdbMetadataProvider(AbstractMetadataProvider):
"query": query,
"page": page,
},
timeout=60,
)
response.raise_for_status()
return response.json()
@@ -118,6 +125,7 @@ class TmdbMetadataProvider(AbstractMetadataProvider):
response = requests.get(
url=f"{self.url}/tv/trending",
params={"language": self.default_language},
timeout=60,
)
response.raise_for_status()
return response.json()
@@ -135,7 +143,7 @@ class TmdbMetadataProvider(AbstractMetadataProvider):
language = self.default_language
try:
response = requests.get(
url=f"{self.url}/movies/{id}", params={"language": language}
url=f"{self.url}/movies/{id}", params={"language": language}, timeout=60
)
response.raise_for_status()
return response.json()
@@ -150,7 +158,9 @@ class TmdbMetadataProvider(AbstractMetadataProvider):
def __get_movie_external_ids(self, id: int) -> dict:
try:
response = requests.get(url=f"{self.url}/movies/{id}/external_ids")
response = requests.get(
url=f"{self.url}/movies/{id}/external_ids", timeout=60
)
response.raise_for_status()
return response.json()
except requests.RequestException as e:
@@ -170,6 +180,7 @@ class TmdbMetadataProvider(AbstractMetadataProvider):
"query": query,
"page": page,
},
timeout=60,
)
response.raise_for_status()
return response.json()
@@ -187,6 +198,7 @@ class TmdbMetadataProvider(AbstractMetadataProvider):
response = requests.get(
url=f"{self.url}/movies/trending",
params={"language": self.default_language},
timeout=60,
)
response.raise_for_status()
return response.json()

View File

@@ -22,31 +22,29 @@ class TvdbMetadataProvider(AbstractMetadataProvider):
self.url = config.tvdb_relay_url
def __get_show(self, id: int) -> dict:
return requests.get(f"{self.url}/tv/shows/{id}").json()
return requests.get(url=f"{self.url}/tv/shows/{id}", timeout=60).json()
def __get_season(self, id: int) -> dict:
return requests.get(f"{self.url}/tv/seasons/{id}").json()
return requests.get(url=f"{self.url}/tv/seasons/{id}", timeout=60).json()
def __search_tv(self, query: str) -> dict:
return requests.get(
f"{self.url}/tv/search",
params={"query": query},
url=f"{self.url}/tv/search", params={"query": query}, timeout=60
).json()
def __get_trending_tv(self) -> dict:
return requests.get(f"{self.url}/tv/trending").json()
return requests.get(url=f"{self.url}/tv/trending", timeout=60).json()
def __get_movie(self, id: int) -> dict:
return requests.get(f"{self.url}/movies/{id}").json()
return requests.get(url=f"{self.url}/movies/{id}", timeout=60).json()
def __search_movie(self, query: str) -> dict:
return requests.get(
f"{self.url}/movies/search",
params={"query": query},
url=f"{self.url}/movies/search", params={"query": query}, timeout=60
).json()
def __get_trending_movies(self) -> dict:
return requests.get(f"{self.url}/movies/trending").json()
return requests.get(url=f"{self.url}/movies/trending", timeout=60).json()
def download_show_poster_image(self, show: Show) -> bool:
show_metadata = self.__get_show(id=show.external_id)

View File

@@ -13,7 +13,7 @@ def get_year_from_date(first_air_date: str | None) -> int | None:
def download_poster_image(storage_path: Path, poster_url: str, id: UUID) -> bool:
res = requests.get(poster_url, stream=True)
res = requests.get(poster_url, stream=True, timeout=60)
if res.status_code == 200:
image_file_path = storage_path.joinpath(str(id)).with_suffix("jpg")

View File

@@ -22,6 +22,7 @@ class GotifyNotificationServiceProvider(AbstractNotificationServiceProvider):
"message": message.message,
"title": message.title,
},
timeout=60,
)
if response.status_code not in range(200, 300):
return False

View File

@@ -22,6 +22,7 @@ class NtfyNotificationServiceProvider(AbstractNotificationServiceProvider):
headers={
"Title": "MediaManager - " + message.title,
},
timeout=60,
)
if response.status_code not in range(200, 300):
return False

View File

@@ -20,6 +20,7 @@ class PushoverNotificationServiceProvider(AbstractNotificationServiceProvider):
"message": message.message,
"title": "MediaManager - " + message.title,
},
timeout=60,
)
if response.status_code not in range(200, 300):
return False

View File

@@ -6,7 +6,7 @@ class QbittorrentConfig(BaseSettings):
host: str = "localhost"
port: int = 8080
username: str = "admin"
password: str = "admin"
password: str = "admin" # noqa: S105
enabled: bool = False
category_name: str = "MediaManager"

View File

@@ -165,7 +165,7 @@ def get_torrent_hash(torrent: IndexerQueryResult) -> str:
log.debug(f"parsing torrent file: {torrent.download_url}")
try:
decoded_content = bencoder.decode(torrent_content)
torrent_hash = hashlib.sha1(
torrent_hash = hashlib.sha1( # noqa: S324
bencoder.encode(decoded_content[b"info"])
).hexdigest()
except Exception as e:

View File

@@ -15,7 +15,7 @@ extend-select = [
"PGH", "PT", "PYI",
"Q",
"RUF",
"SLOT",
"S", "SLOT",
"T10", "TD", "TID",
"W",
"YTT"