Ran ruff format

This commit is contained in:
Lucas
2026-01-12 10:08:49 -08:00
parent d04d9bf430
commit 84bab5f5d5
6 changed files with 113 additions and 80 deletions

View File

@@ -1,27 +1,29 @@
import json
def slugify(name):
def slugify(name):
return name.lower().replace(" ", "-").replace("(", "").replace(")", "")
def extract_repo_path(link):
def extract_repo_path(link):
parts = link.rstrip("/").split("/")
if len(parts) >= 5:
return f"{parts[-2]}/{parts[-1]}"
return ""
def format_stars(n):
if n >= 1_000_000:
formatted = f"{n/1_000_000:.1f}M"
return formatted.replace('.0M', 'M')
formatted = f"{n / 1_000_000:.1f}M"
return formatted.replace(".0M", "M")
elif n >= 1_000:
formatted = f"{n/1_000:.1f}k"
return formatted.replace('.0k', 'k')
formatted = f"{n / 1_000:.1f}k"
return formatted.replace(".0k", "k")
else:
return str(n)
'''
"""
def format_stars(n):
if n >= 1_000_000:
value = n / 1_000_000
@@ -37,10 +39,10 @@ def format_stars(n):
return f"{int(value)}{suffix}"
else:
return f"{value:.1f}{suffix}"
'''
"""
def generate_contents(platform="all"):
with open("core/data/static/categories.json", "r", encoding="utf-8") as f:
cat_data = json.load(f)
with open("core/data/dynamic/applications.json", "r", encoding="utf-8") as f:
@@ -48,28 +50,30 @@ def generate_contents(platform="all"):
with open("core/data/static/tags.json", "r", encoding="utf-8") as f:
tags_data = json.load(f)
with open("core/data/static/platforms.json", "r", encoding="utf-8") as f:
platforms_data = json.load(f)
platforms_data = json.load(f)
categories = cat_data.get("categories", [])
subcategories = cat_data.get("subcategories", [])
applications = app_data.get("applications", [])
parent_map = {cat["id"]: cat["name"] for cat in categories}
attribute_map = {attribute["id"]: attribute["emoji"] for attribute in tags_data["attributes"]}
property_map = {property["id"]: property["name"] for property in tags_data["properties"]}
attribute_map = {
attribute["id"]: attribute["emoji"] for attribute in tags_data["attributes"]
}
property_map = {
property["id"]: property["name"] for property in tags_data["properties"]
}
platform_map = {p["id"]: p["name"] for p in platforms_data["platforms"]}
subcat_by_parent = {}
for sub in subcategories:
parent = sub.get("parent", "other")
subcat_by_parent.setdefault(parent, []).append({
"Name": sub["name"],
"id": sub["id"]
})
subcat_by_parent.setdefault(parent, []).append(
{"Name": sub["name"], "id": sub["id"]}
)
for key in subcat_by_parent:
subcat_by_parent[key].sort(key=lambda x: x["Name"].lower())
apps_by_subcat = {}
for app in applications:
@@ -77,7 +81,6 @@ def generate_contents(platform="all"):
if platform == "all":
include = True
else:
app_platforms = [p.lower() for p in app.get("platforms", [])]
target = platform.lower()
if target in app_platforms:
@@ -87,7 +90,7 @@ def generate_contents(platform="all"):
include = True
if not include:
continue
cat_id = app.get("category", "uncategorized")
apps_by_subcat.setdefault(cat_id, []).append(app)
@@ -96,11 +99,13 @@ def generate_contents(platform="all"):
md_output = ""
parent_items = [(pid, parent_map.get(pid, pid)) for pid in subcat_by_parent if pid != "other"]
parent_items = [
(pid, parent_map.get(pid, pid)) for pid in subcat_by_parent if pid != "other"
]
parent_items.sort(key=lambda x: x[1].lower())
if "other" in subcat_by_parent:
parent_items.append(("other", "Other"))
for pid, pname in parent_items:
md_output += f"# {pname} - [Go to top](#table-of-contents)\n\n"
@@ -120,22 +125,35 @@ def generate_contents(platform="all"):
"""
if app.get("tags"):
tags += " " + " ".join(app["tags"])
"""
"""
if app.get("tags"):
# attribute_tags = " " + " ".join(attribute_map.get(tag, tag) for tag in app.get("tags", []))
attribute_tags = " " + " ".join(attribute_map[tag] for tag in app["tags"] if tag in attribute_map)
property_tags = " ".join(f"`{property_map[tag]}`" for tag in app["tags"] if tag in property_map)
attribute_tags = " " + " ".join(
attribute_map[tag]
for tag in app["tags"]
if tag in attribute_map
)
property_tags = " ".join(
f"`{property_map[tag]}`"
for tag in app["tags"]
if tag in property_map
)
# app_platforms = " ".join(f"`{p}`" for p in app.get("platforms", []))
app_platforms = " ".join(f"`{platform_map.get(p, p)}`" for p in app.get("platforms", []))
app_platforms = " ".join(
f"`{platform_map.get(p, p)}`" for p in app.get("platforms", [])
)
stars = app.get("stars")
stars_formatted = f"**{format_stars(stars)}**" if stars is not None else ""
stars_formatted = (
f"**{format_stars(stars)}**" if stars is not None else ""
)
# repo_path = extract_repo_path(link)
# stars_badge = f"![GitHub Repo stars](https://img.shields.io/github/stars/{repo_path}?style=for-the-badge&label=%20&color=white)" if repo_path else ""
md_output += f"| [{name}]({link}){attribute_tags}{property_tags} | {description} | {app_platforms} | {stars_formatted} |\n"
md_output += "\n"
return md_output
if __name__ == "__main__":
# For testing, default to 'all' platforms
print(generate_contents("all"))

View File

@@ -1,12 +1,13 @@
import json
# Generates mainheader with dynamic project count
def generate_mainheader():
with open("core/data/dynamic/applications.json", "r", encoding="utf-8") as f:
data = json.load(f)
project_count = len(data.get("applications", []))
header_content = f"""
<table align="center">
<tr>
@@ -19,8 +20,9 @@ def generate_mainheader():
<p align="center"><code>Status: Active</code> - <code>Projects: {project_count}</code></p>
"""
return header_content
if __name__ == "__main__":
generate_mainheader()

View File

@@ -11,9 +11,10 @@ header_files = {
"windows": "core/components/windowsheader.md",
"macos": "core/components/macosheader.md",
"linux": "core/components/linuxheader.md",
"selfhost": "core/components/selfhostheader.md"
"selfhost": "core/components/selfhostheader.md",
}
def generate_readme_for_platform(platform):
content = ""
header_file = header_files.get(platform, "core/components/header.md")
@@ -21,33 +22,36 @@ def generate_readme_for_platform(platform):
# Inject mainheader with dynamic project count
if platform == "all":
content += generate_mainheader()
# Inject header
with open(header_file, "r", encoding="utf-8") as f:
content += f.read() + "\n"
# Inject tags.md
with open("core/components/tags.md", "r", encoding="utf-8") as f:
content += f.read() + "\n"
# Generate Table of Contents
toc_md = generate_table_of_contents()
content += toc_md + "\n"
# Generate the actual markdown list of contents for the given platform
contents_md = generate_contents(platform)
content += contents_md + "\n"
# Inject footer.md
with open("core/components/footer.md", "r", encoding="utf-8") as f:
content += f.read() + "\n"
# Write output file
output_filename = "README.md" if platform == "all" else f"resources/readmes/{platform}.md"
output_filename = (
"README.md" if platform == "all" else f"resources/readmes/{platform}.md"
)
with open(output_filename, "w", encoding="utf-8") as f:
f.write(content)
print(f"Generated {output_filename}")
if __name__ == "__main__":
for platform in platforms:
generate_readme_for_platform(platform)

View File

@@ -1,23 +1,25 @@
import json
def slugify(name):
#Create an anchor-friendly slug from a string
# Create an anchor-friendly slug from a string
return name.lower().replace(" ", "-").replace("(", "").replace(")", "")
def generate_table_of_contents():
# Load the categories JSON data
with open("core/data/static/categories.json", "r", encoding="utf-8") as f:
data = json.load(f)
categories = data.get("categories", [])
subcategories = data.get("subcategories", [])
# Build the alphabetical list (ignoring parent categories)
subcat_names = [sub["name"] for sub in subcategories]
subcat_names.sort(key=lambda x: x.lower())
alphabetical_md = ""
for name in subcat_names:
alphabetical_md += f"- [{name}](#{slugify(name)})\n"
# Build the categorized list
# Create a mapping from parent id to parent name
parent_map = {cat["id"]: cat["name"] for cat in categories}
@@ -34,20 +36,24 @@ def generate_table_of_contents():
parents.sort(key=lambda x: x[1].lower())
if "other" in grouped:
parents.append(("other", "Other"))
categorized_md_lines = []
for pid, pname in parents:
categorized_md_lines.append(f"- {pname}")
for subname in grouped[pid]:
categorized_md_lines.append(f" - [{subname}](#{slugify(subname)})")
# Append fixed sections at the end of the categorized TOC
fixed_sections = ["Removed Projects", "FAQ", "Honorable Mentions of Closed-Source Software"]
fixed_sections = [
"Removed Projects",
"FAQ",
"Honorable Mentions of Closed-Source Software",
]
for item in fixed_sections:
categorized_md_lines.append(f"- [{item}](#{slugify(item)})")
categorized_md = "\n".join(categorized_md_lines)
toc = f"""## Table of Contents
<details>
@@ -64,6 +70,7 @@ def generate_table_of_contents():
"""
return toc
if __name__ == "__main__":
# For testing the TOC generator
print(generate_table_of_contents())

View File

@@ -16,7 +16,7 @@ issues_report = []
for app in applications:
app_issues = []
github_url = app.get("repo_url", "").strip()
if not github_url:
app_issues.append("Missing GitHub URL")
@@ -24,13 +24,13 @@ for app in applications:
app_issues.append("Duplicate GitHub URL")
else:
seen_github.add(github_url)
category = app.get("category", "").lower()
if not category:
app_issues.append("Missing category")
elif category not in valid_categories:
app_issues.append(f"Invalid category '{category}'")
platforms = [p.lower() for p in app.get("platforms", [])]
if not platforms:
app_issues.append("Missing platform")
@@ -38,12 +38,11 @@ for app in applications:
invalid_platforms = [p for p in platforms if p not in valid_platforms]
if invalid_platforms:
app_issues.append(f"Invalid platforms: {', '.join(invalid_platforms)}")
if app_issues:
issues_report.append({
"name": app.get("name", "Unnamed Project"),
"issues": app_issues
})
issues_report.append(
{"name": app.get("name", "Unnamed Project"), "issues": app_issues}
)
with open("resources/maintenance/format_maintenance.md", "w") as f:
f.write("# Format Maintenance Report\n\n")

View File

@@ -3,60 +3,63 @@ import requests
import json
from datetime import datetime
with open('core/data/dynamic/applications.json', 'r') as f:
with open("core/data/dynamic/applications.json", "r") as f:
data = json.load(f)
GITHUB_TOKEN = os.getenv('GITHUB_TOKEN')
GITHUB_TOKEN = os.getenv("GITHUB_TOKEN")
headers = {
'Authorization': f'token {GITHUB_TOKEN}',
'Accept': 'application/vnd.github.v3+json'
"Authorization": f"token {GITHUB_TOKEN}",
"Accept": "application/vnd.github.v3+json",
}
def update_application_data(app):
repo_name = app["repo_url"].split("github.com/")[1]
repo_url = f'https://api.github.com/repos/{repo_name}'
repo_url = f"https://api.github.com/repos/{repo_name}"
print(f"Updating: {repo_name}")
print(f"API URL: {repo_url}")
response = requests.get(repo_url, headers=headers)
if response.status_code == 200:
repo_data = response.json()
app['stars'] = repo_data.get('stargazers_count', app['stars'])
app['language'] = repo_data.get('language', app['language'])
app["stars"] = repo_data.get("stargazers_count", app["stars"])
app["language"] = repo_data.get("language", app["language"])
if 'custom-homepage' not in app.get('flags', []):
app['homepage_url'] = repo_data.get('homepage', app['homepage_url'])
if "custom-homepage" not in app.get("flags", []):
app["homepage_url"] = repo_data.get("homepage", app["homepage_url"])
if 'custom-description' not in app.get('flags', []):
app['description'] = repo_data.get('description', app.get('description'))
if 'custom-license' not in app.get('flags', []):
license_data = repo_data.get('license')
if "custom-description" not in app.get("flags", []):
app["description"] = repo_data.get("description", app.get("description"))
if "custom-license" not in app.get("flags", []):
license_data = repo_data.get("license")
if license_data is not None:
app['license'] = license_data.get('spdx_id', app['license'])
app["license"] = license_data.get("spdx_id", app["license"])
else:
app['license'] = app['license']
app['last_commit'] = datetime.strptime(repo_data['pushed_at'], '%Y-%m-%dT%H:%M:%SZ').strftime('%m/%d/%Y')
app["license"] = app["license"]
app["last_commit"] = datetime.strptime(
repo_data["pushed_at"], "%Y-%m-%dT%H:%M:%SZ"
).strftime("%m/%d/%Y")
return app
else:
print(f"Error: Unable to fetch data for {repo_name}. Status Code: {response.status_code}")
print(
f"Error: Unable to fetch data for {repo_name}. Status Code: {response.status_code}"
)
print(f"Response: {response.text}")
return app
for app in data['applications']:
for app in data["applications"]:
app = update_application_data(app)
with open('core/data/dynamic/applications.json', 'w') as f:
with open("core/data/dynamic/applications.json", "w") as f:
json.dump(data, f, indent=4)
print("Updated application data successfully!")