mirror of
https://github.com/mustbeperfect/definitive-opensource.git
synced 2026-04-20 12:55:30 +02:00
Switched from shields to star count in text
This commit is contained in:
@@ -1,18 +1,26 @@
|
||||
import json
|
||||
|
||||
def slugify(name):
|
||||
# Converts string to an anchor-friendly slug
|
||||
|
||||
return name.lower().replace(" ", "-").replace("(", "").replace(")", "")
|
||||
|
||||
def extract_repo_path(link):
|
||||
# Extract the GitHub repository path from the URL. Link format: https://github.com/username/repo
|
||||
|
||||
parts = link.rstrip("/").split("/")
|
||||
if len(parts) >= 5:
|
||||
return f"{parts[-2]}/{parts[-1]}"
|
||||
return ""
|
||||
|
||||
def format_stars(n):
|
||||
if n >= 1_000_000:
|
||||
return f"{n/1_000_000:.1f}M"
|
||||
elif n >= 1_000:
|
||||
return f"{n/1_000:.1f}k"
|
||||
else:
|
||||
return str(n)
|
||||
|
||||
def generate_contents(platform="all"):
|
||||
# Load categories, applications, and tags JSON data
|
||||
|
||||
with open("source/data/categories.json", "r", encoding="utf-8") as f:
|
||||
cat_data = json.load(f)
|
||||
with open("source/data/applications.json", "r", encoding="utf-8") as f:
|
||||
@@ -24,13 +32,13 @@ def generate_contents(platform="all"):
|
||||
subcategories = cat_data.get("subcategories", [])
|
||||
applications = app_data.get("applications", [])
|
||||
|
||||
# Map parent categories id to corresponding name
|
||||
|
||||
parent_map = {cat["id"]: cat["name"] for cat in categories}
|
||||
|
||||
# Map tag id to emoji
|
||||
|
||||
tag_map = {tag["id"]: tag["emoji"] for tag in tags_data["tags"]}
|
||||
|
||||
# Group subcategories by their parent
|
||||
|
||||
subcat_by_parent = {}
|
||||
for sub in subcategories:
|
||||
parent = sub.get("parent", "other")
|
||||
@@ -38,24 +46,24 @@ def generate_contents(platform="all"):
|
||||
"Name": sub["name"],
|
||||
"id": sub["id"]
|
||||
})
|
||||
# Sort subcategories alphabetically in each parent group
|
||||
|
||||
for key in subcat_by_parent:
|
||||
subcat_by_parent[key].sort(key=lambda x: x["Name"].lower())
|
||||
|
||||
# Filter and group applications by their subcategory
|
||||
|
||||
apps_by_subcat = {}
|
||||
for app in applications:
|
||||
include = False
|
||||
if platform == "all":
|
||||
include = True
|
||||
else:
|
||||
# Compare platform tags in lower case
|
||||
|
||||
app_platforms = [p.lower() for p in app.get("platforms", [])]
|
||||
target = platform.lower()
|
||||
if target in app_platforms:
|
||||
include = True
|
||||
|
||||
# Include "Cross" apps for select platforms
|
||||
|
||||
if target in ["macos", "linux", "windows"] and "cross" in app_platforms:
|
||||
include = True
|
||||
if not include:
|
||||
@@ -64,13 +72,13 @@ def generate_contents(platform="all"):
|
||||
cat_id = app.get("category", "uncategorized")
|
||||
apps_by_subcat.setdefault(cat_id, []).append(app)
|
||||
|
||||
# Sort applications within each subcategory alphabetically by name
|
||||
|
||||
for key in apps_by_subcat:
|
||||
apps_by_subcat[key].sort(key=lambda x: x["name"].lower())
|
||||
|
||||
# Build Markdown output.
|
||||
|
||||
md_output = ""
|
||||
# Process parent categories: sort alphabetically (excluding "other", which is added last)
|
||||
|
||||
parent_items = [(pid, parent_map.get(pid, pid)) for pid in subcat_by_parent if pid != "other"]
|
||||
parent_items.sort(key=lambda x: x[1].lower())
|
||||
if "other" in subcat_by_parent:
|
||||
@@ -78,13 +86,13 @@ def generate_contents(platform="all"):
|
||||
|
||||
for pid, pname in parent_items:
|
||||
md_output += f"# {pname} - [Go to top](#contents)\n\n"
|
||||
# For each subcategory under the parent category
|
||||
|
||||
for sub in subcat_by_parent.get(pid, []):
|
||||
subname = sub["Name"]
|
||||
md_output += f"### {subname}\n\n"
|
||||
md_output += "| Name | Description | Platform | Stars |\n"
|
||||
md_output += "| --- | --- | --- | --- |\n"
|
||||
# List all apps for the given subcategory
|
||||
|
||||
apps = apps_by_subcat.get(sub["id"], [])
|
||||
for app in apps:
|
||||
name = app.get("name", "")
|
||||
@@ -98,11 +106,12 @@ def generate_contents(platform="all"):
|
||||
if app.get("tags"):
|
||||
tags = " " + " ".join(tag_map.get(tag, tag) for tag in app.get("tags", []))
|
||||
|
||||
# Join the platform tags as provided
|
||||
app_platforms = " ".join(f"`{p}`" for p in app.get("platforms", []))
|
||||
repo_path = extract_repo_path(link)
|
||||
stars_badge = f"" if repo_path else ""
|
||||
md_output += f"| [{name}]({link}){tags} | {description} | {app_platforms} | {stars_badge} |\n"
|
||||
stars = app.get("stars")
|
||||
stars_formatted = f"**{format_stars(stars)}**" if stars is not None else ""
|
||||
# repo_path = extract_repo_path(link)
|
||||
# stars_badge = f"" if repo_path else ""
|
||||
md_output += f"| [{name}]({link}){tags} | {description} | {app_platforms} | {stars_formatted} |\n"
|
||||
md_output += "\n"
|
||||
return md_output
|
||||
|
||||
|
||||
@@ -3,61 +3,49 @@ import requests
|
||||
import json
|
||||
from datetime import datetime
|
||||
|
||||
# Load the applications data from the JSON file
|
||||
with open('source/data/applications.json', 'r') as f:
|
||||
data = json.load(f)
|
||||
|
||||
# GitHub API token from the environment variables
|
||||
GITHUB_TOKEN = os.getenv('GITHUB_TOKEN')
|
||||
|
||||
# Headers for the API request
|
||||
headers = {
|
||||
'Authorization': f'token {GITHUB_TOKEN}',
|
||||
'Accept': 'application/vnd.github.v3+json'
|
||||
}
|
||||
|
||||
# Function to get the latest data for each application
|
||||
def update_application_data(app):
|
||||
# Extract repository name from the GitHub URL
|
||||
|
||||
repo_name = app["link"].split("github.com/")[1]
|
||||
|
||||
# API URL for the repository
|
||||
repo_url = f'https://api.github.com/repos/{repo_name}'
|
||||
|
||||
print(f"Updating: {repo_name}") # Debugging output to see which repo is being processed
|
||||
print(f"API URL: {repo_url}") # Debugging output to check URL
|
||||
print(f"Updating: {repo_name}")
|
||||
print(f"API URL: {repo_url}")
|
||||
|
||||
# Make the request to the GitHub API
|
||||
response = requests.get(repo_url, headers=headers)
|
||||
|
||||
if response.status_code == 200:
|
||||
repo_data = response.json()
|
||||
|
||||
# Update the app's fields with the data from the GitHub API
|
||||
app['stars'] = repo_data.get('stargazers_count', app['stars'])
|
||||
app['language'] = repo_data.get('language', app['language'])
|
||||
|
||||
# Check if 'license' is None, then safely extract 'spdx_id'
|
||||
license_data = repo_data.get('license')
|
||||
if license_data is not None:
|
||||
app['license'] = license_data.get('spdx_id', app['license'])
|
||||
else:
|
||||
app['license'] = app['license'] # Retain the current license if no license exists
|
||||
app['license'] = app['license']
|
||||
|
||||
# Update last commit date
|
||||
app['last_commit'] = datetime.strptime(repo_data['pushed_at'], '%Y-%m-%dT%H:%M:%SZ').strftime('%m/%d/%Y')
|
||||
|
||||
return app
|
||||
else:
|
||||
print(f"Error: Unable to fetch data for {repo_name}. Status Code: {response.status_code}") # Print status code
|
||||
print(f"Response: {response.text}") # Print response content for more insight
|
||||
print(f"Response: {response.text}")
|
||||
return app
|
||||
|
||||
# Update the applications data
|
||||
for app in data['applications']:
|
||||
app = update_application_data(app)
|
||||
|
||||
# Write the updated data back to the JSON file
|
||||
with open('source/data/applications.json', 'w') as f:
|
||||
json.dump(data, f, indent=4)
|
||||
|
||||
|
||||
Reference in New Issue
Block a user