mirror of
https://github.com/mustbeperfect/definitive-opensource.git
synced 2026-04-28 03:39:42 +02:00
Refactored source/scripts into core/source
This commit is contained in:
59
core/source/maintenance/json_formatter.py
Normal file
59
core/source/maintenance/json_formatter.py
Normal file
@@ -0,0 +1,59 @@
|
||||
import json
|
||||
|
||||
with open("core/data/dynamic/applications.json") as f:
|
||||
applications = json.load(f)["applications"]
|
||||
|
||||
with open("core/data/static/categories.json") as f:
|
||||
categories_data = json.load(f)["subcategories"]
|
||||
valid_categories = {c["id"].lower() for c in categories_data}
|
||||
|
||||
with open("core/data/static/platforms.json") as f:
|
||||
platforms_data = json.load(f)["platforms"]
|
||||
valid_platforms = {p["id"].lower() for p in platforms_data}
|
||||
|
||||
seen_github = set()
|
||||
issues_report = []
|
||||
|
||||
for app in applications:
|
||||
app_issues = []
|
||||
|
||||
github_url = app.get("repo_url", "").strip()
|
||||
if not github_url:
|
||||
app_issues.append("Missing GitHub URL")
|
||||
elif github_url in seen_github:
|
||||
app_issues.append("Duplicate GitHub URL")
|
||||
else:
|
||||
seen_github.add(github_url)
|
||||
|
||||
category = app.get("category", "").lower()
|
||||
if not category:
|
||||
app_issues.append("Missing category")
|
||||
elif category not in valid_categories:
|
||||
app_issues.append(f"Invalid category '{category}'")
|
||||
|
||||
platforms = [p.lower() for p in app.get("platforms", [])]
|
||||
if not platforms:
|
||||
app_issues.append("Missing platform")
|
||||
else:
|
||||
invalid_platforms = [p for p in platforms if p not in valid_platforms]
|
||||
if invalid_platforms:
|
||||
app_issues.append(f"Invalid platforms: {', '.join(invalid_platforms)}")
|
||||
|
||||
if app_issues:
|
||||
issues_report.append({
|
||||
"name": app.get("name", "Unnamed Project"),
|
||||
"issues": app_issues
|
||||
})
|
||||
|
||||
with open("resources/maintenance/format_maintenance.md", "w") as f:
|
||||
f.write("# Format Maintenance Report\n\n")
|
||||
if not issues_report:
|
||||
f.write("No issues found. All applications are properly formatted.\n")
|
||||
else:
|
||||
for entry in issues_report:
|
||||
f.write(f"## {entry['name']}\n")
|
||||
for issue in entry["issues"]:
|
||||
f.write(f"- {issue}\n")
|
||||
f.write("\n")
|
||||
|
||||
print("Maintenance report generated: format_maintenance.md")
|
||||
63
core/source/maintenance/stats_updator.py
Normal file
63
core/source/maintenance/stats_updator.py
Normal file
@@ -0,0 +1,63 @@
|
||||
import os
|
||||
import requests
|
||||
import json
|
||||
from datetime import datetime
|
||||
import os
|
||||
|
||||
with open('core/data/dynamic/applications.json', 'r') as f:
|
||||
data = json.load(f)
|
||||
|
||||
GITHUB_TOKEN = os.getenv('GITHUB_TOKEN')
|
||||
|
||||
headers = {
|
||||
'Authorization': f'token {GITHUB_TOKEN}',
|
||||
'Accept': 'application/vnd.github.v3+json'
|
||||
}
|
||||
|
||||
def update_application_data(app):
|
||||
|
||||
repo_name = app["repo_url"].split("github.com/")[1]
|
||||
|
||||
|
||||
repo_url = f'https://api.github.com/repos/{repo_name}'
|
||||
|
||||
print(f"Updating: {repo_name}")
|
||||
print(f"API URL: {repo_url}")
|
||||
|
||||
|
||||
response = requests.get(repo_url, headers=headers)
|
||||
|
||||
if response.status_code == 200:
|
||||
repo_data = response.json()
|
||||
|
||||
app['stars'] = repo_data.get('stargazers_count', app['stars'])
|
||||
app['language'] = repo_data.get('language', app['language'])
|
||||
|
||||
if 'custom-homepage' not in app.get('flags', []):
|
||||
app['homepage_url'] = repo_data.get('homepage', app['homepage_url'])
|
||||
|
||||
if 'custom-description' not in app.get('flags', []):
|
||||
app['description'] = repo_data.get('description', app.get('description'))
|
||||
|
||||
if 'custom-license' not in app.get('flags', []):
|
||||
license_data = repo_data.get('license')
|
||||
if license_data is not None:
|
||||
app['license'] = license_data.get('spdx_id', app['license'])
|
||||
else:
|
||||
app['license'] = app['license']
|
||||
|
||||
app['last_commit'] = datetime.strptime(repo_data['pushed_at'], '%Y-%m-%dT%H:%M:%SZ').strftime('%m/%d/%Y')
|
||||
|
||||
return app
|
||||
else:
|
||||
print(f"Error: Unable to fetch data for {repo_name}. Status Code: {response.status_code}")
|
||||
print(f"Response: {response.text}")
|
||||
return app
|
||||
|
||||
for app in data['applications']:
|
||||
app = update_application_data(app)
|
||||
|
||||
with open('core/data/dynamic/applications.json', 'w') as f:
|
||||
json.dump(data, f, indent=4)
|
||||
|
||||
print("Updated application data successfully!")
|
||||
83
core/source/maintenance/status_checker.py
Normal file
83
core/source/maintenance/status_checker.py
Normal file
@@ -0,0 +1,83 @@
|
||||
import os
|
||||
import requests
|
||||
import json
|
||||
from datetime import datetime, timedelta
|
||||
import os
|
||||
|
||||
INPUT_FILE = "core/data/dynamic/applications.json"
|
||||
OUTPUT_FILE = "resources/maintenance/status_maintenance.md"
|
||||
GITHUB_TOKEN = os.getenv("GITHUB_TOKEN")
|
||||
|
||||
headers = {"Accept": "application/vnd.github+json"}
|
||||
if GITHUB_TOKEN:
|
||||
headers["Authorization"] = f"Bearer {GITHUB_TOKEN}"
|
||||
|
||||
with open(INPUT_FILE, "r") as f:
|
||||
data = json.load(f)
|
||||
|
||||
potentially_abandoned = []
|
||||
archived = []
|
||||
no_longer_exists = []
|
||||
|
||||
for app in data.get("applications", []):
|
||||
repo_url = app.get("repo_url")
|
||||
if not repo_url or "github.com" not in repo_url:
|
||||
continue
|
||||
|
||||
try:
|
||||
parts = repo_url.rstrip("/").split("/")
|
||||
owner, repo = parts[-2], parts[-1]
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
api_url = f"https://api.github.com/repos/{owner}/{repo}"
|
||||
|
||||
try:
|
||||
r = requests.get(api_url, headers=headers)
|
||||
if r.status_code == 404:
|
||||
no_longer_exists.append(app["name"])
|
||||
continue
|
||||
elif r.status_code != 200:
|
||||
print(f"Warning: failed to fetch {repo_url} ({r.status_code})")
|
||||
continue
|
||||
|
||||
repo_data = r.json()
|
||||
|
||||
if repo_data.get("archived"):
|
||||
archived.append(app["name"])
|
||||
continue
|
||||
|
||||
pushed_at = repo_data.get("pushed_at")
|
||||
if pushed_at:
|
||||
last_commit_date = datetime.strptime(pushed_at, "%Y-%m-%dT%H:%M:%SZ")
|
||||
if last_commit_date < datetime.utcnow() - timedelta(days=365):
|
||||
potentially_abandoned.append(app["name"])
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error processing {repo_url}: {e}")
|
||||
|
||||
with open(OUTPUT_FILE, "w") as f:
|
||||
f.write("# Repository Activity Maintenance Report\n\n")
|
||||
|
||||
f.write("## Potentially Abandoned:\n")
|
||||
if potentially_abandoned:
|
||||
for name in potentially_abandoned:
|
||||
f.write(f"- {name}\n")
|
||||
else:
|
||||
f.write("_None_\n")
|
||||
|
||||
f.write("\n## Archived:\n")
|
||||
if archived:
|
||||
for name in archived:
|
||||
f.write(f"- {name}\n")
|
||||
else:
|
||||
f.write("_None_\n")
|
||||
|
||||
f.write("\n## No Longer Exists:\n")
|
||||
if no_longer_exists:
|
||||
for name in no_longer_exists:
|
||||
f.write(f"- {name}\n")
|
||||
else:
|
||||
f.write("_None_\n")
|
||||
|
||||
print(f"{OUTPUT_FILE} Complete")
|
||||
Reference in New Issue
Block a user