Merge branch 'main' into patch-1

This commit is contained in:
Mamerto Fabian Jr
2025-01-14 07:32:23 +08:00
committed by GitHub
3 changed files with 471 additions and 0 deletions

159
.github/workflows/release-check.yml vendored Normal file
View File

@@ -0,0 +1,159 @@
name: Release Check
on:
# Allow manual trigger for testing
workflow_dispatch:
jobs:
prepare:
runs-on: ubuntu-latest
outputs:
matrix: ${{ steps.set-matrix.outputs.matrix }}
last_release: ${{ steps.last-release.outputs.hash }}
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Find package directories
id: set-matrix
run: |
# Find all package.json and pyproject.toml files, excluding root
DIRS=$(git ls-tree -r HEAD --name-only | grep -E "package.json|pyproject.toml" | xargs dirname | grep -v "^.$" | jq -R -s -c 'split("\n")[:-1]')
echo "matrix=${DIRS}" >> $GITHUB_OUTPUT
echo "Found directories: ${DIRS}"
- name: Get last release hash
id: last-release
run: |
HASH=$(git rev-list --tags --max-count=1 || echo "HEAD~1")
echo "hash=${HASH}" >> $GITHUB_OUTPUT
echo "Using last release hash: ${HASH}"
check-release:
needs: prepare
runs-on: ubuntu-latest
strategy:
matrix:
directory: ${{ fromJson(needs.prepare.outputs.matrix) }}
fail-fast: false
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- uses: astral-sh/setup-uv@v5
- name: Setup Node.js
if: endsWith(matrix.directory, '/package.json')
uses: actions/setup-node@v4
with:
node-version: '18'
- name: Setup Python
if: endsWith(matrix.directory, '/pyproject.toml')
run: uv python install
- name: Check release
id: check
run: |
# Create unique hash for this directory
dir_hash=$(echo "${{ matrix.directory }}" | sha256sum | awk '{print $1}')
# Run release check script with verbose output
echo "Running release check against last release: ${{ needs.prepare.outputs.last_release }}"
# Run git diff first to show changes
echo "Changes since last release:"
git diff --name-only "${{ needs.prepare.outputs.last_release }}" -- "${{ matrix.directory }}" || true
# Run the release check
output=$(uv run --script scripts/release.py --dry-run "${{ matrix.directory }}" "${{ needs.prepare.outputs.last_release }}" 2>&1)
exit_code=$?
echo "Release check output (exit code: $exit_code):"
echo "$output"
# Extract package info if successful
if [ $exit_code -eq 0 ]; then
pkg_info=$(echo "$output" | grep -o -E "[a-zA-Z0-9\-]+@[0-9]+\.[0-9]+\.[0-9]+" || true)
else
echo "Release check failed"
exit 1
fi
if [ ! -z "$pkg_info" ]; then
echo "Found package that needs release: $pkg_info"
# Create outputs directory
mkdir -p ./outputs
# Save both package info and full changes
echo "$pkg_info" > "./outputs/${dir_hash}_info"
echo "dir_hash=${dir_hash}" >> $GITHUB_OUTPUT
# Log what we're saving
echo "Saved package info to ./outputs/${dir_hash}_info:"
cat "./outputs/${dir_hash}_info"
else
echo "No release needed for this package"
fi
- name: Set artifact name
if: steps.check.outputs.dir_hash
id: artifact
run: |
# Replace forward slashes with dashes
SAFE_DIR=$(echo "${{ matrix.directory }}" | tr '/' '-')
echo "name=release-outputs-${SAFE_DIR}" >> $GITHUB_OUTPUT
- uses: actions/upload-artifact@v4
if: steps.check.outputs.dir_hash
with:
name: ${{ steps.artifact.outputs.name }}
path: ./outputs/${{ steps.check.outputs.dir_hash }}*
check-tag:
needs: [prepare, check-release]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/download-artifact@v4
with:
pattern: release-outputs-src-*
merge-multiple: true
path: outputs
- name: Simulate tag creation
run: |
if [ -d outputs ]; then
# Collect package info
find outputs -name "*_info" -exec cat {} \; > packages.txt
if [ -s packages.txt ]; then
DATE=$(date +%Y.%m.%d)
echo "🔍 Dry run: Would create tag v${DATE} if this was a real release"
# Generate comprehensive release notes
{
echo "# Release ${DATE}"
echo ""
echo "## Updated Packages"
while IFS= read -r line; do
echo "- $line"
done < packages.txt
} > notes.md
echo "🔍 Would create release with following notes:"
cat notes.md
echo "🔍 Would create tag v${DATE} with the above release notes"
echo "🔍 Would create GitHub release from tag v${DATE}"
else
echo "No packages need release"
fi
else
echo "No release artifacts found"
fi

131
.github/workflows/release.yml vendored Normal file
View File

@@ -0,0 +1,131 @@
name: Automatic Release Creation
on:
workflow_dispatch:
jobs:
detect-last-release:
runs-on: ubuntu-latest
outputs:
last_release: ${{ steps.last-release.outputs.hash }}
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Get last release hash
id: last-release
run: |
HASH=$(git rev-list --tags --max-count=1 || echo "HEAD~1")
echo "hash=${HASH}" >> $GITHUB_OUTPUT
echo "Using last release hash: ${HASH}"
create-tag-name:
runs-on: ubuntu-latest
outputs:
tag_name: ${{ steps.last-release.outputs.tag}}
steps:
- name: Get last release hash
id: last-release
run: |
DATE=$(date +%Y.%m.%d)
echo "tag=v${DATE}" >> $GITHUB_OUTPUT
echo "Using tag: v${DATE}"
detect-packages:
needs: [detect-last-release]
runs-on: ubuntu-latest
outputs:
packages: ${{ steps.find-packages.outputs.packages }}
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Install uv
uses: astral-sh/setup-uv@v5
- name: Find packages
id: find-packages
working-directory: src
run: |
cat << 'EOF' > find_packages.py
import json
import os
import subprocess
from itertools import chain
from pathlib import Path
packages = []
print("Starting package detection...")
print(f"Using LAST_RELEASE: {os.environ['LAST_RELEASE']}")
# Find all directories containing package.json or pyproject.toml
paths = chain(Path('.').glob('*/package.json'), Path('.').glob('*/pyproject.toml'))
for path in paths:
print(f"\nChecking path: {path}")
# Check for changes in .py or .ts files
# Run git diff from the specific directory
cmd = ['git', 'diff', '--name-only', f'{os.environ["LAST_RELEASE"]}..HEAD', '--', '.']
result = subprocess.run(cmd, capture_output=True, text=True, cwd=path.parent)
# Check if any .py or .ts files were changed
changed_files = result.stdout.strip().split('\n')
print(f"Changed files found: {changed_files}")
has_changes = any(f.endswith(('.py', '.ts')) for f in changed_files if f)
if has_changes:
print(f"Adding package: {path.parent}")
packages.append(str(path.parent))
print(f"\nFinal packages list: {packages}")
# Write output
with open(os.environ['GITHUB_OUTPUT'], 'a') as f:
f.write(f"packages={json.dumps(packages)}\n")
EOF
LAST_RELEASE=${{ needs.detect-last-release.outputs.last_release }} uv run --script --python 3.12 find_packages.py
create-tag:
needs: [detect-packages, create-tag-name]
runs-on: ubuntu-latest
permissions:
contents: write
steps:
- uses: actions/checkout@v4
- name: Create release
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
# Configure git
git config --global user.name "GitHub Actions"
git config --global user.email "actions@github.com"
# Get packages array
PACKAGES='${{ needs.detect-packages.outputs.packages }}'
if [ "$(echo "$PACKAGES" | jq 'length')" -gt 0 ]; then
# Generate comprehensive release notes
{
echo "# Release ${{ needs.create-tag-name.outputs.tag_name }}"
echo ""
echo "## Updated Packages"
echo "$PACKAGES" | jq -r '.[]' | while read -r package; do
echo "- $package"
done
} > notes.md
# Create and push tag
git tag -a "${{ needs.create-tag-name.outputs.tag_name }}" -m "Release ${{ needs.create-tag-name.outputs.tag_name }}"
git push origin "${{ needs.create-tag-name.outputs.tag_name }}"
# Create GitHub release
gh release create "${{ needs.create-tag-name.outputs.tag_name }}" \
--title "Release ${{ needs.create-tag-name.outputs.tag_name }}" \
--notes-file notes.md
else
echo "No packages need release"
fi

181
scripts/release.py Executable file
View File

@@ -0,0 +1,181 @@
#!/usr/bin/env uv run --script
# /// script
# requires-python = ">=3.11"
# dependencies = [
# "click>=8.1.8",
# "tomlkit>=0.13.2"
# ]
# ///
import sys
import re
import click
from pathlib import Path
import json
import tomlkit
import datetime
import subprocess
from enum import Enum
from typing import Any, NewType
Version = NewType("Version", str)
GitHash = NewType("GitHash", str)
class GitHashParamType(click.ParamType):
name = "git_hash"
def convert(
self, value: Any, param: click.Parameter | None, ctx: click.Context | None
) -> GitHash | None:
if value is None:
return None
if not (8 <= len(value) <= 40):
self.fail(f"Git hash must be between 8 and 40 characters, got {len(value)}")
if not re.match(r"^[0-9a-fA-F]+$", value):
self.fail("Git hash must contain only hex digits (0-9, a-f)")
try:
# Verify hash exists in repo
subprocess.run(
["git", "rev-parse", "--verify", value], check=True, capture_output=True
)
except subprocess.CalledProcessError:
self.fail(f"Git hash {value} not found in repository")
return GitHash(value.lower())
GIT_HASH = GitHashParamType()
class PackageType(Enum):
NPM = 1
PYPI = 2
@classmethod
def from_path(cls, directory: Path) -> "PackageType":
if (directory / "package.json").exists():
return cls.NPM
elif (directory / "pyproject.toml").exists():
return cls.PYPI
else:
raise Exception("No package.json or pyproject.toml found")
def get_changes(path: Path, git_hash: str) -> bool:
"""Check if any files changed between current state and git hash"""
try:
output = subprocess.run(
["git", "diff", "--name-only", git_hash, "--", path],
cwd=path,
check=True,
capture_output=True,
text=True,
)
changed_files = [Path(f) for f in output.stdout.splitlines()]
relevant_files = [f for f in changed_files if f.suffix in ['.py', '.ts']]
return len(relevant_files) >= 1
except subprocess.CalledProcessError:
return False
def get_package_name(path: Path, pkg_type: PackageType) -> str:
"""Get package name from package.json or pyproject.toml"""
match pkg_type:
case PackageType.NPM:
with open(path / "package.json", "rb") as f:
return json.load(f)["name"]
case PackageType.PYPI:
with open(path / "pyproject.toml") as f:
toml_data = tomlkit.parse(f.read())
name = toml_data.get("project", {}).get("name")
if not name:
raise Exception("No name in pyproject.toml project section")
return str(name)
def generate_version() -> Version:
"""Generate version based on current date"""
now = datetime.datetime.now()
return Version(f"{now.year}.{now.month}.{now.day}")
def publish_package(
path: Path, pkg_type: PackageType, version: Version, dry_run: bool = False
):
"""Publish package based on type"""
try:
match pkg_type:
case PackageType.NPM:
# Update version in package.json
with open(path / "package.json", "rb+") as f:
data = json.load(f)
data["version"] = version
f.seek(0)
json.dump(data, f, indent=2)
f.truncate()
if not dry_run:
# Publish to npm
subprocess.run(["npm", "publish"], cwd=path, check=True)
case PackageType.PYPI:
# Update version in pyproject.toml
with open(path / "pyproject.toml") as f:
data = tomlkit.parse(f.read())
data["project"]["version"] = version
with open(path / "pyproject.toml", "w") as f:
f.write(tomlkit.dumps(data))
if not dry_run:
# Build and publish to PyPI
subprocess.run(["uv", "build"], cwd=path, check=True)
subprocess.run(
["uv", "publish"],
cwd=path,
check=True,
)
except Exception as e:
raise Exception(f"Failed to publish: {e}") from e
@click.command()
@click.argument("directory", type=click.Path(exists=True, path_type=Path))
@click.argument("git_hash", type=GIT_HASH)
@click.option(
"--dry-run", is_flag=True, help="Update version numbers but don't publish"
)
def main(directory: Path, git_hash: GitHash, dry_run: bool) -> int:
"""Release package if changes detected"""
# Detect package type
try:
path = directory.resolve(strict=True)
pkg_type = PackageType.from_path(path)
except Exception as e:
return 1
# Check for changes
if not get_changes(path, git_hash):
return 0
try:
# Generate version and publish
version = generate_version()
name = get_package_name(path, pkg_type)
publish_package(path, pkg_type, version, dry_run)
if not dry_run:
click.echo(f"{name}@{version}")
else:
click.echo(f"Dry run: Would have published {name}@{version}")
return 0
except Exception as e:
return 1
if __name__ == "__main__":
sys.exit(main())