Merge branch 'main' into add-elevenlabs

This commit is contained in:
Mamerto Fabian Jr
2025-01-14 11:21:12 +08:00
committed by GitHub
2 changed files with 126 additions and 256 deletions

View File

@@ -1,159 +0,0 @@
name: Release Check
on:
# Allow manual trigger for testing
workflow_dispatch:
jobs:
prepare:
runs-on: ubuntu-latest
outputs:
matrix: ${{ steps.set-matrix.outputs.matrix }}
last_release: ${{ steps.last-release.outputs.hash }}
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Find package directories
id: set-matrix
run: |
# Find all package.json and pyproject.toml files, excluding root
DIRS=$(git ls-tree -r HEAD --name-only | grep -E "package.json|pyproject.toml" | xargs dirname | grep -v "^.$" | jq -R -s -c 'split("\n")[:-1]')
echo "matrix=${DIRS}" >> $GITHUB_OUTPUT
echo "Found directories: ${DIRS}"
- name: Get last release hash
id: last-release
run: |
HASH=$(git rev-list --tags --max-count=1 || echo "HEAD~1")
echo "hash=${HASH}" >> $GITHUB_OUTPUT
echo "Using last release hash: ${HASH}"
check-release:
needs: prepare
runs-on: ubuntu-latest
strategy:
matrix:
directory: ${{ fromJson(needs.prepare.outputs.matrix) }}
fail-fast: false
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- uses: astral-sh/setup-uv@v5
- name: Setup Node.js
if: endsWith(matrix.directory, '/package.json')
uses: actions/setup-node@v4
with:
node-version: '18'
- name: Setup Python
if: endsWith(matrix.directory, '/pyproject.toml')
run: uv python install
- name: Check release
id: check
run: |
# Create unique hash for this directory
dir_hash=$(echo "${{ matrix.directory }}" | sha256sum | awk '{print $1}')
# Run release check script with verbose output
echo "Running release check against last release: ${{ needs.prepare.outputs.last_release }}"
# Run git diff first to show changes
echo "Changes since last release:"
git diff --name-only "${{ needs.prepare.outputs.last_release }}" -- "${{ matrix.directory }}" || true
# Run the release check
output=$(uv run --script scripts/release.py --dry-run "${{ matrix.directory }}" "${{ needs.prepare.outputs.last_release }}" 2>&1)
exit_code=$?
echo "Release check output (exit code: $exit_code):"
echo "$output"
# Extract package info if successful
if [ $exit_code -eq 0 ]; then
pkg_info=$(echo "$output" | grep -o -E "[a-zA-Z0-9\-]+@[0-9]+\.[0-9]+\.[0-9]+" || true)
else
echo "Release check failed"
exit 1
fi
if [ ! -z "$pkg_info" ]; then
echo "Found package that needs release: $pkg_info"
# Create outputs directory
mkdir -p ./outputs
# Save both package info and full changes
echo "$pkg_info" > "./outputs/${dir_hash}_info"
echo "dir_hash=${dir_hash}" >> $GITHUB_OUTPUT
# Log what we're saving
echo "Saved package info to ./outputs/${dir_hash}_info:"
cat "./outputs/${dir_hash}_info"
else
echo "No release needed for this package"
fi
- name: Set artifact name
if: steps.check.outputs.dir_hash
id: artifact
run: |
# Replace forward slashes with dashes
SAFE_DIR=$(echo "${{ matrix.directory }}" | tr '/' '-')
echo "name=release-outputs-${SAFE_DIR}" >> $GITHUB_OUTPUT
- uses: actions/upload-artifact@v4
if: steps.check.outputs.dir_hash
with:
name: ${{ steps.artifact.outputs.name }}
path: ./outputs/${{ steps.check.outputs.dir_hash }}*
check-tag:
needs: [prepare, check-release]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/download-artifact@v4
with:
pattern: release-outputs-src-*
merge-multiple: true
path: outputs
- name: Simulate tag creation
run: |
if [ -d outputs ]; then
# Collect package info
find outputs -name "*_info" -exec cat {} \; > packages.txt
if [ -s packages.txt ]; then
DATE=$(date +%Y.%m.%d)
echo "🔍 Dry run: Would create tag v${DATE} if this was a real release"
# Generate comprehensive release notes
{
echo "# Release ${DATE}"
echo ""
echo "## Updated Packages"
while IFS= read -r line; do
echo "- $line"
done < packages.txt
} > notes.md
echo "🔍 Would create release with following notes:"
cat notes.md
echo "🔍 Would create tag v${DATE} with the above release notes"
echo "🔍 Would create GitHub release from tag v${DATE}"
else
echo "No packages need release"
fi
else
echo "No release artifacts found"
fi

View File

@@ -1,6 +1,6 @@
#!/usr/bin/env uv run --script #!/usr/bin/env uv run --script
# /// script # /// script
# requires-python = ">=3.11" # requires-python = ">=3.12"
# dependencies = [ # dependencies = [
# "click>=8.1.8", # "click>=8.1.8",
# "tomlkit>=0.13.2" # "tomlkit>=0.13.2"
@@ -14,8 +14,8 @@ import json
import tomlkit import tomlkit
import datetime import datetime
import subprocess import subprocess
from enum import Enum from dataclasses import dataclass
from typing import Any, NewType from typing import Any, Iterator, NewType, Protocol
Version = NewType("Version", str) Version = NewType("Version", str)
@@ -51,25 +51,58 @@ class GitHashParamType(click.ParamType):
GIT_HASH = GitHashParamType() GIT_HASH = GitHashParamType()
class PackageType(Enum): class Package(Protocol):
NPM = 1 path: Path
PYPI = 2
@classmethod def package_name(self) -> str: ...
def from_path(cls, directory: Path) -> "PackageType":
if (directory / "package.json").exists(): def update_version(self, version: Version) -> None: ...
return cls.NPM
elif (directory / "pyproject.toml").exists():
return cls.PYPI
else:
raise Exception("No package.json or pyproject.toml found")
def get_changes(path: Path, git_hash: str) -> bool: @dataclass
class NpmPackage:
path: Path
def package_name(self) -> str:
with open(self.path / "package.json", "r") as f:
return json.load(f)["name"]
def update_version(self, version: Version):
with open(self.path / "package.json", "r+") as f:
data = json.load(f)
data["version"] = version
f.seek(0)
json.dump(data, f, indent=2)
f.truncate()
@dataclass
class PyPiPackage:
path: Path
def package_name(self) -> str:
with open(self.path / "pyproject.toml") as f:
toml_data = tomlkit.parse(f.read())
name = toml_data.get("project", {}).get("name")
if not name:
raise Exception("No name in pyproject.toml project section")
return str(name)
def update_version(self, version: Version):
# Update version in pyproject.toml
with open(self.path / "pyproject.toml") as f:
data = tomlkit.parse(f.read())
data["project"]["version"] = version
with open(self.path / "pyproject.toml", "w") as f:
f.write(tomlkit.dumps(data))
def has_changes(path: Path, git_hash: GitHash) -> bool:
"""Check if any files changed between current state and git hash""" """Check if any files changed between current state and git hash"""
try: try:
output = subprocess.run( output = subprocess.run(
["git", "diff", "--name-only", git_hash, "--", path], ["git", "diff", "--name-only", git_hash, "--", "."],
cwd=path, cwd=path,
check=True, check=True,
capture_output=True, capture_output=True,
@@ -77,105 +110,101 @@ def get_changes(path: Path, git_hash: str) -> bool:
) )
changed_files = [Path(f) for f in output.stdout.splitlines()] changed_files = [Path(f) for f in output.stdout.splitlines()]
relevant_files = [f for f in changed_files if f.suffix in ['.py', '.ts']] relevant_files = [f for f in changed_files if f.suffix in [".py", ".ts"]]
return len(relevant_files) >= 1 return len(relevant_files) >= 1
except subprocess.CalledProcessError: except subprocess.CalledProcessError:
return False return False
def get_package_name(path: Path, pkg_type: PackageType) -> str: def gen_version() -> Version:
"""Get package name from package.json or pyproject.toml"""
match pkg_type:
case PackageType.NPM:
with open(path / "package.json", "rb") as f:
return json.load(f)["name"]
case PackageType.PYPI:
with open(path / "pyproject.toml") as f:
toml_data = tomlkit.parse(f.read())
name = toml_data.get("project", {}).get("name")
if not name:
raise Exception("No name in pyproject.toml project section")
return str(name)
def generate_version() -> Version:
"""Generate version based on current date""" """Generate version based on current date"""
now = datetime.datetime.now() now = datetime.datetime.now()
return Version(f"{now.year}.{now.month}.{now.day}") return Version(f"{now.year}.{now.month}.{now.day}")
def publish_package( def find_changed_packages(directory: Path, git_hash: GitHash) -> Iterator[Package]:
path: Path, pkg_type: PackageType, version: Version, dry_run: bool = False for path in directory.glob("*/package.json"):
): if has_changes(path.parent, git_hash):
"""Publish package based on type""" yield NpmPackage(path.parent)
try: for path in directory.glob("*/pyproject.toml"):
match pkg_type: if has_changes(path.parent, git_hash):
case PackageType.NPM: yield PyPiPackage(path.parent)
# Update version in package.json
with open(path / "package.json", "rb+") as f:
data = json.load(f)
data["version"] = version
f.seek(0)
json.dump(data, f, indent=2)
f.truncate()
if not dry_run:
# Publish to npm
subprocess.run(["npm", "publish"], cwd=path, check=True)
case PackageType.PYPI:
# Update version in pyproject.toml
with open(path / "pyproject.toml") as f:
data = tomlkit.parse(f.read())
data["project"]["version"] = version
with open(path / "pyproject.toml", "w") as f:
f.write(tomlkit.dumps(data))
if not dry_run:
# Build and publish to PyPI
subprocess.run(["uv", "build"], cwd=path, check=True)
subprocess.run(
["uv", "publish"],
cwd=path,
check=True,
)
except Exception as e:
raise Exception(f"Failed to publish: {e}") from e
@click.command() @click.group()
@click.argument("directory", type=click.Path(exists=True, path_type=Path)) def cli():
@click.argument("git_hash", type=GIT_HASH) pass
@cli.command("update-packages")
@click.option( @click.option(
"--dry-run", is_flag=True, help="Update version numbers but don't publish" "--directory", type=click.Path(exists=True, path_type=Path), default=Path.cwd()
) )
def main(directory: Path, git_hash: GitHash, dry_run: bool) -> int: @click.argument("git_hash", type=GIT_HASH)
"""Release package if changes detected""" def update_packages(directory: Path, git_hash: GitHash) -> int:
# Detect package type # Detect package type
try: path = directory.resolve(strict=True)
path = directory.resolve(strict=True) version = gen_version()
pkg_type = PackageType.from_path(path)
except Exception as e:
return 1
# Check for changes for package in find_changed_packages(path, git_hash):
if not get_changes(path, git_hash): name = package.package_name()
return 0 package.update_version(version)
try: click.echo(f"{name}@{version}")
# Generate version and publish
version = generate_version()
name = get_package_name(path, pkg_type)
publish_package(path, pkg_type, version, dry_run) return 0
if not dry_run:
click.echo(f"{name}@{version}")
else: @cli.command("generate-notes")
click.echo(f"Dry run: Would have published {name}@{version}") @click.option(
return 0 "--directory", type=click.Path(exists=True, path_type=Path), default=Path.cwd()
except Exception as e: )
return 1 @click.argument("git_hash", type=GIT_HASH)
def generate_notes(directory: Path, git_hash: GitHash) -> int:
# Detect package type
path = directory.resolve(strict=True)
version = gen_version()
click.echo(f"# Release : v{version}")
click.echo("")
click.echo("## Updated packages")
for package in find_changed_packages(path, git_hash):
name = package.package_name()
click.echo(f"- {name}@{version}")
return 0
@cli.command("generate-version")
def generate_version() -> int:
# Detect package type
click.echo(gen_version())
return 0
@cli.command("generate-matrix")
@click.option(
"--directory", type=click.Path(exists=True, path_type=Path), default=Path.cwd()
)
@click.option("--npm", is_flag=True, default=False)
@click.option("--pypi", is_flag=True, default=False)
@click.argument("git_hash", type=GIT_HASH)
def generate_matrix(directory: Path, git_hash: GitHash, pypi: bool, npm: bool) -> int:
# Detect package type
path = directory.resolve(strict=True)
version = gen_version()
changes = []
for package in find_changed_packages(path, git_hash):
pkg = package.path.relative_to(path)
if npm and isinstance(package, NpmPackage):
changes.append(str(pkg))
if pypi and isinstance(package, PyPiPackage):
changes.append(str(pkg))
click.echo(json.dumps(changes))
return 0
if __name__ == "__main__": if __name__ == "__main__":
sys.exit(main()) sys.exit(cli())