|
#!/usr/bin/env python |
|
import json |
|
import shutil |
|
import subprocess |
|
from pathlib import Path |
|
|
|
import typer |
|
from rich.console import Console |
|
|
|
console = Console() |
|
|
|
def sh(cmd, dry=False, check=True): |
|
if dry: |
|
console.print(f"[cyan]DRY:[/cyan] {' '.join(cmd)}") |
|
return "" |
|
r = subprocess.run(cmd, capture_output=True, text=True) |
|
if check and r.returncode != 0: |
|
raise RuntimeError(r.stderr.strip() or r.stdout.strip()) |
|
return r.stdout.strip() |
|
|
|
def repo_exists(org, name) -> bool: |
|
try: |
|
sh(["gh","repo","view",f"{org}/{name}","--json","name"]) |
|
return True |
|
except Exception: |
|
return False |
|
|
|
def ensure_repo(org, name, vis, desc, topics, dry): |
|
"""Create or update repository - idempotent""" |
|
created = False |
|
if not repo_exists(org, name): |
|
sh(["gh","repo","create", f"{org}/{name}", |
|
f"--{vis}", "--disable-issues", "--disable-wiki", "--confirm"], dry) |
|
console.print(f"[green]Created {org}/{name}[/green]") |
|
created = True |
|
else: |
|
console.print(f"[dim]Repo exists {org}/{name}[/dim]") |
|
|
|
# Only update visibility if it changed (check current state) |
|
if not created: |
|
try: |
|
current_vis = sh( |
|
["gh", "repo", "view", f"{org}/{name}", "--json", "visibility", "-q", ".visibility"], check=True |
|
) |
|
current_vis = current_vis.strip().lower() |
|
target_vis = vis.lower() |
|
|
|
if current_vis != target_vis: |
|
sh(["gh","repo","edit",f"{org}/{name}",f"--visibility={target_vis}"], dry, check=False) |
|
console.print(f"[yellow]Visibility changed: {current_vis} => {target_vis}[/yellow]") |
|
except Exception: |
|
# If check fails, try to update anyway |
|
sh(["gh","repo","edit",f"{org}/{name}",f"--visibility={vis}"], dry, check=False) |
|
|
|
if desc: |
|
sh(["gh","repo","edit",f"{org}/{name}","--description",desc], dry, check=False) |
|
if topics: |
|
sh(["gh","repo","edit",f"{org}/{name}","--add-topic",",".join(topics)], dry, check=False) |
|
|
|
def set_default_branch(org, name, default_branch, dry): |
|
if default_branch: |
|
sh(["gh","api","-X","PATCH",f"repos/{org}/{name}","-f",f"default_branch={default_branch}"], dry, check=False) |
|
|
|
def ensure_local_clone(base: Path, org, name, dry): |
|
local = base / name |
|
if not local.exists(): |
|
sh(["gh","repo","clone",f"{org}/{name}",str(local)], dry) |
|
console.print(f"Cloned => {local}") |
|
else: |
|
console.print("Local exists => " + str(local)) |
|
return local |
|
|
|
def seed_repo(local: Path, name: str, org: str, template_repo: str, dry: bool): |
|
"""Initialize repo from template repository instead of local seed folder""" |
|
if dry: |
|
console.print(f"[cyan]DRY:[/cyan] seed repo {local} from template {template_repo}") |
|
return |
|
|
|
# Check if repo already has commits |
|
try: |
|
sh(["git","-C",str(local),"rev-parse","HEAD"], check=True) |
|
console.print("Repo already initialized") |
|
return |
|
except Exception: |
|
pass # No commits yet, continue with seeding |
|
|
|
# Clone template repo to temp location |
|
import tempfile |
|
temp_dir = Path(tempfile.mkdtemp()) / template_repo |
|
try: |
|
sh(["gh","repo","clone",f"{org}/{template_repo}",str(temp_dir)], check=True) |
|
|
|
# Remove .git to avoid conflicts |
|
git_dir = temp_dir / ".git" |
|
if git_dir.exists(): |
|
shutil.rmtree(git_dir) |
|
|
|
# Copy template files to target repo |
|
rsync_dir(temp_dir, local) |
|
|
|
# Substitute placeholders |
|
substitute_placeholders(local, name, org) |
|
|
|
finally: |
|
# Clean up temp dir |
|
if temp_dir.exists(): |
|
shutil.rmtree(temp_dir.parent) |
|
|
|
# Stage all files and commit |
|
sh(["git","-C",str(local),"add","."], check=False) |
|
st = sh(["git","-C",str(local),"status","--porcelain"], check=False) |
|
if st.strip(): |
|
sh(["git","-C",str(local),"commit","-m","bootstrap: initial commit from template"], check=False) |
|
sh(["git","-C",str(local),"branch","-M","main"], check=False) |
|
sh(["git","-C",str(local),"push","-u","origin","main"], check=False) |
|
console.print(f"Initialized repo => {name}") |
|
else: |
|
console.print("No changes to commit") |
|
|
|
def rsync_dir(src: Path, dst: Path): |
|
"""Copy files from src to dst, respecting .seedignore""" |
|
ignore = set() |
|
ig = src / ".seedignore" |
|
if ig.exists(): |
|
ignore = {p.strip() for p in ig.read_text(encoding="utf-8").splitlines() |
|
if p.strip() and not p.startswith("#")} |
|
|
|
for item in src.rglob("*"): |
|
rel = item.relative_to(src) |
|
# Skip .seedignore itself and ignored patterns |
|
if str(rel) == ".seedignore": |
|
continue |
|
if any(str(rel).startswith(p) for p in ignore): |
|
continue |
|
|
|
target = dst / rel |
|
if item.is_dir(): |
|
target.mkdir(parents=True, exist_ok=True) |
|
else: |
|
target.parent.mkdir(parents=True, exist_ok=True) |
|
shutil.copy2(item, target) |
|
|
|
def substitute_placeholders(path: Path, name: str, org: str): |
|
"""Replace placeholders in seed files""" |
|
from datetime import datetime |
|
year = str(datetime.utcnow().year) |
|
|
|
for file in path.rglob("*"): |
|
if file.is_file() and file.suffix.lower() in {".md", ".txt", ".yml", ".yaml", ".json"}: |
|
try: |
|
text = file.read_text(encoding="utf-8") |
|
new_text = (text.replace("{{REPO_NAME}}", name) |
|
.replace("{{ORG}}", org) |
|
.replace("{{YEAR}}", year)) |
|
if new_text != text: |
|
file.write_text(new_text, encoding="utf-8") |
|
except Exception: |
|
pass |
|
|
|
|
|
def inspect_fork_delta(org, name, upstream, branch, base_path): |
|
"""Inspect differences between fork and upstream repository""" |
|
if not upstream: |
|
console.print(f"[dim]No upstream configured for {name}[/dim]") |
|
return |
|
|
|
console.print(f"\n[bold yellow]Inspecting Delta: {name} <-> {upstream}[/bold yellow]") |
|
|
|
local = base_path / name |
|
|
|
# Clone or update local repo |
|
if not local.exists(): |
|
try: |
|
sh(["gh", "repo", "clone", f"{org}/{name}", str(local)], check=True) |
|
console.print(f"[dim]Cloned {name} for inspection[/dim]") |
|
except Exception as e: |
|
console.print(f"[red]Failed to clone {name}: {e}[/red]") |
|
return |
|
else: |
|
console.print(f"[dim]Using local copy: {local}[/dim]") |
|
|
|
# Add upstream remote if not exists |
|
try: |
|
remotes = sh(["git", "-C", str(local), "remote"], check=True) |
|
if "upstream" not in remotes: |
|
upstream_url = f"https://github.com/{upstream}.git" |
|
sh(["git", "-C", str(local), "remote", "add", "upstream", upstream_url], check=True) |
|
console.print(f"[dim]Added upstream remote: {upstream}[/dim]") |
|
except Exception as e: |
|
console.print(f"[yellow]Warning: Could not add upstream remote: {e}[/yellow]") |
|
|
|
# Fetch from both remotes |
|
try: |
|
console.print("[dim]Fetching from origin and upstream...[/dim]") |
|
sh(["git", "-C", str(local), "fetch", "origin", branch], check=False) |
|
sh(["git", "-C", str(local), "fetch", "upstream", branch], check=False) |
|
except Exception as e: |
|
console.print(f"[yellow]Warning: Fetch failed: {e}[/yellow]") |
|
|
|
# Get commit comparison |
|
try: |
|
# Commits behind (in upstream but not in fork) |
|
behind = sh(["git", "-C", str(local), "rev-list", "--count", f"origin/{branch}..upstream/{branch}"], check=True) |
|
# Commits ahead (in fork but not in upstream) |
|
ahead = sh(["git", "-C", str(local), "rev-list", "--count", f"upstream/{branch}..origin/{branch}"], check=True) |
|
|
|
behind_count = int(behind.strip()) if behind.strip() else 0 |
|
ahead_count = int(ahead.strip()) if ahead.strip() else 0 |
|
|
|
# Status summary |
|
if behind_count == 0 and ahead_count == 0: |
|
console.print("[green]✓ Fork is up-to-date with upstream[/green]") |
|
return |
|
else: |
|
status_parts = [] |
|
if behind_count > 0: |
|
status_parts.append(f"[red]{behind_count} commits behind[/red]") |
|
if ahead_count > 0: |
|
status_parts.append(f"[cyan]{ahead_count} commits ahead[/cyan]") |
|
console.print(f"Status: {' | '.join(status_parts)}") |
|
|
|
# Show commits behind (what would be synced) |
|
if behind_count > 0: |
|
console.print(f"\n[yellow]New commits in upstream (would be synced):[/yellow]") |
|
commits = sh(["git", "-C", str(local), "log", "--oneline", "--no-decorate", |
|
f"origin/{branch}..upstream/{branch}", "-10"], check=True) |
|
for line in commits.strip().split('\n')[:10]: |
|
if line.strip(): |
|
console.print(f" • {line}") |
|
if behind_count > 10: |
|
console.print(f" ... and {behind_count - 10} more commits") |
|
|
|
# Show files that would change |
|
try: |
|
files_changed = sh(["git", "-C", str(local), "diff", "--name-status", |
|
f"origin/{branch}...upstream/{branch}"], check=True) |
|
if files_changed.strip(): |
|
lines = files_changed.strip().split('\n') |
|
file_count = len(lines) |
|
console.print(f"\n[yellow]Files affected: {file_count}[/yellow]") |
|
for line in lines[:15]: |
|
if line.strip(): |
|
console.print(f" {line}") |
|
if file_count > 15: |
|
console.print(f" ... and {file_count - 15} more files") |
|
except Exception: |
|
pass |
|
|
|
# Show commits ahead (would be lost with force sync) |
|
if ahead_count > 0: |
|
console.print(f"\n[cyan]Commits in fork not in upstream:[/cyan]") |
|
commits = sh(["git", "-C", str(local), "log", "--oneline", "--no-decorate", |
|
f"upstream/{branch}..origin/{branch}", "-5"], check=True) |
|
for line in commits.strip().split('\n')[:5]: |
|
if line.strip(): |
|
console.print(f" • {line}") |
|
if ahead_count > 5: |
|
console.print(f" ... and {ahead_count - 5} more commits") |
|
console.print("[red]⚠ These commits would be LOST with --force-sync[/red]") |
|
|
|
except Exception as e: |
|
console.print(f"[red]Failed to inspect delta: {e}[/red]") |
|
|
|
|
|
def sync_fork(org, name, upstream, branch, force_sync, dry): |
|
"""Sync fork with upstream repository - idempotent""" |
|
if not upstream: |
|
return |
|
|
|
# Check if repo is actually a fork |
|
try: |
|
repo_info = sh(["gh", "repo", "view", f"{org}/{name}", "--json", "isFork", "-q", ".isFork"], check=True) |
|
is_fork = repo_info.strip().lower() == "true" |
|
if not is_fork: |
|
console.print(f"[yellow]Skipping sync: {name} is not a fork[/yellow]") |
|
return |
|
except Exception: |
|
console.print(f"[dim]Could not verify fork status for {name}[/dim]") |
|
return |
|
|
|
# Build sync command |
|
cmd = ["gh", "repo", "sync", f"{org}/{name}"] |
|
|
|
if branch: |
|
cmd.extend(["--branch", branch]) |
|
|
|
if upstream: |
|
cmd.extend(["--source", upstream]) |
|
|
|
if force_sync: |
|
cmd.append("--force") |
|
console.print(f"[yellow]Force syncing {name} with {upstream}[/yellow]") |
|
|
|
try: |
|
sh(cmd, dry, check=True) |
|
console.print(f"[green]Synced {name} with {upstream}[/green]") |
|
except Exception as e: |
|
console.print(f"[yellow]Failed to sync {name}: {str(e)}[/yellow]") |
|
|
|
|
|
def apply_org_settings(org, settings, dry): |
|
"""Apply organization-wide settings""" |
|
if not settings: |
|
return |
|
|
|
# Build API call with all settings |
|
cmd = ["gh", "api", "-X", "PATCH", f"orgs/{org}"] |
|
applied = [] |
|
|
|
# Add string fields |
|
for field in ["description", "name", "blog", "location", "email", "company", "twitter_username", |
|
"default_repository_permission", "default_repository_branch"]: |
|
if field in settings and settings[field]: |
|
cmd.extend(["-f", f"{field}={settings[field]}"]) |
|
applied.append(field) |
|
|
|
# Add boolean fields |
|
for field in ["members_can_create_repositories", "members_can_create_public_repositories", |
|
"members_can_create_private_repositories", "members_can_delete_repositories", |
|
"members_can_change_repo_visibility", "members_can_fork_private_repositories", |
|
"members_can_delete_issues", "members_can_invite_outside_collaborators", |
|
"members_can_create_teams", "members_can_create_pages", "readers_can_create_discussions", |
|
"web_commit_signoff_required", "has_organization_projects", "has_repository_projects", |
|
"dependabot_alerts_enabled_for_new_repositories", |
|
"dependabot_security_updates_enabled_for_new_repositories", |
|
"dependency_graph_enabled_for_new_repositories"]: |
|
if field in settings: |
|
cmd.extend(["-F", f"{field}={str(settings[field]).lower()}"]) |
|
applied.append(field) |
|
|
|
if len(cmd) > 4: # Only run if we have settings to apply |
|
sh(cmd, dry) |
|
console.print(f"[green]Org settings:[/green] {len(applied)} settings applied") |
|
|
|
def apply_repo_settings(org, name, settings, is_private, is_fork, dry): |
|
"""Apply repository settings""" |
|
if not settings: |
|
return |
|
|
|
# Build API call with repo settings |
|
cmd = ["gh", "api", "-X", "PATCH", f"repos/{org}/{name}"] |
|
notable = [] |
|
|
|
# Add string fields |
|
for field in ["description", "homepage"]: |
|
if field in settings and settings[field]: |
|
cmd.extend(["-f", f"{field}={settings[field]}"]) |
|
|
|
# Add boolean fields and track notable ones |
|
for field in ["has_issues", "has_projects", "has_wiki", "has_discussions", |
|
"allow_squash_merge", "allow_merge_commit", "allow_rebase_merge", |
|
"allow_auto_merge", "delete_branch_on_merge", "allow_update_branch", |
|
"web_commit_signoff_required", "archived", "has_downloads", "is_template"]: |
|
if field in settings: |
|
cmd.extend(["-F", f"{field}={str(settings[field]).lower()}"]) |
|
# Track settings that differ from typical defaults or are noteworthy |
|
if field in ["has_discussions", "has_wiki", "archived", "is_template"] and settings[field]: |
|
notable.append(f"{field}=true") |
|
elif field in ["has_issues", "has_projects"] and not settings[field]: |
|
notable.append(f"{field}=false") |
|
elif field in ["has_downloads"] and not settings[field]: |
|
notable.append(f"{field}=false") |
|
|
|
# Only include allow_forking for public, non-fork repos |
|
# Forks cannot control their forking settings (inherited from parent) |
|
if not is_private and not is_fork and "allow_forking" in settings: |
|
cmd.extend(["-F", f"allow_forking={str(settings['allow_forking']).lower()}"]) |
|
|
|
if len(cmd) > 4: # Only run if we have settings to apply |
|
sh(cmd, dry, check=False) |
|
if notable: |
|
console.print(f"[dim]Repo settings: {', '.join(notable)}[/dim]") |
|
else: |
|
console.print("[dim]Repo settings: defaults applied[/dim]") |
|
|
|
def apply_pages_settings(org, name, pages_config, is_private, dry): |
|
"""Apply GitHub Pages configuration - idempotent""" |
|
if not pages_config: |
|
return |
|
|
|
enabled = pages_config.get("enabled", False) |
|
|
|
# Check if Pages currently exists and get current config |
|
pages_exists = False |
|
current_pages = None |
|
try: |
|
result = sh(["gh", "api", f"repos/{org}/{name}/pages"], check=True) |
|
if result: |
|
current_pages = json.loads(result) |
|
pages_exists = True |
|
except Exception: |
|
pass |
|
|
|
# Handle enabling Pages |
|
if enabled: |
|
if is_private: |
|
console.print(f"[dim]Pages skipped: {name} is private (requires paid plan)[/dim]") |
|
return |
|
|
|
# Get target configuration |
|
source = pages_config.get("source", {}) |
|
target_branch = source.get("branch", "main") |
|
target_path = source.get("path", "/docs") |
|
target_build_type = pages_config.get("build_type", "workflow") |
|
|
|
# Get target custom domain |
|
target_cname = pages_config.get("cname") |
|
|
|
# Check if Pages needs updating |
|
needs_update = not pages_exists |
|
if pages_exists and current_pages: |
|
current_build = current_pages.get("build_type") |
|
current_source = current_pages.get("source", {}) |
|
current_branch = current_source.get("branch") |
|
current_path = current_source.get("path") |
|
current_cname = current_pages.get("cname") |
|
|
|
if (current_build != target_build_type or |
|
current_branch != target_branch or |
|
current_path != target_path or |
|
current_cname != target_cname): |
|
needs_update = True |
|
|
|
if needs_update: |
|
# Determine method: POST to create, PUT to update |
|
method = "PUT" if pages_exists else "POST" |
|
cmd = ["gh", "api", "-X", method, f"repos/{org}/{name}/pages"] |
|
|
|
cmd.extend(["-F", f"source[branch]={target_branch}"]) |
|
cmd.extend(["-F", f"source[path]={target_path}"]) |
|
cmd.extend(["-F", f"build_type={target_build_type}"]) |
|
|
|
# Add custom domain if specified |
|
if target_cname: |
|
cmd.extend(["-F", f"cname={target_cname}"]) |
|
|
|
sh(cmd, dry, check=False) |
|
action = "updated" if pages_exists else "enabled" |
|
console.print(f"[green]Pages {action}: {target_build_type} from {target_branch}{target_path}[/green]") |
|
else: |
|
console.print(f"[dim]Pages unchanged: {target_build_type} from {target_branch}{target_path}[/dim]") |
|
|
|
# Handle HTTPS enforcement (separate API call, only if Pages exists and custom domain is set) |
|
if pages_exists and target_cname: |
|
target_https = pages_config.get("https_enforced", True) |
|
current_https = current_pages.get("https_enforced") if current_pages else False |
|
|
|
if target_https != current_https: |
|
# Note: HTTPS can only be enforced after DNS is configured and certificate is issued |
|
# This API call may fail if certificate is not ready yet |
|
sh(["gh", "api", "-X", "PUT", f"repos/{org}/{name}/pages", |
|
"-F", f"https_enforced={str(target_https).lower()}"], dry, check=False) |
|
console.print(f"[green]HTTPS enforcement: {target_https}[/green]") |
|
elif target_https: |
|
console.print("[dim]HTTPS already enforced[/dim]") |
|
|
|
# Handle disabling Pages |
|
elif pages_exists and not enabled: |
|
sh(["gh", "api", "-X", "DELETE", f"repos/{org}/{name}/pages"], dry) |
|
console.print("[dim]Pages disabled[/dim]") |
|
|
|
def ensure_members(org, name, members, dry): |
|
for m in members or []: |
|
user = m["user"] |
|
perm = m.get("permission","push") |
|
sh(["gh","api","-X","PUT", f"repos/{org}/{name}/collaborators/{user}", |
|
"-f", f"permission={perm}"], dry) |
|
console.print(f"Member {user} <= {perm}") |
|
|
|
def set_deploy_key_secret(org, name, is_private, dry): |
|
"""Set CATALOG_PUBLISH_TOKEN secret for private repos""" |
|
if not is_private: |
|
return |
|
|
|
# Check if deploy key file exists |
|
key_file = Path("deploy-key") |
|
if not key_file.exists(): |
|
console.print("[dim]Deploy key not found, skipping secret setup[/dim]") |
|
return |
|
|
|
try: |
|
# Read private key |
|
private_key = key_file.read_text() |
|
|
|
# Set secret via gh CLI |
|
cmd = ["gh", "secret", "set", "CATALOG_PUBLISH_TOKEN", |
|
"--repo", f"{org}/{name}", "--body", private_key] |
|
sh(cmd, dry, check=False) |
|
console.print("[green]Deploy key secret set for private repo[/green]") |
|
except Exception as e: |
|
console.print(f"[yellow]Failed to set deploy key secret: {e}[/yellow]") |
|
|
|
def team_exists(org, team_slug) -> bool: |
|
"""Check if team exists by slug""" |
|
try: |
|
sh(["gh", "api", f"orgs/{org}/teams/{team_slug}"], check=True) |
|
return True |
|
except Exception: |
|
return False |
|
|
|
def ensure_teams(org, teams_config, dry): |
|
"""Create and configure teams - idempotent""" |
|
if not teams_config: |
|
return |
|
|
|
console.print("\n[bold yellow]Teams Management[/bold yellow]") |
|
|
|
for team in teams_config: |
|
team_name = team["name"] |
|
team_slug = team_name.lower().replace(" ", "-") |
|
privacy = team.get("privacy", "closed") |
|
description = team.get("description", "") |
|
repos = team.get("repos", {}) |
|
|
|
# Check if team exists |
|
exists = team_exists(org, team_slug) |
|
|
|
if not exists: |
|
# Create team |
|
cmd = ["gh", "api", "-X", "POST", f"orgs/{org}/teams", |
|
"-f", f"name={team_name}", |
|
"-f", f"privacy={privacy}"] |
|
if description: |
|
cmd.extend(["-f", f"description={description}"]) |
|
|
|
sh(cmd, dry, check=False) |
|
console.print(f"[green]Created team: {team_name}[/green]") |
|
else: |
|
# Update team settings if they exist |
|
cmd = ["gh", "api", "-X", "PATCH", f"orgs/{org}/teams/{team_slug}", |
|
"-f", f"name={team_name}", |
|
"-f", f"privacy={privacy}"] |
|
if description: |
|
cmd.extend(["-f", f"description={description}"]) |
|
|
|
sh(cmd, dry, check=False) |
|
console.print(f"[dim]Team exists: {team_name}[/dim]") |
|
|
|
# Assign repository permissions |
|
for repo_name, permission in repos.items(): |
|
# Map permission names to GitHub API permission values |
|
# GitHub API accepts: pull, push, admin, maintain, triage |
|
permission_map = { |
|
"read": "pull", |
|
"write": "push", |
|
"admin": "admin", |
|
"maintain": "maintain", |
|
"triage": "triage" |
|
} |
|
api_permission = permission_map.get(permission.lower(), permission.lower()) |
|
|
|
cmd = ["gh", "api", "-X", "PUT", |
|
f"orgs/{org}/teams/{team_slug}/repos/{org}/{repo_name}", |
|
"-f", f"permission={api_permission}"] |
|
sh(cmd, dry, check=False) |
|
console.print(f" [dim]+ {repo_name}: {permission}[/dim]") |
|
|
|
def main( |
|
manifest: Path = typer.Option(None, "--manifest", "-m", help="Path to manifest JSON (auto-detected from .org file if not specified)"), |
|
dry: bool = typer.Option(False, "--dry/--no-dry", help="Plan only (no writes)"), |
|
sync: bool = typer.Option(True, "--sync/--no-sync", help="Sync forks with upstream (default: True)"), |
|
sync_only: bool = typer.Option(False, "--sync-only", help="Only sync existing forks, skip repo creation"), |
|
force_sync: bool = typer.Option(False, "--force-sync", help="Force sync (hard reset to upstream)"), |
|
inspect_delta: bool = typer.Option(False, "--inspect-delta", help="Show detailed diff between fork and upstream") |
|
): |
|
# Auto-detect manifest from .org flag file if not specified |
|
if manifest is None: |
|
org_file = Path(".org") |
|
if org_file.exists(): |
|
org_name = org_file.read_text().strip() |
|
manifest = Path(f"{org_name}.json") |
|
console.print(f"[dim]Using manifest from .org file: {manifest}[/dim]") |
|
else: |
|
console.print(f"[red]No manifest specified and .org file not found[/red]") |
|
console.print(f"[yellow]Create .org file with org name, or use --manifest flag[/yellow]") |
|
raise typer.Exit(1) |
|
|
|
if not manifest.exists(): |
|
console.print(f"[red]Missing manifest: {manifest}[/red]") |
|
raise typer.Exit(1) |
|
|
|
cfg = json.loads(manifest.read_text()) |
|
org = cfg["org"] |
|
org_settings = cfg.get("org_settings", {}) |
|
dfl = cfg.get("defaults", {}) |
|
repo_defaults = cfg.get("repo_defaults", {}) |
|
pages_defaults = cfg.get("pages_defaults", {}) |
|
template_repo = cfg.get("template_repo", "api-template") # <--- template repository |
|
base = Path.cwd() |
|
|
|
console.rule(f"[bold yellow]Apply (IaC) {org}[/bold yellow]") |
|
|
|
# Apply org-wide settings first |
|
if org_settings: |
|
apply_org_settings(org, org_settings, dry) |
|
|
|
# Create and configure teams |
|
teams_config = cfg.get("teams", []) |
|
if teams_config: |
|
ensure_teams(org, teams_config, dry) |
|
|
|
for r in cfg["repos"]: |
|
name = r["name"] |
|
desc = r.get("description","") |
|
topics = r.get("topics",[]) |
|
|
|
# visibility precedence: repo.visibility > repo.private(bool) > defaults.visibility > "private" |
|
if "visibility" in r: |
|
vis = str(r["visibility"]).lower() |
|
elif "private" in r: |
|
vis = "private" if r["private"] else "public" |
|
else: |
|
vis = str(dfl.get("visibility","private")).lower() |
|
|
|
default_branch = r.get("default_branch") or dfl.get("default_branch") or "main" |
|
init_readme = bool(r.get("init_readme", dfl.get("init_readme", True))) |
|
members = r.get("members", []) |
|
upstream = r.get("upstream") |
|
sync_branch = r.get("sync_branch") or default_branch |
|
|
|
# Merge repo_defaults with per-repo settings (per-repo overrides defaults) |
|
repo_settings = {**repo_defaults, **{k: v for k, v in r.items() |
|
if k in ["homepage", "has_issues", "has_projects", "has_wiki", "has_discussions", |
|
"allow_squash_merge", "allow_merge_commit", "allow_rebase_merge", |
|
"allow_auto_merge", "delete_branch_on_merge", "allow_update_branch", |
|
"allow_forking", "web_commit_signoff_required", "archived", |
|
"has_downloads", "is_template"]}} |
|
|
|
# Merge pages_defaults with per-repo pages config (per-repo overrides defaults) |
|
pages_config = None |
|
if "pages" in r: |
|
pages_config = {**pages_defaults, **r["pages"]} |
|
elif pages_defaults.get("enabled", False): |
|
pages_config = pages_defaults |
|
|
|
console.print(f"[bold cyan]=> {name}[/bold cyan]") |
|
|
|
# Detect if this is a fork (has upstream field) |
|
is_fork = bool(upstream) |
|
|
|
if not sync_only: |
|
if is_fork: |
|
# Forks already exist on GitHub - just verify and update metadata |
|
console.print(f"[dim]Fork of {upstream} - skipping creation[/dim]") |
|
if desc or topics: |
|
if desc: |
|
sh(["gh","repo","edit",f"{org}/{name}","--description",desc], dry, check=False) |
|
if topics: |
|
sh(["gh","repo","edit",f"{org}/{name}","--add-topic",",".join(topics)], dry, check=False) |
|
else: |
|
# Regular repos - full creation workflow |
|
# 1. Create remote repo |
|
ensure_repo(org, name, vis, desc, topics, dry) |
|
# 2. Clone locally and seed only if init_readme is true (new repos) |
|
if init_readme: |
|
local = ensure_local_clone(base, org, name, dry) |
|
seed_repo(local, name, org, template_repo, dry) |
|
|
|
# Common operations for both forks and regular repos |
|
# 3. Set default branch |
|
set_default_branch(org, name, default_branch, dry) |
|
# 4. Apply repo settings |
|
is_private = (vis == "private") |
|
apply_repo_settings(org, name, repo_settings, is_private, is_fork, dry) |
|
# 5. Apply Pages settings |
|
apply_pages_settings(org, name, pages_config, is_private, dry) |
|
# 6. Add collaborators |
|
ensure_members(org, name, members, dry) |
|
# 7. Set deploy key secret for private repos |
|
set_deploy_key_secret(org, name, is_private, dry) |
|
|
|
# 8. Inspect delta if requested (clones locally as needed) |
|
if inspect_delta and upstream: |
|
inspect_fork_delta(org, name, upstream, sync_branch, base) |
|
|
|
# 9. Sync fork with upstream if configured |
|
if sync and upstream: |
|
sync_fork(org, name, upstream, sync_branch, force_sync, dry) |
|
|
|
console.rule("[bold green]Done[/bold green]") |
|
|
|
if __name__ == "__main__": |
|
typer.run(main) |