Deploy new apps or push updates to existing deployments via Docker Compose + Caddy + Gitea webhooks. Multi-server profiles, auto-detection of deployment status, full infrastructure provisioning. - SKILL.md: 715-line workflow documentation - scripts/detect_deployment.py: deployment status detection - scripts/validate_compose.py: compose file validation - references/: infrastructure, compose patterns, Caddy patterns - assets/: Makefile and compose templates - config.json: mew server profile
404 lines
13 KiB
Python
404 lines
13 KiB
Python
#!/usr/bin/env python3
|
|
"""
|
|
detect_deployment.py — Detect whether an app is already deployed.
|
|
|
|
Checks three signals:
|
|
1. Deploy map — repo entry in /etc/deploy-listener/deploy-map.json
|
|
2. Gitea — repo exists on the Gitea instance
|
|
3. Caddy — a Caddyfile reverse_proxy maps to the app's container name
|
|
|
|
Outputs JSON to stdout and uses exit code to indicate status:
|
|
0 — deployed (at least deploy_map + gitea match)
|
|
1 — not deployed
|
|
|
|
Usage:
|
|
python3 detect_deployment.py --repo-name darren/my-app [--config path/to/config.json]
|
|
|
|
Cross-platform: works on Linux and Windows (WSL / Git Bash).
|
|
Stdlib only — no pip dependencies.
|
|
"""
|
|
|
|
from __future__ import annotations
|
|
|
|
import argparse
|
|
import json
|
|
import os
|
|
import platform
|
|
import re
|
|
import socket
|
|
import subprocess
|
|
import sys
|
|
import urllib.error
|
|
import urllib.request
|
|
from pathlib import Path
|
|
from typing import Any
|
|
|
|
|
|
# ---------------------------------------------------------------------------
|
|
# Helpers
|
|
# ---------------------------------------------------------------------------
|
|
|
|
def _load_json(path: str | Path) -> Any:
|
|
"""Load and return parsed JSON from *path*."""
|
|
with open(path, encoding="utf-8") as fh:
|
|
return json.load(fh)
|
|
|
|
|
|
def _resolve_config_path(explicit: str | None, script_dir: Path) -> Path:
|
|
"""Return the config.json path — explicit arg wins, otherwise default."""
|
|
if explicit:
|
|
return Path(explicit).resolve()
|
|
return (script_dir.parent / "config.json").resolve()
|
|
|
|
|
|
def _is_local(server_hostname: str) -> bool:
|
|
"""Return True when we appear to be running *on* the target server."""
|
|
local_name = socket.gethostname().lower()
|
|
if local_name == server_hostname.lower():
|
|
return True
|
|
# Also check common aliases
|
|
try:
|
|
server_ip = socket.gethostbyname(server_hostname)
|
|
local_ips = socket.gethostbyname_ex(socket.gethostname())[2]
|
|
if server_ip in local_ips:
|
|
return True
|
|
except (socket.gaierror, socket.herror, OSError):
|
|
pass
|
|
return False
|
|
|
|
|
|
def _run_local(cmd: list[str], timeout: int = 15) -> tuple[int, str, str]:
|
|
"""Run *cmd* locally and return (returncode, stdout, stderr)."""
|
|
try:
|
|
proc = subprocess.run(
|
|
cmd,
|
|
capture_output=True,
|
|
text=True,
|
|
timeout=timeout,
|
|
)
|
|
return proc.returncode, proc.stdout, proc.stderr
|
|
except FileNotFoundError:
|
|
return 127, "", f"Command not found: {cmd[0]}"
|
|
except subprocess.TimeoutExpired:
|
|
return 124, "", f"Command timed out after {timeout}s"
|
|
|
|
|
|
def _run_ssh(ssh_user: str, ssh_host: str, remote_cmd: str, timeout: int = 15) -> tuple[int, str, str]:
|
|
"""Execute *remote_cmd* over SSH and return (returncode, stdout, stderr)."""
|
|
ssh_target = f"{ssh_user}@{ssh_host}" if ssh_user else ssh_host
|
|
cmd = [
|
|
"ssh",
|
|
"-o", "BatchMode=yes",
|
|
"-o", "ConnectTimeout=10",
|
|
"-o", "StrictHostKeyChecking=accept-new",
|
|
ssh_target,
|
|
remote_cmd,
|
|
]
|
|
return _run_local(cmd, timeout=timeout)
|
|
|
|
|
|
def _read_remote_file(ssh_user: str, ssh_host: str, remote_path: str) -> str | None:
|
|
"""Read a file on the remote server via SSH. Returns contents or None on failure."""
|
|
rc, stdout, stderr = _run_ssh(ssh_user, ssh_host, f"cat {remote_path}")
|
|
if rc == 0:
|
|
return stdout
|
|
return None
|
|
|
|
|
|
def _read_file_local_or_remote(
|
|
path: str,
|
|
is_local: bool,
|
|
ssh_user: str,
|
|
ssh_host: str,
|
|
) -> str | None:
|
|
"""Read a file — locally if we are on the server, otherwise via SSH."""
|
|
if is_local:
|
|
try:
|
|
return Path(path).read_text(encoding="utf-8")
|
|
except (OSError, UnicodeDecodeError):
|
|
return None
|
|
return _read_remote_file(ssh_user, ssh_host, path)
|
|
|
|
|
|
def _read_gitea_token(token_path: str) -> str | None:
|
|
"""Read the Gitea API token from a file path."""
|
|
try:
|
|
return Path(token_path).expanduser().read_text(encoding="utf-8").strip()
|
|
except (OSError, UnicodeDecodeError):
|
|
return None
|
|
|
|
|
|
# ---------------------------------------------------------------------------
|
|
# Signal 1: Deploy Map
|
|
# ---------------------------------------------------------------------------
|
|
|
|
def check_deploy_map(
|
|
repo_name: str,
|
|
deploy_map_path: str,
|
|
is_local: bool,
|
|
ssh_user: str,
|
|
ssh_host: str,
|
|
) -> tuple[bool, dict[str, Any]]:
|
|
"""
|
|
Check if *repo_name* exists in the deploy map JSON file.
|
|
|
|
Returns (found: bool, details: dict).
|
|
"""
|
|
details: dict[str, Any] = {}
|
|
raw = _read_file_local_or_remote(deploy_map_path, is_local, ssh_user, ssh_host)
|
|
if raw is None:
|
|
details["error"] = "Could not read deploy map"
|
|
return False, details
|
|
|
|
try:
|
|
deploy_map = json.loads(raw)
|
|
except json.JSONDecodeError as exc:
|
|
details["error"] = f"Invalid JSON in deploy map: {exc}"
|
|
return False, details
|
|
|
|
# The deploy map may use different key formats — check common patterns:
|
|
# "owner/repo", "repo", or nested by owner.
|
|
repo_lower = repo_name.lower()
|
|
repo_short = repo_name.split("/")[-1].lower() if "/" in repo_name else repo_name.lower()
|
|
|
|
# Flat dict keyed by "owner/repo"
|
|
if isinstance(deploy_map, dict):
|
|
for key, value in deploy_map.items():
|
|
if key.lower() == repo_lower or key.lower() == repo_short:
|
|
details["matched_key"] = key
|
|
if isinstance(value, dict):
|
|
details["stack_dir"] = value.get("stack_dir", value.get("path", ""))
|
|
else:
|
|
details["stack_dir"] = str(value)
|
|
return True, details
|
|
|
|
# List of entries with a "repo" field
|
|
if isinstance(deploy_map, list):
|
|
for entry in deploy_map:
|
|
if not isinstance(entry, dict):
|
|
continue
|
|
entry_repo = (entry.get("repo") or entry.get("repository") or "").lower()
|
|
if entry_repo == repo_lower or entry_repo == repo_short:
|
|
details["matched_key"] = entry_repo
|
|
details["stack_dir"] = entry.get("stack_dir", entry.get("path", ""))
|
|
return True, details
|
|
|
|
return False, details
|
|
|
|
|
|
# ---------------------------------------------------------------------------
|
|
# Signal 2: Gitea
|
|
# ---------------------------------------------------------------------------
|
|
|
|
def check_gitea(
|
|
repo_name: str,
|
|
gitea_url: str,
|
|
gitea_token: str | None,
|
|
) -> tuple[bool, dict[str, Any]]:
|
|
"""
|
|
Check if *repo_name* (owner/repo) exists on Gitea.
|
|
|
|
Returns (exists: bool, details: dict).
|
|
"""
|
|
details: dict[str, Any] = {}
|
|
|
|
# Ensure owner/repo format
|
|
if "/" not in repo_name:
|
|
details["error"] = "repo_name must be in owner/repo format for Gitea check"
|
|
return False, details
|
|
|
|
owner, repo = repo_name.split("/", 1)
|
|
api_url = f"{gitea_url.rstrip('/')}/api/v1/repos/{owner}/{repo}"
|
|
details["gitea_url"] = api_url
|
|
|
|
req = urllib.request.Request(api_url, method="GET")
|
|
req.add_header("Accept", "application/json")
|
|
if gitea_token:
|
|
req.add_header("Authorization", f"token {gitea_token}")
|
|
|
|
try:
|
|
with urllib.request.urlopen(req, timeout=15) as resp:
|
|
if resp.status == 200:
|
|
body = json.loads(resp.read().decode("utf-8"))
|
|
details["full_name"] = body.get("full_name", "")
|
|
details["html_url"] = body.get("html_url", "")
|
|
details["description"] = body.get("description", "")
|
|
return True, details
|
|
except urllib.error.HTTPError as exc:
|
|
if exc.code == 404:
|
|
details["status"] = 404
|
|
return False, details
|
|
details["error"] = f"HTTP {exc.code}: {exc.reason}"
|
|
return False, details
|
|
except urllib.error.URLError as exc:
|
|
details["error"] = f"URL error: {exc.reason}"
|
|
return False, details
|
|
except OSError as exc:
|
|
details["error"] = f"Connection error: {exc}"
|
|
return False, details
|
|
|
|
return False, details
|
|
|
|
|
|
# ---------------------------------------------------------------------------
|
|
# Signal 3: Caddy
|
|
# ---------------------------------------------------------------------------
|
|
|
|
def check_caddy(
|
|
repo_name: str,
|
|
caddyfile_path: str,
|
|
is_local: bool,
|
|
ssh_user: str,
|
|
ssh_host: str,
|
|
) -> tuple[bool, dict[str, Any]]:
|
|
"""
|
|
Check if the Caddyfile has a reverse_proxy pointing to this app's container.
|
|
|
|
Heuristic: look for the container name (short repo name) in reverse_proxy
|
|
directives or upstream blocks.
|
|
|
|
Returns (found: bool, details: dict).
|
|
"""
|
|
details: dict[str, Any] = {}
|
|
container_name = repo_name.split("/")[-1].lower() if "/" in repo_name else repo_name.lower()
|
|
|
|
raw = _read_file_local_or_remote(caddyfile_path, is_local, ssh_user, ssh_host)
|
|
if raw is None:
|
|
details["error"] = "Could not read Caddyfile"
|
|
return False, details
|
|
|
|
# Parse the Caddyfile looking for:
|
|
# reverse_proxy <container_name>:<port>
|
|
# reverse_proxy http://<container_name>:<port>
|
|
# Also capture the domain (site block header) associated with the match.
|
|
lines = raw.splitlines()
|
|
current_domain = ""
|
|
# Pattern: matches a Caddy site-block header (domain line) — simplified heuristic
|
|
domain_pattern = re.compile(r"^(\S+\.\S+)\s*\{?\s*$")
|
|
proxy_pattern = re.compile(
|
|
r"reverse_proxy\s+(?:https?://)?" + re.escape(container_name) + r"[:\s]",
|
|
re.IGNORECASE,
|
|
)
|
|
|
|
for line in lines:
|
|
stripped = line.strip()
|
|
domain_match = domain_pattern.match(stripped)
|
|
if domain_match:
|
|
current_domain = domain_match.group(1)
|
|
if proxy_pattern.search(stripped):
|
|
details["domain"] = current_domain
|
|
details["matched_line"] = stripped
|
|
details["container_name"] = container_name
|
|
return True, details
|
|
|
|
return False, details
|
|
|
|
|
|
# ---------------------------------------------------------------------------
|
|
# Main orchestration
|
|
# ---------------------------------------------------------------------------
|
|
|
|
def detect(repo_name: str, config: dict[str, Any]) -> dict[str, Any]:
|
|
"""Run all three detection signals and return the combined result dict."""
|
|
server = config.get("server", {})
|
|
ssh_host = server.get("ssh_host", "")
|
|
ssh_user = server.get("ssh_user", "")
|
|
server_hostname = server.get("hostname", ssh_host)
|
|
deploy_map_path = server.get("deploy_map_path", "/etc/deploy-listener/deploy-map.json")
|
|
caddyfile_path = server.get("caddyfile_path", "/etc/caddy/Caddyfile")
|
|
|
|
gitea_cfg = config.get("gitea", {})
|
|
gitea_url = gitea_cfg.get("url", "")
|
|
gitea_token_path = config.get("secrets", {}).get("gitea_token", "")
|
|
gitea_token = _read_gitea_token(gitea_token_path) if gitea_token_path else None
|
|
|
|
local = _is_local(server_hostname)
|
|
|
|
# --- Signal 1: Deploy Map ---
|
|
dm_found, dm_details = check_deploy_map(
|
|
repo_name, deploy_map_path, local, ssh_user, ssh_host,
|
|
)
|
|
|
|
# --- Signal 2: Gitea ---
|
|
gt_found, gt_details = check_gitea(repo_name, gitea_url, gitea_token)
|
|
|
|
# --- Signal 3: Caddy ---
|
|
cd_found, cd_details = check_caddy(
|
|
repo_name, caddyfile_path, local, ssh_user, ssh_host,
|
|
)
|
|
|
|
# Merge details
|
|
all_details: dict[str, Any] = {}
|
|
if dm_details.get("stack_dir"):
|
|
all_details["stack_dir"] = dm_details["stack_dir"]
|
|
if cd_details.get("domain"):
|
|
all_details["domain"] = cd_details["domain"]
|
|
if gt_details.get("html_url"):
|
|
all_details["gitea_url"] = gt_details["html_url"]
|
|
|
|
deployed = dm_found and gt_found # primary condition
|
|
|
|
return {
|
|
"deployed": deployed,
|
|
"signals": {
|
|
"deploy_map": dm_found,
|
|
"gitea": gt_found,
|
|
"caddy": cd_found,
|
|
},
|
|
"details": all_details,
|
|
}
|
|
|
|
|
|
# ---------------------------------------------------------------------------
|
|
# CLI entry point
|
|
# ---------------------------------------------------------------------------
|
|
|
|
def main() -> None:
|
|
parser = argparse.ArgumentParser(
|
|
description="Detect whether an app is already deployed.",
|
|
formatter_class=argparse.RawDescriptionHelpFormatter,
|
|
epilog=__doc__,
|
|
)
|
|
parser.add_argument(
|
|
"--repo-name",
|
|
required=True,
|
|
help="Repository name in owner/repo format (e.g. darren/my-app).",
|
|
)
|
|
parser.add_argument(
|
|
"--config",
|
|
default=None,
|
|
help="Path to config.json. Default: <script_dir>/../config.json",
|
|
)
|
|
args = parser.parse_args()
|
|
|
|
script_dir = Path(__file__).resolve().parent
|
|
config_path = _resolve_config_path(args.config, script_dir)
|
|
|
|
if not config_path.exists():
|
|
print(
|
|
json.dumps({"error": f"Config file not found: {config_path}"}),
|
|
file=sys.stderr,
|
|
)
|
|
sys.exit(1)
|
|
|
|
try:
|
|
config = _load_json(config_path)
|
|
except (json.JSONDecodeError, OSError) as exc:
|
|
print(
|
|
json.dumps({"error": f"Failed to load config: {exc}"}),
|
|
file=sys.stderr,
|
|
)
|
|
sys.exit(1)
|
|
|
|
result = detect(args.repo_name, config)
|
|
|
|
# JSON to stdout
|
|
print(json.dumps(result, indent=2))
|
|
|
|
# Exit code: 0 = deployed, 1 = not deployed
|
|
sys.exit(0 if result["deployed"] else 1)
|
|
|
|
|
|
if __name__ == "__main__":
|
|
main()
|