docs: add automated compose documentation generation pipeline
This commit is contained in:
@@ -0,0 +1,55 @@
|
||||
name: Generate documentation
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
paths-ignore:
|
||||
- "docs/generated/**"
|
||||
- "docs/diagrams/**"
|
||||
- "docs/public/**"
|
||||
pull_request:
|
||||
branches: [main]
|
||||
paths-ignore:
|
||||
- "docs/generated/**"
|
||||
- "docs/diagrams/**"
|
||||
- "docs/public/**"
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
commit_generated_docs:
|
||||
description: "Commit generated docs back to the branch"
|
||||
required: false
|
||||
default: "false"
|
||||
type: choice
|
||||
options: ["false", "true"]
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
generate-docs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Install tooling
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y docker-compose-plugin graphviz jq python3 python3-pip
|
||||
python3 -m pip install --user pyyaml jinja2
|
||||
- name: Generate documentation
|
||||
run: |
|
||||
chmod +x scripts/docs/*.sh
|
||||
scripts/docs/generate-all.sh
|
||||
- name: Upload generated documentation
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: generated-documentation
|
||||
path: |
|
||||
docs/generated
|
||||
docs/diagrams
|
||||
docs/public
|
||||
- name: Commit generated docs
|
||||
if: github.event_name == 'workflow_dispatch' && inputs.commit_generated_docs == 'true'
|
||||
uses: stefanzweifel/git-auto-commit-action@v5
|
||||
with:
|
||||
commit_message: "docs: regenerate environment documentation"
|
||||
file_pattern: docs/generated docs/diagrams docs/public
|
||||
@@ -0,0 +1,25 @@
|
||||
# Generated Documentation
|
||||
|
||||
## Local generation
|
||||
|
||||
```bash
|
||||
chmod +x scripts/docs/*.sh
|
||||
scripts/docs/generate-all.sh
|
||||
```
|
||||
|
||||
This pipeline only runs `docker compose config` and static parsing. It does **not** start containers.
|
||||
|
||||
## CI behaviour
|
||||
|
||||
GitHub Actions workflow `.github/workflows/generate-docs.yml` runs on pushes/PRs to `main` and manual dispatch. It generates docs and uploads them as the `generated-documentation` artifact.
|
||||
|
||||
## Outputs
|
||||
|
||||
- `docs/generated`: resolved compose config and markdown inventories
|
||||
- `docs/diagrams`: DOT and SVG architecture diagram
|
||||
- `docs/public`: sanitized copy for public sharing
|
||||
|
||||
## Publication safety
|
||||
|
||||
- `docs/public` is intended for public sharing after sanitization.
|
||||
- `docs/generated` and `docs/diagrams` may include internal details and should be treated as internal by default.
|
||||
@@ -0,0 +1,4 @@
|
||||
PROJECT_ROOT=.
|
||||
TZ=UTC
|
||||
DOMAIN=example.internal
|
||||
SECRETS_ENV=scripts/docs/ci-secrets-placeholder.env
|
||||
@@ -0,0 +1,2 @@
|
||||
EXAMPLE_PASSWORD=placeholder
|
||||
EXAMPLE_TOKEN=placeholder
|
||||
Executable
+11
@@ -0,0 +1,11 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
ROOT="$(git rev-parse --show-toplevel 2>/dev/null || pwd)"
|
||||
cd "$ROOT"
|
||||
mkdir -p docs/generated docs/diagrams docs/public
|
||||
scripts/docs/render-compose-config.sh
|
||||
python3 scripts/docs/generate-compose-inventory.py docs/generated/docker-compose.resolved.yml docs/generated/compose-inventory.md
|
||||
python3 scripts/docs/generate-traefik-routes.py docs/generated/docker-compose.resolved.yml docs/generated/traefik-routes.md
|
||||
python3 scripts/docs/generate-prometheus-rules.py docs/generated/prometheus-rules.md
|
||||
python3 scripts/docs/generate-diagrams.py docs/generated/docker-compose.resolved.yml docs/diagrams/docker-compose.dot docs/diagrams/docker-compose.svg
|
||||
python3 scripts/docs/sanitize-public-docs.py docs/generated docs/diagrams docs/public
|
||||
@@ -0,0 +1,24 @@
|
||||
#!/usr/bin/env python3
|
||||
import sys, yaml
|
||||
from datetime import datetime, timezone
|
||||
|
||||
def md(v): return str(v).replace('|','\\|') if v is not None else ''
|
||||
|
||||
inp,out=sys.argv[1],sys.argv[2]
|
||||
with open(inp) as f: c=yaml.safe_load(f) or {}
|
||||
svcs=c.get('services',{}) or {}
|
||||
nets=c.get('networks',{}) or {}
|
||||
vols=c.get('volumes',{}) or {}
|
||||
lines=["# Docker Compose Inventory","",f"Generated: {datetime.now(timezone.utc).strftime('%Y-%m-%dT%H:%M:%SZ')}","","## Summary","","| Item | Count |","|---|---:|",f"| Services | {len(svcs)} |",f"| Networks | {len(nets)} |",f"| Volumes | {len(vols)} |","","## Services","","| Service | Container | Image | Build | Profiles | Networks | Ports | Restart |","|---|---|---|---|---|---|---|---|"]
|
||||
for n,s in sorted(svcs.items()):
|
||||
build=s.get('build','')
|
||||
if isinstance(build,dict): build=build.get('context','')
|
||||
ports=', '.join(str(p) for p in s.get('ports',[]) )
|
||||
networks=', '.join((s.get('networks') or {}).keys() if isinstance(s.get('networks'),dict) else (s.get('networks') or []))
|
||||
profiles=', '.join(s.get('profiles',[]) or [])
|
||||
lines.append(f"| {md(n)} | {md(s.get('container_name',''))} | {md(s.get('image',''))} | {md(build)} | {md(profiles)} | {md(networks)} | {md(ports)} | {md(s.get('restart',''))} |")
|
||||
lines += ["","## Networks","","| Network | Driver | External |","|---|---|---|"]
|
||||
for n,v in sorted(nets.items()): lines.append(f"| {md(n)} | {md((v or {}).get('driver',''))} | {md((v or {}).get('external',False))} |")
|
||||
lines += ["","## Volumes","","| Volume | External |","|---|---|"]
|
||||
for n,v in sorted(vols.items()): lines.append(f"| {md(n)} | {md((v or {}).get('external',False))} |")
|
||||
open(out,'w').write('\n'.join(lines)+'\n')
|
||||
@@ -0,0 +1,18 @@
|
||||
#!/usr/bin/env python3
|
||||
import sys,yaml,subprocess,shutil
|
||||
inp,dotf,svgf=sys.argv[1],sys.argv[2],sys.argv[3]
|
||||
with open(inp) as f:c=yaml.safe_load(f) or {}
|
||||
svcs=c.get('services') or {}
|
||||
lines=["digraph Compose {"," rankdir=LR;"," node [fontname=Helvetica];"]
|
||||
for s in svcs: lines.append(f' "svc:{s}" [label="{s}", shape=box, style=filled, fillcolor="#dfefff"];')
|
||||
for n in (c.get('networks') or {}).keys(): lines.append(f' "net:{n}" [label="{n}", shape=ellipse, style=filled, fillcolor="#f4f4f4"];')
|
||||
for s,sv in svcs.items():
|
||||
ns=sv.get('networks') or []
|
||||
if isinstance(ns,dict): ns=ns.keys()
|
||||
for n in ns: lines.append(f' "svc:{s}" -> "net:{n}";')
|
||||
lines.append("}")
|
||||
open(dotf,'w').write('\n'.join(lines)+'\n')
|
||||
if shutil.which('dot'):
|
||||
subprocess.run(['dot','-Tsvg',dotf,'-o',svgf],check=True)
|
||||
else:
|
||||
open(svgf,'w').write('<svg xmlns="http://www.w3.org/2000/svg" width="640" height="80"><text x="10" y="40">Graphviz dot not found in environment.</text></svg>\n')
|
||||
@@ -0,0 +1,15 @@
|
||||
#!/usr/bin/env python3
|
||||
import sys,yaml,glob
|
||||
out=sys.argv[1]
|
||||
patterns=["monitoring/prometheus/rules/**/*.yml","monitoring/prometheus/rules/**/*.yaml","**/prometheus/rules/**/*.yml","**/prometheus/rules/**/*.yaml"]
|
||||
files=sorted({f for p in patterns for f in glob.glob(p,recursive=True)})
|
||||
lines=["# Prometheus Rules","", "| File | Group | Alert | Expr | For | Labels | Annotations |","|---|---|---|---|---|---|---|"]
|
||||
if not files:
|
||||
open(out,'w').write("# Prometheus Rules\n\nNo Prometheus rule files were found.\n"); sys.exit(0)
|
||||
for fp in files:
|
||||
try:data=yaml.safe_load(open(fp)) or {}
|
||||
except Exception as e: raise SystemExit(f"Malformed YAML in {fp}: {e}")
|
||||
for g in data.get('groups',[]) or []:
|
||||
for r in g.get('rules',[]) or []:
|
||||
lines.append(f"| {fp} | {g.get('name','')} | {r.get('alert','')} | {str(r.get('expr','')).replace('|','\\|')} | {r.get('for','')} | {r.get('labels',{})} | {r.get('annotations',{})} |")
|
||||
open(out,'w').write('\n'.join(lines)+'\n')
|
||||
@@ -0,0 +1,28 @@
|
||||
#!/usr/bin/env python3
|
||||
import sys,yaml,re
|
||||
inp,out=sys.argv[1],sys.argv[2]
|
||||
with open(inp) as f: c=yaml.safe_load(f) or {}
|
||||
rows=[]
|
||||
for sname,svc in (c.get('services') or {}).items():
|
||||
labels=svc.get('labels') or {}
|
||||
if isinstance(labels,list):
|
||||
d={}
|
||||
for l in labels:
|
||||
if '=' in str(l):k,v=str(l).split('=',1);d[k]=v
|
||||
labels=d
|
||||
routers={}
|
||||
for k,v in labels.items():
|
||||
m=re.match(r'traefik\.http\.routers\.([^.]+)\.(rule|entrypoints|tls|middlewares)$',k)
|
||||
if m: routers.setdefault(m.group(1),{})[m.group(2)]=v
|
||||
ports={}
|
||||
for k,v in labels.items():
|
||||
m=re.match(r'traefik\.http\.services\.([^.]+)\.loadbalancer\.server\.port$',k)
|
||||
if m: ports[m.group(1)]=v
|
||||
for r,rv in routers.items():
|
||||
rows.append((sname,r,rv.get('rule',''),rv.get('entrypoints',''),rv.get('tls',''),rv.get('middlewares',''),ports.get(r,'')))
|
||||
lines=["# Traefik Routes","", "| Service | Router | Rule | Entrypoints | TLS | Middlewares | Target Port |","|---|---|---|---|---|---|---|"]
|
||||
if not rows:
|
||||
lines=["# Traefik Routes","","No Traefik routes were detected."]
|
||||
else:
|
||||
for r in sorted(rows): lines.append('| '+' | '.join(str(x).replace('|','\\|') for x in r)+' |')
|
||||
open(out,'w').write('\n'.join(lines)+'\n')
|
||||
Executable
+31
@@ -0,0 +1,31 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# Discover compose files for docs tooling and CI without running containers.
|
||||
ROOT="$(git rev-parse --show-toplevel 2>/dev/null || pwd)"
|
||||
cd "$ROOT"
|
||||
|
||||
declare -a files=()
|
||||
|
||||
if [ -f services-up.sh ]; then
|
||||
# Parse literal FILES array entries (e.g., default-network.yml) and reuse
|
||||
# compose search roots inferred from services-up.sh.
|
||||
while IFS= read -r line; do
|
||||
path=$(sed -E 's#.*\$PROJECT_ROOT/([^" ]+).*#\1#' <<<"$line")
|
||||
[ -f "$path" ] && files+=("$path")
|
||||
done < <(awk '/^FILES=\(/,/^\)/ {print}' services-up.sh | rg '\-f\s+"\$PROJECT_ROOT/')
|
||||
|
||||
if rg -q 'find "\$PROJECT_ROOT/apps" "\$PROJECT_ROOT/monitoring" "\$PROJECT_ROOT/core"' services-up.sh; then
|
||||
while IFS= read -r f; do files+=("$f"); done < <(
|
||||
find apps monitoring core -maxdepth 2 -type f \( -name 'docker-compose.yml' -o -name 'docker-compose.yaml' -o -name 'compose.yml' -o -name 'compose.yaml' \) | sed 's#^\./##' | sort
|
||||
)
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ "${#files[@]}" -eq 0 ]; then
|
||||
while IFS= read -r f; do files+=("$f"); done < <(
|
||||
find . -type f \( -name 'docker-compose.yml' -o -name 'docker-compose.yaml' -o -name 'compose.yml' -o -name 'compose.yaml' \) | sed 's#^\./##' | sort
|
||||
)
|
||||
fi
|
||||
|
||||
printf '%s\n' "${files[@]}" | awk 'NF' | awk '!seen[$0]++'
|
||||
Executable
+25
@@ -0,0 +1,25 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
ROOT="$(git rev-parse --show-toplevel 2>/dev/null || pwd)"
|
||||
cd "$ROOT"
|
||||
mkdir -p docs/generated
|
||||
mapfile -t COMPOSE_FILES < <(scripts/docs/list-compose-files.sh)
|
||||
if [ "${#COMPOSE_FILES[@]}" -eq 0 ]; then
|
||||
echo "No compose files found" >&2
|
||||
exit 1
|
||||
fi
|
||||
printf '%s\n' "${COMPOSE_FILES[@]}" > docs/generated/compose-files.txt
|
||||
ARGS=()
|
||||
for file in "${COMPOSE_FILES[@]}"; do ARGS+=("-f" "$file"); done
|
||||
ENV_FILE=""
|
||||
if [ -f default-environment.env ]; then
|
||||
ENV_FILE="default-environment.env"
|
||||
else
|
||||
ENV_FILE="scripts/docs/ci-default.env"
|
||||
fi
|
||||
if [ ! -f "$ENV_FILE" ]; then
|
||||
echo "Environment file not found: $ENV_FILE" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
docker compose -p core --env-file "$ENV_FILE" "${ARGS[@]}" config > docs/generated/docker-compose.resolved.yml
|
||||
@@ -0,0 +1,18 @@
|
||||
#!/usr/bin/env python3
|
||||
import sys,re,shutil
|
||||
from pathlib import Path
|
||||
srcg,srcd,out=sys.argv[1],sys.argv[2],sys.argv[3]
|
||||
outp=Path(out)
|
||||
outp.mkdir(parents=True,exist_ok=True)
|
||||
for src in [Path(srcg),Path(srcd)]:
|
||||
for f in src.rglob('*'):
|
||||
if not f.is_file(): continue
|
||||
rel=f.relative_to(src)
|
||||
dest=outp/src.name/rel
|
||||
dest.parent.mkdir(parents=True,exist_ok=True)
|
||||
txt=f.read_text(errors='ignore')
|
||||
txt=re.sub(r'\b[a-zA-Z0-9.-]+\.lan\.ddnsgeek\.com\b','<internal-domain>',txt)
|
||||
txt=re.sub(r'\b(?:10\.\d{1,3}\.\d{1,3}\.\d{1,3}|192\.168\.\d{1,3}\.\d{1,3}|172\.(?:1[6-9]|2\d|3[01])\.\d{1,3}\.\d{1,3})\b','<private-ip>',txt)
|
||||
txt=re.sub(r'(?i)\b(password|token|api_key|secret)\s*[:=]\s*[^\s\n]+',r'\1=<redacted>',txt)
|
||||
txt=re.sub(r'(?m)^([A-Z0-9_]*(?:PASSWORD|TOKEN|API_KEY|SECRET)[A-Z0-9_]*)\s*[:=]\s*.*$',r'\1=<redacted>',txt)
|
||||
dest.write_text(txt)
|
||||
Reference in New Issue
Block a user