Compare commits
4 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 85cf56fcaf | |||
| a576dfdaa0 | |||
| cf516ab1f4 | |||
| e7feec9146 |
@@ -44,20 +44,3 @@ flowchart TB
|
||||
```
|
||||
|
||||
For a request-flow/network view and architecture notes, see [docs/architecture.md](docs/architecture.md).
|
||||
|
||||
## Prometheus Runtime Inventory Export
|
||||
Regenerate derived docs/diagrams from inventory:
|
||||
|
||||
```bash
|
||||
python3 scripts/render_prometheus_docs.py --inventory-file docs/runtime/prometheus-inventory.json
|
||||
```
|
||||
|
||||
|
||||
Use `scripts/export_prometheus_inventory.py` to snapshot Prometheus-observed runtime inventory into versionable artifacts for docs/diagram workflows.
|
||||
|
||||
```bash
|
||||
export PROMETHEUS_URL="https://prometheus.example.com"
|
||||
python3 scripts/export_prometheus_inventory.py --output-dir docs/runtime
|
||||
```
|
||||
|
||||
This writes raw API snapshots and a normalized inventory JSON under `docs/runtime/`, and updates `docs/prometheus-inventory.md`.
|
||||
|
||||
@@ -114,34 +114,8 @@ Unknowns (left intentionally as placeholders):
|
||||
|
||||
If you want, this section can be replaced with a concrete Proxmox topology once you add an inventory source (e.g., Terraform, Ansible inventory, or a diagram export).
|
||||
|
||||
## Runtime visibility from Prometheus
|
||||
|
||||
<!-- BEGIN GENERATED PROMETHEUS SECTION -->
|
||||
|
||||
Prometheus inventory provides **observed runtime coverage** of scrape targets. It complements (but does not replace) declared architecture in Compose files and static docs.
|
||||
|
||||
- Inventory timestamp: `2026-04-13T06:36:45Z`
|
||||
- Observed jobs: `8`
|
||||
- Observed instances: `19`
|
||||
- Observed services (label-derived): `1`
|
||||
|
||||
### Observed monitoring view
|
||||
|
||||
| job | targets | unhealthy |
|
||||
| --- | --- | --- |
|
||||
| container-updates | 2 | 0 |
|
||||
| kuma | 2 | 0 |
|
||||
| node | 7 | 0 |
|
||||
| pihole | 1 | 0 |
|
||||
| prometheus | 1 | 0 |
|
||||
| proxmox-storage | 2 | 0 |
|
||||
| telegraf | 2 | 0 |
|
||||
| traefik | 2 | 0 |
|
||||
|
||||
### Data sources
|
||||
|
||||
- `docs/runtime/prometheus-inventory.json` (normalized runtime export)
|
||||
- Prometheus scrape metadata (`targets` + label sets)
|
||||
- Existing repository architecture docs for declared topology
|
||||
|
||||
### Notes from inventory
|
||||
|
||||
@@ -293,6 +293,18 @@ def get_compose_files_from_script(script_path):
|
||||
if not os.path.exists(script_path):
|
||||
return files
|
||||
base_dir = get_project_root_from_script(script_path)
|
||||
|
||||
def _clean_compose_path(raw_path):
|
||||
cleaned = str(raw_path).strip().strip(",")
|
||||
if (cleaned.startswith('"') and cleaned.endswith('"')) or (
|
||||
cleaned.startswith("'") and cleaned.endswith("'")
|
||||
):
|
||||
cleaned = cleaned[1:-1]
|
||||
expanded = expand_compose_path(cleaned, base_dir)
|
||||
if os.path.isabs(expanded):
|
||||
return os.path.normpath(expanded)
|
||||
return os.path.normpath(os.path.join(base_dir, expanded))
|
||||
|
||||
try:
|
||||
with open(script_path) as f:
|
||||
content = f.read()
|
||||
@@ -303,8 +315,47 @@ def get_compose_files_from_script(script_path):
|
||||
if line.startswith("-f"):
|
||||
path = line[2:].strip()
|
||||
if path:
|
||||
full = os.path.normpath(os.path.join(base_dir, path))
|
||||
full = _clean_compose_path(path)
|
||||
files.append(full)
|
||||
|
||||
# services-up.sh can append many compose files at runtime via:
|
||||
# FILES+=(-f "$file") done < <(find "$PROJECT_ROOT/apps" ...)
|
||||
# Mirror that behavior here so we can map service->compose image.
|
||||
root_dirs = []
|
||||
find_match = re.search(r'find\s+(.*?)\s+\\\s*\n', content)
|
||||
if find_match:
|
||||
for token in re.findall(r'"([^"]+)"|\'([^\']+)\'', find_match.group(1)):
|
||||
candidate = token[0] or token[1]
|
||||
if candidate:
|
||||
root_dirs.append(_clean_compose_path(candidate))
|
||||
else:
|
||||
root_dirs = [
|
||||
os.path.join(base_dir, "apps"),
|
||||
os.path.join(base_dir, "monitoring"),
|
||||
os.path.join(base_dir, "core"),
|
||||
]
|
||||
|
||||
for root_dir in root_dirs:
|
||||
if not os.path.isdir(root_dir):
|
||||
continue
|
||||
for candidate in sorted(os.listdir(root_dir)):
|
||||
svc_dir = os.path.join(root_dir, candidate)
|
||||
if not os.path.isdir(svc_dir):
|
||||
continue
|
||||
for compose_name in ("docker-compose.yml", "docker-compose.yaml"):
|
||||
compose_path = os.path.join(svc_dir, compose_name)
|
||||
if os.path.exists(compose_path):
|
||||
files.append(compose_path)
|
||||
|
||||
# Preserve order while removing duplicates.
|
||||
deduped = []
|
||||
seen = set()
|
||||
for path in files:
|
||||
if path in seen:
|
||||
continue
|
||||
seen.add(path)
|
||||
deduped.append(path)
|
||||
files = deduped
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed parsing services-up.sh: {e}")
|
||||
return files
|
||||
|
||||
Reference in New Issue
Block a user