diff --git a/docker-compose.generated.yml b/docker-compose.generated.yml new file mode 100644 index 0000000..75d8ff3 --- /dev/null +++ b/docker-compose.generated.yml @@ -0,0 +1,189 @@ +# Generated from services-up.sh compose file list +# Note: apps/shift-recorder/docker-compose.yml and apps/stockfill/docker-compose.yml +# are referenced by services-up.sh but do not exist in this checkout. +services: + authelia: + build: + context: ${PROJECT_ROOT}/core/authelia + image: authelia/authelia + profiles: + - core + - all + - traefik + x-source-compose: core/docker-compose.yml + crowdsec: + build: ${PROJECT_ROOT}/core/crowdsec + profiles: + - core + - all + - traefik + x-source-compose: core/docker-compose.yml + docker-update-exporter: + build: + context: ${PROJECT_ROOT}/monitoring/docker-exporter + profiles: + - monitoring + - all + - prometheus-exporters + x-source-compose: monitoring/prometheus/docker-compose.yml + docker-update-exporter-test: + build: + context: ${PROJECT_ROOT}/core/test + profiles: + - test + x-source-compose: core/test/docker-compose.yml + error-pages: + image: tarampampam/error-pages:3 + profiles: + - core + - all + - traefik + x-source-compose: core/docker-compose.yml + gitea: + image: gitea/gitea:latest + profiles: + - apps + - all + - gitea + x-source-compose: apps/gitea/docker-compose.yml + gotify: + image: gotify/server:latest + profiles: + - monitoring + - all + - gotify + x-source-compose: monitoring/gotify/docker-compose.yml + grafana: + image: grafana/grafana:latest + profiles: + - monitoring + - all + - grafana + x-source-compose: monitoring/grafana/docker-compose.yml + gramps-db: + image: postgres:13 + profiles: + - apps + - all + - gramps + x-source-compose: apps/gramps/docker-compose.yml + grampsweb: + image: ghcr.io/gramps-project/grampsweb:latest + profiles: + - apps + - all + - gramps + x-source-compose: apps/gramps/docker-compose.yml + influxdb: + image: influxdb:2.7 + profiles: + - monitoring + - all + - prometheus + x-source-compose: monitoring/prometheus/docker-compose.yml + monitor-kuma: + image: louislam/uptime-kuma:2.1.1 + profiles: + - monitoring + - all + - uptime-kuma + x-source-compose: monitoring/uptime-kuma/docker-compose.yml + nextcloud-db: + image: mariadb:11.4 + profiles: + - apps + - all + - nextcloud + x-source-compose: apps/nextcloud/docker-compose.yml + nextcloud-redis: + image: redis + profiles: + - apps + - all + - nextcloud + x-source-compose: apps/nextcloud/docker-compose.yml + nextcloud-webapp: + build: + context: ${PROJECT_ROOT}/apps/nextcloud + profiles: + - apps + - all + - nextcloud + x-source-compose: apps/nextcloud/docker-compose.yml + node-exporter: + image: prom/node-exporter:latest + profiles: + - monitoring + - all + - prometheus-exporters + x-source-compose: monitoring/prometheus/docker-compose.yml + node-red: + build: + context: ${PROJECT_ROOT}/monitoring/node-red + profiles: + - monitoring + - all + x-source-compose: monitoring/node-red/docker-compose.yml + passbolt-db: + image: mariadb:12 + profiles: + - apps + - all + - passbolt + x-source-compose: apps/passbolt/docker-compose.yml + passbolt-webapp: + image: passbolt/passbolt:latest-ce + profiles: + - apps + - all + - passbolt + x-source-compose: apps/passbolt/docker-compose.yml + pihole-exporter: + image: ekofr/pihole-exporter:latest + profiles: + - monitoring + - all + - prometheus-exporters + x-source-compose: monitoring/prometheus/docker-compose.yml + portainer: + image: portainer/portainer-ce:latest + profiles: + - monitoring + - all + - portainer + x-source-compose: monitoring/portainer/docker-compose.yml + prometheus: + image: prom/prometheus:latest + profiles: + - monitoring + - all + - prometheus + x-source-compose: monitoring/prometheus/docker-compose.yml + searxng-webapp: + image: searxng/searxng + profiles: + - apps + - all + - searxng + x-source-compose: apps/searxng/docker-compose.yml + telegraf: + image: telegraf:latest + profiles: + - monitoring + - all + - prometheus + x-source-compose: monitoring/prometheus/docker-compose.yml + traefik: + build: + context: ${PROJECT_ROOT}/core + image: traefik:3 + profiles: + - core + - all + - traefik + x-source-compose: core/docker-compose.yml + update-test: + image: nginx:1.27.4 + profiles: + - test + x-source-compose: core/test/docker-compose.yml diff --git a/monitoring/docker-exporter/TEST_EXAMPLE.md b/monitoring/docker-exporter/TEST_EXAMPLE.md new file mode 100644 index 0000000..d6af05b --- /dev/null +++ b/monitoring/docker-exporter/TEST_EXAMPLE.md @@ -0,0 +1,24 @@ +# Exporter image-mapping test example + +Run the exporter in dry-run mode to print the `service -> image:tag` mapping without starting the metrics loop: + +```bash +SERVICES_UP_SCRIPT=/workspace/docker/services-up.sh python monitoring/docker-exporter/exporter.py --dry-run +``` + +Example output excerpt: + +```json +{ + "crowdsec": "crowdsecurity/crowdsec:latest", + "docker-update-exporter": "python:3.11-slim", + "nextcloud-webapp": "nextcloud:production", + "node-red": "nodered/node-red:latest", + "prometheus": "prom/prometheus:latest", + "traefik": "traefik:3" +} +``` + +This confirms the exporter now reports images for both: +- services with explicit `image:` values, and +- services using `build:` contexts. diff --git a/monitoring/docker-exporter/exporter.py b/monitoring/docker-exporter/exporter.py index f44787f..27ec3b0 100644 --- a/monitoring/docker-exporter/exporter.py +++ b/monitoring/docker-exporter/exporter.py @@ -1,4 +1,5 @@ #!/usr/bin/env python3 +import argparse import os import re import time @@ -21,10 +22,15 @@ logger = logging.getLogger("docker-update-exporter") EXPORTER_PORT = 9105 CHECK_INTERVAL = 60 CACHE_TTL = 6 * 3600 -SERVICES_UP_SCRIPT = "/compose/services-up.sh" -CACHE_FILE = "/data/remote_digest_cache.json" +SERVICES_UP_SCRIPT = os.getenv("SERVICES_UP_SCRIPT", "/compose/services-up.sh") +CACHE_FILE = os.getenv("CACHE_FILE", "/data/remote_digest_cache.json") +DRY_RUN = os.getenv("DRY_RUN", "false").lower() in ("1", "true", "yes") -client = docker.from_env() +try: + client = docker.from_env() +except Exception as e: + logger.warning(f"Docker client unavailable at startup: {e}") + client = None # --- Metrics --- CONTAINER_UPDATE = Gauge( @@ -79,6 +85,8 @@ def get_project_prefix_from_script(script_path): return prefix def get_local_digest(image_name): + if client is None: + return None try: img = client.images.get(image_name) digests = img.attrs.get("RepoDigests", []) @@ -173,21 +181,37 @@ def parse_dockerfile_for_image(dockerfile_path): logger.debug(f"Found LABEL image={image_name} in {dockerfile_path}") return image_name - # If no LABEL, use the FROM line as fallback + # If no LABEL, use the last FROM line as fallback df.seek(0) + last_from = None for line in df: line = line.strip() if line.upper().startswith("FROM "): parts = line.split() if len(parts) >= 2: - base_image = parts[1] - logger.debug(f"Found base FROM {base_image} in {dockerfile_path}") - return base_image + last_from = parts[1] + if last_from: + logger.debug(f"Found base FROM {last_from} in {dockerfile_path}") + return last_from except Exception as e: logger.debug(f"Error reading Dockerfile {dockerfile_path}: {e}") return image_name +def normalize_image_name(image_name): + if not image_name: + return None + if "@" in image_name: + return image_name + if ":" in image_name.rsplit("/", 1)[-1]: + return image_name + return f"{image_name}:latest" + +def expand_compose_path(path_value, project_root): + raw = str(path_value) + raw = raw.replace("${PROJECT_ROOT}", project_root).replace("$PROJECT_ROOT", project_root) + return os.path.expandvars(raw) + # --- Compose parsing --- def get_compose_files_from_script(script_path): files = [] @@ -210,33 +234,83 @@ def get_compose_files_from_script(script_path): logger.warning(f"Failed parsing services-up.sh: {e}") return files -def parse_compose_services(compose_files): +def parse_project_name_from_script(script_path): + project = "core" + if not os.path.exists(script_path): + return project + try: + with open(script_path) as f: + for line in f: + m = re.match(r'PROJECT\s*=\s*["\']?([^"\']+)', line) + if m: + project = m.group(1) + break + except Exception as e: + logger.warning(f"Failed reading project name: {e}") + return project + +def resolve_local_build_image(service_name, project_name): + if client is None: + return None + try: + images = client.images.list(filters={"label": f"com.docker.compose.service={service_name}"}) + for image in images: + labels = image.attrs.get("Config", {}).get("Labels", {}) or {} + if labels.get("com.docker.compose.project") != project_name: + continue + for tag in image.tags: + if tag and "" not in tag: + logger.debug(f"Resolved local compose image for {service_name}: {tag}") + return normalize_image_name(tag) + except Exception as e: + logger.debug(f"Could not inspect local build metadata for {service_name}: {e}") + return None + +def parse_compose_services(compose_files, project_name, project_root): svc_map = {} for f in compose_files: + if not os.path.exists(f): + logger.warning(f"Compose file from services-up.sh is missing: {f}") + continue try: with open(f) as stream: data = yaml.safe_load(stream) or {} for svc_name, svc_def in data.get("services", {}).items(): - image = svc_def.get("image") - is_built = False - if not image and "build" in svc_def: - is_built = True - build_ctx = svc_def["build"] - dockerfile_path = None + image = normalize_image_name(svc_def.get("image")) + profiles = svc_def.get("profiles", []) + build_ctx = svc_def.get("build") + dockerfile_path = None + from_dockerfile = None + local_built_image = None + if build_ctx: if isinstance(build_ctx, dict): context = build_ctx.get("context", ".") - dockerfile_path = os.path.join(context, build_ctx.get("dockerfile", "Dockerfile")) - elif isinstance(build_ctx, str): - dockerfile_path = os.path.join(build_ctx, "Dockerfile") + dockerfile = build_ctx.get("dockerfile", "Dockerfile") + else: + context = build_ctx + dockerfile = "Dockerfile" - image = parse_dockerfile_for_image(dockerfile_path) + compose_dir = os.path.dirname(f) + context_expanded = expand_compose_path(context, project_root) + if os.path.isabs(context_expanded): + context_path = context_expanded + else: + context_path = os.path.normpath(os.path.join(compose_dir, context_expanded)) + dockerfile_expanded = expand_compose_path(dockerfile, project_root) + dockerfile_path = os.path.normpath(os.path.join(context_path, dockerfile_expanded)) + from_dockerfile = normalize_image_name(parse_dockerfile_for_image(dockerfile_path)) + local_built_image = resolve_local_build_image(svc_name, project_name) - if not image: - logger.info(f"Defaulting build image for {svc_name} to {svc_name}:latest") - image = f"{svc_name}:latest" + resolved_image = image or local_built_image or from_dockerfile or f"{project_name}-{svc_name}:latest" - svc_map[svc_name] = (image, is_built) + svc_map[svc_name] = { + "image": resolved_image, + "profiles": profiles, + "build_context": build_ctx, + "compose_file": f, + "dockerfile": dockerfile_path + } except Exception as e: logger.warning(f"Failed parsing {f}: {e}") @@ -245,11 +319,15 @@ def parse_compose_services(compose_files): # --- Main check --- def check_containers(): + if client is None: + logger.error("Docker client is unavailable; skipping check cycle") + return CONTAINER_UPDATE.clear() - prefix = get_project_prefix_from_script(SERVICES_UP_SCRIPT) + project_name = parse_project_name_from_script(SERVICES_UP_SCRIPT) + project_root = os.path.dirname(SERVICES_UP_SCRIPT) compose_files = get_compose_files_from_script(SERVICES_UP_SCRIPT) - svc_map = parse_compose_services(compose_files) + svc_map = parse_compose_services(compose_files, project_name, project_root) containers = client.containers.list() for container in containers: @@ -261,17 +339,14 @@ def check_containers(): running = container.attrs["Config"]["Image"] compose_image = None - is_built = False if svc in svc_map: - compose_image, is_built = svc_map[svc] - if is_built: - name, _, _ = compose_image.partition(":") - compose_image = f"{prefix}{name}" + compose_image = svc_map[svc]["image"] update_flag = 0 local_digest = get_local_digest(running) - remote_digest = get_remote_digest(compose_image if is_built else running) + remote_target = compose_image or running + remote_digest = get_remote_digest(remote_target) if local_digest and remote_digest and local_digest != remote_digest: update_flag = 1 @@ -283,8 +358,26 @@ def check_containers(): com_docker_compose_project=proj ).set(update_flag) +def dump_service_image_mapping(): + project_name = parse_project_name_from_script(SERVICES_UP_SCRIPT) + project_root = os.path.dirname(SERVICES_UP_SCRIPT) + compose_files = get_compose_files_from_script(SERVICES_UP_SCRIPT) + svc_map = parse_compose_services(compose_files, project_name, project_root) + mapping = {name: data["image"] for name, data in sorted(svc_map.items())} + logger.info("Service to image mapping:") + logger.info(json.dumps(mapping, indent=2, sort_keys=True)) + return mapping + # --- Runner --- if __name__ == "__main__": + parser = argparse.ArgumentParser(description="Docker image update exporter") + parser.add_argument("--dry-run", action="store_true", help="Only print service->image mapping and exit") + args = parser.parse_args() + + if DRY_RUN or args.dry_run: + dump_service_image_mapping() + raise SystemExit(0) + start_http_server(EXPORTER_PORT) while True: try: