#!/usr/bin/env python3 import os import re import time import json import docker import requests import yaml from prometheus_client import Gauge, start_http_server # --- Config --- EXPORTER_PORT = 9105 CHECK_INTERVAL = 60 CACHE_TTL = 6 * 3600 SERVICES_UP_SCRIPT = "/compose/services-up.sh" CACHE_FILE = "/data/remote_digest_cache.json" client = docker.from_env() # --- Metrics --- CONTAINER_UPDATE = Gauge( "docker_container_update_available", "1 if container image is out of date (compose drift or registry), 0 otherwise", ["container", "compose_image", "running_image", "com_docker_compose_project"] ) LAST_CHECK = Gauge( "docker_image_update_last_check_timestamp", "Last time the update check ran (unix timestamp)" ) # --- Persistent Cache --- def load_cache(): if not os.path.exists(CACHE_FILE): return {} try: with open(CACHE_FILE, "r") as f: return json.load(f) except Exception as e: print(f"[cache] Failed to load cache: {e}") return {} def save_cache(): try: os.makedirs(os.path.dirname(CACHE_FILE), exist_ok=True) with open(CACHE_FILE, "w") as f: json.dump(REMOTE_DIGEST_CACHE, f) except Exception as e: print(f"[cache] Failed to save cache: {e}") REMOTE_DIGEST_CACHE = load_cache() # --- Helpers --- def get_project_prefix_from_script(script_path): project_prefix = "core-" # fallback if not os.path.exists(script_path): return project_prefix with open(script_path, "r") as f: for line in f: line = line.strip() m = re.match(r'PROJECT\s*=\s*["\']?([^"\']+)["\']?', line) if m: project_prefix = m.group(1) + "-" break return project_prefix def get_local_digest(image_name): try: img = client.images.get(image_name) digests = img.attrs.get("RepoDigests", []) if digests: return digests[0].split("@")[1] except Exception as e: print(f"[local_digest] Error for {image_name}: {e}") return None def get_remote_digest(image_name): now = time.time() original = image_name # Use cached value if still valid if original in REMOTE_DIGEST_CACHE: digest, ts = REMOTE_DIGEST_CACHE[original] if now - ts < CACHE_TTL: return digest try: if "/" not in image_name: registry = "docker.io" repo = "library/" + image_name else: parts = image_name.split("/") if "." in parts[0] or ":" in parts[0]: registry = parts[0] repo = "/".join(parts[1:]) else: registry = "docker.io" repo = image_name if ":" in repo: repo, tag = repo.rsplit(":", 1) else: tag = "latest" token = None manifest_url = None if registry in ["docker.io", "registry-1.docker.io"]: token_res = requests.get( "https://auth.docker.io/token", params={ "service": "registry.docker.io", "scope": f"repository:{repo}:pull" }, timeout=10 ) token = token_res.json().get("token") manifest_url = f"https://registry-1.docker.io/v2/{repo}/manifests/{tag}" elif registry == "ghcr.io": token_res = requests.get( "https://ghcr.io/token", params={"scope": f"repository:{repo}:pull"}, timeout=10 ) token = token_res.json().get("token") manifest_url = f"https://ghcr.io/v2/{repo}/manifests/{tag}" else: return None res = requests.get( manifest_url, headers={ "Authorization": f"Bearer {token}" if token else "", "Accept": "application/vnd.docker.distribution.manifest.v2+json" }, timeout=10 ) if res.status_code == 200: digest = res.headers.get("Docker-Content-Digest") # Save to persistent cache REMOTE_DIGEST_CACHE[original] = (digest, now) save_cache() return digest except Exception as e: print(f"[remote_digest] Error for {image_name}: {e}") return None def get_compose_files_from_script(script_path): files = [] if not os.path.exists(script_path): print(f"[compose_files] Script not found: {script_path}") return files base_dir = os.path.dirname(script_path) with open(script_path, "r") as f: content = f.read() match = re.search(r'FILES\s*=\s*\((.*?)\)', content, re.DOTALL) if match: lines = match.group(1).splitlines() for line in lines: line = line.strip() if line.startswith("-f"): rel_path = line[2:].strip() if rel_path: full_path = os.path.join(base_dir, rel_path) full_path = os.path.normpath(full_path) print(f"[compose_files] {rel_path} -> {full_path}") files.append(full_path) return files def parse_compose_files(compose_files): """Return mapping service_name -> (expected image, is_built)""" service_to_image = {} for f in compose_files: if not os.path.exists(f): continue try: with open(f, "r") as stream: data = yaml.safe_load(stream) services = data.get("services", {}) for service_name, service_def in services.items(): image = service_def.get("image") is_built = False if not image: build_ctx = service_def.get("build") if isinstance(build_ctx, dict): context_path = build_ctx.get("context", ".") dockerfile_path = os.path.join( context_path, build_ctx.get("dockerfile", "Dockerfile") ) elif isinstance(build_ctx, str): context_path = build_ctx dockerfile_path = os.path.join(context_path, "Dockerfile") else: dockerfile_path = None if dockerfile_path and os.path.exists(dockerfile_path): try: with open(dockerfile_path, "r") as df: for line in df: line = line.strip() if line.upper().startswith("LABEL") and "image=" in line: m = re.search( r'image=["\']?([^"\']+)["\']?', line ) if m: image = m.group(1) break except Exception as e: print(f"[dockerfile] Error reading {dockerfile_path}: {e}") if not image: image = f"{service_name}:latest" is_built = True service_to_image[service_name] = (image, is_built) except Exception as e: print(f"[compose_parse] Failed {f}: {e}") return service_to_image def check_containers(): CONTAINER_UPDATE.clear() PROJECT_PREFIX = get_project_prefix_from_script(SERVICES_UP_SCRIPT) compose_files = get_compose_files_from_script(SERVICES_UP_SCRIPT) service_to_image = parse_compose_files(compose_files) for container in client.containers.list(): project_label = container.labels.get("com.docker.compose.project") if not project_label: continue # skip non-compose containers service_label = container.labels.get("com.docker.compose.service") running_image = container.attrs["Config"]["Image"] compose_image = None is_built = False if service_label and service_label in service_to_image: compose_image, is_built = service_to_image[service_label] if is_built: compose_image_name, _, _ = compose_image.partition(":") compose_image = f"{PROJECT_PREFIX}{compose_image_name}" update_flag = 0 if is_built: if running_image != compose_image: update_flag = 1 else: local_digest = get_local_digest(running_image) remote_digest = get_remote_digest(service_to_image[service_label][0]) if local_digest and remote_digest and local_digest != remote_digest: update_flag = 1 else: if running_image != compose_image: update_flag = 1 else: local_digest = get_local_digest(running_image) remote_digest = get_remote_digest(running_image) if local_digest and remote_digest and local_digest != remote_digest: update_flag = 1 CONTAINER_UPDATE.labels( container=container.name, compose_image=compose_image if compose_image else "unknown", running_image=running_image, com_docker_compose_project=project_label ).set(update_flag) print( f"{container.name} | " f"running={running_image} | " f"compose={compose_image} | " f"update={update_flag}" ) LAST_CHECK.set(time.time()) if __name__ == "__main__": start_http_server(EXPORTER_PORT) print(f"Docker update exporter running on :{EXPORTER_PORT}") while True: try: check_containers() except Exception as e: print(f"[main] Error: {e}") time.sleep(CHECK_INTERVAL)