modified: .gitignore
new file: .gitignore.gpt new file: .gitignore.old deleted: apps/gitea/data/git/repositories/beatzaplenty/limo-booking-app.git/logs/refs/heads/codex/add-environment-setup-in-conftest.py deleted: apps/gitea/data/git/repositories/beatzaplenty/limo-booking-app.git/logs/refs/heads/codex/add-logging-to-geocode.py deleted: apps/gitea/data/git/repositories/beatzaplenty/limo-booking-app.git/logs/refs/heads/codex/add-logging-to-route_metrics.py deleted: apps/gitea/data/git/repositories/beatzaplenty/limo-booking-app.git/logs/refs/heads/codex/add-logging-to-tracking-simulator.py deleted: apps/gitea/data/git/repositories/beatzaplenty/limo-booking-app.git/logs/refs/heads/codex/extend-sqlite-tuning-in-database.py deleted: apps/gitea/data/git/repositories/beatzaplenty/limo-booking-app.git/logs/refs/heads/codex/fix-route-handling-in-routing.py deleted: apps/gitea/data/git/repositories/beatzaplenty/limo-booking-app.git/logs/refs/heads/codex/handle-api-response-errors-in-routing.py deleted: apps/gitea/data/git/repositories/beatzaplenty/limo-booking-app.git/logs/refs/heads/codex/refactor-database-path-handling-in-database.py deleted: apps/gitea/data/git/repositories/beatzaplenty/limo-booking-app.git/logs/refs/heads/codex/update-fcm-message-construction-in-notifications.py deleted: apps/gitea/data/git/repositories/beatzaplenty/limo-booking-app.git/logs/refs/heads/codex/update-role-check-in-ws.py deleted: apps/gitea/data/git/repositories/beatzaplenty/limo-booking-app.git/logs/refs/heads/codex/update-user-seed-in-database.py deleted: apps/gitea/data/git/repositories/beatzaplenty/limo-booking-app.git/refs/heads/codex/add-environment-setup-in-conftest.py deleted: apps/gitea/data/git/repositories/beatzaplenty/limo-booking-app.git/refs/heads/codex/add-logging-to-geocode.py deleted: apps/gitea/data/git/repositories/beatzaplenty/limo-booking-app.git/refs/heads/codex/add-logging-to-route_metrics.py deleted: apps/gitea/data/git/repositories/beatzaplenty/limo-booking-app.git/refs/heads/codex/add-logging-to-tracking-simulator.py deleted: apps/gitea/data/git/repositories/beatzaplenty/limo-booking-app.git/refs/heads/codex/extend-sqlite-tuning-in-database.py deleted: apps/gitea/data/git/repositories/beatzaplenty/limo-booking-app.git/refs/heads/codex/fix-route-handling-in-routing.py deleted: apps/gitea/data/git/repositories/beatzaplenty/limo-booking-app.git/refs/heads/codex/handle-api-response-errors-in-routing.py deleted: apps/gitea/data/git/repositories/beatzaplenty/limo-booking-app.git/refs/heads/codex/refactor-database-path-handling-in-database.py deleted: apps/gitea/data/git/repositories/beatzaplenty/limo-booking-app.git/refs/heads/codex/update-fcm-message-construction-in-notifications.py deleted: apps/gitea/data/git/repositories/beatzaplenty/limo-booking-app.git/refs/heads/codex/update-role-check-in-ws.py deleted: apps/gitea/data/git/repositories/beatzaplenty/limo-booking-app.git/refs/heads/codex/update-user-seed-in-database.py deleted: apps/searxng/dockerfiles/docker-entrypoint.sh deleted: apps/searxng/docs/conf.py deleted: apps/searxng/docs/user/.gitignore deleted: apps/searxng/examples/basic_engine.py deleted: apps/searxng/searx/__init__.py deleted: apps/searxng/searx/answerers/__init__.py deleted: apps/searxng/searx/answerers/random/answerer.py deleted: apps/searxng/searx/answerers/statistics/answerer.py deleted: apps/searxng/searx/autocomplete.py deleted: apps/searxng/searx/babel_extract.py deleted: apps/searxng/searx/botdetection/__init__.py deleted: apps/searxng/searx/botdetection/_helpers.py deleted: apps/searxng/searx/botdetection/http_accept.py deleted: apps/searxng/searx/botdetection/http_accept_encoding.py deleted: apps/searxng/searx/botdetection/http_accept_language.py deleted: apps/searxng/searx/botdetection/http_connection.py deleted: apps/searxng/searx/botdetection/http_user_agent.py deleted: apps/searxng/searx/botdetection/ip_limit.py deleted: apps/searxng/searx/botdetection/ip_lists.py deleted: apps/searxng/searx/botdetection/limiter.py deleted: apps/searxng/searx/botdetection/link_token.py deleted: apps/searxng/searx/compat.py deleted: apps/searxng/searx/data/__init__.py deleted: apps/searxng/searx/enginelib/__init__.py deleted: apps/searxng/searx/enginelib/traits.py deleted: apps/searxng/searx/engines/1337x.py deleted: apps/searxng/searx/engines/9gag.py deleted: apps/searxng/searx/engines/__init__.py deleted: apps/searxng/searx/engines/ahmia.py deleted: apps/searxng/searx/engines/annas_archive.py deleted: apps/searxng/searx/engines/apkmirror.py deleted: apps/searxng/searx/engines/apple_app_store.py deleted: apps/searxng/searx/engines/apple_maps.py deleted: apps/searxng/searx/engines/archlinux.py deleted: apps/searxng/searx/engines/artic.py deleted: apps/searxng/searx/engines/arxiv.py deleted: apps/searxng/searx/engines/bandcamp.py deleted: apps/searxng/searx/engines/base.py deleted: apps/searxng/searx/engines/bing.py deleted: apps/searxng/searx/engines/bing_images.py deleted: apps/searxng/searx/engines/bing_news.py deleted: apps/searxng/searx/engines/bing_videos.py deleted: apps/searxng/searx/engines/brave.py deleted: apps/searxng/searx/engines/bt4g.py deleted: apps/searxng/searx/engines/btdigg.py deleted: apps/searxng/searx/engines/command.py deleted: apps/searxng/searx/engines/core.py deleted: apps/searxng/searx/engines/crossref.py deleted: apps/searxng/searx/engines/currency_convert.py deleted: apps/searxng/searx/engines/dailymotion.py deleted: apps/searxng/searx/engines/deepl.py deleted: apps/searxng/searx/engines/deezer.py deleted: apps/searxng/searx/engines/demo_offline.py deleted: apps/searxng/searx/engines/demo_online.py deleted: apps/searxng/searx/engines/deviantart.py deleted: apps/searxng/searx/engines/dictzone.py deleted: apps/searxng/searx/engines/digbt.py deleted: apps/searxng/searx/engines/docker_hub.py deleted: apps/searxng/searx/engines/doku.py deleted: apps/searxng/searx/engines/duckduckgo.py deleted: apps/searxng/searx/engines/duckduckgo_definitions.py deleted: apps/searxng/searx/engines/duckduckgo_images.py deleted: apps/searxng/searx/engines/duckduckgo_weather.py deleted: apps/searxng/searx/engines/duden.py deleted: apps/searxng/searx/engines/dummy-offline.py deleted: apps/searxng/searx/engines/dummy.py deleted: apps/searxng/searx/engines/ebay.py deleted: apps/searxng/searx/engines/elasticsearch.py deleted: apps/searxng/searx/engines/emojipedia.py deleted: apps/searxng/searx/engines/fdroid.py deleted: apps/searxng/searx/engines/flickr.py deleted: apps/searxng/searx/engines/flickr_noapi.py deleted: apps/searxng/searx/engines/framalibre.py deleted: apps/searxng/searx/engines/freesound.py deleted: apps/searxng/searx/engines/frinkiac.py deleted: apps/searxng/searx/engines/genius.py deleted: apps/searxng/searx/engines/gentoo.py deleted: apps/searxng/searx/engines/github.py deleted: apps/searxng/searx/engines/google.py deleted: apps/searxng/searx/engines/google_images.py deleted: apps/searxng/searx/engines/google_news.py deleted: apps/searxng/searx/engines/google_play.py deleted: apps/searxng/searx/engines/google_scholar.py deleted: apps/searxng/searx/engines/google_videos.py deleted: apps/searxng/searx/engines/imdb.py deleted: apps/searxng/searx/engines/ina.py deleted: apps/searxng/searx/engines/invidious.py deleted: apps/searxng/searx/engines/jisho.py deleted: apps/searxng/searx/engines/json_engine.py deleted: apps/searxng/searx/engines/kickass.py deleted: apps/searxng/searx/engines/lemmy.py deleted: apps/searxng/searx/engines/lingva.py deleted: apps/searxng/searx/engines/loc.py deleted: apps/searxng/searx/engines/mediathekviewweb.py deleted: apps/searxng/searx/engines/mediawiki.py deleted: apps/searxng/searx/engines/meilisearch.py deleted: apps/searxng/searx/engines/metacpan.py deleted: apps/searxng/searx/engines/mixcloud.py deleted: apps/searxng/searx/engines/mongodb.py deleted: apps/searxng/searx/engines/mysql_server.py deleted: apps/searxng/searx/engines/nyaa.py deleted: apps/searxng/searx/engines/opensemantic.py deleted: apps/searxng/searx/engines/openstreetmap.py deleted: apps/searxng/searx/engines/openverse.py deleted: apps/searxng/searx/engines/pdbe.py deleted: apps/searxng/searx/engines/peertube.py deleted: apps/searxng/searx/engines/photon.py deleted: apps/searxng/searx/engines/piped.py deleted: apps/searxng/searx/engines/piratebay.py deleted: apps/searxng/searx/engines/postgresql.py deleted: apps/searxng/searx/engines/pubmed.py deleted: apps/searxng/searx/engines/qwant.py deleted: apps/searxng/searx/engines/recoll.py deleted: apps/searxng/searx/engines/reddit.py deleted: apps/searxng/searx/engines/redis_server.py deleted: apps/searxng/searx/engines/rumble.py deleted: apps/searxng/searx/engines/scanr_structures.py deleted: apps/searxng/searx/engines/searchcode_code.py deleted: apps/searxng/searx/engines/searx_engine.py deleted: apps/searxng/searx/engines/semantic_scholar.py deleted: apps/searxng/searx/engines/sepiasearch.py deleted: apps/searxng/searx/engines/seznam.py deleted: apps/searxng/searx/engines/sjp.py deleted: apps/searxng/searx/engines/solidtorrents.py deleted: apps/searxng/searx/engines/solr.py deleted: apps/searxng/searx/engines/soundcloud.py deleted: apps/searxng/searx/engines/spotify.py deleted: apps/searxng/searx/engines/springer.py deleted: apps/searxng/searx/engines/sqlite.py deleted: apps/searxng/searx/engines/stackexchange.py deleted: apps/searxng/searx/engines/startpage.py deleted: apps/searxng/searx/engines/tagesschau.py deleted: apps/searxng/searx/engines/tineye.py deleted: apps/searxng/searx/engines/tokyotoshokan.py deleted: apps/searxng/searx/engines/torznab.py deleted: apps/searxng/searx/engines/translated.py deleted: apps/searxng/searx/engines/twitter.py deleted: apps/searxng/searx/engines/unsplash.py deleted: apps/searxng/searx/engines/vimeo.py deleted: apps/searxng/searx/engines/wikidata.py deleted: apps/searxng/searx/engines/wikipedia.py deleted: apps/searxng/searx/engines/wolframalpha_api.py deleted: apps/searxng/searx/engines/wolframalpha_noapi.py deleted: apps/searxng/searx/engines/wordnik.py deleted: apps/searxng/searx/engines/wttr.py deleted: apps/searxng/searx/engines/www1x.py deleted: apps/searxng/searx/engines/xpath.py deleted: apps/searxng/searx/engines/yacy.py deleted: apps/searxng/searx/engines/yahoo.py deleted: apps/searxng/searx/engines/yahoo_news.py deleted: apps/searxng/searx/engines/youtube_api.py deleted: apps/searxng/searx/engines/youtube_noapi.py deleted: apps/searxng/searx/engines/zlibrary.py deleted: apps/searxng/searx/exceptions.py deleted: apps/searxng/searx/external_bang.py deleted: apps/searxng/searx/external_urls.py deleted: apps/searxng/searx/flaskfix.py deleted: apps/searxng/searx/infopage/__init__.py deleted: apps/searxng/searx/locales.py deleted: apps/searxng/searx/metrics/__init__.py deleted: apps/searxng/searx/metrics/error_recorder.py deleted: apps/searxng/searx/metrics/models.py deleted: apps/searxng/searx/network/__init__.py deleted: apps/searxng/searx/network/client.py deleted: apps/searxng/searx/network/network.py deleted: apps/searxng/searx/network/raise_for_httperror.py deleted: apps/searxng/searx/plugins/__init__.py deleted: apps/searxng/searx/plugins/ahmia_filter.py deleted: apps/searxng/searx/plugins/hash_plugin.py deleted: apps/searxng/searx/plugins/hostname_replace.py deleted: apps/searxng/searx/plugins/limiter.py deleted: apps/searxng/searx/plugins/oa_doi_rewrite.py deleted: apps/searxng/searx/plugins/search_on_category_select.py deleted: apps/searxng/searx/plugins/self_info.py deleted: apps/searxng/searx/plugins/tor_check.py deleted: apps/searxng/searx/plugins/tracker_url_remover.py deleted: apps/searxng/searx/plugins/vim_hotkeys.py deleted: apps/searxng/searx/preferences.py deleted: apps/searxng/searx/query.py deleted: apps/searxng/searx/redisdb.py deleted: apps/searxng/searx/redislib.py deleted: apps/searxng/searx/results.py deleted: apps/searxng/searx/search/__init__.py deleted: apps/searxng/searx/search/checker/__init__.py deleted: apps/searxng/searx/search/checker/__main__.py deleted: apps/searxng/searx/search/checker/background.py deleted: apps/searxng/searx/search/checker/impl.py deleted: apps/searxng/searx/search/checker/scheduler.py deleted: apps/searxng/searx/search/models.py deleted: apps/searxng/searx/search/processors/__init__.py deleted: apps/searxng/searx/search/processors/abstract.py deleted: apps/searxng/searx/search/processors/offline.py deleted: apps/searxng/searx/search/processors/online.py deleted: apps/searxng/searx/search/processors/online_currency.py deleted: apps/searxng/searx/search/processors/online_dictionary.py deleted: apps/searxng/searx/search/processors/online_url_search.py deleted: apps/searxng/searx/settings.yml deleted: apps/searxng/searx/settings_defaults.py deleted: apps/searxng/searx/settings_loader.py deleted: apps/searxng/searx/static/plugins/external_plugins/.gitignore deleted: apps/searxng/searx/static/themes/simple/.gitattributes deleted: apps/searxng/searx/static/themes/simple/.gitignore deleted: apps/searxng/searx/sxng_locales.py deleted: apps/searxng/searx/tools/__init__.py deleted: apps/searxng/searx/tools/config.py deleted: apps/searxng/searx/unixthreadname.py deleted: apps/searxng/searx/utils.py deleted: apps/searxng/searx/version.py deleted: apps/searxng/searx/webadapter.py deleted: apps/searxng/searx/webapp.py deleted: apps/searxng/searx/webutils.py deleted: apps/searxng/searxng_extra/standalone_searx.py deleted: apps/searxng/searxng_extra/update/__init__.py deleted: apps/searxng/searxng_extra/update/update_ahmia_blacklist.py deleted: apps/searxng/searxng_extra/update/update_currencies.py deleted: apps/searxng/searxng_extra/update/update_engine_descriptions.py deleted: apps/searxng/searxng_extra/update/update_engine_traits.py deleted: apps/searxng/searxng_extra/update/update_external_bangs.py deleted: apps/searxng/searxng_extra/update/update_firefox_version.py deleted: apps/searxng/searxng_extra/update/update_osm_keys_tags.py deleted: apps/searxng/searxng_extra/update/update_pygments.py deleted: apps/searxng/searxng_extra/update/update_wikidata_units.py deleted: apps/searxng/setup.py deleted: apps/searxng/tests/__init__.py deleted: apps/searxng/tests/robot/__init__.py deleted: apps/searxng/tests/robot/__main__.py deleted: apps/searxng/tests/robot/settings_robot.yml deleted: apps/searxng/tests/robot/test_webapp.py deleted: apps/searxng/tests/unit/__init__.py deleted: apps/searxng/tests/unit/engines/test_command.py deleted: apps/searxng/tests/unit/engines/test_xpath.py deleted: apps/searxng/tests/unit/network/__init__.py deleted: apps/searxng/tests/unit/network/test_network.py deleted: apps/searxng/tests/unit/settings/empty_settings.yml deleted: apps/searxng/tests/unit/settings/syntaxerror_settings.yml deleted: apps/searxng/tests/unit/settings/test_settings.yml deleted: apps/searxng/tests/unit/settings/user_settings.yml deleted: apps/searxng/tests/unit/settings/user_settings_keep_only.yml deleted: apps/searxng/tests/unit/settings/user_settings_remove.yml deleted: apps/searxng/tests/unit/settings/user_settings_remove2.yml deleted: apps/searxng/tests/unit/settings/user_settings_simple.yml deleted: apps/searxng/tests/unit/test_answerers.py deleted: apps/searxng/tests/unit/test_engines_init.py deleted: apps/searxng/tests/unit/test_exceptions.py deleted: apps/searxng/tests/unit/test_external_bangs.py deleted: apps/searxng/tests/unit/test_locales.py deleted: apps/searxng/tests/unit/test_plugins.py deleted: apps/searxng/tests/unit/test_preferences.py deleted: apps/searxng/tests/unit/test_query.py deleted: apps/searxng/tests/unit/test_results.py deleted: apps/searxng/tests/unit/test_search.py deleted: apps/searxng/tests/unit/test_settings_loader.py deleted: apps/searxng/tests/unit/test_utils.py deleted: apps/searxng/tests/unit/test_webadapter.py deleted: apps/searxng/tests/unit/test_webapp.py deleted: apps/searxng/tests/unit/test_webutils.py deleted: apps/searxng/utils/build_env.py deleted: apps/searxng/utils/filtron.sh deleted: apps/searxng/utils/lib.sh deleted: apps/searxng/utils/lib_go.sh deleted: apps/searxng/utils/lib_nvm.sh deleted: apps/searxng/utils/lib_redis.sh deleted: apps/searxng/utils/lib_sxng_data.sh deleted: apps/searxng/utils/lib_sxng_node.sh deleted: apps/searxng/utils/lib_sxng_static.sh deleted: apps/searxng/utils/lib_sxng_test.sh deleted: apps/searxng/utils/lib_sxng_themes.sh deleted: apps/searxng/utils/lib_sxng_weblate.sh deleted: apps/searxng/utils/lxc.sh deleted: apps/searxng/utils/morty.sh deleted: apps/searxng/utils/searx.sh deleted: apps/searxng/utils/searxng.sh deleted: apps/searxng/utils/searxng_check.py deleted: apps/searxng/utils/templates/etc/searxng/settings.yml deleted: apps/shift-recorder deleted: apps/stockfill deleted: archive/esphome/data/.gitignore deleted: archive/esphome/data/esphome-garage.yaml deleted: archive/esphome/data/esphome-waynes-room.yaml deleted: core/crowdsec/data/detect.yaml deleted: core/traefik/data/dynamic.yaml deleted: core/traefik/data/plugins.yaml new file: default-environment.env new file: docker new file: last-ip.ini new file: monitoring/docker-exporter/Dockerfile.old new file: monitoring/docker-exporter/exporter.py.old renamed: apps/searxng/searxng_extra/__init__.py -> monitoring/gotify/docker-health-alert/last_unhealthy.txt new file: monitoring/grafana/output.txt new file: monitoring/influxdb/influxd.bolt new file: monitoring/influxdb/influxd.sqlite deleted: monitoring/node-red/data/test-container.sh new file: monitoring/prometheus/prometheus.yml.old new file: monitoring/telegraf/telegraf.conf new file: monitoring/uptime-kuma/result new file: monitoring/uptime-kuma/uptime-kuma/kuma.db new file: tree.out new file: update-containers.log new file: update-firewall.log
This commit is contained in:
@@ -0,0 +1,8 @@
|
||||
FROM python:3.11-slim
|
||||
|
||||
WORKDIR /app
|
||||
COPY exporter.py .
|
||||
|
||||
RUN pip install docker prometheus_client requests pyyaml
|
||||
|
||||
CMD ["python", "exporter.py"]
|
||||
@@ -0,0 +1,514 @@
|
||||
#!/usr/bin/env python3
|
||||
import os
|
||||
import re
|
||||
import time
|
||||
import json
|
||||
import logging
|
||||
import docker
|
||||
import requests
|
||||
import yaml
|
||||
from prometheus_client import Gauge, start_http_server
|
||||
|
||||
# --- Logging ---
|
||||
LOG_LEVEL = os.getenv("LOG_LEVEL", "INFO").upper()
|
||||
|
||||
logging.basicConfig(
|
||||
level=getattr(logging, LOG_LEVEL, logging.INFO),
|
||||
format="%(asctime)s [%(levelname)s] %(message)s"
|
||||
)
|
||||
|
||||
logger = logging.getLogger("docker-update-exporter")
|
||||
|
||||
# --- Config ---
|
||||
EXPORTER_PORT = 9105
|
||||
CHECK_INTERVAL = 60
|
||||
CACHE_TTL = 6 * 3600
|
||||
SERVICES_UP_SCRIPT = "/compose/services-up.sh"
|
||||
CACHE_FILE = "/data/remote_digest_cache.json"
|
||||
|
||||
client = docker.from_env()
|
||||
|
||||
# --- Metrics ---
|
||||
CONTAINER_UPDATE = Gauge(
|
||||
"docker_container_update_available",
|
||||
"1 if container image is out of date (compose drift or registry), 0 otherwise",
|
||||
["container", "compose_image", "running_image", "com_docker_compose_project"]
|
||||
)
|
||||
|
||||
LAST_CHECK = Gauge(
|
||||
"docker_image_update_last_check_timestamp",
|
||||
"Last time the update check ran (unix timestamp)"
|
||||
)
|
||||
|
||||
# --- Persistent Cache ---
|
||||
|
||||
def load_cache():
|
||||
if not os.path.exists(CACHE_FILE):
|
||||
logger.info(f"Cache file does not exist yet: {CACHE_FILE}")
|
||||
return {}
|
||||
|
||||
try:
|
||||
with open(CACHE_FILE, "r") as f:
|
||||
cache = json.load(f)
|
||||
logger.info(f"Loaded {len(cache)} cached remote digests")
|
||||
logger.debug(f"Cache contents: {cache}")
|
||||
return cache
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to load cache from {CACHE_FILE}: {e}")
|
||||
return {}
|
||||
|
||||
def save_cache():
|
||||
try:
|
||||
os.makedirs(os.path.dirname(CACHE_FILE), exist_ok=True)
|
||||
with open(CACHE_FILE, "w") as f:
|
||||
json.dump(REMOTE_DIGEST_CACHE, f)
|
||||
|
||||
logger.debug(
|
||||
f"Saved {len(REMOTE_DIGEST_CACHE)} entries to cache file {CACHE_FILE}"
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to save cache to {CACHE_FILE}: {e}")
|
||||
|
||||
REMOTE_DIGEST_CACHE = load_cache()
|
||||
|
||||
# --- Helpers ---
|
||||
|
||||
def get_project_prefix_from_script(script_path):
|
||||
project_prefix = "core-" # fallback
|
||||
|
||||
if not os.path.exists(script_path):
|
||||
logger.warning(
|
||||
f"services-up script not found at {script_path}, using fallback project prefix {project_prefix}"
|
||||
)
|
||||
return project_prefix
|
||||
|
||||
try:
|
||||
with open(script_path, "r") as f:
|
||||
for line in f:
|
||||
line = line.strip()
|
||||
m = re.match(r'PROJECT\s*=\s*["\']?([^"\']+)["\']?', line)
|
||||
if m:
|
||||
project_prefix = m.group(1) + "-"
|
||||
logger.debug(
|
||||
f"Detected compose project prefix from script: {project_prefix}"
|
||||
)
|
||||
break
|
||||
except Exception as e:
|
||||
logger.error(f"Failed reading project prefix from {script_path}: {e}")
|
||||
|
||||
return project_prefix
|
||||
|
||||
def get_local_digest(image_name):
|
||||
try:
|
||||
img = client.images.get(image_name)
|
||||
digests = img.attrs.get("RepoDigests", [])
|
||||
|
||||
logger.debug(f"Local RepoDigests for {image_name}: {digests}")
|
||||
|
||||
if digests:
|
||||
digest = digests[0].split("@")[1]
|
||||
logger.debug(f"Local digest for {image_name}: {digest}")
|
||||
return digest
|
||||
|
||||
logger.info(f"No local digest found for image {image_name}")
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to retrieve local digest for {image_name}: {e}")
|
||||
|
||||
return None
|
||||
|
||||
def get_remote_digest(image_name):
|
||||
now = time.time()
|
||||
original = image_name
|
||||
|
||||
# Cache hit
|
||||
if original in REMOTE_DIGEST_CACHE:
|
||||
digest, ts = REMOTE_DIGEST_CACHE[original]
|
||||
age = now - ts
|
||||
|
||||
if age < CACHE_TTL:
|
||||
logger.debug(
|
||||
f"Using cached remote digest for {original} "
|
||||
f"(age={int(age)}s, ttl={CACHE_TTL}s): {digest}"
|
||||
)
|
||||
return digest
|
||||
|
||||
logger.info(
|
||||
f"Cache entry expired for {original} "
|
||||
f"(age={int(age)}s > ttl={CACHE_TTL}s)"
|
||||
)
|
||||
|
||||
try:
|
||||
if "/" not in image_name:
|
||||
registry = "docker.io"
|
||||
repo = "library/" + image_name
|
||||
else:
|
||||
parts = image_name.split("/")
|
||||
if "." in parts[0] or ":" in parts[0]:
|
||||
registry = parts[0]
|
||||
repo = "/".join(parts[1:])
|
||||
else:
|
||||
registry = "docker.io"
|
||||
repo = image_name
|
||||
|
||||
if ":" in repo:
|
||||
repo, tag = repo.rsplit(":", 1)
|
||||
else:
|
||||
tag = "latest"
|
||||
|
||||
logger.debug(
|
||||
f"Resolving remote digest for {original}: "
|
||||
f"registry={registry}, repo={repo}, tag={tag}"
|
||||
)
|
||||
|
||||
token = None
|
||||
manifest_url = None
|
||||
|
||||
if registry in ["docker.io", "registry-1.docker.io"]:
|
||||
logger.debug(f"Requesting Docker Hub token for {repo}")
|
||||
|
||||
token_res = requests.get(
|
||||
"https://auth.docker.io/token",
|
||||
params={
|
||||
"service": "registry.docker.io",
|
||||
"scope": f"repository:{repo}:pull"
|
||||
},
|
||||
timeout=10
|
||||
)
|
||||
|
||||
logger.debug(
|
||||
f"Docker Hub token response for {repo}: "
|
||||
f"status={token_res.status_code}"
|
||||
)
|
||||
|
||||
token = token_res.json().get("token")
|
||||
manifest_url = (
|
||||
f"https://registry-1.docker.io/v2/{repo}/manifests/{tag}"
|
||||
)
|
||||
|
||||
elif registry == "ghcr.io":
|
||||
logger.debug(f"Requesting GHCR token for {repo}")
|
||||
|
||||
token_res = requests.get(
|
||||
"https://ghcr.io/token",
|
||||
params={
|
||||
"service": "ghcr.io",
|
||||
"scope": f"repository:{repo}:pull"
|
||||
},
|
||||
timeout=10
|
||||
)
|
||||
|
||||
logger.debug(
|
||||
f"GHCR token response for {repo}: "
|
||||
f"status={token_res.status_code}"
|
||||
)
|
||||
|
||||
token = token_res.json().get("token")
|
||||
manifest_url = f"https://ghcr.io/v2/{repo}/manifests/{tag}"
|
||||
|
||||
else:
|
||||
logger.warning(
|
||||
f"Unsupported registry '{registry}' for image {original}"
|
||||
)
|
||||
return None
|
||||
|
||||
if not token:
|
||||
logger.warning(
|
||||
f"No authentication token returned for {original}"
|
||||
)
|
||||
return None
|
||||
|
||||
logger.debug(f"Requesting manifest for {original}: {manifest_url}")
|
||||
|
||||
res = requests.get(
|
||||
manifest_url,
|
||||
headers={
|
||||
"Authorization": f"Bearer {token}",
|
||||
"Accept": "application/vnd.docker.distribution.manifest.v2+json"
|
||||
},
|
||||
timeout=10
|
||||
)
|
||||
|
||||
logger.debug(
|
||||
f"Manifest response for {original}: "
|
||||
f"status={res.status_code}"
|
||||
)
|
||||
|
||||
if res.status_code == 200:
|
||||
digest = res.headers.get("Docker-Content-Digest")
|
||||
|
||||
logger.info(
|
||||
f"Fetched remote digest for {original}: {digest}"
|
||||
)
|
||||
|
||||
REMOTE_DIGEST_CACHE[original] = (digest, now)
|
||||
save_cache()
|
||||
|
||||
logger.debug(
|
||||
f"Cached remote digest for {original}: {digest}"
|
||||
)
|
||||
|
||||
return digest
|
||||
|
||||
if res.status_code == 429:
|
||||
logger.warning(
|
||||
f"Registry rate limit hit while fetching {original}"
|
||||
)
|
||||
elif res.status_code in [401, 403]:
|
||||
logger.warning(
|
||||
f"Authentication failed while fetching {original}: "
|
||||
f"status={res.status_code}"
|
||||
)
|
||||
else:
|
||||
logger.warning(
|
||||
f"Unexpected manifest response for {original}: "
|
||||
f"status={res.status_code}, body={res.text[:250]}"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to fetch remote digest for {original}: {e}")
|
||||
|
||||
return None
|
||||
|
||||
def get_compose_files_from_script(script_path):
|
||||
files = []
|
||||
|
||||
if not os.path.exists(script_path):
|
||||
logger.error(f"services-up script not found: {script_path}")
|
||||
return files
|
||||
|
||||
base_dir = os.path.dirname(script_path)
|
||||
|
||||
try:
|
||||
with open(script_path, "r") as f:
|
||||
content = f.read()
|
||||
|
||||
match = re.search(r'FILES\s*=\s*\((.*?)\)', content, re.DOTALL)
|
||||
|
||||
if not match:
|
||||
logger.warning(
|
||||
f"No FILES=(...) block found in {script_path}"
|
||||
)
|
||||
return files
|
||||
|
||||
lines = match.group(1).splitlines()
|
||||
|
||||
for line in lines:
|
||||
line = line.strip()
|
||||
|
||||
if line.startswith("-f"):
|
||||
rel_path = line[2:].strip()
|
||||
|
||||
if rel_path:
|
||||
full_path = os.path.normpath(
|
||||
os.path.join(base_dir, rel_path)
|
||||
)
|
||||
|
||||
logger.debug(
|
||||
f"Resolved compose file: {rel_path} -> {full_path}"
|
||||
)
|
||||
|
||||
files.append(full_path)
|
||||
|
||||
logger.info(f"Found {len(files)} compose files")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed parsing compose files from {script_path}: {e}")
|
||||
|
||||
return files
|
||||
|
||||
def parse_compose_files(compose_files):
|
||||
service_to_image = {}
|
||||
|
||||
for f in compose_files:
|
||||
if not os.path.exists(f):
|
||||
logger.warning(f"Compose file missing: {f}")
|
||||
continue
|
||||
|
||||
try:
|
||||
with open(f, "r") as stream:
|
||||
data = yaml.safe_load(stream) or {}
|
||||
services = data.get("services", {})
|
||||
|
||||
logger.debug(
|
||||
f"Parsing {len(services)} services from compose file {f}"
|
||||
)
|
||||
|
||||
for service_name, service_def in services.items():
|
||||
image = service_def.get("image")
|
||||
is_built = False
|
||||
|
||||
if not image:
|
||||
is_built = True
|
||||
build_ctx = service_def.get("build")
|
||||
|
||||
logger.debug(
|
||||
f"Service {service_name} is build-based, build config={build_ctx}"
|
||||
)
|
||||
|
||||
if isinstance(build_ctx, dict):
|
||||
context_path = build_ctx.get("context", ".")
|
||||
dockerfile_path = os.path.join(
|
||||
context_path,
|
||||
build_ctx.get("dockerfile", "Dockerfile")
|
||||
)
|
||||
elif isinstance(build_ctx, str):
|
||||
context_path = build_ctx
|
||||
dockerfile_path = os.path.join(
|
||||
context_path, "Dockerfile"
|
||||
)
|
||||
else:
|
||||
dockerfile_path = None
|
||||
|
||||
if dockerfile_path and os.path.exists(dockerfile_path):
|
||||
try:
|
||||
with open(dockerfile_path, "r") as df:
|
||||
for line in df:
|
||||
line = line.strip()
|
||||
if (
|
||||
line.upper().startswith("LABEL")
|
||||
and "image=" in line
|
||||
):
|
||||
m = re.search(
|
||||
r'image=["\']?([^"\']+)["\']?',
|
||||
line
|
||||
)
|
||||
if m:
|
||||
image = m.group(1)
|
||||
logger.debug(
|
||||
f"Found upstream image label for {service_name}: {image}"
|
||||
)
|
||||
break
|
||||
except Exception as e:
|
||||
logger.warning(
|
||||
f"Failed reading Dockerfile {dockerfile_path}: {e}"
|
||||
)
|
||||
|
||||
if not image:
|
||||
image = f"{service_name}:latest"
|
||||
logger.info(
|
||||
f"No image label found for build service {service_name}, "
|
||||
f"defaulting to {image}"
|
||||
)
|
||||
|
||||
service_to_image[service_name] = (image, is_built)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed parsing compose file {f}: {e}")
|
||||
|
||||
logger.info(f"Mapped {len(service_to_image)} compose services to images")
|
||||
logger.debug(f"Service/image mapping: {service_to_image}")
|
||||
|
||||
return service_to_image
|
||||
|
||||
def check_containers():
|
||||
logger.info("Starting container update check")
|
||||
|
||||
CONTAINER_UPDATE.clear()
|
||||
|
||||
project_prefix = get_project_prefix_from_script(SERVICES_UP_SCRIPT)
|
||||
compose_files = get_compose_files_from_script(SERVICES_UP_SCRIPT)
|
||||
service_to_image = parse_compose_files(compose_files)
|
||||
|
||||
containers = client.containers.list()
|
||||
logger.info(f"Checking {len(containers)} running containers")
|
||||
|
||||
for container in containers:
|
||||
project_label = container.labels.get("com.docker.compose.project")
|
||||
|
||||
if not project_label:
|
||||
logger.debug(
|
||||
f"Skipping non-compose container {container.name}"
|
||||
)
|
||||
continue
|
||||
|
||||
service_label = container.labels.get("com.docker.compose.service")
|
||||
running_image = container.attrs["Config"]["Image"]
|
||||
|
||||
logger.debug(
|
||||
f"Evaluating container={container.name}, "
|
||||
f"service={service_label}, project={project_label}, "
|
||||
f"running_image={running_image}"
|
||||
)
|
||||
|
||||
compose_image = None
|
||||
is_built = False
|
||||
|
||||
if service_label and service_label in service_to_image:
|
||||
compose_image, is_built = service_to_image[service_label]
|
||||
|
||||
if is_built:
|
||||
compose_image_name, _, _ = compose_image.partition(":")
|
||||
compose_image = f"{project_prefix}{compose_image_name}"
|
||||
|
||||
update_flag = 0
|
||||
|
||||
if is_built:
|
||||
if running_image != compose_image:
|
||||
logger.info(
|
||||
f"Update detected for build-based container {container.name}: "
|
||||
f"running image {running_image} != expected {compose_image}"
|
||||
)
|
||||
update_flag = 1
|
||||
else:
|
||||
local_digest = get_local_digest(running_image)
|
||||
remote_digest = get_remote_digest(
|
||||
service_to_image[service_label][0]
|
||||
)
|
||||
|
||||
if local_digest and remote_digest and local_digest != remote_digest:
|
||||
logger.info(
|
||||
f"Remote image update available for {container.name}: "
|
||||
f"{local_digest} != {remote_digest}"
|
||||
)
|
||||
update_flag = 1
|
||||
else:
|
||||
if running_image != compose_image:
|
||||
logger.info(
|
||||
f"Compose drift detected for {container.name}: "
|
||||
f"running image {running_image} != compose image {compose_image}"
|
||||
)
|
||||
update_flag = 1
|
||||
else:
|
||||
local_digest = get_local_digest(running_image)
|
||||
remote_digest = get_remote_digest(running_image)
|
||||
|
||||
if local_digest and remote_digest and local_digest != remote_digest:
|
||||
logger.info(
|
||||
f"Registry update available for {container.name}: "
|
||||
f"{local_digest} != {remote_digest}"
|
||||
)
|
||||
update_flag = 1
|
||||
|
||||
CONTAINER_UPDATE.labels(
|
||||
container=container.name,
|
||||
compose_image=compose_image if compose_image else "unknown",
|
||||
running_image=running_image,
|
||||
com_docker_compose_project=project_label
|
||||
).set(update_flag)
|
||||
|
||||
logger.info(
|
||||
f"Container {container.name}: "
|
||||
f"running={running_image}, "
|
||||
f"compose={compose_image}, "
|
||||
f"update_available={update_flag}"
|
||||
)
|
||||
|
||||
LAST_CHECK.set(time.time())
|
||||
logger.info("Container update check complete")
|
||||
|
||||
if __name__ == "__main__":
|
||||
logger.info(
|
||||
f"Starting Docker update exporter on port {EXPORTER_PORT} "
|
||||
f"with LOG_LEVEL={LOG_LEVEL}"
|
||||
)
|
||||
|
||||
start_http_server(EXPORTER_PORT)
|
||||
|
||||
while True:
|
||||
try:
|
||||
check_containers()
|
||||
except Exception as e:
|
||||
logger.exception(f"Unhandled error during update check: {e}")
|
||||
|
||||
time.sleep(CHECK_INTERVAL)
|
||||
Reference in New Issue
Block a user