modified: .gitignore

new file:   apps/gitea/data/git/repositories/beatzaplenty/limo-booking-app.git/logs/refs/heads/codex/add-environment-setup-in-conftest.py
	new file:   apps/gitea/data/git/repositories/beatzaplenty/limo-booking-app.git/logs/refs/heads/codex/add-logging-to-geocode.py
	new file:   apps/gitea/data/git/repositories/beatzaplenty/limo-booking-app.git/logs/refs/heads/codex/add-logging-to-route_metrics.py
	new file:   apps/gitea/data/git/repositories/beatzaplenty/limo-booking-app.git/logs/refs/heads/codex/add-logging-to-tracking-simulator.py
	new file:   apps/gitea/data/git/repositories/beatzaplenty/limo-booking-app.git/logs/refs/heads/codex/extend-sqlite-tuning-in-database.py
	new file:   apps/gitea/data/git/repositories/beatzaplenty/limo-booking-app.git/logs/refs/heads/codex/fix-route-handling-in-routing.py
	new file:   apps/gitea/data/git/repositories/beatzaplenty/limo-booking-app.git/logs/refs/heads/codex/handle-api-response-errors-in-routing.py
	new file:   apps/gitea/data/git/repositories/beatzaplenty/limo-booking-app.git/logs/refs/heads/codex/refactor-database-path-handling-in-database.py
	new file:   apps/gitea/data/git/repositories/beatzaplenty/limo-booking-app.git/logs/refs/heads/codex/update-fcm-message-construction-in-notifications.py
	new file:   apps/gitea/data/git/repositories/beatzaplenty/limo-booking-app.git/logs/refs/heads/codex/update-role-check-in-ws.py
	new file:   apps/gitea/data/git/repositories/beatzaplenty/limo-booking-app.git/logs/refs/heads/codex/update-user-seed-in-database.py
	new file:   apps/gitea/data/git/repositories/beatzaplenty/limo-booking-app.git/refs/heads/codex/add-environment-setup-in-conftest.py
	new file:   apps/gitea/data/git/repositories/beatzaplenty/limo-booking-app.git/refs/heads/codex/add-logging-to-geocode.py
	new file:   apps/gitea/data/git/repositories/beatzaplenty/limo-booking-app.git/refs/heads/codex/add-logging-to-route_metrics.py
	new file:   apps/gitea/data/git/repositories/beatzaplenty/limo-booking-app.git/refs/heads/codex/add-logging-to-tracking-simulator.py
	new file:   apps/gitea/data/git/repositories/beatzaplenty/limo-booking-app.git/refs/heads/codex/extend-sqlite-tuning-in-database.py
	new file:   apps/gitea/data/git/repositories/beatzaplenty/limo-booking-app.git/refs/heads/codex/fix-route-handling-in-routing.py
	new file:   apps/gitea/data/git/repositories/beatzaplenty/limo-booking-app.git/refs/heads/codex/handle-api-response-errors-in-routing.py
	new file:   apps/gitea/data/git/repositories/beatzaplenty/limo-booking-app.git/refs/heads/codex/refactor-database-path-handling-in-database.py
	new file:   apps/gitea/data/git/repositories/beatzaplenty/limo-booking-app.git/refs/heads/codex/update-fcm-message-construction-in-notifications.py
	new file:   apps/gitea/data/git/repositories/beatzaplenty/limo-booking-app.git/refs/heads/codex/update-role-check-in-ws.py
	new file:   apps/gitea/data/git/repositories/beatzaplenty/limo-booking-app.git/refs/heads/codex/update-user-seed-in-database.py
	renamed:    gitea/docker-compose.yml -> apps/gitea/docker-compose.yml
	new file:   apps/gramps/docker-compose.yml
	renamed:    nextcloud/Dockerfile -> apps/nextcloud/Dockerfile
	new file:   apps/nextcloud/docker-compose.yml
	renamed:    passbolt/Dockerfile -> apps/passbolt/Dockerfile
	renamed:    passbolt/docker-compose.yml -> apps/passbolt/docker-compose.yml
	renamed:    searxng/Dockerfile -> apps/searxng/Dockerfile
	renamed:    searxng/docker-compose.yml -> apps/searxng/docker-compose.yml
	renamed:    searxng/dockerfiles/docker-entrypoint.sh -> apps/searxng/dockerfiles/docker-entrypoint.sh
	renamed:    searxng/docs/conf.py -> apps/searxng/docs/conf.py
	renamed:    searxng/docs/user/.gitignore -> apps/searxng/docs/user/.gitignore
	renamed:    searxng/examples/basic_engine.py -> apps/searxng/examples/basic_engine.py
	renamed:    searxng/searx/__init__.py -> apps/searxng/searx/__init__.py
	renamed:    searxng/searx/answerers/__init__.py -> apps/searxng/searx/answerers/__init__.py
	renamed:    searxng/searx/answerers/random/answerer.py -> apps/searxng/searx/answerers/random/answerer.py
	renamed:    searxng/searx/answerers/statistics/answerer.py -> apps/searxng/searx/answerers/statistics/answerer.py
	renamed:    searxng/searx/autocomplete.py -> apps/searxng/searx/autocomplete.py
	renamed:    searxng/searx/babel_extract.py -> apps/searxng/searx/babel_extract.py
	renamed:    searxng/searx/botdetection/__init__.py -> apps/searxng/searx/botdetection/__init__.py
	renamed:    searxng/searx/botdetection/_helpers.py -> apps/searxng/searx/botdetection/_helpers.py
	renamed:    searxng/searx/botdetection/http_accept.py -> apps/searxng/searx/botdetection/http_accept.py
	renamed:    searxng/searx/botdetection/http_accept_encoding.py -> apps/searxng/searx/botdetection/http_accept_encoding.py
	renamed:    searxng/searx/botdetection/http_accept_language.py -> apps/searxng/searx/botdetection/http_accept_language.py
	renamed:    searxng/searx/botdetection/http_connection.py -> apps/searxng/searx/botdetection/http_connection.py
	renamed:    searxng/searx/botdetection/http_user_agent.py -> apps/searxng/searx/botdetection/http_user_agent.py
	renamed:    searxng/searx/botdetection/ip_limit.py -> apps/searxng/searx/botdetection/ip_limit.py
	renamed:    searxng/searx/botdetection/ip_lists.py -> apps/searxng/searx/botdetection/ip_lists.py
	renamed:    searxng/searx/botdetection/limiter.py -> apps/searxng/searx/botdetection/limiter.py
	renamed:    searxng/searx/botdetection/link_token.py -> apps/searxng/searx/botdetection/link_token.py
	renamed:    searxng/searx/compat.py -> apps/searxng/searx/compat.py
	renamed:    searxng/searx/data/__init__.py -> apps/searxng/searx/data/__init__.py
	renamed:    searxng/searx/enginelib/__init__.py -> apps/searxng/searx/enginelib/__init__.py
	renamed:    searxng/searx/enginelib/traits.py -> apps/searxng/searx/enginelib/traits.py
	renamed:    searxng/searx/engines/1337x.py -> apps/searxng/searx/engines/1337x.py
	renamed:    searxng/searx/engines/9gag.py -> apps/searxng/searx/engines/9gag.py
	renamed:    searxng/searx/engines/__init__.py -> apps/searxng/searx/engines/__init__.py
	renamed:    searxng/searx/engines/ahmia.py -> apps/searxng/searx/engines/ahmia.py
	renamed:    searxng/searx/engines/annas_archive.py -> apps/searxng/searx/engines/annas_archive.py
	renamed:    searxng/searx/engines/apkmirror.py -> apps/searxng/searx/engines/apkmirror.py
	renamed:    searxng/searx/engines/apple_app_store.py -> apps/searxng/searx/engines/apple_app_store.py
	renamed:    searxng/searx/engines/apple_maps.py -> apps/searxng/searx/engines/apple_maps.py
	renamed:    searxng/searx/engines/archlinux.py -> apps/searxng/searx/engines/archlinux.py
	renamed:    searxng/searx/engines/artic.py -> apps/searxng/searx/engines/artic.py
	renamed:    searxng/searx/engines/arxiv.py -> apps/searxng/searx/engines/arxiv.py
	renamed:    searxng/searx/engines/bandcamp.py -> apps/searxng/searx/engines/bandcamp.py
	renamed:    searxng/searx/engines/base.py -> apps/searxng/searx/engines/base.py
	renamed:    searxng/searx/engines/bing.py -> apps/searxng/searx/engines/bing.py
	renamed:    searxng/searx/engines/bing_images.py -> apps/searxng/searx/engines/bing_images.py
	renamed:    searxng/searx/engines/bing_news.py -> apps/searxng/searx/engines/bing_news.py
	renamed:    searxng/searx/engines/bing_videos.py -> apps/searxng/searx/engines/bing_videos.py
	renamed:    searxng/searx/engines/brave.py -> apps/searxng/searx/engines/brave.py
	renamed:    searxng/searx/engines/bt4g.py -> apps/searxng/searx/engines/bt4g.py
	renamed:    searxng/searx/engines/btdigg.py -> apps/searxng/searx/engines/btdigg.py
	renamed:    searxng/searx/engines/command.py -> apps/searxng/searx/engines/command.py
	renamed:    searxng/searx/engines/core.py -> apps/searxng/searx/engines/core.py
	renamed:    searxng/searx/engines/crossref.py -> apps/searxng/searx/engines/crossref.py
	renamed:    searxng/searx/engines/currency_convert.py -> apps/searxng/searx/engines/currency_convert.py
	renamed:    searxng/searx/engines/dailymotion.py -> apps/searxng/searx/engines/dailymotion.py
	renamed:    searxng/searx/engines/deepl.py -> apps/searxng/searx/engines/deepl.py
	renamed:    searxng/searx/engines/deezer.py -> apps/searxng/searx/engines/deezer.py
	renamed:    searxng/searx/engines/demo_offline.py -> apps/searxng/searx/engines/demo_offline.py
	renamed:    searxng/searx/engines/demo_online.py -> apps/searxng/searx/engines/demo_online.py
	renamed:    searxng/searx/engines/deviantart.py -> apps/searxng/searx/engines/deviantart.py
	renamed:    searxng/searx/engines/dictzone.py -> apps/searxng/searx/engines/dictzone.py
	renamed:    searxng/searx/engines/digbt.py -> apps/searxng/searx/engines/digbt.py
	renamed:    searxng/searx/engines/docker_hub.py -> apps/searxng/searx/engines/docker_hub.py
	renamed:    searxng/searx/engines/doku.py -> apps/searxng/searx/engines/doku.py
	renamed:    searxng/searx/engines/duckduckgo.py -> apps/searxng/searx/engines/duckduckgo.py
	renamed:    searxng/searx/engines/duckduckgo_definitions.py -> apps/searxng/searx/engines/duckduckgo_definitions.py
	renamed:    searxng/searx/engines/duckduckgo_images.py -> apps/searxng/searx/engines/duckduckgo_images.py
	renamed:    searxng/searx/engines/duckduckgo_weather.py -> apps/searxng/searx/engines/duckduckgo_weather.py
	renamed:    searxng/searx/engines/duden.py -> apps/searxng/searx/engines/duden.py
	renamed:    searxng/searx/engines/dummy-offline.py -> apps/searxng/searx/engines/dummy-offline.py
	renamed:    searxng/searx/engines/dummy.py -> apps/searxng/searx/engines/dummy.py
	renamed:    searxng/searx/engines/ebay.py -> apps/searxng/searx/engines/ebay.py
	renamed:    searxng/searx/engines/elasticsearch.py -> apps/searxng/searx/engines/elasticsearch.py
	renamed:    searxng/searx/engines/emojipedia.py -> apps/searxng/searx/engines/emojipedia.py
	renamed:    searxng/searx/engines/fdroid.py -> apps/searxng/searx/engines/fdroid.py
	renamed:    searxng/searx/engines/flickr.py -> apps/searxng/searx/engines/flickr.py
	renamed:    searxng/searx/engines/flickr_noapi.py -> apps/searxng/searx/engines/flickr_noapi.py
	renamed:    searxng/searx/engines/framalibre.py -> apps/searxng/searx/engines/framalibre.py
	renamed:    searxng/searx/engines/freesound.py -> apps/searxng/searx/engines/freesound.py
	renamed:    searxng/searx/engines/frinkiac.py -> apps/searxng/searx/engines/frinkiac.py
	renamed:    searxng/searx/engines/genius.py -> apps/searxng/searx/engines/genius.py
	renamed:    searxng/searx/engines/gentoo.py -> apps/searxng/searx/engines/gentoo.py
	renamed:    searxng/searx/engines/github.py -> apps/searxng/searx/engines/github.py
	renamed:    searxng/searx/engines/google.py -> apps/searxng/searx/engines/google.py
	renamed:    searxng/searx/engines/google_images.py -> apps/searxng/searx/engines/google_images.py
	renamed:    searxng/searx/engines/google_news.py -> apps/searxng/searx/engines/google_news.py
	renamed:    searxng/searx/engines/google_play.py -> apps/searxng/searx/engines/google_play.py
	renamed:    searxng/searx/engines/google_scholar.py -> apps/searxng/searx/engines/google_scholar.py
	renamed:    searxng/searx/engines/google_videos.py -> apps/searxng/searx/engines/google_videos.py
	renamed:    searxng/searx/engines/imdb.py -> apps/searxng/searx/engines/imdb.py
	renamed:    searxng/searx/engines/ina.py -> apps/searxng/searx/engines/ina.py
	renamed:    searxng/searx/engines/invidious.py -> apps/searxng/searx/engines/invidious.py
	renamed:    searxng/searx/engines/jisho.py -> apps/searxng/searx/engines/jisho.py
	renamed:    searxng/searx/engines/json_engine.py -> apps/searxng/searx/engines/json_engine.py
	renamed:    searxng/searx/engines/kickass.py -> apps/searxng/searx/engines/kickass.py
	renamed:    searxng/searx/engines/lemmy.py -> apps/searxng/searx/engines/lemmy.py
	renamed:    searxng/searx/engines/lingva.py -> apps/searxng/searx/engines/lingva.py
	renamed:    searxng/searx/engines/loc.py -> apps/searxng/searx/engines/loc.py
	renamed:    searxng/searx/engines/mediathekviewweb.py -> apps/searxng/searx/engines/mediathekviewweb.py
	renamed:    searxng/searx/engines/mediawiki.py -> apps/searxng/searx/engines/mediawiki.py
	renamed:    searxng/searx/engines/meilisearch.py -> apps/searxng/searx/engines/meilisearch.py
	renamed:    searxng/searx/engines/metacpan.py -> apps/searxng/searx/engines/metacpan.py
	renamed:    searxng/searx/engines/mixcloud.py -> apps/searxng/searx/engines/mixcloud.py
	renamed:    searxng/searx/engines/mongodb.py -> apps/searxng/searx/engines/mongodb.py
	renamed:    searxng/searx/engines/mysql_server.py -> apps/searxng/searx/engines/mysql_server.py
	renamed:    searxng/searx/engines/nyaa.py -> apps/searxng/searx/engines/nyaa.py
	renamed:    searxng/searx/engines/opensemantic.py -> apps/searxng/searx/engines/opensemantic.py
	renamed:    searxng/searx/engines/openstreetmap.py -> apps/searxng/searx/engines/openstreetmap.py
	renamed:    searxng/searx/engines/openverse.py -> apps/searxng/searx/engines/openverse.py
	renamed:    searxng/searx/engines/pdbe.py -> apps/searxng/searx/engines/pdbe.py
	renamed:    searxng/searx/engines/peertube.py -> apps/searxng/searx/engines/peertube.py
	renamed:    searxng/searx/engines/photon.py -> apps/searxng/searx/engines/photon.py
	renamed:    searxng/searx/engines/piped.py -> apps/searxng/searx/engines/piped.py
	renamed:    searxng/searx/engines/piratebay.py -> apps/searxng/searx/engines/piratebay.py
	renamed:    searxng/searx/engines/postgresql.py -> apps/searxng/searx/engines/postgresql.py
	renamed:    searxng/searx/engines/pubmed.py -> apps/searxng/searx/engines/pubmed.py
	renamed:    searxng/searx/engines/qwant.py -> apps/searxng/searx/engines/qwant.py
	renamed:    searxng/searx/engines/recoll.py -> apps/searxng/searx/engines/recoll.py
	renamed:    searxng/searx/engines/reddit.py -> apps/searxng/searx/engines/reddit.py
	renamed:    searxng/searx/engines/redis_server.py -> apps/searxng/searx/engines/redis_server.py
	renamed:    searxng/searx/engines/rumble.py -> apps/searxng/searx/engines/rumble.py
	renamed:    searxng/searx/engines/scanr_structures.py -> apps/searxng/searx/engines/scanr_structures.py
	renamed:    searxng/searx/engines/searchcode_code.py -> apps/searxng/searx/engines/searchcode_code.py
	renamed:    searxng/searx/engines/searx_engine.py -> apps/searxng/searx/engines/searx_engine.py
	renamed:    searxng/searx/engines/semantic_scholar.py -> apps/searxng/searx/engines/semantic_scholar.py
	renamed:    searxng/searx/engines/sepiasearch.py -> apps/searxng/searx/engines/sepiasearch.py
	renamed:    searxng/searx/engines/seznam.py -> apps/searxng/searx/engines/seznam.py
	renamed:    searxng/searx/engines/sjp.py -> apps/searxng/searx/engines/sjp.py
	renamed:    searxng/searx/engines/solidtorrents.py -> apps/searxng/searx/engines/solidtorrents.py
	renamed:    searxng/searx/engines/solr.py -> apps/searxng/searx/engines/solr.py
	renamed:    searxng/searx/engines/soundcloud.py -> apps/searxng/searx/engines/soundcloud.py
	renamed:    searxng/searx/engines/spotify.py -> apps/searxng/searx/engines/spotify.py
	renamed:    searxng/searx/engines/springer.py -> apps/searxng/searx/engines/springer.py
	renamed:    searxng/searx/engines/sqlite.py -> apps/searxng/searx/engines/sqlite.py
	renamed:    searxng/searx/engines/stackexchange.py -> apps/searxng/searx/engines/stackexchange.py
	renamed:    searxng/searx/engines/startpage.py -> apps/searxng/searx/engines/startpage.py
	renamed:    searxng/searx/engines/tagesschau.py -> apps/searxng/searx/engines/tagesschau.py
	renamed:    searxng/searx/engines/tineye.py -> apps/searxng/searx/engines/tineye.py
	renamed:    searxng/searx/engines/tokyotoshokan.py -> apps/searxng/searx/engines/tokyotoshokan.py
	renamed:    searxng/searx/engines/torznab.py -> apps/searxng/searx/engines/torznab.py
	renamed:    searxng/searx/engines/translated.py -> apps/searxng/searx/engines/translated.py
	renamed:    searxng/searx/engines/twitter.py -> apps/searxng/searx/engines/twitter.py
	renamed:    searxng/searx/engines/unsplash.py -> apps/searxng/searx/engines/unsplash.py
	renamed:    searxng/searx/engines/vimeo.py -> apps/searxng/searx/engines/vimeo.py
	renamed:    searxng/searx/engines/wikidata.py -> apps/searxng/searx/engines/wikidata.py
	renamed:    searxng/searx/engines/wikipedia.py -> apps/searxng/searx/engines/wikipedia.py
	renamed:    searxng/searx/engines/wolframalpha_api.py -> apps/searxng/searx/engines/wolframalpha_api.py
	renamed:    searxng/searx/engines/wolframalpha_noapi.py -> apps/searxng/searx/engines/wolframalpha_noapi.py
	renamed:    searxng/searx/engines/wordnik.py -> apps/searxng/searx/engines/wordnik.py
	renamed:    searxng/searx/engines/wttr.py -> apps/searxng/searx/engines/wttr.py
	renamed:    searxng/searx/engines/www1x.py -> apps/searxng/searx/engines/www1x.py
	renamed:    searxng/searx/engines/xpath.py -> apps/searxng/searx/engines/xpath.py
	renamed:    searxng/searx/engines/yacy.py -> apps/searxng/searx/engines/yacy.py
	renamed:    searxng/searx/engines/yahoo.py -> apps/searxng/searx/engines/yahoo.py
	renamed:    searxng/searx/engines/yahoo_news.py -> apps/searxng/searx/engines/yahoo_news.py
	renamed:    searxng/searx/engines/youtube_api.py -> apps/searxng/searx/engines/youtube_api.py
	renamed:    searxng/searx/engines/youtube_noapi.py -> apps/searxng/searx/engines/youtube_noapi.py
	renamed:    searxng/searx/engines/zlibrary.py -> apps/searxng/searx/engines/zlibrary.py
	renamed:    searxng/searx/exceptions.py -> apps/searxng/searx/exceptions.py
	renamed:    searxng/searx/external_bang.py -> apps/searxng/searx/external_bang.py
	renamed:    searxng/searx/external_urls.py -> apps/searxng/searx/external_urls.py
	renamed:    searxng/searx/flaskfix.py -> apps/searxng/searx/flaskfix.py
	renamed:    searxng/searx/infopage/__init__.py -> apps/searxng/searx/infopage/__init__.py
	renamed:    searxng/searx/locales.py -> apps/searxng/searx/locales.py
	renamed:    searxng/searx/metrics/__init__.py -> apps/searxng/searx/metrics/__init__.py
	renamed:    searxng/searx/metrics/error_recorder.py -> apps/searxng/searx/metrics/error_recorder.py
	renamed:    searxng/searx/metrics/models.py -> apps/searxng/searx/metrics/models.py
	renamed:    searxng/searx/network/__init__.py -> apps/searxng/searx/network/__init__.py
	renamed:    searxng/searx/network/client.py -> apps/searxng/searx/network/client.py
	renamed:    searxng/searx/network/network.py -> apps/searxng/searx/network/network.py
	renamed:    searxng/searx/network/raise_for_httperror.py -> apps/searxng/searx/network/raise_for_httperror.py
	renamed:    searxng/searx/plugins/__init__.py -> apps/searxng/searx/plugins/__init__.py
	renamed:    searxng/searx/plugins/ahmia_filter.py -> apps/searxng/searx/plugins/ahmia_filter.py
	renamed:    searxng/searx/plugins/hash_plugin.py -> apps/searxng/searx/plugins/hash_plugin.py
	renamed:    searxng/searx/plugins/hostname_replace.py -> apps/searxng/searx/plugins/hostname_replace.py
	renamed:    searxng/searx/plugins/limiter.py -> apps/searxng/searx/plugins/limiter.py
	renamed:    searxng/searx/plugins/oa_doi_rewrite.py -> apps/searxng/searx/plugins/oa_doi_rewrite.py
	renamed:    searxng/searx/plugins/search_on_category_select.py -> apps/searxng/searx/plugins/search_on_category_select.py
	renamed:    searxng/searx/plugins/self_info.py -> apps/searxng/searx/plugins/self_info.py
	renamed:    searxng/searx/plugins/tor_check.py -> apps/searxng/searx/plugins/tor_check.py
	renamed:    searxng/searx/plugins/tracker_url_remover.py -> apps/searxng/searx/plugins/tracker_url_remover.py
	renamed:    searxng/searx/plugins/vim_hotkeys.py -> apps/searxng/searx/plugins/vim_hotkeys.py
	renamed:    searxng/searx/preferences.py -> apps/searxng/searx/preferences.py
	renamed:    searxng/searx/query.py -> apps/searxng/searx/query.py
	renamed:    searxng/searx/redisdb.py -> apps/searxng/searx/redisdb.py
	renamed:    searxng/searx/redislib.py -> apps/searxng/searx/redislib.py
	renamed:    searxng/searx/results.py -> apps/searxng/searx/results.py
	renamed:    searxng/searx/search/__init__.py -> apps/searxng/searx/search/__init__.py
	renamed:    searxng/searx/search/checker/__init__.py -> apps/searxng/searx/search/checker/__init__.py
	renamed:    searxng/searx/search/checker/__main__.py -> apps/searxng/searx/search/checker/__main__.py
	renamed:    searxng/searx/search/checker/background.py -> apps/searxng/searx/search/checker/background.py
	renamed:    searxng/searx/search/checker/impl.py -> apps/searxng/searx/search/checker/impl.py
	renamed:    searxng/searx/search/checker/scheduler.py -> apps/searxng/searx/search/checker/scheduler.py
	renamed:    searxng/searx/search/models.py -> apps/searxng/searx/search/models.py
	renamed:    searxng/searx/search/processors/__init__.py -> apps/searxng/searx/search/processors/__init__.py
	renamed:    searxng/searx/search/processors/abstract.py -> apps/searxng/searx/search/processors/abstract.py
	renamed:    searxng/searx/search/processors/offline.py -> apps/searxng/searx/search/processors/offline.py
	renamed:    searxng/searx/search/processors/online.py -> apps/searxng/searx/search/processors/online.py
	renamed:    searxng/searx/search/processors/online_currency.py -> apps/searxng/searx/search/processors/online_currency.py
	renamed:    searxng/searx/search/processors/online_dictionary.py -> apps/searxng/searx/search/processors/online_dictionary.py
	renamed:    searxng/searx/search/processors/online_url_search.py -> apps/searxng/searx/search/processors/online_url_search.py
	renamed:    searxng/searx/settings.yml -> apps/searxng/searx/settings.yml
	renamed:    searxng/searx/settings_defaults.py -> apps/searxng/searx/settings_defaults.py
	renamed:    searxng/searx/settings_loader.py -> apps/searxng/searx/settings_loader.py
	renamed:    searxng/searx/static/plugins/external_plugins/.gitignore -> apps/searxng/searx/static/plugins/external_plugins/.gitignore
	renamed:    searxng/searx/static/themes/simple/.gitattributes -> apps/searxng/searx/static/themes/simple/.gitattributes
	renamed:    searxng/searx/static/themes/simple/.gitignore -> apps/searxng/searx/static/themes/simple/.gitignore
	renamed:    searxng/searx/sxng_locales.py -> apps/searxng/searx/sxng_locales.py
	renamed:    searxng/searx/tools/__init__.py -> apps/searxng/searx/tools/__init__.py
	renamed:    searxng/searx/tools/config.py -> apps/searxng/searx/tools/config.py
	renamed:    searxng/searx/unixthreadname.py -> apps/searxng/searx/unixthreadname.py
	renamed:    searxng/searx/utils.py -> apps/searxng/searx/utils.py
	renamed:    searxng/searx/version.py -> apps/searxng/searx/version.py
	renamed:    searxng/searx/webadapter.py -> apps/searxng/searx/webadapter.py
	renamed:    searxng/searx/webapp.py -> apps/searxng/searx/webapp.py
	renamed:    searxng/searx/webutils.py -> apps/searxng/searx/webutils.py
	renamed:    searxng/searxng_extra/__init__.py -> apps/searxng/searxng_extra/__init__.py
	renamed:    searxng/searxng_extra/standalone_searx.py -> apps/searxng/searxng_extra/standalone_searx.py
	renamed:    searxng/searxng_extra/update/__init__.py -> apps/searxng/searxng_extra/update/__init__.py
	renamed:    searxng/searxng_extra/update/update_ahmia_blacklist.py -> apps/searxng/searxng_extra/update/update_ahmia_blacklist.py
	renamed:    searxng/searxng_extra/update/update_currencies.py -> apps/searxng/searxng_extra/update/update_currencies.py
	renamed:    searxng/searxng_extra/update/update_engine_descriptions.py -> apps/searxng/searxng_extra/update/update_engine_descriptions.py
	renamed:    searxng/searxng_extra/update/update_engine_traits.py -> apps/searxng/searxng_extra/update/update_engine_traits.py
	renamed:    searxng/searxng_extra/update/update_external_bangs.py -> apps/searxng/searxng_extra/update/update_external_bangs.py
	renamed:    searxng/searxng_extra/update/update_firefox_version.py -> apps/searxng/searxng_extra/update/update_firefox_version.py
	renamed:    searxng/searxng_extra/update/update_osm_keys_tags.py -> apps/searxng/searxng_extra/update/update_osm_keys_tags.py
	renamed:    searxng/searxng_extra/update/update_pygments.py -> apps/searxng/searxng_extra/update/update_pygments.py
	renamed:    searxng/searxng_extra/update/update_wikidata_units.py -> apps/searxng/searxng_extra/update/update_wikidata_units.py
	renamed:    searxng/setup.py -> apps/searxng/setup.py
	renamed:    searxng/tests/__init__.py -> apps/searxng/tests/__init__.py
	renamed:    searxng/tests/robot/__init__.py -> apps/searxng/tests/robot/__init__.py
	renamed:    searxng/tests/robot/__main__.py -> apps/searxng/tests/robot/__main__.py
	renamed:    searxng/tests/robot/settings_robot.yml -> apps/searxng/tests/robot/settings_robot.yml
	renamed:    searxng/tests/robot/test_webapp.py -> apps/searxng/tests/robot/test_webapp.py
	renamed:    searxng/tests/unit/__init__.py -> apps/searxng/tests/unit/__init__.py
	renamed:    searxng/tests/unit/engines/test_command.py -> apps/searxng/tests/unit/engines/test_command.py
	renamed:    searxng/tests/unit/engines/test_xpath.py -> apps/searxng/tests/unit/engines/test_xpath.py
	renamed:    searxng/tests/unit/network/__init__.py -> apps/searxng/tests/unit/network/__init__.py
	renamed:    searxng/tests/unit/network/test_network.py -> apps/searxng/tests/unit/network/test_network.py
	renamed:    searxng/tests/unit/settings/empty_settings.yml -> apps/searxng/tests/unit/settings/empty_settings.yml
	renamed:    searxng/tests/unit/settings/syntaxerror_settings.yml -> apps/searxng/tests/unit/settings/syntaxerror_settings.yml
	renamed:    searxng/tests/unit/settings/test_settings.yml -> apps/searxng/tests/unit/settings/test_settings.yml
	renamed:    searxng/tests/unit/settings/user_settings.yml -> apps/searxng/tests/unit/settings/user_settings.yml
	renamed:    searxng/tests/unit/settings/user_settings_keep_only.yml -> apps/searxng/tests/unit/settings/user_settings_keep_only.yml
	renamed:    searxng/tests/unit/settings/user_settings_remove.yml -> apps/searxng/tests/unit/settings/user_settings_remove.yml
	renamed:    searxng/tests/unit/settings/user_settings_remove2.yml -> apps/searxng/tests/unit/settings/user_settings_remove2.yml
	renamed:    searxng/tests/unit/settings/user_settings_simple.yml -> apps/searxng/tests/unit/settings/user_settings_simple.yml
	renamed:    searxng/tests/unit/test_answerers.py -> apps/searxng/tests/unit/test_answerers.py
	renamed:    searxng/tests/unit/test_engines_init.py -> apps/searxng/tests/unit/test_engines_init.py
	renamed:    searxng/tests/unit/test_exceptions.py -> apps/searxng/tests/unit/test_exceptions.py
	renamed:    searxng/tests/unit/test_external_bangs.py -> apps/searxng/tests/unit/test_external_bangs.py
	renamed:    searxng/tests/unit/test_locales.py -> apps/searxng/tests/unit/test_locales.py
	renamed:    searxng/tests/unit/test_plugins.py -> apps/searxng/tests/unit/test_plugins.py
	renamed:    searxng/tests/unit/test_preferences.py -> apps/searxng/tests/unit/test_preferences.py
	renamed:    searxng/tests/unit/test_query.py -> apps/searxng/tests/unit/test_query.py
	renamed:    searxng/tests/unit/test_results.py -> apps/searxng/tests/unit/test_results.py
	renamed:    searxng/tests/unit/test_search.py -> apps/searxng/tests/unit/test_search.py
	renamed:    searxng/tests/unit/test_settings_loader.py -> apps/searxng/tests/unit/test_settings_loader.py
	renamed:    searxng/tests/unit/test_utils.py -> apps/searxng/tests/unit/test_utils.py
	renamed:    searxng/tests/unit/test_webadapter.py -> apps/searxng/tests/unit/test_webadapter.py
	renamed:    searxng/tests/unit/test_webapp.py -> apps/searxng/tests/unit/test_webapp.py
	renamed:    searxng/tests/unit/test_webutils.py -> apps/searxng/tests/unit/test_webutils.py
	renamed:    searxng/utils/build_env.py -> apps/searxng/utils/build_env.py
	renamed:    searxng/utils/filtron.sh -> apps/searxng/utils/filtron.sh
	renamed:    searxng/utils/lib.sh -> apps/searxng/utils/lib.sh
	renamed:    searxng/utils/lib_go.sh -> apps/searxng/utils/lib_go.sh
	renamed:    searxng/utils/lib_nvm.sh -> apps/searxng/utils/lib_nvm.sh
	renamed:    searxng/utils/lib_redis.sh -> apps/searxng/utils/lib_redis.sh
	renamed:    searxng/utils/lib_sxng_data.sh -> apps/searxng/utils/lib_sxng_data.sh
	renamed:    searxng/utils/lib_sxng_node.sh -> apps/searxng/utils/lib_sxng_node.sh
	renamed:    searxng/utils/lib_sxng_static.sh -> apps/searxng/utils/lib_sxng_static.sh
	renamed:    searxng/utils/lib_sxng_test.sh -> apps/searxng/utils/lib_sxng_test.sh
	renamed:    searxng/utils/lib_sxng_themes.sh -> apps/searxng/utils/lib_sxng_themes.sh
	renamed:    searxng/utils/lib_sxng_weblate.sh -> apps/searxng/utils/lib_sxng_weblate.sh
	renamed:    searxng/utils/lxc.sh -> apps/searxng/utils/lxc.sh
	renamed:    searxng/utils/morty.sh -> apps/searxng/utils/morty.sh
	renamed:    searxng/utils/searx.sh -> apps/searxng/utils/searx.sh
	renamed:    searxng/utils/searxng.sh -> apps/searxng/utils/searxng.sh
	renamed:    searxng/utils/searxng_check.py -> apps/searxng/utils/searxng_check.py
	renamed:    searxng/utils/templates/etc/searxng/settings.yml -> apps/searxng/utils/templates/etc/searxng/settings.yml
	new file:   apps/shift-recorder
	new file:   apps/stockfill
	new file:   core/authelia/configuration.yml
	new file:   core/authelia/users_database.yml
	new file:   core/crowdsec/Dockerfile
	new file:   core/crowdsec/data/detect.yaml
	new file:   core/docker-compose.yml
	new file:   core/test/Dockerfile
	new file:   core/test/docker-compose.yml
	new file:   core/test/exporter.py
	new file:   core/traefik/data/dynamic.yaml
	renamed:    traefik/data/plugins.yaml -> core/traefik/data/plugins.yaml
	new file:   core/traefik/dynamic.yml
	new file:   core/traefik/traefik.yml
	new file:   default-network.yml
	new file:   monitoring/docker-exporter/Dockerfile
	new file:   monitoring/docker-exporter/exporter.py
	new file:   monitoring/gotify/docker-compose.yml
	new file:   monitoring/gotify/docker-health-to-gotify.sh
	new file:   monitoring/grafana/docker-compose.yml
	new file:   monitoring/node-red/Dockerfile
	new file:   monitoring/node-red/data/test-container.sh
	new file:   monitoring/node-red/docker-compose.yml
	new file:   monitoring/portainer/docker-compose.yml
	new file:   monitoring/prometheus/docker-compose.yml
	new file:   monitoring/prometheus/prometheus.yml
	new file:   monitoring/prometheus/rules/alerts.yml
	new file:   monitoring/uptime-kuma/docker-compose.yml
	deleted:    nextcloud/docker-compose.yml
	new file:   services-up.sh
	deleted:    traefik/docker-compose.yml
	deleted:    traefik/traefik.Dockerfile
	modified:   update-containers.py
	modified:   update-containers.sh

	modified:   apps/shift-recorder (modified content)
	modified:   apps/stockfill (modified content)
This commit is contained in:
git
2026-03-31 19:59:49 +10:00
parent d5b6cb22cd
commit b71cd3fcbb
340 changed files with 2084 additions and 311 deletions
+15
View File
@@ -0,0 +1,15 @@
FROM python:3.11-slim
#RUN groupadd -g 1000 appuser || true && \
# useradd -m -u 1000 -g 1000 -s /bin/bash appuser
#RUN groupadd -g 999 docker || true && usermod -aG docker appuser
WORKDIR /app
COPY exporter.py .
RUN mkdir -p /data
RUN pip install --no-cache-dir docker prometheus_client requests pyyaml
#USER appuser
CMD ["python", "-u", "exporter.py"]
+294
View File
@@ -0,0 +1,294 @@
#!/usr/bin/env python3
import os
import re
import time
import json
import logging
import docker
import requests
import yaml
from prometheus_client import Gauge, start_http_server
# --- Logging ---
LOG_LEVEL = os.getenv("LOG_LEVEL", "DEBUG").upper()
logging.basicConfig(
level=getattr(logging, LOG_LEVEL, logging.DEBUG),
format="%(asctime)s [%(levelname)s] %(message)s"
)
logger = logging.getLogger("docker-update-exporter")
# --- Config ---
EXPORTER_PORT = 9105
CHECK_INTERVAL = 60
CACHE_TTL = 6 * 3600
SERVICES_UP_SCRIPT = "/compose/services-up.sh"
CACHE_FILE = "/data/remote_digest_cache.json"
client = docker.from_env()
# --- Metrics ---
CONTAINER_UPDATE = Gauge(
"docker_container_update_available",
"1 if container image is out of date (compose drift or registry), 0 otherwise",
["container", "compose_image", "running_image", "com_docker_compose_project"]
)
LAST_CHECK = Gauge(
"docker_image_update_last_check_timestamp",
"Last time the update check ran (unix timestamp)"
)
# --- Persistent Cache ---
def load_cache():
if not os.path.exists(CACHE_FILE):
logger.info(f"Cache file does not exist yet: {CACHE_FILE}")
return {}
try:
with open(CACHE_FILE, "r") as f:
cache = json.load(f)
logger.info(f"Loaded {len(cache)} cached remote digests")
return cache
except Exception as e:
logger.error(f"Failed to load cache: {e}")
return {}
def save_cache():
try:
os.makedirs(os.path.dirname(CACHE_FILE), exist_ok=True)
with open(CACHE_FILE, "w") as f:
json.dump(REMOTE_DIGEST_CACHE, f)
logger.debug(f"Saved {len(REMOTE_DIGEST_CACHE)} remote digests to cache")
except Exception as e:
logger.error(f"Failed to save cache: {e}")
REMOTE_DIGEST_CACHE = load_cache()
# --- Helpers ---
def get_project_prefix_from_script(script_path):
prefix = "core-"
if not os.path.exists(script_path):
return prefix
try:
with open(script_path) as f:
for line in f:
m = re.match(r'PROJECT\s*=\s*["\']?([^"\']+)', line)
if m:
return m.group(1) + "-"
except Exception as e:
logger.warning(f"Failed reading project prefix: {e}")
return prefix
def get_local_digest(image_name):
try:
img = client.images.get(image_name)
digests = img.attrs.get("RepoDigests", [])
if digests:
digest = digests[0].split("@")[1]
logger.debug(f"Local digest for {image_name}: {digest}")
return digest
logger.debug(f"No local digest found for {image_name}")
except Exception:
pass
return None
def get_remote_digest(image_name):
now = time.time()
if image_name in REMOTE_DIGEST_CACHE:
digest, ts = REMOTE_DIGEST_CACHE[image_name]
if now - ts < CACHE_TTL:
return digest
try:
if "/" not in image_name:
registry = "docker.io"
repo = "library/" + image_name
else:
parts = image_name.split("/")
if "." in parts[0] or ":" in parts[0]:
registry = parts[0]
repo = "/".join(parts[1:])
else:
registry = "docker.io"
repo = image_name
if ":" in repo:
repo, tag = repo.rsplit(":", 1)
else:
tag = "latest"
if registry in ["docker.io", "registry-1.docker.io"]:
token_res = requests.get(
"https://auth.docker.io/token",
params={"service": "registry.docker.io", "scope": f"repository:{repo}:pull"},
timeout=10
)
token = token_res.json().get("token")
manifest_url = f"https://registry-1.docker.io/v2/{repo}/manifests/{tag}"
elif registry == "ghcr.io":
token_res = requests.get(
"https://ghcr.io/token",
params={"service": "ghcr.io", "scope": f"repository:{repo}:pull"},
timeout=10
)
token = token_res.json().get("token")
manifest_url = f"https://ghcr.io/v2/{repo}/manifests/{tag}"
else:
logger.warning(f"Unsupported registry {registry} for {image_name}")
return None
if not token:
return None
res = requests.get(
manifest_url,
headers={"Authorization": f"Bearer {token}", "Accept": "application/vnd.docker.distribution.manifest.v2+json"},
timeout=10
)
if res.status_code == 200:
digest = res.headers.get("Docker-Content-Digest")
REMOTE_DIGEST_CACHE[image_name] = (digest, now)
save_cache()
return digest
except Exception as e:
logger.debug(f"Error fetching remote digest for {image_name}: {e}")
return None
# --- Dockerfile Image Extraction ---
def parse_dockerfile_for_image(dockerfile_path):
if not os.path.exists(dockerfile_path):
return None
image_name = None
try:
with open(dockerfile_path) as df:
for line in df:
line = line.strip()
# Prefer LABEL with image if present
if "LABEL" in line and "image=" in line:
match = re.search(r'image=["\']?([^"\']+)["\']?', line)
if match:
image_name = match.group(1)
logger.debug(f"Found LABEL image={image_name} in {dockerfile_path}")
return image_name
# If no LABEL, use the FROM line as fallback
df.seek(0)
for line in df:
line = line.strip()
if line.upper().startswith("FROM "):
parts = line.split()
if len(parts) >= 2:
base_image = parts[1]
logger.debug(f"Found base FROM {base_image} in {dockerfile_path}")
return base_image
except Exception as e:
logger.debug(f"Error reading Dockerfile {dockerfile_path}: {e}")
return image_name
# --- Compose parsing ---
def get_compose_files_from_script(script_path):
files = []
if not os.path.exists(script_path):
return files
base_dir = os.path.dirname(script_path)
try:
with open(script_path) as f:
content = f.read()
match = re.search(r'FILES\s*=\s*\((.*?)\)', content, re.DOTALL)
if match:
for line in match.group(1).splitlines():
line = line.strip()
if line.startswith("-f"):
path = line[2:].strip()
if path:
full = os.path.normpath(os.path.join(base_dir, path))
files.append(full)
except Exception as e:
logger.warning(f"Failed parsing services-up.sh: {e}")
return files
def parse_compose_services(compose_files):
svc_map = {}
for f in compose_files:
try:
with open(f) as stream:
data = yaml.safe_load(stream) or {}
for svc_name, svc_def in data.get("services", {}).items():
image = svc_def.get("image")
is_built = False
if not image and "build" in svc_def:
is_built = True
build_ctx = svc_def["build"]
dockerfile_path = None
if isinstance(build_ctx, dict):
context = build_ctx.get("context", ".")
dockerfile_path = os.path.join(context, build_ctx.get("dockerfile", "Dockerfile"))
elif isinstance(build_ctx, str):
dockerfile_path = os.path.join(build_ctx, "Dockerfile")
image = parse_dockerfile_for_image(dockerfile_path)
if not image:
logger.info(f"Defaulting build image for {svc_name} to {svc_name}:latest")
image = f"{svc_name}:latest"
svc_map[svc_name] = (image, is_built)
except Exception as e:
logger.warning(f"Failed parsing {f}: {e}")
logger.debug(f"Service image mapping: {svc_map}")
return svc_map
# --- Main check ---
def check_containers():
CONTAINER_UPDATE.clear()
prefix = get_project_prefix_from_script(SERVICES_UP_SCRIPT)
compose_files = get_compose_files_from_script(SERVICES_UP_SCRIPT)
svc_map = parse_compose_services(compose_files)
containers = client.containers.list()
for container in containers:
proj = container.labels.get("com.docker.compose.project")
if not proj:
continue
svc = container.labels.get("com.docker.compose.service")
running = container.attrs["Config"]["Image"]
compose_image = None
is_built = False
if svc in svc_map:
compose_image, is_built = svc_map[svc]
if is_built:
name, _, _ = compose_image.partition(":")
compose_image = f"{prefix}{name}"
update_flag = 0
local_digest = get_local_digest(running)
remote_digest = get_remote_digest(compose_image if is_built else running)
if local_digest and remote_digest and local_digest != remote_digest:
update_flag = 1
CONTAINER_UPDATE.labels(
container=container.name,
compose_image=compose_image or "unknown",
running_image=running,
com_docker_compose_project=proj
).set(update_flag)
# --- Runner ---
if __name__ == "__main__":
start_http_server(EXPORTER_PORT)
while True:
try:
check_containers()
except Exception as e:
logger.exception(f"update check failed: {e}")
time.sleep(CHECK_INTERVAL)
+32
View File
@@ -0,0 +1,32 @@
services:
gotify:
profiles: ["monitoring","all","gotify"]
image: gotify/server:latest
container_name: gotify
restart: always
volumes:
- ${PROJECT_ROOT}/monitoring/gotify/data:/app/data
environment:
- TZ=${TZ}
- GOTIFY_DEFAULTUSER_NAME=admin
- GOTIFY_DEFAULTUSER_PASS=R1m@dmin
- GOTIFY_REGISTRATION=false
networks:
# - traefik_reverse_proxy
- traefik
labels:
- "traefik.enable=true"
- "traefik.docker.network=core_traefik"
- "io.portainer.accesscontrol.public"
- "traefik.http.routers.gotify.rule=Host(`gotify.lan.ddnsgeek.com`)"
- "traefik.http.routers.gotify.entrypoints=websecure"
- "traefik.http.routers.gotify.tls.certresolver=myresolver"
- "traefik.http.services.gotify.loadbalancer.server.port=80"
#networks:
# traefik_reverse_proxy:
# external: true
+45
View File
@@ -0,0 +1,45 @@
#!/usr/bin/env bash
set -euo pipefail
#: "${GOTIFY_URL:?Set GOTIFY_URL (e.g. https://gotify.lan.ddnsgeek.com)}"
#: "${GOTIFY_TOKEN:?Set GOTIFY_TOKEN (AAM..CtNmUGoNIV)}"
GOTIFY_URL="https://gotify.lan.ddnsgeek.com"
GOTIFY_TOKEN="ADuOnDBG7C27hcf"
STATE_DIR="./docker-health-alert"
STATE_FILE="${STATE_DIR}/last_unhealthy.txt"
mkdir -p "$STATE_DIR"
# Collect unhealthy running containers (ignore those with no healthcheck)
unhealthy="$(
docker ps -q | while read -r id; do
status="$(docker inspect --format '{{if .State.Health}}{{.State.Health.Status}}{{else}}no-healthcheck{{end}}' "$id")"
name="$(docker inspect --format '{{.Name}}' "$id" | sed 's|^/||')"
if [[ "$status" == "unhealthy" ]]; then
echo "$name"
fi
done | sort
)"
# Only alert on change
last="$(cat "$STATE_FILE" 2>/dev/null || true)"
if [[ "$unhealthy" != "$last" ]]; then
if [[ -n "$unhealthy" ]]; then
msg="Unhealthy containers: $(echo "$unhealthy" | paste -sd ', ' -)"
title="Docker: UNHEALTHY"
priority=8
else
msg="All containers healthy again."
title="Docker: RECOVERED"
priority=4
fi
curl -fsS -X POST "${GOTIFY_URL%/}/message" \
-H "X-Gotify-Key: ${GOTIFY_TOKEN}" \
-F "title=${title}" \
-F "message=${msg}" \
-F "priority=${priority}" >/dev/null
printf "%s" "$unhealthy" > "$STATE_FILE"
fi
+49
View File
@@ -0,0 +1,49 @@
services:
grafana:
profiles: ["monitoring","all","grafana"]
image: grafana/grafana:latest
container_name: grafana
restart: unless-stopped
environment:
- GF_SERVER_ROOT_URL=https://grafana.lan.ddnsgeek.com/
volumes:
- ${PROJECT_ROOT}/monitoring/grafana/data:/var/lib/grafana
networks:
# - traefik_reverse_proxy
# - prometheus_edge
- traefik
- monitor
labels:
- "traefik.http.routers.grafana.rule=Host(`grafana.lan.ddnsgeek.com`)"
- "traefik.enable=true"
- "traefik.http.routers.grafana.entrypoints=websecure"
- "traefik.http.routers.grafana.tls.certresolver=myresolver"
- "io.portainer.accesscontrol.public"
- "traefik.http.services.grafana.loadbalancer.server.port=3000"
- "traefik.docker.network=core_traefik"
healthcheck:
test: ["CMD", "wget", "--spider", "-q", "http://localhost:3000/api/health"]
interval: 30s
timeout: 10s
retries: 3
start_period: 30s
# tempo:
# image: grafana/tempo:latest
# container_name: tempo
# command:
# - "-config.file=/etc/tempo/config.yaml"
# volumes:
# - ./tempo/config.yaml:/etc/tempo/config.yaml
# - ./tempo/data:/var/lib/tempo
# ports:
# - "4317:4317" # OTLP gRPC endpoint for Traefik
# - "3200:3200" # optional: HTTP endpoint
# networks:
# - prometheus_edge
#networks:
# traefik_reverse_proxy:
# external: true
# prometheus_edge:
# external: true
+6
View File
@@ -0,0 +1,6 @@
FROM nodered/node-red:latest
USER root
RUN apk add --no-cache docker-cli docker-cli-compose
RUN addgroup -g 131 -S docker && addgroup node-red docker
USER node-red
+45
View File
@@ -0,0 +1,45 @@
#!/usr/bin/env bash
# test-container.sh
# Usage: ./test-container.sh container_name
container="$1"
test_name="testing-${container}"
compose_script="/compose/services-up.sh"
# Run container in detached mode
$compose_script --profile all run -d --name "$test_name" --build "$container" >/dev/null 2>&1
# Poll health status
timeout=60 # seconds
interval=2 # seconds
elapsed=0
result=1 # default to failure
while [ $elapsed -lt $timeout ]; do
status=$(docker inspect --format='{{.State.Health.Status}}' "$test_name" 2>/dev/null)
if [ "$status" == "healthy" ]; then
# echo "healthy"
result=0 # success
break
elif [ "$status" == "unhealthy" ]; then
# echo "unhealthy"
result=1 # failure
break
fi
sleep $interval
elapsed=$((elapsed + interval))
done
# Timeout case
if [ $elapsed -ge $timeout ]; then
# echo "timeout"
result=1
fi
# Cleanup
docker rm "$test_name" --force >/dev/null 2>&1
#echo "Exiting with $result" >&2
echo $result
exit $result
+50
View File
@@ -0,0 +1,50 @@
services:
node-red:
# image: nodered/node-red:latest
build:
context: ${PROJECT_ROOT}/monitoring/node-red
container_name: node-red
profiles: ["monitoring","all"]
restart: unless-stopped
privileged: true
# ports:
# - "1880:1880"
volumes:
- ${PROJECT_ROOT}/monitoring/node-red/data:/data
- /var/run/docker.sock:/var/run/docker.sock:rw
- ${PROJECT_ROOT}:/compose
- ${PROJECT_ROOT}/default-environment.env:/usr/src/node-red/default-environment.env:ro
- ${PROJECT_ROOT}/default-network.yml:/usr/src/node-red/default-network.yml:ro
- ${PROJECT_ROOT}/core/docker-compose.yml:/usr/src/node-red/core/docker-compose.yml:ro
- ${PROJECT_ROOT}/monitoring/prometheus/docker-compose.yml:/usr/src/node-red/monitoring/prometheus/docker-compose.yml:ro
- ${PROJECT_ROOT}/monitoring/gotify/docker-compose.yml:/usr/src/node-red/monitoring/gotify/docker-compose.yml:ro
- ${PROJECT_ROOT}/monitoring/grafana/docker-compose.yml:/usr/src/node-red/monitoring/grafana/docker-compose.yml:ro
- ${PROJECT_ROOT}/monitoring/portainer/docker-compose.yml:/usr/src/node-red/monitoring/portainer/docker-compose.yml:ro
- ${PROJECT_ROOT}/monitoring/uptime-kuma/docker-compose.yml:/usr/src/node-red/monitoring/uptime-kuma/docker-compose.yml:ro
- ${PROJECT_ROOT}/apps/gitea/docker-compose.yml:/usr/src/node-red/apps/gitea/docker-compose.yml:ro
- ${PROJECT_ROOT}/apps/gramps/docker-compose.yml:/usr/src/node-red/apps/gramps/docker-compose.yml:ro
- ${PROJECT_ROOT}/apps/nextcloud/docker-compose.yml:/usr/src/node-red/apps/nextcloud/docker-compose.yml:ro
- ${PROJECT_ROOT}/apps/passbolt/docker-compose.yml:/usr/src/node-red/apps/passbolt/docker-compose.yml:ro
- ${PROJECT_ROOT}/apps/searxng/docker-compose.yml:/usr/src/node-red/apps/searxng/docker-compose.yml:ro
- ${PROJECT_ROOT}/apps/shift-recorder/docker-compose.yml:/usr/src/node-red/apps/shift-recorder/docker-compose.yml:ro
- ${PROJECT_ROOT}/apps/stockfill/docker-compose.yml:/usr/src/node-red/apps/stockfill/docker-compose.yml:ro
- ${PROJECT_ROOT}/monitoring/node-red/docker-compose.yml:/usr/src/node-red/monitoring/node-red/docker-compose.yml:ro
- ${PROJECT_ROOT}/core/test/docker-compose.yml:/usr/src/node-red/core/test/docker-compose.yml:ro
# - /run/current-system/sw/bin/docker:/usr/bin/docker:ro
# depends_on:
# - mosquitto
# - influxdb
networks:
- monitor
- traefik
labels:
- "traefik.enable=true"
- "traefik.http.routers.node-red.rule=Host(`node-red.lan.ddnsgeek.com`)"
# - "traefik.http.routers.node-red.service=api@internal"
- "traefik.http.routers.node-red.entrypoints=websecure"
- "traefik.http.routers.node-red.tls.certresolver=myresolver"
- "traefik.http.routers.node-red.middlewares=authelia"
- "io.portainer.accesscontrol.public"
- "traefik.docker.network=core_traefik"
- "traefik.http.services.node-red.loadbalancer.server.port=1880"
+40
View File
@@ -0,0 +1,40 @@
services:
portainer:
profiles: ["monitoring","all","portainer"]
image: portainer/portainer-ce:latest
container_name: portainer
restart: unless-stopped
command: -H unix:///var/run/docker.sock
volumes:
- /var/run/docker.sock:/var/run/docker.sock
- ${PROJECT_ROOT}/monitoring/portainer/data:/data
networks:
# - traefik_reverse_proxy
- traefik
labels:
- traefik.enable=true
# Router
- traefik.http.routers.portainer.rule=Host(`portainer.lan.ddnsgeek.com`)
- traefik.http.routers.portainer.entrypoints=websecure
- traefik.http.routers.portainer.tls=true
- traefik.http.routers.portainer.tls.certresolver=myresolver
- io.portainer.accesscontrol.public
# Service -> Portainer listens on 9000 inside the container
- traefik.http.services.portainer.loadbalancer.server.port=9000
environment:
- GODEBUG=netdns=cgo
# healthcheck:
# test: ["CMD", "wget", "--spider", "-q", "https://portainer.lan.ddnsgeek.com/api/status"]
# interval: 30s
# timeout: 10s
# retries: 3
# start_period: 30s
#networks:
# traefik_reverse_proxy:
# external: true
# internal:
# driver: bridge
+230
View File
@@ -0,0 +1,230 @@
#version: "3.8"
services:
prometheus:
profiles: ["monitoring","all","prometheus"]
image: prom/prometheus:latest
container_name: prometheus
depends_on:
# - alertmanager
- telegraf
- influxdb
- node-exporter
- docker-update-exporter
- pihole-exporter
command:
- "--config.file=/etc/prometheus/prometheus.yml"
- "--storage.tsdb.path=/prometheus"
- "--storage.tsdb.retention.time=15d"
# build:
# context: ${PROJECT_ROOT}/monitoring/prometheus
volumes:
- ${PROJECT_ROOT}/monitoring/prometheus/prometheus.yml:/etc/prometheus/prometheus.yml:ro
- ${PROJECT_ROOT}/monitoring/prometheus/data:/prometheus
- ${PROJECT_ROOT}/monitoring/prometheus/rules:/etc/prometheus/rules:ro
restart: unless-stopped
labels:
- "traefik.http.routers.prometheus.rule=Host(`prometheus.lan.ddnsgeek.com`)"
- "traefik.enable=true"
- "traefik.http.routers.prometheus.entrypoints=websecure"
- "traefik.http.routers.prometheus.tls.certresolver=myresolver"
- "io.portainer.accesscontrol.public"
- "traefik.http.services.prometheus.loadbalancer.server.port=9090"
- "traefik.http.routers.prometheus.middlewares=authelia"
- "traefik.docker.network=core_traefik"
networks:
# - edge
# - traefik_reverse_proxy
- traefik
- monitor
healthcheck:
test: ["CMD", "wget", "--spider", "-q", "http://localhost:9090/-/healthy"]
interval: 30s
timeout: 10s
retries: 3
start_period: 30s
# alertmanager:
# image: prom/alertmanager:latest
# container_name: alertmanager
# command:
# - "--config.file=/etc/alertmanager/alertmanager.yml"
# volumes:
# - ./alertmanager/alertmanager.yml:/etc/alertmanager/alertmanager.yml:ro
# restart: unless-stopped
# networks:
# - edge
# - traefik_reverse_proxy
# healthcheck:
# test: ["CMD", "wget", "--spider", "-q", "http://localhost:9093/-/healthy"]
# interval: 30s
# timeout: 10s
# retries: 3
# start_period: 20s
# labels:
# - "traefik.http.routers.alertmanager.rule=Host(`alertmanager.lan.ddnsgeek.com`)"
# - "traefik.enable=true"
# - "traefik.http.routers.alertmanager.entrypoints=websecure"
# - "traefik.http.routers.alertmanager.tls.certresolver=myresolver"
# - "io.portainer.accesscontrol.public"
# - "traefik.http.services.alertmanager.loadbalancer.server.port=9093"
# - "traefik.http.routers.alertmanager.middlewares=authelia"
# - "traefik.docker.network=traefik_reverse_proxy"
node-exporter:
profiles: ["monitoring","all","prometheus-exporters"]
image: prom/node-exporter:latest
container_name: node-exporter
pid: host
volumes:
- /proc:/host/proc:ro
- /sys:/host/sys:ro
- /:/rootfs:ro
command:
- "--path.procfs=/host/proc"
- "--path.sysfs=/host/sys"
- "--path.rootfs=/rootfs"
restart: unless-stopped
networks:
# - edge
- monitor
healthcheck:
test: ["CMD", "wget", "--spider", "-q", "http://localhost:9100/metrics"]
interval: 30s
timeout: 10s
retries: 3
influxdb:
profiles: ["monitoring","all","prometheus"]
image: influxdb:2.7
container_name: influxdb
restart: unless-stopped
volumes:
- ${PROJECT_ROOT}/monitoring/influxdb:/var/lib/influxdb2
environment:
DOCKER_INFLUXDB_INIT_MODE: setup
DOCKER_INFLUXDB_INIT_USERNAME: admin
DOCKER_INFLUXDB_INIT_PASSWORD: adminpassword
DOCKER_INFLUXDB_INIT_ORG: pbs
DOCKER_INFLUXDB_INIT_BUCKET: telemetry
networks:
# - edge
# - traefik_reverse_proxy
- traefik
- monitor
labels:
- "traefik.http.routers.influxdb.rule=Host(`influxdb.lan.ddnsgeek.com`)"
- "traefik.enable=true"
- "traefik.http.routers.influxdb.entrypoints=websecure"
- "traefik.http.routers.influxdb.tls.certresolver=myresolver"
- "io.portainer.accesscontrol.public"
- "traefik.http.services.influxdb.loadbalancer.server.port=8086"
- "traefik.http.routers.influxdb.middlewares=authelia"
- "traefik.docker.network=core_traefik"
healthcheck:
test: ["CMD-SHELL", "curl -f http://localhost:8086/health || exit 1"]
interval: 30s
timeout: 5s
retries: 3
start_period: 10s
telegraf:
profiles: ["monitoring","all","prometheus"]
image: telegraf:latest
group_add:
- "131"
privileged: true
container_name: telegraf
restart: unless-stopped
volumes:
- /var/run/docker.sock:/var/run/docker.sock:ro
- ${PROJECT_ROOT}/monitoring/telegraf/telegraf.conf:/etc/telegraf/telegraf.conf:ro
networks:
# - edge
- monitor
healthcheck:
test: ["CMD-SHELL", "curl -f http://localhost:9273/metrics || exit 1"]
interval: 30s
timeout: 5s
retries: 3
start_period: 10s
docker-update-exporter:
profiles: ["monitoring","all","prometheus-exporters"]
build:
context: ${PROJECT_ROOT}/monitoring/docker-exporter
container_name: docker-update-exporter
# volumes:
# - /var/run/docker.sock:/var/run/docker.sock
# - ${PROJECT_ROOT}/monitoring/docker-exporter/data:/data:rw
# - ${PROJECT_ROOT}/services-up.sh:/app/services-up.sh:ro
environment:
LOG_LEVEL: DEBUG
volumes:
- ~/.docker/config.json:/root/.docker/config.json:ro
- /var/run/docker.sock:/var/run/docker.sock
- ${PROJECT_ROOT}/monitoring/docker-exporter/data:/data:rw
- ${PROJECT_ROOT}:/compose
- ${PROJECT_ROOT}/default-environment.env:/compose/default-environment.env:ro
- ${PROJECT_ROOT}/default-network.yml:/compose/default-network.yml:ro
- ${PROJECT_ROOT}/core/docker-compose.yml:/compose/core/docker-compose.yml:ro
- ${PROJECT_ROOT}/monitoring/prometheus/docker-compose.yml:/compose/monitoring/prometheus/docker-compose.yml:ro
- ${PROJECT_ROOT}/monitoring/gotify/docker-compose.yml:/compose/monitoring/gotify/docker-compose.yml:ro
- ${PROJECT_ROOT}/monitoring/grafana/docker-compose.yml:/compose/monitoring/grafana/docker-compose.yml:ro
- ${PROJECT_ROOT}/monitoring/portainer/docker-compose.yml:/compose/monitoring/portainer/docker-compose.yml:ro
- ${PROJECT_ROOT}/monitoring/uptime-kuma/docker-compose.yml:/compose/monitoring/uptime-kuma/docker-compose.yml:>
- ${PROJECT_ROOT}/apps/gitea/docker-compose.yml:/compose/apps/gitea/docker-compose.yml:ro
- ${PROJECT_ROOT}/apps/gramps/docker-compose.yml:/compose/apps/gramps/docker-compose.yml:ro
- ${PROJECT_ROOT}/apps/nextcloud/docker-compose.yml:/compose/apps/nextcloud/docker-compose.yml:ro
- ${PROJECT_ROOT}/apps/passbolt/docker-compose.yml:/compose/apps/passbolt/docker-compose.yml:ro
- ${PROJECT_ROOT}/apps/searxng/docker-compose.yml:/compose/apps/searxng/docker-compose.yml:ro
- ${PROJECT_ROOT}/apps/shift-recorder/docker-compose.yml:/compose/apps/shift-recorder/docker-compose.yml:ro
- ${PROJECT_ROOT}/apps/stockfill/docker-compose.yml:/compose/apps/stockfill/docker-compose.yml:ro
- ${PROJECT_ROOT}/monitoring/node-red/docker-compose.yml:/compose/monitoring/node-red/docker-compose.yml:ro
- ${PROJECT_ROOT}/core/test/docker-compose.yml:/compose/core/test/docker-compose.yml:ro
# ports:
# - "9105:9105"
restart: unless-stopped
networks:
# - edge
- monitor
healthcheck:
test: ["CMD", "python", "-c", "import urllib.request; urllib.request.urlopen('http://localhost:9105/metrics')"]
interval: 30s
timeout: 5s
retries: 3
start_period: 10s
pihole-exporter:
profiles: ["monitoring","all","prometheus-exporters"]
image: ekofr/pihole-exporter:latest
container_name: pihole-exporter
environment:
PIHOLE_HOSTNAME: pihole.sweet.home
PIHOLE_PASSWORD: ""
PORT: 9617
ports:
- "9617:9617"
restart: unless-stopped
networks:
# - edge
- monitor
#networks:
# internal:
# internal: true
# edge:
# internal: false
# traefik_reverse_proxy:
# external: true
+160
View File
@@ -0,0 +1,160 @@
global:
scrape_interval: 15s
evaluation_interval: 15s
#alerting:
# alertmanagers:
# - static_configs:
# - targets:
# - alertmanager:9093
scrape_configs:
# Prometheus itself
- job_name: "prometheus"
static_configs:
- targets: ["prometheus:9090"]
labels:
role: prometheus
# =========================
# Node Exporters (ALL hosts)
# =========================
- job_name: "node"
static_configs:
- targets:
- node-exporter:9100
labels:
role: docker
- targets:
- raspberrypi.tail13f623.ts.net:9100
labels:
role: raspberrypi
- targets:
- pve.sweet.home:9100
labels:
role: proxmox
- targets:
- pbs.sweet.home:9100
labels:
role: backup
- targets:
- pihole:9100
labels:
role: pihole
- targets:
- server:9100
labels:
role: server
- targets:
- nix-cache:9100
labels:
role: cache
# =========================
# Telegraf (Docker metrics)
# =========================
- job_name: "telegraf"
static_configs:
- targets:
- telegraf:9273
labels:
role: docker
# =========================
# Traefik (all instances)
# =========================
- job_name: "traefik"
static_configs:
- targets:
- traefik.lan.ddnsgeek.com:8080
labels:
role: docker
- targets:
- raspberrypi.tail13f623.ts.net:8080
labels:
role: raspberrypi
metric_relabel_configs:
- source_labels: [service]
regex: '(.+)@.+'
target_label: service
replacement: '$1'
# =========================
# Uptime Kuma (separate due to auth)
# =========================
- job_name: "kuma"
metrics_path: /metrics
scrape_interval: 30s
basic_auth:
username: wayne.bennett@live.com
password: '4vjCco?[%{=+,t`):C'
static_configs:
- targets:
- monitor-kuma:3001
labels:
role: docker
- targets:
- kuma.lan.ddnsgeek.com
labels:
role: raspberrypi
# =========================
# Proxmox Storage Exporters
# =========================
- job_name: "proxmox-storage"
metrics_path: /metrics
static_configs:
- targets:
- pve.sweet.home:9101
labels:
role: proxmox
storage: lvm
- targets:
- pbs.sweet.home:9102
labels:
role: backup
storage: datastore
# =========================
# Docker Updates Exporter
# =========================
- job_name: "container-updates"
static_configs:
- targets:
- docker-update-exporter:9105
labels:
role: docker
- targets:
- raspberrypi.tail13f623.ts.net:9105
labels:
role: raspberrypi
# =========================
# pihole Exporter
# =========================
- job_name: "pihole"
static_configs:
- targets:
- pihole-exporter:9617
labels:
role: pihole
#rule_files:
# - /etc/prometheus/rules/*.yml
+28
View File
@@ -0,0 +1,28 @@
groups:
- name: system
rules:
- alert: HostHighCPU
# expr: rate(node_cpu_seconds_total{mode!="idle"}[2m]) > 0.9
expr: 100 * (1 - avg by(instance) (rate(node_cpu_seconds_total{mode="idle"}[2m]))) > 90
for: 2m
labels:
severity: warning
annotations:
summary: "High CPU usage on host"
- alert: ContainerRestarting
# expr: increase(container_start_time_seconds[10m]) > 3
expr: increase(container_restart_count[10m]) > 3
for: 1m
labels:
severity: warning
annotations:
summary: "Container restarting frequently"
- alert: AlwaysFiring
expr: vector(1)
for: 10s
labels:
severity: critical
annotations:
summary: "This alert should always fire"
+31
View File
@@ -0,0 +1,31 @@
services:
monitor-kuma:
profiles: ["monitoring","all","uptime-kuma"]
image: louislam/uptime-kuma:2.1.1
container_name: monitor-kuma
restart: always
volumes:
- /var/run/docker.sock:/var/run/docker.sock:ro
- ${PROJECT_ROOT}/monitoring/uptime-kuma/data:/app/data
# ports:
# - 8888:3001
labels:
- traefik.enable=true
# Router
- traefik.http.routers.monitor.rule=Host(`monitor-kuma.lan.ddnsgeek.com`)
- traefik.http.routers.monitor.entrypoints=websecure
- traefik.http.routers.monitor.tls=true
- traefik.http.routers.monitor.tls.certresolver=myresolver
- io.portainer.accesscontrol.public
- traefik.docker.network=core_traefik
# Service -> container port
- traefik.http.services.monitor.loadbalancer.server.port=3001
networks:
# - traefik_reverse_proxy
- traefik
- monitor
#networks:
# traefik_reverse_proxy:
# external: true