Merge pull request #36 from beatz174-bit/codex/investigate-unknown-image-reporting-issue

Fix compose file discovery and path normalization in docker-exporter
This commit is contained in:
beatz174-bit
2026-04-13 17:35:12 +10:00
committed by GitHub
+52 -1
View File
@@ -293,6 +293,18 @@ def get_compose_files_from_script(script_path):
if not os.path.exists(script_path):
return files
base_dir = get_project_root_from_script(script_path)
def _clean_compose_path(raw_path):
cleaned = str(raw_path).strip().strip(",")
if (cleaned.startswith('"') and cleaned.endswith('"')) or (
cleaned.startswith("'") and cleaned.endswith("'")
):
cleaned = cleaned[1:-1]
expanded = expand_compose_path(cleaned, base_dir)
if os.path.isabs(expanded):
return os.path.normpath(expanded)
return os.path.normpath(os.path.join(base_dir, expanded))
try:
with open(script_path) as f:
content = f.read()
@@ -303,8 +315,47 @@ def get_compose_files_from_script(script_path):
if line.startswith("-f"):
path = line[2:].strip()
if path:
full = os.path.normpath(os.path.join(base_dir, path))
full = _clean_compose_path(path)
files.append(full)
# services-up.sh can append many compose files at runtime via:
# FILES+=(-f "$file") done < <(find "$PROJECT_ROOT/apps" ...)
# Mirror that behavior here so we can map service->compose image.
root_dirs = []
find_match = re.search(r'find\s+(.*?)\s+\\\s*\n', content)
if find_match:
for token in re.findall(r'"([^"]+)"|\'([^\']+)\'', find_match.group(1)):
candidate = token[0] or token[1]
if candidate:
root_dirs.append(_clean_compose_path(candidate))
else:
root_dirs = [
os.path.join(base_dir, "apps"),
os.path.join(base_dir, "monitoring"),
os.path.join(base_dir, "core"),
]
for root_dir in root_dirs:
if not os.path.isdir(root_dir):
continue
for candidate in sorted(os.listdir(root_dir)):
svc_dir = os.path.join(root_dir, candidate)
if not os.path.isdir(svc_dir):
continue
for compose_name in ("docker-compose.yml", "docker-compose.yaml"):
compose_path = os.path.join(svc_dir, compose_name)
if os.path.exists(compose_path):
files.append(compose_path)
# Preserve order while removing duplicates.
deduped = []
seen = set()
for path in files:
if path in seen:
continue
seen.add(path)
deduped.append(path)
files = deduped
except Exception as e:
logger.warning(f"Failed parsing services-up.sh: {e}")
return files