Harden Dynu/Traefik DNS correlation and validation
This commit is contained in:
@@ -14,15 +14,19 @@ import sys
|
||||
from collections import defaultdict
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Tuple
|
||||
from typing import Any, Dict, Iterable, List, Set
|
||||
|
||||
import yaml
|
||||
|
||||
BASE_DOMAIN = "lan.ddnsgeek.com"
|
||||
ALLOWED_UNMAPPED_HOSTNAMES = ["edge.lan.ddnsgeek.com"]
|
||||
DYN_DATA = Path("data/dns/dynu_live.json")
|
||||
OUT_JSON = Path("data/dns/dynu_traefik_inventory.json")
|
||||
OUT_MD = Path("docs/generated/dns-inventory.md")
|
||||
|
||||
HOST_RULE_RE = re.compile(r"Host\((.*?)\)")
|
||||
DOMAIN_RE = re.compile(r"[`\"']([^`\"']+)[`\"']")
|
||||
HOST_CALL_RE = re.compile(r"Host\s*\(([^)]*)\)", re.IGNORECASE)
|
||||
QUOTED_HOST_RE = re.compile(r"[`\"']([^`\"']+)[`\"']")
|
||||
ROUTER_LABEL_RE = re.compile(r"^traefik\.http\.routers\.([^.]+)\.(.+)$")
|
||||
|
||||
|
||||
class ReadOnlyError(RuntimeError):
|
||||
@@ -38,95 +42,156 @@ def require_read_only() -> None:
|
||||
|
||||
|
||||
def compose_files(root: Path) -> List[Path]:
|
||||
files = [root / "default-network.yml"]
|
||||
files: Set[Path] = set()
|
||||
if (root / "default-network.yml").exists():
|
||||
files.add(root / "default-network.yml")
|
||||
|
||||
for area in ("apps", "monitoring", "core"):
|
||||
base = root / area
|
||||
if not base.exists():
|
||||
continue
|
||||
for p in sorted(base.glob("*/*")):
|
||||
if p.is_file() and p.name in {"docker-compose.yml", "docker-compose.yaml"}:
|
||||
files.append(p)
|
||||
return files
|
||||
for pattern in ("**/docker-compose.yml", "**/docker-compose.yaml"):
|
||||
files.update(p for p in base.glob(pattern) if p.is_file())
|
||||
|
||||
return sorted(files)
|
||||
|
||||
|
||||
def parse_hosts_from_label(label_value: str) -> List[str]:
|
||||
found = []
|
||||
for fragment in HOST_RULE_RE.findall(label_value):
|
||||
for host in DOMAIN_RE.findall(fragment):
|
||||
h = host.strip().strip(".").lower()
|
||||
if h:
|
||||
found.append(h)
|
||||
return sorted(set(found))
|
||||
def parse_hosts_from_rule(rule: str) -> List[str]:
|
||||
hosts: Set[str] = set()
|
||||
for call_fragment in HOST_CALL_RE.findall(rule):
|
||||
quoted_hosts = QUOTED_HOST_RE.findall(call_fragment)
|
||||
for host in quoted_hosts:
|
||||
clean = host.strip().strip(".").lower()
|
||||
if clean:
|
||||
hosts.add(clean)
|
||||
|
||||
if not quoted_hosts:
|
||||
for token in call_fragment.split(","):
|
||||
clean = token.strip().strip(".`\"'").lower()
|
||||
if clean:
|
||||
hosts.add(clean)
|
||||
|
||||
return sorted(hosts)
|
||||
|
||||
|
||||
def extract_traefik_hosts(path: Path) -> List[Dict[str, str]]:
|
||||
lines = path.read_text(encoding="utf-8").splitlines()
|
||||
entries: List[Dict[str, str]] = []
|
||||
def load_env_defaults(repo_root: Path) -> Dict[str, str]:
|
||||
env_values: Dict[str, str] = {}
|
||||
for candidate in (repo_root / "default-environment.env", repo_root / ".env"):
|
||||
if not candidate.exists():
|
||||
continue
|
||||
for line in candidate.read_text(encoding="utf-8").splitlines():
|
||||
stripped = line.strip()
|
||||
if not stripped or stripped.startswith("#") or "=" not in stripped:
|
||||
continue
|
||||
key, value = stripped.split("=", 1)
|
||||
env_values[key.strip()] = value.strip().strip("'\"")
|
||||
return env_values
|
||||
|
||||
in_services = False
|
||||
current_service = ""
|
||||
current_labels_indent = None
|
||||
|
||||
for raw in lines:
|
||||
line = raw.rstrip("\n")
|
||||
stripped = line.strip()
|
||||
def resolve_rule_variables(rule: str, env_values: Dict[str, str]) -> str:
|
||||
var_re = re.compile(r"\$\{([A-Za-z_][A-Za-z0-9_]*)\}")
|
||||
|
||||
if stripped == "services:":
|
||||
in_services = True
|
||||
current_service = ""
|
||||
current_labels_indent = None
|
||||
def replacer(match: re.Match[str]) -> str:
|
||||
key = match.group(1)
|
||||
if key in os.environ:
|
||||
return os.environ[key]
|
||||
return env_values.get(key, match.group(0))
|
||||
|
||||
return var_re.sub(replacer, rule)
|
||||
|
||||
|
||||
def normalize_labels(raw_labels: Any) -> Dict[str, str]:
|
||||
labels: Dict[str, str] = {}
|
||||
if isinstance(raw_labels, dict):
|
||||
for key, value in raw_labels.items():
|
||||
labels[str(key)] = "" if value is None else str(value)
|
||||
return labels
|
||||
|
||||
if isinstance(raw_labels, list):
|
||||
for item in raw_labels:
|
||||
if isinstance(item, str) and "=" in item:
|
||||
key, value = item.split("=", 1)
|
||||
labels[key.strip()] = value.strip()
|
||||
elif isinstance(item, str):
|
||||
labels[item.strip()] = ""
|
||||
return labels
|
||||
|
||||
return labels
|
||||
|
||||
|
||||
def infer_stack(compose_file: Path) -> str:
|
||||
parts = compose_file.parts
|
||||
return parts[0] if parts else "unknown"
|
||||
|
||||
|
||||
def boolish(value: str) -> bool:
|
||||
return value.strip().lower() in {"1", "true", "yes", "on"}
|
||||
|
||||
|
||||
def parse_middlewares(raw_value: str) -> List[str]:
|
||||
return [item.strip() for item in raw_value.split(",") if item.strip()]
|
||||
|
||||
|
||||
def extract_traefik_hosts(path: Path, env_values: Dict[str, str]) -> List[Dict[str, Any]]:
|
||||
try:
|
||||
payload = yaml.safe_load(path.read_text(encoding="utf-8")) or {}
|
||||
except yaml.YAMLError as exc:
|
||||
raise RuntimeError(f"Failed to parse compose YAML in {path}: {exc}") from exc
|
||||
|
||||
services = payload.get("services")
|
||||
if not isinstance(services, dict):
|
||||
return []
|
||||
|
||||
entries: List[Dict[str, Any]] = []
|
||||
stack = infer_stack(path)
|
||||
|
||||
for service_name, service_payload in services.items():
|
||||
if not isinstance(service_payload, dict):
|
||||
continue
|
||||
|
||||
if not in_services:
|
||||
continue
|
||||
labels = normalize_labels(service_payload.get("labels"))
|
||||
router_fields: Dict[str, Dict[str, str]] = defaultdict(dict)
|
||||
|
||||
service_match = re.match(r"^(\s{2})([A-Za-z0-9_.-]+):\s*$", line)
|
||||
if service_match:
|
||||
current_service = service_match.group(2)
|
||||
current_labels_indent = None
|
||||
continue
|
||||
for label_key, label_value in labels.items():
|
||||
match = ROUTER_LABEL_RE.match(label_key)
|
||||
if not match:
|
||||
continue
|
||||
router_name, field_name = match.groups()
|
||||
router_fields[router_name][field_name] = label_value
|
||||
|
||||
if re.match(r"^\S", line):
|
||||
in_services = False
|
||||
current_service = ""
|
||||
current_labels_indent = None
|
||||
continue
|
||||
for router_name, fields in router_fields.items():
|
||||
rule = fields.get("rule", "")
|
||||
if not rule:
|
||||
continue
|
||||
|
||||
labels_match = re.match(r"^(\s+)labels:\s*$", line)
|
||||
if labels_match and current_service:
|
||||
current_labels_indent = len(labels_match.group(1))
|
||||
continue
|
||||
router_label_key = f"traefik.http.routers.{router_name}.rule"
|
||||
middlewares = parse_middlewares(fields.get("middlewares", ""))
|
||||
tls_options = fields.get("tls.options", "")
|
||||
tls_enabled = boolish(fields.get("tls", "")) or bool(tls_options) or bool(fields.get("tls.certresolver", ""))
|
||||
|
||||
if current_labels_indent is None:
|
||||
continue
|
||||
lowered_metadata = " ".join([tls_options, ",".join(middlewares)]).lower()
|
||||
uses_mtls = "mtls" in lowered_metadata
|
||||
uses_authelia = "authelia" in lowered_metadata
|
||||
|
||||
indent = len(line) - len(line.lstrip(" "))
|
||||
if indent <= current_labels_indent:
|
||||
current_labels_indent = None
|
||||
continue
|
||||
|
||||
if "traefik.http.routers." not in line or ".rule" not in line:
|
||||
continue
|
||||
|
||||
label_value = ""
|
||||
list_match = re.match(r"^\s*-\s*([\"']?)(.+)\1\s*$", stripped)
|
||||
if list_match:
|
||||
payload = list_match.group(2)
|
||||
label_value = payload.split("=", 1)[1] if "=" in payload else payload
|
||||
else:
|
||||
map_match = re.match(r"^\s*([\"']?[^:]+\1):\s*(.+)$", line)
|
||||
if map_match:
|
||||
label_value = map_match.group(2).strip().strip("\"'")
|
||||
|
||||
for fqdn in parse_hosts_from_label(label_value):
|
||||
entries.append(
|
||||
{
|
||||
"fqdn": fqdn,
|
||||
"stack": path.parts[0],
|
||||
"service": current_service,
|
||||
"source_compose_file": str(path),
|
||||
}
|
||||
)
|
||||
resolved_rule = resolve_rule_variables(rule, env_values)
|
||||
for fqdn in parse_hosts_from_rule(resolved_rule):
|
||||
entries.append(
|
||||
{
|
||||
"fqdn": fqdn,
|
||||
"service": str(service_name),
|
||||
"stack": stack,
|
||||
"source_compose_file": str(path),
|
||||
"router": router_name,
|
||||
"router_label_keys": [router_label_key],
|
||||
"raw_rule": rule,
|
||||
"resolved_rule": resolved_rule,
|
||||
"uses_tls": tls_enabled,
|
||||
"tls_options": tls_options,
|
||||
"middlewares": middlewares,
|
||||
"uses_mtls": uses_mtls,
|
||||
"uses_authelia": uses_authelia,
|
||||
}
|
||||
)
|
||||
|
||||
return entries
|
||||
|
||||
@@ -157,10 +222,36 @@ def load_dynu(path: Path) -> Dict[str, List[Dict[str, str]]]:
|
||||
return index
|
||||
|
||||
|
||||
def write_markdown(data: Dict) -> None:
|
||||
matched = [x for x in data["inventory"] if x["status"] == "matched"]
|
||||
missing = [x for x in data["inventory"] if x["status"] == "missing_in_dynu"]
|
||||
dns_only = [x for x in data["inventory"] if x["status"] == "dns_only"]
|
||||
def is_subdomain_of_base(fqdn: str) -> bool:
|
||||
return fqdn.endswith(f".{BASE_DOMAIN}")
|
||||
|
||||
|
||||
def summarize_reasons(
|
||||
has_traefik: bool,
|
||||
has_dns: bool,
|
||||
is_allowed_unmapped: bool,
|
||||
is_ambiguous: bool,
|
||||
is_enforced_dns_subdomain: bool,
|
||||
) -> List[str]:
|
||||
reasons: List[str] = []
|
||||
if has_traefik and has_dns:
|
||||
reasons.append("mapped")
|
||||
if has_dns and not has_traefik and is_allowed_unmapped:
|
||||
reasons.append("allowed_unmapped")
|
||||
if has_dns and not has_traefik and is_enforced_dns_subdomain and not is_allowed_unmapped:
|
||||
reasons.append("unexpected_unmapped")
|
||||
if has_dns and not has_traefik:
|
||||
reasons.append("dns_only")
|
||||
if has_traefik and not has_dns:
|
||||
reasons.append("traefik_only")
|
||||
if is_ambiguous:
|
||||
reasons.append("duplicate_mapping")
|
||||
reasons.append("ambiguous_mapping")
|
||||
return reasons
|
||||
|
||||
|
||||
def write_markdown(data: Dict[str, Any]) -> None:
|
||||
inventory = data["inventory"]
|
||||
|
||||
lines = [
|
||||
"# DNS Inventory (Dynu + Traefik)",
|
||||
@@ -175,38 +266,60 @@ def write_markdown(data: Dict) -> None:
|
||||
"",
|
||||
f"- Traefik hostnames discovered: **{data['summary']['traefik_hostnames']}**",
|
||||
f"- Dynu hostnames discovered: **{data['summary']['dynu_hostnames']}**",
|
||||
f"- Matched: **{data['summary']['matched']}**",
|
||||
f"- Missing in Dynu: **{data['summary']['missing_in_dynu']}**",
|
||||
f"- Dynu DNS only: **{data['summary']['dns_only']}**",
|
||||
f"- Duplicate Traefik hostnames: **{data['summary']['duplicate_traefik_hostnames']}**",
|
||||
f"- Mapped hostnames: **{data['summary']['mapped_hostnames']}**",
|
||||
f"- DNS-only hostnames: **{data['summary']['dns_only_hostnames']}**",
|
||||
f"- Traefik-only hostnames: **{data['summary']['traefik_only_hostnames']}**",
|
||||
f"- Ambiguous hostnames: **{len(data['validation']['ambiguous_hostnames'])}**",
|
||||
"",
|
||||
"## Dynu Records",
|
||||
"## Validation",
|
||||
"",
|
||||
f"- Validation ok: **{str(data['validation']['validation_ok']).lower()}**",
|
||||
f"- Allowed unmapped hostnames: `{', '.join(data['validation']['allowed_unmapped_hostnames'])}`",
|
||||
f"- Unexpected unmapped hostnames: **{len(data['validation']['unexpected_unmapped_hostnames'])}**",
|
||||
f"- Duplicate hostnames: **{len(data['validation']['duplicate_hostnames'])}**",
|
||||
f"- Ambiguous hostnames: **{len(data['validation']['ambiguous_hostnames'])}**",
|
||||
"",
|
||||
"| Hostname | Type | Value | TTL |",
|
||||
"|---|---|---|---|",
|
||||
]
|
||||
|
||||
for row in data["dynu_records_table"]:
|
||||
lines.append(f"| `{row['hostname']}` | `{row['type']}` | `{row['value']}` | `{row['ttl']}` |")
|
||||
|
||||
lines.extend(["", "## Correlation", "", "| Hostname | Status | Service(s) | Source compose file(s) | DNS records |", "|---|---|---|---|---|"])
|
||||
for row in data["inventory"]:
|
||||
svc = ", ".join(sorted({f"{e['stack']}/{e['service']}" for e in row.get('traefik_entries', [])})) or "-"
|
||||
src = ", ".join(sorted({e['source_compose_file'] for e in row.get('traefik_entries', [])})) or "-"
|
||||
dns = ", ".join([f"{r['type']}:{r['value']}" for r in row.get("dynu_records", [])]) or "-"
|
||||
lines.append(f"| `{row['fqdn']}` | `{row['status']}` | {svc} | {src} | {dns} |")
|
||||
|
||||
def section(title: str, rows: List[Dict]) -> None:
|
||||
lines.extend(["", f"## {title}", ""])
|
||||
def bullet_list(title: str, values: Iterable[str]) -> None:
|
||||
rows = list(values)
|
||||
lines.extend([f"### {title}", ""])
|
||||
if not rows:
|
||||
lines.append("_None._")
|
||||
return
|
||||
for row in rows:
|
||||
lines.append(f"- `{row['fqdn']}`")
|
||||
else:
|
||||
for value in rows:
|
||||
lines.append(f"- `{value}`")
|
||||
lines.append("")
|
||||
|
||||
section("Matched records", matched)
|
||||
section("Traefik hostnames missing in Dynu", missing)
|
||||
section("Dynu DNS records not mapped to known Traefik services", dns_only)
|
||||
bullet_list("Allowed unmapped hostnames", data["validation"]["allowed_unmapped_hostnames"])
|
||||
bullet_list("Unexpected unmapped hostnames", data["validation"]["unexpected_unmapped_hostnames"])
|
||||
bullet_list("Duplicate hostnames", data["validation"]["duplicate_hostnames"])
|
||||
bullet_list("Ambiguous hostnames", data["validation"]["ambiguous_hostnames"])
|
||||
|
||||
lines.extend(
|
||||
[
|
||||
"## Correlation",
|
||||
"",
|
||||
"| Hostname | Status | Reasons | Service(s) | Route metadata | DNS records |",
|
||||
"|---|---|---|---|---|---|",
|
||||
]
|
||||
)
|
||||
|
||||
for row in inventory:
|
||||
services = sorted({f"{entry['stack']}/{entry['service']}" for entry in row["traefik_entries"]})
|
||||
service_cell = ", ".join(services) if services else "-"
|
||||
reason_cell = ", ".join(row["reasons"]) if row["reasons"] else "-"
|
||||
|
||||
route_chunks = []
|
||||
for entry in row["traefik_entries"]:
|
||||
middlewares = ",".join(entry.get("middlewares", [])) or "-"
|
||||
route_chunks.append(
|
||||
f"{entry['router']} [tls={str(entry['uses_tls']).lower()}, mtls={str(entry['uses_mtls']).lower()}, authelia={str(entry['uses_authelia']).lower()}, tls_options={entry.get('tls_options') or '-'}, middlewares={middlewares}]"
|
||||
)
|
||||
route_cell = "<br>".join(route_chunks) if route_chunks else "-"
|
||||
|
||||
dns_cell = ", ".join(f"{item['type']}:{item['value']}" for item in row["dynu_records"]) if row["dynu_records"] else "-"
|
||||
lines.append(f"| `{row['fqdn']}` | `{row['status']}` | `{reason_cell}` | {service_cell} | {route_cell} | {dns_cell} |")
|
||||
|
||||
OUT_MD.parent.mkdir(parents=True, exist_ok=True)
|
||||
OUT_MD.write_text("\n".join(lines) + "\n", encoding="utf-8")
|
||||
@@ -227,43 +340,79 @@ def main() -> int:
|
||||
dynu_index = load_dynu(DYN_DATA)
|
||||
|
||||
repo_root = Path(__file__).resolve().parents[2]
|
||||
hosts = []
|
||||
env_values = load_env_defaults(repo_root)
|
||||
hosts: List[Dict[str, Any]] = []
|
||||
for cf in compose_files(repo_root):
|
||||
hosts.extend(extract_traefik_hosts(cf.relative_to(repo_root)))
|
||||
hosts.extend(extract_traefik_hosts(cf.relative_to(repo_root), env_values))
|
||||
|
||||
by_fqdn: Dict[str, List[Dict[str, str]]] = defaultdict(list)
|
||||
by_fqdn: Dict[str, List[Dict[str, Any]]] = defaultdict(list)
|
||||
for entry in hosts:
|
||||
if entry["fqdn"].endswith(BASE_DOMAIN):
|
||||
if entry["fqdn"] == BASE_DOMAIN or is_subdomain_of_base(entry["fqdn"]):
|
||||
by_fqdn[entry["fqdn"]].append(entry)
|
||||
|
||||
duplicate_hosts = {k for k, v in by_fqdn.items() if len(v) > 1}
|
||||
duplicate_hostnames = sorted(k for k, v in by_fqdn.items() if len(v) > 1)
|
||||
|
||||
combined_fqdns = sorted(set(by_fqdn.keys()) | set(dynu_index.keys()))
|
||||
inventory = []
|
||||
ambiguous_hostnames: List[str] = []
|
||||
|
||||
for fqdn in combined_fqdns:
|
||||
traefik_entries = sorted(
|
||||
by_fqdn.get(fqdn, []),
|
||||
key=lambda x: (x["stack"], x["service"], x["source_compose_file"]),
|
||||
key=lambda x: (x["stack"], x["service"], x["source_compose_file"], x["router"]),
|
||||
)
|
||||
dns_records = dynu_index.get(fqdn, [])
|
||||
|
||||
if traefik_entries and dns_records:
|
||||
status = "matched"
|
||||
elif traefik_entries and not dns_records:
|
||||
status = "missing_in_dynu"
|
||||
else:
|
||||
is_allowed_unmapped = fqdn in ALLOWED_UNMAPPED_HOSTNAMES
|
||||
has_traefik = bool(traefik_entries)
|
||||
has_dns = bool(dns_records)
|
||||
|
||||
service_keys = {f"{item['stack']}/{item['service']}" for item in traefik_entries}
|
||||
is_ambiguous = len(service_keys) > 1
|
||||
if is_ambiguous:
|
||||
ambiguous_hostnames.append(fqdn)
|
||||
|
||||
is_enforced_dns_subdomain = is_subdomain_of_base(fqdn)
|
||||
|
||||
if has_traefik and has_dns:
|
||||
status = "mapped"
|
||||
elif has_dns and is_allowed_unmapped:
|
||||
status = "allowed_unmapped"
|
||||
elif has_dns and not has_traefik and is_enforced_dns_subdomain:
|
||||
status = "unexpected_unmapped"
|
||||
elif has_dns and not has_traefik:
|
||||
status = "dns_only"
|
||||
else:
|
||||
status = "traefik_only"
|
||||
|
||||
reasons = summarize_reasons(
|
||||
has_traefik, has_dns, is_allowed_unmapped, is_ambiguous, is_enforced_dns_subdomain
|
||||
)
|
||||
|
||||
inventory.append(
|
||||
{
|
||||
"fqdn": fqdn,
|
||||
"status": status,
|
||||
"duplicate": fqdn in duplicate_hosts,
|
||||
"reasons": reasons,
|
||||
"duplicate": fqdn in duplicate_hostnames,
|
||||
"traefik_entries": traefik_entries,
|
||||
"dynu_records": dns_records,
|
||||
}
|
||||
)
|
||||
|
||||
subdomain_dns_hosts = sorted(host for host in dynu_index if is_subdomain_of_base(host))
|
||||
unexpected_unmapped_hostnames = sorted(
|
||||
host for host in subdomain_dns_hosts if host not in by_fqdn and host not in ALLOWED_UNMAPPED_HOSTNAMES
|
||||
)
|
||||
|
||||
validation = {
|
||||
"allowed_unmapped_hostnames": sorted(ALLOWED_UNMAPPED_HOSTNAMES),
|
||||
"unexpected_unmapped_hostnames": unexpected_unmapped_hostnames,
|
||||
"duplicate_hostnames": duplicate_hostnames,
|
||||
"ambiguous_hostnames": sorted(set(ambiguous_hostnames)),
|
||||
"validation_ok": len(unexpected_unmapped_hostnames) == 0,
|
||||
}
|
||||
|
||||
dynu_rows = []
|
||||
for fqdn in sorted(dynu_index.keys()):
|
||||
for rec in dynu_index[fqdn]:
|
||||
@@ -285,11 +434,11 @@ def main() -> int:
|
||||
"summary": {
|
||||
"traefik_hostnames": len(by_fqdn),
|
||||
"dynu_hostnames": len(dynu_index),
|
||||
"matched": sum(1 for x in inventory if x["status"] == "matched"),
|
||||
"missing_in_dynu": sum(1 for x in inventory if x["status"] == "missing_in_dynu"),
|
||||
"dns_only": sum(1 for x in inventory if x["status"] == "dns_only"),
|
||||
"duplicate_traefik_hostnames": len(duplicate_hosts),
|
||||
"mapped_hostnames": sum(1 for x in inventory if x["status"] == "mapped"),
|
||||
"dns_only_hostnames": sum(1 for x in inventory if "dns_only" in x["reasons"]),
|
||||
"traefik_only_hostnames": sum(1 for x in inventory if x["status"] == "traefik_only"),
|
||||
},
|
||||
"validation": validation,
|
||||
"inventory": inventory,
|
||||
"dynu_records_table": dynu_rows,
|
||||
}
|
||||
@@ -300,6 +449,15 @@ def main() -> int:
|
||||
|
||||
print(f"Wrote {OUT_JSON}")
|
||||
print(f"Wrote {OUT_MD}")
|
||||
|
||||
if os.environ.get("DYNU_ENFORCE_VALIDATION") == "true" and not validation["validation_ok"]:
|
||||
print(
|
||||
"Validation failed: unexpected unmapped hostnames were found: "
|
||||
+ ", ".join(validation["unexpected_unmapped_hostnames"]),
|
||||
file=sys.stderr,
|
||||
)
|
||||
return 4
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
|
||||
Reference in New Issue
Block a user