Merge pull request #61 from beatz174-bit/codex/update-terraform-for-dynu-dns-reconciliation

Add Dynu brownfield DNS inventory, outputs, and generator
This commit is contained in:
beatz174-bit
2026-05-13 06:23:27 +10:00
committed by GitHub
5 changed files with 276 additions and 38 deletions
+23
View File
@@ -0,0 +1,23 @@
# This file is maintained automatically by "terraform init".
# Manual edits may be lost in future updates.
provider "registry.terraform.io/beatz174-bit/dynu" {
version = "0.3.0"
hashes = [
"h1:yftAEp/lcPmbVRV8YenFaMJElUkt3j79TSt3OcQnwk4=",
"zh:1f7344737dff5b12155e8308b1cb55b0cc6f83a4a5776eb1cc2273bc84bb8fa2",
"zh:212662c4f5b979401f282f7f9856480bad86d9e488110bebc589a4eeb892ff02",
"zh:2ed5294fc7db7639c41f99a9d7bcfff6585f1f372eb23cd8229adfe219cba63a",
"zh:30fc9df00120f309ae969ae107e4dc4a0b04517f2c07a78934a2c479995f77b8",
"zh:3126369f6dc86e8083ec12a2643ea87a13543ed12631b40375b7b9563da41474",
"zh:3c5775a6763608253e2698b85dcee42eafd6ca8e08a8851866123de403a331b0",
"zh:50bedffbee48505604d05181172018143e54c68249761a749fb7c115eec4ce04",
"zh:528549a2763dd2fbf3ffe047fa19c0524eb08addd777c9c0350800394ff16235",
"zh:99049e25d7d3fb26e2a94d6e609c8989efcee1af1568a77110a70bce2c01f1ef",
"zh:9a6490b67aca08b135e5ba7092fc315e67177be1f280f6c0d06b9c64a0892d3a",
"zh:bfaae08fb5a0b10184a7b6e8382038d0a0b9936ea13efa1462c35cee902b0c14",
"zh:c0dbe59b9bfcbd42f3da1732615669a579275acc654c75127612d86f319b38b3",
"zh:f809ab383cca0a5f83072981c64208cbd7fa67e986a86ee02dd2c82333221e32",
"zh:f815fa2f8681477f159eb9a32b78f8988065e5040b7feea42604bac469c2c4eb",
]
}
+43 -38
View File
@@ -1,30 +1,26 @@
# Dynu Terraform Layer (Brownfield DNS Reconciliation)
This Terraform root is for **Dynu DNS brownfield import/reconciliation** and documentation outputs.
This Terraform root is for **Dynu DNS brownfield reconciliation**. The intended pattern is:
Dynu remains the authoritative DNS provider for existing records. Terraform here is used to mirror and reconcile existing DNS state incrementally, not to casually recreate production DNS from scratch.
1. Import the existing root domain object.
2. Read inventory through `data.dynu_dns_records.root`.
3. Generate reviewable `dynu_dns_record` resources and import commands.
4. Import every existing DNS record into matching Terraform resources.
5. Use `terraform plan` as the reconciliation check before any apply.
## Provider
## Provider behavior to keep in mind
- Source: `beatz174-bit/dynu`
- Provider version is intentionally unpinned in this root to use the latest published release.
- Provider block uses `api_key` auth argument from the published provider schema.
- `dynu_domain` import requires a **numeric Dynu domain ID**.
- Importing `dynu_domain` imports only the root domain object.
- It **does not** import DNS records/subdomains.
- `dynu_dns_record` imports require `<domain_id>/<record_id>`.
## Credentials and auth
## Variables
Use local `terraform.tfvars` (or provider-supported environment variables).
- Required variable: `dynu_api_key`
- Domain variable: `dynu_root_domain` (defaults to `lan.ddnsgeek.com`)
- Optional placeholders for future provider auth changes: `dynu_username`, `dynu_password`
- Import helper variable: `dynu_record_import_id` (set per record import operation)
Never commit:
- `terraform.tfvars`
- `.terraform/`
- `*.tfstate*`
- credentials/secrets
- `dynu_root_domain` (default: `lan.ddnsgeek.com`)
- `dynu_api_key` (sensitive)
- `dynu_username` / `dynu_password` (optional)
## Safe validation commands
@@ -33,35 +29,44 @@ cd infrastructure/terraform/dynu
terraform fmt -check -recursive
terraform init -backend=false -input=false
terraform validate
python3 -m py_compile scripts/generate-brownfield-records.py
```
## Local workflow
## Brownfield workflow
```bash
cp terraform.tfvars.example terraform.tfvars
$EDITOR terraform.tfvars
cd infrastructure/terraform/dynu
terraform init
terraform import dynu_domain.lan_ddnsgeek_com '<numeric-dynu-domain-id>'
terraform apply -refresh-only
terraform output -json dynu_dns_records > /tmp/dynu-records.json
python3 scripts/generate-brownfield-records.py --dry-run
python3 scripts/generate-brownfield-records.py --overwrite
# Review generated/dynu_dns_records.generated.tf
# Review generated/import-dynu-dns-records.sh
bash generated/import-dynu-dns-records.sh
terraform plan
```
## Import workflow (one object at a time)
## What each component means
```bash
terraform import dynu_domain.lan_ddnsgeek_com '<provider-specific-domain-import-id-or-domain-name>'
terraform state show dynu_domain.lan_ddnsgeek_com
terraform plan
```
- `data.dynu_dns_records.root`: read-only live inventory from Dynu.
- `generated/dynu_dns_records.generated.tf`: generated management-intent resources; includes `prevent_destroy = true` on each record.
- `generated/import-dynu-dns-records.sh`: imports each discovered record to its generated `dynu_dns_record` address using `<domain_id>/<record_id>`.
- `terraform plan` after imports: reconciliation checkpoint. Any create/update/delete must be reviewed manually before apply.
Or with import blocks:
## Generated artifacts
```bash
cp imports.tf.example imports.tf
$EDITOR imports.tf
terraform plan -generate-config-out=generated-dynu.tf
```
The helper script writes these files under `generated/`:
Notes:
- `generated/dynu_dns_records_inventory.json`
- `generated/dynu_dns_records.generated.tf`
- `generated/import-dynu-dns-records.sh`
1. Confirm exact import ID formats from the provider docs.
2. For DNS records, add one `dynu_dns_record` resource at a time after confirming required arguments (`hostname`, `record_type`) from `terraform providers schema`.
3. Treat generated config as draft input; keep only stable, meaningful attributes in hand-maintained `.tf`.
These are generated outputs meant for operator review before use in production.
@@ -0,0 +1,16 @@
data "dynu_dns_records" "root" {
hostname = var.dynu_root_domain
}
locals {
dynu_dns_record_name_map = {
for record in data.dynu_dns_records.root.records :
format(
"%s_%s",
can(regex("^[a-z]", regexreplace(replace(lower(format("%s_%s", record.hostname, record.record_type)), "*", "wildcard"), "[^a-z0-9]+", "_")))
? trim(regexreplace(replace(lower(format("%s_%s", record.hostname, record.record_type)), "*", "wildcard"), "[^a-z0-9]+", "_"), "_")
: format("record_%s", trim(regexreplace(replace(lower(format("%s_%s", record.hostname, record.record_type)), "*", "wildcard"), "[^a-z0-9]+", "_"), "_")),
record.id
) => record
}
}
+28
View File
@@ -17,3 +17,31 @@ output "dynu_dns_inventory" {
records = local.dynu_dns_records_catalog
}
}
output "dynu_root_domain_id" {
description = "Dynu numeric domain ID resolved from dynu_root_domain."
value = data.dynu_dns_records.root.domain_id
}
output "dynu_root_domain_name" {
description = "Dynu root domain name resolved from dynu_root_domain."
value = data.dynu_dns_records.root.domain_name
}
output "dynu_dns_records" {
description = "Full read-only DNS record inventory returned by Dynu."
value = data.dynu_dns_records.root.records
}
output "dynu_dns_hostnames" {
description = "Sorted hostname list discovered for dynu_root_domain."
value = sort(distinct([for record in data.dynu_dns_records.root.records : record.hostname]))
}
output "dynu_dns_record_import_ids" {
description = "Map of generated Terraform resource names to provider import IDs in domain_id/record_id format."
value = {
for name, record in local.dynu_dns_record_name_map :
name => format("%s/%s", record.domain_id, record.id)
}
}
@@ -0,0 +1,166 @@
#!/usr/bin/env python3
"""Generate Terraform dynu_dns_record resources/import commands from Dynu inventory outputs."""
from __future__ import annotations
import argparse
import json
import re
import subprocess
import sys
from pathlib import Path
SCRIPT_PATH = Path(__file__).resolve()
TF_ROOT = SCRIPT_PATH.parents[1]
GENERATED_DIR = TF_ROOT / "generated"
TF_FILE = GENERATED_DIR / "dynu_dns_records.generated.tf"
IMPORT_SCRIPT = GENERATED_DIR / "import-dynu-dns-records.sh"
INVENTORY_FILE = GENERATED_DIR / "dynu_dns_records_inventory.json"
HEADER_TF = """# ---------------------------------------------------------------------------
# GENERATED FILE - REVIEW BEFORE USE
#
# Generated from Dynu brownfield DNS inventory.
# Do not blindly apply this file to production DNS.
# Import records into Terraform state before allowing Terraform to manage them.
# ---------------------------------------------------------------------------
"""
HEADER_SH = """#!/usr/bin/env bash
# ---------------------------------------------------------------------------
# GENERATED FILE - REVIEW BEFORE USE
#
# Imports existing Dynu DNS records into Terraform state.
# Does not apply changes.
# ---------------------------------------------------------------------------
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
TF_ROOT="$(cd "${SCRIPT_DIR}/.." && pwd)"
cd "${TF_ROOT}"
# Re-running imports will fail for resources already in state.
# This script skips imports when state already contains the resource address.
"""
OPTIONAL_FIELDS = ["group", "host", "priority", "weight", "port", "flags", "tag", "value", "node_name"]
def run_terraform_output() -> dict:
if not (TF_ROOT / ".terraform").exists():
raise RuntimeError("Terraform is not initialized in infrastructure/terraform/dynu. Run: terraform init")
cmd = ["terraform", "output", "-json"]
proc = subprocess.run(cmd, cwd=TF_ROOT, capture_output=True, text=True)
if proc.returncode != 0:
raise RuntimeError(f"Failed to run {' '.join(cmd)}:\n{proc.stderr.strip()}")
return json.loads(proc.stdout)
def tf_name(record: dict) -> str:
base = f"{record.get('hostname', '')}_{record.get('record_type', '')}_{record.get('id', '')}".lower()
base = base.replace("*", "wildcard")
base = re.sub(r"[^a-z0-9_]+", "_", base)
base = re.sub(r"_+", "_", base).strip("_")
if not base or not re.match(r"^[a-z]", base):
base = f"record_{base}" if base else "record"
if not base.endswith(str(record.get("id", ""))):
base = f"{base}_{record.get('id', '')}"
return base
def hcl_value(value):
if isinstance(value, bool):
return "true" if value else "false"
if isinstance(value, (int, float)):
return str(value)
return json.dumps(value)
def generate_resources(records: list[dict]) -> str:
chunks = [HEADER_TF.rstrip(), ""]
for rec in records:
name = tf_name(rec)
lines = [f'resource "dynu_dns_record" "{name}" {{']
lines.append(f" hostname = {hcl_value(rec.get('hostname'))}")
lines.append(f" record_type = {hcl_value(rec.get('record_type'))}")
if rec.get("ttl") is not None:
lines.append(f" ttl = {hcl_value(rec.get('ttl'))}")
if rec.get("enabled") is not None:
lines.append(f" enabled = {hcl_value(rec.get('enabled'))}")
content = rec.get("content")
rtype = str(rec.get("record_type", "")).upper()
if content in (None, "") and rtype in {"A", "AAAA"}:
lines.append(" dynamic = true")
elif content not in (None, ""):
lines.append(f" content = {hcl_value(content)}")
for field in OPTIONAL_FIELDS:
value = rec.get(field)
if value not in (None, ""):
lines.append(f" {field.ljust(11)}= {hcl_value(value)}")
lines.extend([
"",
" lifecycle {",
" prevent_destroy = true",
" }",
"}",
"",
])
chunks.extend(lines)
return "\n".join(chunks).rstrip() + "\n"
def generate_import_script(records: list[dict]) -> str:
lines = [HEADER_SH.rstrip(), ""]
for rec in records:
name = tf_name(rec)
import_id = f"{rec['domain_id']}/{rec['id']}"
addr = f"dynu_dns_record.{name}"
lines.append(f"if terraform state show '{addr}' >/dev/null 2>&1; then")
lines.append(f" echo 'Skipping already imported: {addr}'")
lines.append("else")
lines.append(f" terraform import '{addr}' '{import_id}'")
lines.append("fi")
lines.append("")
return "\n".join(lines).rstrip() + "\n"
def write_file(path: Path, content: str, dry_run: bool, overwrite: bool) -> None:
if path.exists() and not overwrite:
raise RuntimeError(f"Refusing to overwrite existing file: {path}. Re-run with --overwrite.")
if dry_run:
print(f"[dry-run] Would write {path}")
return
path.write_text(content, encoding="utf-8")
print(f"Wrote {path}")
def main() -> int:
parser = argparse.ArgumentParser()
parser.add_argument("--dry-run", action="store_true", help="Print intended output paths without writing files.")
parser.add_argument("--overwrite", "--force", action="store_true", dest="overwrite", help="Overwrite existing generated files.")
args = parser.parse_args()
try:
out = run_terraform_output()
records = out["dynu_dns_records"]["value"]
if not isinstance(records, list):
raise RuntimeError("terraform output dynu_dns_records did not return a list.")
GENERATED_DIR.mkdir(parents=True, exist_ok=True)
write_file(INVENTORY_FILE, json.dumps(records, indent=2, sort_keys=True) + "\n", args.dry_run, args.overwrite)
write_file(TF_FILE, generate_resources(records), args.dry_run, args.overwrite)
write_file(IMPORT_SCRIPT, generate_import_script(records), args.dry_run, args.overwrite)
if not args.dry_run:
IMPORT_SCRIPT.chmod(0o755)
return 0
except Exception as exc: # noqa: BLE001
print(f"Error: {exc}", file=sys.stderr)
return 1
if __name__ == "__main__":
raise SystemExit(main())