Fix the public DNS
This commit is contained in:
parent
d6d9a348b1
commit
3f41b63582
6 changed files with 265 additions and 204 deletions
|
@ -1,11 +1,8 @@
|
|||
zapp_binary(
|
||||
py_project(
|
||||
name = "updater",
|
||||
main = "src/python/arrdem/updater/__main__.py",
|
||||
main = "src/python/updater/__main__.py",
|
||||
shebang = "/usr/bin/env python3",
|
||||
imports = [
|
||||
"src/python",
|
||||
],
|
||||
deps = [
|
||||
lib_deps = [
|
||||
"//projects/gandi",
|
||||
py_requirement("jinja2"),
|
||||
py_requirement("pyyaml"),
|
||||
|
|
|
@ -1,197 +0,0 @@
|
|||
"""
|
||||
A quick and dirty public DNS script, super tightly coupled to my infrastructure.
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import os
|
||||
from pprint import pprint
|
||||
import re
|
||||
|
||||
from gandi.client import GandiAPI
|
||||
import jinja2
|
||||
import meraki
|
||||
import yaml
|
||||
|
||||
|
||||
RECORD_LINE_PATTERN = re.compile(
|
||||
r"^(?P<rrset_name>\S+)\s+"
|
||||
r"(?P<rrset_ttl>\S+)\s+"
|
||||
r"IN\s+"
|
||||
r"(?P<rrset_type>\S+)\s+"
|
||||
r"(?P<rrset_values>[^\s#]+)"
|
||||
r"(?P<comment>\s*#.*?)$"
|
||||
)
|
||||
|
||||
|
||||
def update(m, k, f, *args, **kwargs):
|
||||
"""clojure.core/update for Python's stateful maps."""
|
||||
|
||||
if k in m:
|
||||
m[k] = f(m[k], *args, **kwargs)
|
||||
|
||||
return m
|
||||
|
||||
|
||||
def parse_zone_record(line):
|
||||
if match := RECORD_LINE_PATTERN.search(line):
|
||||
dat = match.groupdict()
|
||||
dat = update(dat, "rrset_ttl", int)
|
||||
dat = update(dat, "rrset_values", lambda x: [x])
|
||||
return dat
|
||||
|
||||
|
||||
def same_record(lr, rr):
|
||||
"""
|
||||
A test to see if two records name the same zone entry.
|
||||
"""
|
||||
|
||||
return lr["rrset_name"] == rr["rrset_name"] and lr["rrset_type"] == rr["rrset_type"]
|
||||
|
||||
|
||||
def records_equate(lr, rr):
|
||||
"""
|
||||
Equality, ignoring rrset_href which is generated by the API.
|
||||
"""
|
||||
|
||||
if not same_record(lr, rr):
|
||||
return False
|
||||
|
||||
elif lr["rrset_ttl"] != rr["rrset_ttl"]:
|
||||
return False
|
||||
|
||||
elif set(lr["rrset_values"]) != set(rr["rrset_values"]):
|
||||
return False
|
||||
|
||||
else:
|
||||
return True
|
||||
|
||||
|
||||
def template_and_parse_zone(template_file, template_bindings):
|
||||
assert template_file is not None
|
||||
assert template_bindings is not None
|
||||
|
||||
with open(template_file) as fp:
|
||||
dat = jinja2.Template(fp.read()).render(**template_bindings)
|
||||
|
||||
uncommitted_records = []
|
||||
for line in dat.splitlines():
|
||||
if line and not line[0] == "#":
|
||||
record = parse_zone_record(line)
|
||||
if record:
|
||||
uncommitted_records.append(record)
|
||||
|
||||
records = []
|
||||
|
||||
for uncommitted_r in uncommitted_records:
|
||||
flag = False
|
||||
for committed_r in records:
|
||||
if same_record(uncommitted_r, committed_r):
|
||||
# Join the two records
|
||||
committed_r["rrset_values"].extend(uncommitted_r["rrset_values"])
|
||||
flag = True
|
||||
|
||||
if not flag:
|
||||
records.append(uncommitted_r)
|
||||
|
||||
sorted(records, key=lambda x: (x["rrset_type"], x["rrset_name"]))
|
||||
|
||||
return records
|
||||
|
||||
|
||||
def diff_zones(left_zone, right_zone):
|
||||
"""
|
||||
Equality between unordered lists of records constituting a zone.
|
||||
"""
|
||||
|
||||
in_left_not_right = []
|
||||
in_right_not_left = []
|
||||
for lr in left_zone:
|
||||
flag = False
|
||||
for rr in right_zone:
|
||||
if records_equate(lr, rr):
|
||||
flag |= True
|
||||
|
||||
if not flag:
|
||||
in_left_not_right.append(lr)
|
||||
in_right_not_left.append(rr)
|
||||
|
||||
return in_left_not_right, in_right_not_left
|
||||
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
description='"Dynamic" DNS updating for self-hosted services'
|
||||
)
|
||||
parser.add_argument("--config", dest="config_file", required=True)
|
||||
parser.add_argument("--templates", dest="template_dir", required=True)
|
||||
parser.add_argument("--dry-run", dest="dry", action="store_true", default=False)
|
||||
|
||||
|
||||
def main():
|
||||
args = parser.parse_args()
|
||||
|
||||
with open(args.config_file, "r") as fp:
|
||||
config = yaml.safe_load(fp)
|
||||
|
||||
dashboard = meraki.DashboardAPI(config["meraki"]["key"], output_log=False)
|
||||
org = config["meraki"]["organization"]
|
||||
device = config["meraki"]["router_serial"]
|
||||
|
||||
uplinks = dashboard.appliance.getOrganizationApplianceUplinkStatuses(
|
||||
organizationId=org, serials=[device]
|
||||
)[0]["uplinks"]
|
||||
|
||||
template_bindings = {
|
||||
"local": {
|
||||
# One of the two
|
||||
"public_v4s": [
|
||||
link.get("publicIp") for link in uplinks if link.get("publicIp")
|
||||
],
|
||||
},
|
||||
# Why isn't there a merge method
|
||||
**config["bindings"],
|
||||
}
|
||||
|
||||
print(f"Using config {template_bindings!r}...")
|
||||
|
||||
api = GandiAPI(config["gandi"]["key"])
|
||||
|
||||
for task in config["tasks"]:
|
||||
if isinstance(task, str):
|
||||
task = {"template": task + ".j2", "zones": [task]}
|
||||
|
||||
computed_zone = template_and_parse_zone(
|
||||
os.path.join(args.template_dir, task["template"]), template_bindings
|
||||
)
|
||||
|
||||
print(f"Running task {task!r}...")
|
||||
|
||||
for zone_name in task["zones"] or []:
|
||||
try:
|
||||
live_zone = api.domain_records(zone_name)
|
||||
|
||||
lr, rl = diff_zones(computed_zone, live_zone)
|
||||
if lr or rl:
|
||||
print(f"Zone {zone_name} differs;")
|
||||
print("Computed:")
|
||||
pprint(computed_zone)
|
||||
pprint("Live:")
|
||||
pprint(live_zone)
|
||||
if(rl):
|
||||
print("Live records not recomputed")
|
||||
pprint(rl)
|
||||
if(lr):
|
||||
print("New records not live")
|
||||
pprint(lr)
|
||||
|
||||
if not args.dry:
|
||||
print(api.replace_domain(zone_name, computed_zone))
|
||||
else:
|
||||
print("Zone {} up to date".format(zone_name))
|
||||
|
||||
except Exception as e:
|
||||
print("While processing zone {}".format(zone_name))
|
||||
raise e
|
||||
|
||||
|
||||
if __name__ == "__main__" or 1:
|
||||
main()
|
109
projects/public_dns/src/python/updater/__init__.py
Normal file
109
projects/public_dns/src/python/updater/__init__.py
Normal file
|
@ -0,0 +1,109 @@
|
|||
"""
|
||||
A quick and dirty public DNS script, super tightly coupled to my infrastructure.
|
||||
"""
|
||||
|
||||
import re
|
||||
|
||||
import jinja2
|
||||
|
||||
|
||||
RECORD_LINE_PATTERN = re.compile(
|
||||
r"^(?P<rrset_name>\S+)\s+(?P<rrset_ttl>\S+)\s+IN\s+(?P<rrset_type>\S+)\s+(?P<rrset_values>[^\s#]+)(?P<comment>\s*#.*?)?$"
|
||||
)
|
||||
|
||||
|
||||
def update(m, k, f, *args, **kwargs):
|
||||
"""clojure.core/update for Python's stateful maps."""
|
||||
|
||||
if k in m:
|
||||
m[k] = f(m[k], *args, **kwargs)
|
||||
|
||||
return m
|
||||
|
||||
|
||||
def parse_zone_record(line):
|
||||
if match := RECORD_LINE_PATTERN.match(line):
|
||||
dat = match.groupdict()
|
||||
dat = update(dat, "rrset_ttl", int)
|
||||
dat = update(dat, "rrset_values", lambda x: [x])
|
||||
return dat
|
||||
|
||||
|
||||
def same_record(lr, rr):
|
||||
"""
|
||||
A test to see if two records name the same zone entry.
|
||||
"""
|
||||
|
||||
return lr["rrset_name"] == rr["rrset_name"] and lr["rrset_type"] == rr["rrset_type"]
|
||||
|
||||
|
||||
def records_equate(lr, rr):
|
||||
"""
|
||||
Equality, ignoring rrset_href which is generated by the API.
|
||||
"""
|
||||
|
||||
if not same_record(lr, rr):
|
||||
return False
|
||||
|
||||
elif lr["rrset_ttl"] != rr["rrset_ttl"]:
|
||||
return False
|
||||
|
||||
elif set(lr["rrset_values"]) != set(rr["rrset_values"]):
|
||||
return False
|
||||
|
||||
else:
|
||||
return True
|
||||
|
||||
|
||||
def template_and_parse_zone(template_file, template_bindings):
|
||||
assert template_file is not None
|
||||
assert template_bindings is not None
|
||||
|
||||
with open(template_file) as fp:
|
||||
dat = jinja2.Template(fp.read()).render(**template_bindings)
|
||||
|
||||
uncommitted_records = []
|
||||
for line in dat.splitlines():
|
||||
if line and not line[0] == "#":
|
||||
record = parse_zone_record(line)
|
||||
if record:
|
||||
uncommitted_records.append(record)
|
||||
else:
|
||||
print("ERROR, could not parse line %r" % line)
|
||||
|
||||
records = []
|
||||
|
||||
for uncommitted_r in uncommitted_records:
|
||||
flag = False
|
||||
for committed_r in records:
|
||||
if same_record(uncommitted_r, committed_r):
|
||||
# Join the two records
|
||||
committed_r["rrset_values"].extend(uncommitted_r["rrset_values"])
|
||||
flag = True
|
||||
|
||||
if not flag:
|
||||
records.append(uncommitted_r)
|
||||
|
||||
sorted(records, key=lambda x: (x["rrset_type"], x["rrset_name"]))
|
||||
|
||||
return records
|
||||
|
||||
|
||||
def diff_zones(left_zone, right_zone):
|
||||
"""
|
||||
Equality between unordered lists of records constituting a zone.
|
||||
"""
|
||||
|
||||
in_left_not_right = []
|
||||
in_right_not_left = []
|
||||
for lr in left_zone:
|
||||
flag = False
|
||||
for rr in right_zone:
|
||||
if records_equate(lr, rr):
|
||||
flag |= True
|
||||
|
||||
if not flag:
|
||||
in_left_not_right.append(lr)
|
||||
in_right_not_left.append(rr)
|
||||
|
||||
return in_left_not_right, in_right_not_left
|
92
projects/public_dns/src/python/updater/__main__.py
Normal file
92
projects/public_dns/src/python/updater/__main__.py
Normal file
|
@ -0,0 +1,92 @@
|
|||
"""
|
||||
A quick and dirty public DNS script, super tightly coupled to my infrastructure.
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import os
|
||||
from pprint import pprint
|
||||
|
||||
from gandi.client import GandiAPI
|
||||
import meraki
|
||||
import yaml
|
||||
|
||||
from updater import *
|
||||
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
description='"Dynamic" DNS updating for self-hosted services'
|
||||
)
|
||||
parser.add_argument("--config", dest="config_file", required=True)
|
||||
parser.add_argument("--templates", dest="template_dir", required=True)
|
||||
parser.add_argument("--dry-run", dest="dry", action="store_true", default=False)
|
||||
|
||||
|
||||
def main():
|
||||
args = parser.parse_args()
|
||||
|
||||
with open(args.config_file, "r") as fp:
|
||||
config = yaml.safe_load(fp)
|
||||
|
||||
dashboard = meraki.DashboardAPI(config["meraki"]["key"], output_log=False)
|
||||
org = config["meraki"]["organization"]
|
||||
device = config["meraki"]["router_serial"]
|
||||
|
||||
uplinks = dashboard.appliance.getOrganizationApplianceUplinkStatuses(
|
||||
organizationId=org, serials=[device]
|
||||
)[0]["uplinks"]
|
||||
|
||||
template_bindings = {
|
||||
"local": {
|
||||
# One of the two
|
||||
"public_v4s": [
|
||||
link.get("publicIp") for link in uplinks if link.get("publicIp")
|
||||
],
|
||||
},
|
||||
# Why isn't there a merge method
|
||||
**config["bindings"],
|
||||
}
|
||||
|
||||
print(f"Using config {template_bindings!r}...")
|
||||
|
||||
api = GandiAPI(config["gandi"]["key"])
|
||||
|
||||
for task in config["tasks"]:
|
||||
if isinstance(task, str):
|
||||
task = {"template": task + ".j2", "zones": [task]}
|
||||
|
||||
computed_zone = template_and_parse_zone(
|
||||
os.path.join(args.template_dir, task["template"]), template_bindings
|
||||
)
|
||||
|
||||
print(f"Running task {task!r}...")
|
||||
|
||||
for zone_name in task["zones"] or []:
|
||||
try:
|
||||
live_zone = api.domain_records(zone_name)
|
||||
|
||||
lr, rl = diff_zones(computed_zone, live_zone)
|
||||
if lr or rl:
|
||||
print(f"Zone {zone_name} differs;")
|
||||
print("Computed:")
|
||||
pprint(computed_zone)
|
||||
pprint("Live:")
|
||||
pprint(live_zone)
|
||||
if rl:
|
||||
print("Live records not recomputed")
|
||||
pprint(rl)
|
||||
if lr:
|
||||
print("New records not live")
|
||||
pprint(lr)
|
||||
|
||||
if not args.dry:
|
||||
print(api.replace_domain(zone_name, computed_zone))
|
||||
else:
|
||||
print("Zone {} up to date".format(zone_name))
|
||||
|
||||
except Exception as e:
|
||||
print("While processing zone {}".format(zone_name))
|
||||
raise e
|
||||
|
||||
|
||||
if __name__ == "__main__" or 1:
|
||||
main()
|
|
@ -16,7 +16,7 @@ www {{ ttl }} IN A {{ link }}
|
|||
registry {{ ttl }} IN A {{ link }}
|
||||
mirror {{ ttl }} IN A {{ link }}
|
||||
buildcache {{ ttl }} IN A {{ link }}
|
||||
feed {{ ttl }} IN A {{ link }}
|
||||
tentacles {{ ttl }} IN A {{ link }}
|
||||
ton {{ ttl }} IN A {{ link }}
|
||||
relay {{ ttl }} IN A {{ link }}
|
||||
pxe {{ ttl }} IN A {{ link }}
|
||||
|
|
60
projects/public_dns/test/python/test_parsing.py
Normal file
60
projects/public_dns/test/python/test_parsing.py
Normal file
|
@ -0,0 +1,60 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import re
|
||||
|
||||
import pytest
|
||||
|
||||
from updater import RECORD_LINE_PATTERN, parse_zone_record, diff_zones
|
||||
|
||||
|
||||
def test_record_pattern():
|
||||
assert re.match(RECORD_LINE_PATTERN, "foo 300 IN A 1.1.1.1")
|
||||
assert re.match(RECORD_LINE_PATTERN, "foo\t 300\t IN \tA\t 1.1.1.1")
|
||||
|
||||
|
||||
AT_RECORD = {
|
||||
"comment": None,
|
||||
"rrset_name": "@",
|
||||
"rrset_ttl": 300,
|
||||
"rrset_type": "A",
|
||||
"rrset_values": ["67.166.27.157"],
|
||||
}
|
||||
A_RECORD = {
|
||||
"comment": None,
|
||||
"rrset_name": "www",
|
||||
"rrset_ttl": 300,
|
||||
"rrset_type": "A",
|
||||
"rrset_values": ["67.166.27.157"],
|
||||
}
|
||||
REGISTRY_RECORD = {
|
||||
"comment": None,
|
||||
"rrset_name": "registry",
|
||||
"rrset_ttl": 300,
|
||||
"rrset_type": "A",
|
||||
"rrset_values": ["67.166.27.157"],
|
||||
}
|
||||
MIRROR_RECORD = {
|
||||
"comment": None,
|
||||
"rrset_name": "mirror",
|
||||
"rrset_ttl": 300,
|
||||
"rrset_type": "A",
|
||||
"rrset_values": ["67.166.27.157"],
|
||||
}
|
||||
|
||||
|
||||
def test_diff_zones():
|
||||
z1 = [AT_RECORD, A_RECORD]
|
||||
z2 = []
|
||||
assert diff_zones(z1, z2) == z1, []
|
||||
|
||||
z1 = [AT_RECORD, A_RECORD]
|
||||
z2 = [AT_RECORD]
|
||||
assert diff_zones(z1, z2) == [A_RECORD], []
|
||||
|
||||
z1 = [AT_RECORD, A_RECORD]
|
||||
z2 = [A_RECORD]
|
||||
assert diff_zones(z1, z2) == [AT_RECORD], []
|
||||
|
||||
z2 = [AT_RECORD, A_RECORD]
|
||||
z1 = [A_RECORD]
|
||||
assert diff_zones(z1, z2) == [], [AT_RECORD]
|
Loading…
Reference in a new issue