And fmt
This commit is contained in:
parent
debc7996ee
commit
bbae5ef63f
34 changed files with 956 additions and 886 deletions
projects/public-dns/src/python/arrdem/updater
|
@ -20,165 +20,171 @@ import meraki
|
|||
|
||||
|
||||
RECORD_LINE_PATTERN = re.compile(
|
||||
"^(?P<rrset_name>\S+)\s+"
|
||||
"(?P<rrset_ttl>\S+)\s+"
|
||||
"IN\s+"
|
||||
"(?P<rrset_type>\S+)\s+"
|
||||
"(?P<rrset_values>.+)$")
|
||||
"^(?P<rrset_name>\S+)\s+"
|
||||
"(?P<rrset_ttl>\S+)\s+"
|
||||
"IN\s+"
|
||||
"(?P<rrset_type>\S+)\s+"
|
||||
"(?P<rrset_values>.+)$"
|
||||
)
|
||||
|
||||
|
||||
def update(m, k, f, *args, **kwargs):
|
||||
"""clojure.core/update for Python's stateful maps."""
|
||||
if k in m:
|
||||
m[k] = f(m[k], *args, **kwargs)
|
||||
return m
|
||||
"""clojure.core/update for Python's stateful maps."""
|
||||
if k in m:
|
||||
m[k] = f(m[k], *args, **kwargs)
|
||||
return m
|
||||
|
||||
|
||||
def parse_zone_record(line):
|
||||
if match := RECORD_LINE_PATTERN.search(line):
|
||||
dat = match.groupdict()
|
||||
dat = update(dat, "rrset_ttl", int)
|
||||
dat = update(dat, "rrset_values", lambda x: [x])
|
||||
return dat
|
||||
if match := RECORD_LINE_PATTERN.search(line):
|
||||
dat = match.groupdict()
|
||||
dat = update(dat, "rrset_ttl", int)
|
||||
dat = update(dat, "rrset_values", lambda x: [x])
|
||||
return dat
|
||||
|
||||
|
||||
def same_record(lr, rr):
|
||||
"""
|
||||
A test to see if two records name the same zone entry.
|
||||
"""
|
||||
"""
|
||||
A test to see if two records name the same zone entry.
|
||||
"""
|
||||
|
||||
return lr["rrset_name"] == rr["rrset_name"] and \
|
||||
lr["rrset_type"] == rr["rrset_type"]
|
||||
return lr["rrset_name"] == rr["rrset_name"] and lr["rrset_type"] == rr["rrset_type"]
|
||||
|
||||
|
||||
def records_equate(lr, rr):
|
||||
"""
|
||||
Equality, ignoring rrset_href which is generated by the API.
|
||||
"""
|
||||
"""
|
||||
Equality, ignoring rrset_href which is generated by the API.
|
||||
"""
|
||||
|
||||
if not same_record(lr, rr):
|
||||
return False
|
||||
elif lr["rrset_ttl"] != rr["rrset_ttl"]:
|
||||
return False
|
||||
elif set(lr["rrset_values"]) != set(rr["rrset_values"]):
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
if not same_record(lr, rr):
|
||||
return False
|
||||
elif lr["rrset_ttl"] != rr["rrset_ttl"]:
|
||||
return False
|
||||
elif set(lr["rrset_values"]) != set(rr["rrset_values"]):
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
|
||||
def template_and_parse_zone(template_file, template_bindings):
|
||||
assert template_file is not None
|
||||
assert template_bindings is not None
|
||||
assert template_file is not None
|
||||
assert template_bindings is not None
|
||||
|
||||
with open(template_file) as f:
|
||||
dat = jinja2.Template(f.read()).render(**template_bindings)
|
||||
with open(template_file) as f:
|
||||
dat = jinja2.Template(f.read()).render(**template_bindings)
|
||||
|
||||
uncommitted_records = []
|
||||
for line in dat.splitlines():
|
||||
if line and not line[0] == "#":
|
||||
record = parse_zone_record(line)
|
||||
if record:
|
||||
uncommitted_records.append(record)
|
||||
uncommitted_records = []
|
||||
for line in dat.splitlines():
|
||||
if line and not line[0] == "#":
|
||||
record = parse_zone_record(line)
|
||||
if record:
|
||||
uncommitted_records.append(record)
|
||||
|
||||
records = []
|
||||
records = []
|
||||
|
||||
for uncommitted_r in uncommitted_records:
|
||||
flag = False
|
||||
for committed_r in records:
|
||||
if same_record(uncommitted_r, committed_r):
|
||||
# Join the two records
|
||||
committed_r["rrset_values"].extend(uncommitted_r["rrset_values"])
|
||||
flag = True
|
||||
for uncommitted_r in uncommitted_records:
|
||||
flag = False
|
||||
for committed_r in records:
|
||||
if same_record(uncommitted_r, committed_r):
|
||||
# Join the two records
|
||||
committed_r["rrset_values"].extend(uncommitted_r["rrset_values"])
|
||||
flag = True
|
||||
|
||||
if not flag:
|
||||
records.append(uncommitted_r)
|
||||
if not flag:
|
||||
records.append(uncommitted_r)
|
||||
|
||||
sorted(records, key=lambda x: (x["rrset_type"], x["rrset_name"]))
|
||||
sorted(records, key=lambda x: (x["rrset_type"], x["rrset_name"]))
|
||||
|
||||
return records
|
||||
return records
|
||||
|
||||
|
||||
def diff_zones(left_zone, right_zone):
|
||||
"""
|
||||
Equality between unordered lists of records constituting a zone.
|
||||
"""
|
||||
|
||||
in_left_not_right = []
|
||||
for lr in left_zone:
|
||||
flag = False
|
||||
for rr in right_zone:
|
||||
if same_record(lr, rr) and records_equate(lr, rr):
|
||||
flag = True
|
||||
break
|
||||
"""
|
||||
Equality between unordered lists of records constituting a zone.
|
||||
"""
|
||||
|
||||
if not flag:
|
||||
in_left_not_right.append(lr)
|
||||
|
||||
in_right_not_left = []
|
||||
for rr in right_zone:
|
||||
flag = False
|
||||
in_left_not_right = []
|
||||
for lr in left_zone:
|
||||
if same_record(lr, rr) and records_equate(lr, rr):
|
||||
flag = True
|
||||
break
|
||||
flag = False
|
||||
for rr in right_zone:
|
||||
if same_record(lr, rr) and records_equate(lr, rr):
|
||||
flag = True
|
||||
break
|
||||
|
||||
if not flag:
|
||||
in_right_not_left.append(lr)
|
||||
if not flag:
|
||||
in_left_not_right.append(lr)
|
||||
|
||||
return in_left_not_right or in_right_not_left
|
||||
in_right_not_left = []
|
||||
for rr in right_zone:
|
||||
flag = False
|
||||
for lr in left_zone:
|
||||
if same_record(lr, rr) and records_equate(lr, rr):
|
||||
flag = True
|
||||
break
|
||||
|
||||
if not flag:
|
||||
in_right_not_left.append(lr)
|
||||
|
||||
return in_left_not_right or in_right_not_left
|
||||
|
||||
|
||||
parser = argparse.ArgumentParser(description="\"Dynamic\" DNS updating for self-hosted services")
|
||||
parser = argparse.ArgumentParser(
|
||||
description='"Dynamic" DNS updating for self-hosted services'
|
||||
)
|
||||
parser.add_argument("--config", dest="config_file", required=True)
|
||||
parser.add_argument("--templates", dest="template_dir", required=True)
|
||||
parser.add_argument("--dry-run", dest="dry", action="store_true", default=False)
|
||||
|
||||
|
||||
def main():
|
||||
args = parser.parse_args()
|
||||
config = yaml.safe_load(open(args.config_file, "r"))
|
||||
args = parser.parse_args()
|
||||
config = yaml.safe_load(open(args.config_file, "r"))
|
||||
|
||||
dashboard = meraki.DashboardAPI(config["meraki"]["key"], output_log=False)
|
||||
org = config["meraki"]["organization"]
|
||||
device = config["meraki"]["router_serial"]
|
||||
dashboard = meraki.DashboardAPI(config["meraki"]["key"], output_log=False)
|
||||
org = config["meraki"]["organization"]
|
||||
device = config["meraki"]["router_serial"]
|
||||
|
||||
uplinks = dashboard.appliance.getOrganizationApplianceUplinkStatuses(
|
||||
organizationId=org,
|
||||
serials=[device]
|
||||
)[0]["uplinks"]
|
||||
uplinks = dashboard.appliance.getOrganizationApplianceUplinkStatuses(
|
||||
organizationId=org, serials=[device]
|
||||
)[0]["uplinks"]
|
||||
|
||||
template_bindings = {
|
||||
"local": {
|
||||
# One of the two
|
||||
"public_v4s": [link.get("publicIp") for link in uplinks if link.get("publicIp")],
|
||||
},
|
||||
# Why isn't there a merge method
|
||||
**config["bindings"]
|
||||
}
|
||||
template_bindings = {
|
||||
"local": {
|
||||
# One of the two
|
||||
"public_v4s": [
|
||||
link.get("publicIp") for link in uplinks if link.get("publicIp")
|
||||
],
|
||||
},
|
||||
# Why isn't there a merge method
|
||||
**config["bindings"],
|
||||
}
|
||||
|
||||
api = GandiAPI(config["gandi"]["key"])
|
||||
api = GandiAPI(config["gandi"]["key"])
|
||||
|
||||
for task in config["tasks"]:
|
||||
if isinstance(task, str):
|
||||
task = {"template": task + ".j2",
|
||||
"zones": [task]}
|
||||
for task in config["tasks"]:
|
||||
if isinstance(task, str):
|
||||
task = {"template": task + ".j2", "zones": [task]}
|
||||
|
||||
computed_zone = template_and_parse_zone(os.path.join(args.template_dir, task["template"]), template_bindings)
|
||||
computed_zone = template_and_parse_zone(
|
||||
os.path.join(args.template_dir, task["template"]), template_bindings
|
||||
)
|
||||
|
||||
for zone_name in task["zones"]:
|
||||
try:
|
||||
live_zone = api.domain_records(zone_name)
|
||||
for zone_name in task["zones"]:
|
||||
try:
|
||||
live_zone = api.domain_records(zone_name)
|
||||
|
||||
if diff_zones(computed_zone, live_zone):
|
||||
print("Zone {} differs, computed zone:".format(zone_name))
|
||||
pprint(computed_zone)
|
||||
if not args.dry:
|
||||
print(api.replace_domain(zone_name, computed_zone))
|
||||
else:
|
||||
print("Zone {} up to date".format(zone_name))
|
||||
if diff_zones(computed_zone, live_zone):
|
||||
print("Zone {} differs, computed zone:".format(zone_name))
|
||||
pprint(computed_zone)
|
||||
if not args.dry:
|
||||
print(api.replace_domain(zone_name, computed_zone))
|
||||
else:
|
||||
print("Zone {} up to date".format(zone_name))
|
||||
|
||||
except Exception as e:
|
||||
print("While processing zone {}".format(zone_name))
|
||||
raise e
|
||||
|
||||
except Exception as e:
|
||||
print("While processing zone {}".format(zone_name))
|
||||
raise e
|
||||
|
||||
if __name__ == "__main__" or 1:
|
||||
main()
|
||||
main()
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue