1
0
Fork 0
mirror of https://github.com/NeoCloud/NeoNetwork synced 2024-12-26 12:59:24 +08:00
NeoNetwork/scripts/roa.py

521 lines
16 KiB
Python
Raw Normal View History

2020-05-01 11:40:05 +08:00
#!/usr/bin/env python3
2020-05-23 19:42:52 +08:00
import argparse
import json
2020-05-25 12:08:09 +08:00
import re
2020-05-23 19:42:52 +08:00
import time
2021-06-08 08:26:26 +08:00
# dnssec
from base64 import b64decode
2020-05-23 19:42:52 +08:00
from collections import defaultdict
from contextlib import redirect_stdout
2021-06-08 08:26:26 +08:00
from functools import wraps
2020-05-23 19:42:52 +08:00
from io import StringIO
from ipaddress import IPv4Network, IPv6Network, ip_network
2020-05-01 11:40:05 +08:00
from itertools import combinations
2020-05-23 19:42:52 +08:00
from pathlib import Path
2020-07-21 11:00:14 +08:00
import netaddr
2020-05-23 19:42:52 +08:00
import toml
2021-04-20 09:23:02 +08:00
from dns.dnssec import make_ds
from dns.rdtypes.ANY.DNSKEY import DNSKEY
2021-06-08 08:26:26 +08:00
from tabulate import tabulate
2020-05-23 19:42:52 +08:00
NEO_NETWORK_POOL = [ip_network("10.127.0.0/16"), ip_network("fd10:127::/32")]
def pick(entity: dict, fields: [str], **kwargs: dict):
new_entity = {}
for field in fields:
new_entity[field] = entity.get(field)
for old_field, new_field in kwargs.items():
new_entity[new_field] = entity.get(old_field)
return new_entity
def is_neo_network(address):
return any(
address.version == neo.version and address.subnet_of(neo)
for neo in NEO_NETWORK_POOL
)
def is_neo_network_asn(asn: int):
return 4201270000 <= asn <= 4201279999
def is_dn42_asn(asn: int):
return 4242420000 <= asn <= 4242429999
2020-05-25 12:08:09 +08:00
def name_to_nic_hdl(name):
r, num = re.subn(r"[^0-9A-Z]", "-", name.upper())
_r = len(r.replace("-", ""))
assert _r >= 3 # has at least 3 effective chars
assert r[0] != "-" # starts with [0-9A-Z]
assert num < _r # not too many subs
return r
2020-05-23 19:42:52 +08:00
def iter_toml_file(path: str):
for item in Path(path).iterdir():
if not item.is_file() or item.suffix != ".toml":
continue
yield item, toml.loads(item.read_text())
2021-06-06 20:18:41 +08:00
def _sort_as_iterator(func):
@wraps(func)
def wrapped(*args, **kwargs):
2021-06-08 08:26:26 +08:00
for item in sorted(
list(func(*args, **kwargs)), key=lambda x: x[0], reverse=False
):
2021-06-06 20:18:41 +08:00
yield item
2021-06-08 08:26:26 +08:00
2021-06-06 20:18:41 +08:00
return wrapped
@_sort_as_iterator
2020-05-23 19:42:52 +08:00
def load_entities():
2020-05-25 12:08:09 +08:00
for item, entity in iter_toml_file("entity"):
yield item.stem, entity
2020-05-23 19:42:52 +08:00
2021-06-06 20:18:41 +08:00
@_sort_as_iterator
2020-05-23 19:42:52 +08:00
def load_asn(entities: dict):
2020-05-25 12:08:09 +08:00
for item, entity in iter_toml_file("asn"):
asn = int(item.stem.lstrip("AS"))
entity["source"] = (
is_neo_network_asn(asn)
and "NeoNetwork"
or is_dn42_asn(asn)
and "DN42"
or entity.get("source")
)
assert entity["owner"] in entities
assert entity["source"] in ["NeoNetwork", "DN42", "Internet"]
yield asn, entity
2020-05-23 19:42:52 +08:00
def node_to_asn(orignal_asn_set: set):
2020-05-01 17:37:29 +08:00
node_table = dict()
2020-05-23 19:42:52 +08:00
for _, entities in iter_toml_file("node"):
mapping = {name: entity["asn"] for (name, entity) in entities.items()}
asn_set = set(mapping.values())
assert orignal_asn_set & asn_set == asn_set
node_table.update(mapping)
2020-05-01 17:37:29 +08:00
return node_table
2020-05-01 11:40:05 +08:00
2020-05-23 19:42:52 +08:00
def assert_peer(nodes: set):
for item, entities in iter_toml_file("peer"):
peers = set(entities["to-peer"])
assert item.stem in nodes
assert nodes & peers == peers
def route_to_roa(asn_table: dict):
def make_route():
for item, entity in iter_toml_file("route"):
asn = int(item.stem.lstrip("AS"))
for prefix, fields in entity.items():
if fields["type"] not in ("loopback", "subnet"):
continue
fields["asn"] = asn
fields["prefix"] = ip_network(prefix, strict=True)
2022-11-10 15:47:25 +08:00
fields["maxLength"] = fields.get("max-len", fields["prefix"].max_prefixlen)
assert fields["prefix"].prefixlen <= fields["maxLength"] <= fields["prefix"].max_prefixlen
2020-05-23 19:42:52 +08:00
supernet = fields.get("supernet")
fields["supernet"] = (
ip_network(supernet, strict=True) if supernet else None
)
assert fields["name"]
assert is_neo_network(fields["prefix"])
2020-07-26 20:39:26 +08:00
assert not fields["supernet"] or (
is_neo_network(fields["supernet"])
and fields["supernet"].supernet_of(fields["prefix"])
)
2022-11-10 15:47:25 +08:00
yield pick(fields, ["asn", "name", "type", "prefix", "supernet", "maxLength"])
2020-05-23 19:42:52 +08:00
2020-05-24 22:57:20 +08:00
entities = sorted(make_route(), key=lambda item: item["asn"])
2020-05-23 19:42:52 +08:00
prefixes = [item["prefix"] for item in entities]
for net1, net2 in combinations(
sorted(entities, key=lambda net: net["prefix"].prefixlen), 2
):
2020-09-19 13:18:14 +08:00
if net1["type"] == net2["type"] == "loopback":
continue
2020-05-23 19:42:52 +08:00
if not net1["prefix"].overlaps(net2["prefix"]):
continue
entity_from_net = lambda net: asn_table.get(net["asn"])["owner"]
2020-08-29 23:32:03 +08:00
try:
assert net1["prefix"] != net2["prefix"]
except AssertionError:
2021-06-08 08:26:26 +08:00
assert net1["asn"] != net2["asn"] and entity_from_net(
net1
) == entity_from_net(net2)
2020-08-29 23:32:03 +08:00
continue
2020-05-23 19:42:52 +08:00
assert net1["prefix"].supernet_of(net2["prefix"])
s1net, s2net = (net1["supernet"], net2["supernet"])
assert s2net # please include supernet = <cidr> in your route
# if net1(the bigger net) has a supernet s1net, then s1net and net1
# will be checked or must have been checked, same for net2
assert not s1net or s1net in prefixes # net1.supernet is garbage
assert s2net == net1["prefix"] or s2net in prefixes # net2.supernet is garbage
return entities
2020-05-01 11:40:05 +08:00
2020-05-23 19:42:52 +08:00
def prehandle_roa(asn_table: dict, args):
roa = route_to_roa(asn_table)
max_prefixlen = IPv4Network(0).max_prefixlen
roa4 = filter(lambda item: isinstance(item["prefix"], IPv4Network), roa)
roa6 = filter(lambda item: isinstance(item["prefix"], IPv6Network), roa)
if args.ipv4:
roa6 = []
elif args.ipv6:
roa4 = []
roa4 = [
r
for r in roa4
if r["prefix"].prefixlen <= args.max or r["prefix"].prefixlen == max_prefixlen
]
roa6 = [r for r in roa6 if r["prefix"].prefixlen <= args.max6]
2020-05-01 11:40:05 +08:00
for r in roa4:
2020-05-23 19:42:52 +08:00
if r["prefix"].prefixlen == max_prefixlen:
r["maxLength"] = max_prefixlen
2022-11-10 15:47:25 +08:00
else:
r["maxLength"] = r["maxLength"] if r["maxLength"] <= args.max else args.max
2020-05-01 11:40:05 +08:00
for r in roa6:
2022-11-10 15:47:25 +08:00
r["maxLength"] = r["maxLength"] if r["maxLength"] <= args.max6 else args.max6
2020-05-02 11:29:44 +08:00
for r in (*roa4, *roa6):
2020-05-23 19:42:52 +08:00
r["prefix"] = r["prefix"].with_prefixlen
return roa4, roa6
2021-06-08 08:26:26 +08:00
def export_dnssec_dnskey(include_zsk=False):
2021-04-20 09:23:02 +08:00
def ds_from_dnskey(zone, flags, protocol, algorithm, *key):
2021-06-08 08:26:26 +08:00
dnspy_dnskey = DNSKEY(
"IN",
"DNSKEY",
int(flags),
int(protocol),
int(algorithm),
b64decode(" ".join(key)),
)
2021-04-20 09:23:02 +08:00
return make_ds(zone, dnspy_dnskey, "SHA256").to_text()
2021-06-08 08:26:26 +08:00
2021-04-19 10:28:51 +08:00
dnskey_path = Path("dns") / "dnssec"
dnskeys = list()
for f in dnskey_path.iterdir():
if f.name.endswith(".keys"):
2021-04-20 09:23:02 +08:00
zonekey = {"zone": "", "records": list()}
2021-04-19 10:28:51 +08:00
records = f.read_text().split("\n")
records = [r.split() for r in records if r and not r.startswith(';')]
2021-04-19 10:28:51 +08:00
for zone, _ttl, _in, _dnskey, *dnskey in records:
int(_ttl)
assert _in == "IN" and _dnskey == "DNSKEY"
if not zonekey["zone"]:
zonekey["zone"] = zone
else:
assert zonekey["zone"] == zone
assert dnskey[0] in ['256', '257']
if dnskey[0] == '257' or include_zsk:
zonekey["records"].append(
{
"dnskey": " ".join(dnskey),
"ds": ds_from_dnskey(zone, *dnskey),
}
)
2021-04-19 10:28:51 +08:00
if zonekey["zone"]:
dnskeys.append(zonekey)
return dnskeys
2020-05-23 19:42:52 +08:00
2021-06-08 08:26:26 +08:00
2020-05-23 19:42:52 +08:00
def make_export(roa4, roa6):
2020-05-25 12:08:09 +08:00
def modify_entity(entity):
entity["nic_hdl"] = name_to_nic_hdl(entity["name"])
return entity
def filter_route(records, asn):
return [
pick(roa, ["prefix", "maxLength"], name="netname")
for roa in records
if roa["asn"] == asn
]
entities = dict(load_entities())
asn_list = [
{
2020-05-23 19:42:52 +08:00
"asn": asn,
2020-05-25 12:08:09 +08:00
"owner": asn_info["owner"],
2020-05-23 19:42:52 +08:00
"name": asn_info["name"],
"source": asn_info["source"],
2020-05-29 06:18:19 +08:00
"description": asn_info.get("description"),
2020-05-23 19:42:52 +08:00
"routes": {
2020-05-25 12:08:09 +08:00
"ipv4": filter_route(roa4, asn),
"ipv6": filter_route(roa6, asn),
2020-05-23 19:42:52 +08:00
},
}
2020-05-25 12:08:09 +08:00
for asn, asn_info in load_asn(entities)
]
current = int(time.time())
output = {
"metadata": {"generated": current, "valid": current + 14 * 86400},
"people": {
owner: {
"info": modify_entity(entity),
"asns": list(filter(lambda item: item["owner"] == owner, asn_list)),
}
for owner, entity in entities.items()
},
2021-06-08 08:26:26 +08:00
"dnssec": export_dnssec_dnskey(),
2020-05-25 12:08:09 +08:00
}
2020-05-23 19:42:52 +08:00
return json.dumps(output, indent=2)
def make_json(roa4, roa6):
current = int(time.time())
output = {
"metadata": {
"counts": len(roa4) + len(roa6),
"generated": current,
"valid": current + 14 * 86400,
},
"roas": [
{"asn": "AS%d" % roa["asn"], **pick(roa, ["prefix", "maxLength"])}
for roa in (*roa4, *roa6)
],
}
return json.dumps(output, indent=2)
2020-05-02 11:29:44 +08:00
2020-05-01 11:40:05 +08:00
2020-05-23 19:42:52 +08:00
def make_rfc8416(roa4, roa6):
output = {
"slurmVersion": 1,
"validationOutputFilters": {"prefixFilters": [], "bgpsecFilters": []},
"locallyAddedAssertions": {
"bgpsecAssertions": [],
"prefixAssertions": [
pick(
2021-06-08 08:26:26 +08:00
roa,
["asn", "prefix"],
maxLength="maxPrefixLength",
name="comment",
2020-05-23 19:42:52 +08:00
)
for roa in (*roa4, *roa6)
],
},
}
return json.dumps(output, indent=2)
def make_roa_records(roa4, roa6):
records = [
2020-09-19 13:28:31 +08:00
"route {prefix} max {maxLength} as {asn};".format_map(roa)
2020-05-23 19:42:52 +08:00
for roa in (*roa4, *roa6)
]
return "\n".join(["# NeoNetwork ROA tool", "", *records])
def make_summary():
2020-05-25 12:08:09 +08:00
entities = dict(load_entities())
asn_table = dict(load_asn(entities))
2020-05-23 19:42:52 +08:00
node_table = node_to_asn(set(asn_table.keys()))
stream = StringIO()
with redirect_stdout(stream):
2020-07-26 20:39:26 +08:00
print("# NeoNetwork Summary")
print()
print("## Entity table")
2020-07-26 20:52:30 +08:00
print()
2020-05-24 22:04:25 +08:00
entity_table = tabulate(
(
(
entity["name"],
entity.get("contact", {}).get("email"),
entity.get("contact", {}).get("telegram"),
)
for entity in entities.values()
),
headers=["Name", "Email", "Telegram"],
2020-07-26 20:39:26 +08:00
tablefmt="github",
2020-05-24 22:04:25 +08:00
)
print(entity_table)
2020-05-23 19:42:52 +08:00
print()
2020-07-26 20:39:26 +08:00
print("## AS table")
2020-07-26 20:52:30 +08:00
print()
2020-05-24 22:04:25 +08:00
as_table = tabulate(
(
(entity["source"], "AS{}".format(asn), entity["owner"], entity["name"])
for asn, entity in sorted(asn_table.items(), key=lambda item: item[0])
),
headers=["Source", "ASN", "Owner", "Name"],
2020-07-26 20:39:26 +08:00
tablefmt="github",
2020-05-24 22:04:25 +08:00
)
print(as_table)
2020-05-23 19:42:52 +08:00
print()
2020-07-26 20:39:26 +08:00
print("## Node table")
2020-07-26 20:52:30 +08:00
print()
2020-05-24 22:04:25 +08:00
node_table = tabulate(
(
("AS{}".format(asn), name)
for name, asn in sorted(node_table.items(), key=lambda item: item[1])
),
headers=["ASN", "Name"],
2020-07-26 20:39:26 +08:00
tablefmt="github",
2020-05-24 22:04:25 +08:00
)
print(node_table)
2020-05-23 19:42:52 +08:00
print()
2020-07-26 20:39:26 +08:00
print("## Peer table")
2020-07-26 20:52:30 +08:00
print()
2020-05-24 22:04:25 +08:00
peer_table = tabulate(
(
(item.stem, downstream)
for item, entity in iter_toml_file("peer")
for downstream in entity["to-peer"]
),
headers=["Upstream", "Downstream"],
2020-07-26 20:39:26 +08:00
tablefmt="github",
2020-05-24 22:04:25 +08:00
colalign=("right",),
)
print(peer_table)
2020-05-23 19:42:52 +08:00
print()
2020-07-26 20:39:26 +08:00
print("## Route table")
2020-07-26 20:52:30 +08:00
print()
2020-05-24 22:04:25 +08:00
route_table = tabulate(
(
(
"AS{asn}".format_map(entity),
entity["name"],
2020-05-24 22:57:20 +08:00
entity["type"],
2020-05-24 22:04:25 +08:00
entity["prefix"] or "",
entity["supernet"] or "",
)
for entity in route_to_roa(asn_table)
),
2020-05-24 22:57:20 +08:00
headers=["ASN", "Name", "Type", "Prefix", "Supernet"],
2020-07-26 20:39:26 +08:00
tablefmt="github",
2020-05-24 22:04:25 +08:00
)
print(route_table)
2020-07-21 10:54:26 +08:00
print()
2020-07-26 20:39:26 +08:00
print("## Used CIDR Range")
2021-09-26 22:26:59 +08:00
print("<details>\n<summary>Click to expand</summary>")
2020-07-26 20:52:30 +08:00
print()
2020-07-21 10:54:26 +08:00
prefixes = netaddr.cidr_merge(
2020-07-21 10:54:52 +08:00
netaddr.IPNetwork(str(entity["prefix"]))
2020-07-21 10:54:26 +08:00
for entity in route_to_roa(asn_table)
)
2020-07-26 20:39:26 +08:00
print("```")
2020-07-21 10:54:26 +08:00
for prefix in prefixes:
print(prefix)
2020-07-26 20:39:26 +08:00
print("```")
2021-09-26 22:26:59 +08:00
print("</details>")
free_netset = netaddr.IPSet([str(n) for n in NEO_NETWORK_POOL]) - netaddr.IPSet(prefixes)
print()
print("## Free CIDR Range")
print("<details>\n<summary>Click to expand</summary>")
print()
print("```")
for prefix in free_netset.iter_cidrs():
print(prefix)
print("```")
print("</details>")
2020-11-25 18:04:35 +08:00
IP_VRSIONS = {4, 6}
2021-06-08 08:26:26 +08:00
total_ip_count = {
ver: sum(
[
prefix.num_addresses
for prefix in NEO_NETWORK_POOL
if prefix.version == ver
]
)
for ver in IP_VRSIONS
}
used_ip_count = {
ver: sum(
[
ip_network(str(prefix)).num_addresses
for prefix in prefixes
if prefix.version == ver
]
)
for ver in IP_VRSIONS
}
2020-11-25 18:04:35 +08:00
print()
print("## Address Space Usage")
print()
address_space_usage_table = tabulate(
(
2021-06-08 08:26:26 +08:00
(
f"IPv{ver}",
f"{(t:=total_ip_count.get(ver)):.5g}",
f"{(u:=used_ip_count.get(ver)):.5g}",
f"{t-u:.5g}",
f"{u/t*100:.2f}%",
f"{(t-u)/t*100:.2f}%",
)
2020-11-25 18:04:35 +08:00
for ver in IP_VRSIONS
),
2021-06-08 08:26:26 +08:00
headers=[
"IP Version",
"Total",
"Used",
"Free",
"Percent Used",
"Percent Free",
],
2020-11-25 18:04:35 +08:00
tablefmt="github",
2021-06-08 08:26:26 +08:00
disable_numparse=True,
2020-11-25 18:04:35 +08:00
)
print(address_space_usage_table)
2020-05-23 19:42:52 +08:00
return stream.getvalue()
def main(args):
2020-05-25 12:08:09 +08:00
entities = dict(load_entities())
asn_table = dict(load_asn(entities))
2020-05-23 19:42:52 +08:00
node_table = node_to_asn(set(asn_table.keys()))
assert_peer(set(node_table.keys()))
roa4, roa6 = prehandle_roa(asn_table, args)
2020-05-02 15:02:14 +08:00
if args.export:
2020-05-23 19:42:52 +08:00
return make_export(roa4, roa6)
2020-05-02 15:02:14 +08:00
elif args.json:
2020-05-23 19:42:52 +08:00
return make_json(roa4, roa6)
2020-05-11 23:40:23 +08:00
elif args.rfc8416:
2020-05-23 19:42:52 +08:00
return make_rfc8416(roa4, roa6)
elif args.summary:
return make_summary()
2020-05-01 11:40:05 +08:00
else:
2020-05-23 19:42:52 +08:00
return make_roa_records(roa4, roa6)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="NeoNetwork ROA tool")
parser.add_argument(
"-m", "--max", type=int, default=29, help="set ipv4 max prefix length"
)
parser.add_argument(
"-M", "--max6", type=int, default=64, help="set ipv6 max prefix length"
)
parser.add_argument("-j", "--json", action="store_true", help="output json")
parser.add_argument("-r", "--rfc8416", action="store_true", help="output rfc8416")
parser.add_argument("-s", "--summary", action="store_true", help="output summary")
parser.add_argument("-o", "--output", default="", help="write output to file")
parser.add_argument("-4", "--ipv4", action="store_true", help="print ipv4 only")
parser.add_argument("-6", "--ipv6", action="store_true", help="print ipv6 only")
parser.add_argument(
"-e", "--export", action="store_true", help="export registry to json"
)
args = parser.parse_args()
if (
args.max < 0
or args.max6 < 0
or args.max > IPv4Network(0).max_prefixlen
or args.max6 > IPv6Network(0).max_prefixlen
):
parser.error("check your max prefix length")
output = main(args)
if not args.output or args.output == "-":
2020-05-01 11:40:05 +08:00
print(output)
2020-05-23 19:42:52 +08:00
elif output:
2020-05-01 11:40:05 +08:00
Path(args.output).write_text(output)
2020-05-23 19:42:52 +08:00
print("written to", args.output)