From 5d7b40b9f2ade9e27b5ca2269e0b9cb787c3bd56 Mon Sep 17 00:00:00 2001 From: Septs Date: Sun, 26 Jul 2020 20:39:26 +0800 Subject: [PATCH] Regular update: 2020-07-26T12:39:26Z --- .scripts/generate-roa.sh | 3 +-- .scripts/roa.py | 31 +++++++++++++++++++------------ 2 files changed, 20 insertions(+), 14 deletions(-) diff --git a/.scripts/generate-roa.sh b/.scripts/generate-roa.sh index 55ba24b..dd00c3d 100755 --- a/.scripts/generate-roa.sh +++ b/.scripts/generate-roa.sh @@ -12,8 +12,7 @@ mkdir -p generated mkdir -p generated/dns .scripts/dns-reverse-generator.py -cp dns/db.10.127 generated/dns -cp dns/db.fd10.127 generated/dns +cp -R dns/* generated/dns .scripts/roa.py -m "$MAX_LEN_4" -M "$MAX_LEN_6" -o generated/roa46_bird2.conf .scripts/roa.py -m "$MAX_LEN_4" -M "$MAX_LEN_6" -4 -o generated/roa4_bird2.conf diff --git a/.scripts/roa.py b/.scripts/roa.py index e6ff741..4db927e 100755 --- a/.scripts/roa.py +++ b/.scripts/roa.py @@ -109,7 +109,10 @@ def route_to_roa(asn_table: dict): ) assert fields["name"] assert is_neo_network(fields["prefix"]) - assert not fields["supernet"] or (is_neo_network(fields["supernet"]) and fields["supernet"].supernet_of(fields["prefix"])) + assert not fields["supernet"] or ( + is_neo_network(fields["supernet"]) + and fields["supernet"].supernet_of(fields["prefix"]) + ) yield pick(fields, ["asn", "name", "type", "prefix", "supernet"]) entities = sorted(make_route(), key=lambda item: item["asn"]) @@ -245,7 +248,9 @@ def make_summary(): node_table = node_to_asn(set(asn_table.keys())) stream = StringIO() with redirect_stdout(stream): - print("Entity table:") + print("# NeoNetwork Summary") + print() + print("## Entity table") entity_table = tabulate( ( ( @@ -256,33 +261,33 @@ def make_summary(): for entity in entities.values() ), headers=["Name", "Email", "Telegram"], - tablefmt="presto", + tablefmt="github", ) print(entity_table) print() - print("AS table:") + print("## AS table") as_table = tabulate( ( (entity["source"], "AS{}".format(asn), entity["owner"], entity["name"]) for asn, entity in sorted(asn_table.items(), key=lambda item: item[0]) ), headers=["Source", "ASN", "Owner", "Name"], - tablefmt="presto", + tablefmt="github", ) print(as_table) print() - print("Node table:") + print("## Node table") node_table = tabulate( ( ("AS{}".format(asn), name) for name, asn in sorted(node_table.items(), key=lambda item: item[1]) ), headers=["ASN", "Name"], - tablefmt="presto", + tablefmt="github", ) print(node_table) print() - print("Peer table:") + print("## Peer table") peer_table = tabulate( ( (item.stem, downstream) @@ -290,12 +295,12 @@ def make_summary(): for downstream in entity["to-peer"] ), headers=["Upstream", "Downstream"], - tablefmt="presto", + tablefmt="github", colalign=("right",), ) print(peer_table) print() - print("Route table:") + print("## Route table") route_table = tabulate( ( ( @@ -308,17 +313,19 @@ def make_summary(): for entity in route_to_roa(asn_table) ), headers=["ASN", "Name", "Type", "Prefix", "Supernet"], - tablefmt="presto", + tablefmt="github", ) print(route_table) print() - print("Used CIDR Range:") + print("## Used CIDR Range") prefixes = netaddr.cidr_merge( netaddr.IPNetwork(str(entity["prefix"])) for entity in route_to_roa(asn_table) ) + print("```") for prefix in prefixes: print(prefix) + print("```") return stream.getvalue()