This reverts commit 44341478233115b26bb27fdb24da5b0a1eedb173.
Signed-off-by: Peter Müller peter.mueller@ipfire.org --- src/python/importer.py | 14 ++++---- src/python/location-importer.in | 63 +++++++++++++++++++++++++++++++++ 2 files changed, 70 insertions(+), 7 deletions(-)
diff --git a/src/python/importer.py b/src/python/importer.py index de20f37..f19db4b 100644 --- a/src/python/importer.py +++ b/src/python/importer.py @@ -30,8 +30,8 @@ WHOIS_SOURCES = ( "https://ftp.afrinic.net/pub/pub/dbase/afrinic.db.gz",
# Asia Pacific Network Information Centre - #"https://ftp.apnic.net/apnic/whois/apnic.db.inet6num.gz", - #"https://ftp.apnic.net/apnic/whois/apnic.db.inetnum.gz", + "https://ftp.apnic.net/apnic/whois/apnic.db.inet6num.gz", + "https://ftp.apnic.net/apnic/whois/apnic.db.inetnum.gz", #"https://ftp.apnic.net/apnic/whois/apnic.db.route6.gz", #"https://ftp.apnic.net/apnic/whois/apnic.db.route.gz", "https://ftp.apnic.net/apnic/whois/apnic.db.aut-num.gz", @@ -45,8 +45,8 @@ WHOIS_SOURCES = ( # XXX ???
# Réseaux IP Européens - #"https://ftp.ripe.net/ripe/dbase/split/ripe.db.inet6num.gz", - #"https://ftp.ripe.net/ripe/dbase/split/ripe.db.inetnum.gz", + "https://ftp.ripe.net/ripe/dbase/split/ripe.db.inet6num.gz", + "https://ftp.ripe.net/ripe/dbase/split/ripe.db.inetnum.gz", #"https://ftp.ripe.net/ripe/dbase/split/ripe.db.route6.gz", #"https://ftp.ripe.net/ripe/dbase/split/ripe.db.route.gz", "https://ftp.ripe.net/ripe/dbase/split/ripe.db.aut-num.gz", @@ -55,10 +55,10 @@ WHOIS_SOURCES = (
EXTENDED_SOURCES = ( # African Network Information Centre - "https://ftp.afrinic.net/pub/stats/afrinic/delegated-afrinic-extended-latest", + #"https://ftp.afrinic.net/pub/stats/afrinic/delegated-afrinic-extended-latest",
# Asia Pacific Network Information Centre - "https://ftp.apnic.net/apnic/stats/apnic/delegated-apnic-extended-latest", + #"https://ftp.apnic.net/apnic/stats/apnic/delegated-apnic-extended-latest",
# American Registry for Internet Numbers "https://ftp.arin.net/pub/stats/arin/delegated-arin-extended-latest", @@ -67,7 +67,7 @@ EXTENDED_SOURCES = ( "http://ftp.lacnic.net/pub/stats/lacnic/delegated-lacnic-extended-latest",
# Réseaux IP Européens - "https://ftp.ripe.net/pub/stats/ripencc/delegated-ripencc-extended-latest", + #"https://ftp.ripe.net/pub/stats/ripencc/delegated-ripencc-extended-latest", )
class Downloader(object): diff --git a/src/python/location-importer.in b/src/python/location-importer.in index e87d378..b220eaf 100644 --- a/src/python/location-importer.in +++ b/src/python/location-importer.in @@ -395,6 +395,10 @@ class CLI(object): if line.startswith("aut-num:"): return self._parse_autnum_block(block)
+ # inetnum + if line.startswith("inet6num:") or line.startswith("inetnum:"): + return self._parse_inetnum_block(block) + # organisation elif line.startswith("organisation:"): return self._parse_org_block(block) @@ -424,6 +428,65 @@ class CLI(object): autnum.get("asn"), autnum.get("org"), )
+ def _parse_inetnum_block(self, block): + logging.debug("Parsing inetnum block:") + + inetnum = {} + for line in block: + logging.debug(line) + + # Split line + key, val = split_line(line) + + if key == "inetnum": + start_address, delim, end_address = val.partition("-") + + # Strip any excess space + start_address, end_address = start_address.rstrip(), end_address.strip() + + # Convert to IP address + try: + start_address = ipaddress.ip_address(start_address) + end_address = ipaddress.ip_address(end_address) + except ValueError: + logging.warning("Could not parse line: %s" % line) + return + + # Set prefix to default + prefix = 32 + + # Count number of addresses in this subnet + num_addresses = int(end_address) - int(start_address) + if num_addresses: + prefix -= math.log(num_addresses, 2) + + inetnum["inetnum"] = "%s/%.0f" % (start_address, prefix) + + elif key == "inet6num": + inetnum[key] = val + + elif key == "country": + if val == "UNITED STATES": + val = "US" + + inetnum[key] = val.upper() + + # Skip empty objects + if not inetnum: + return + + network = ipaddress.ip_network(inetnum.get("inet6num") or inetnum.get("inetnum"), strict=False) + + # Bail out in case we have processed a non-public IP network + if network.is_private: + logging.warning("Skipping non-globally routable network: %s" % network) + return + + self.db.execute("INSERT INTO networks(network, country) \ + VALUES(%s, %s) ON CONFLICT (network) DO UPDATE SET country = excluded.country", + "%s" % network, inetnum.get("country"), + ) + def _parse_org_block(self, block): org = {} for line in block:
This reverts commit 13f67f285856e8eabfeff2daf1be3aeaa36a82cc.
Signed-off-by: Peter Müller peter.mueller@ipfire.org --- src/python/location-importer.in | 89 ++++++++++++++++++++++++++++++++- 1 file changed, 87 insertions(+), 2 deletions(-)
diff --git a/src/python/location-importer.in b/src/python/location-importer.in index b220eaf..d249a35 100644 --- a/src/python/location-importer.in +++ b/src/python/location-importer.in @@ -165,6 +165,7 @@ class CLI(object): -- networks CREATE TABLE IF NOT EXISTS networks(network inet, country text); CREATE UNIQUE INDEX IF NOT EXISTS networks_network ON networks(network); + CREATE INDEX IF NOT EXISTS networks_family ON networks USING BTREE(family(network)); CREATE INDEX IF NOT EXISTS networks_search ON networks USING GIST(network inet_ops);
-- overrides @@ -365,6 +366,16 @@ class CLI(object): CREATE TEMPORARY TABLE _organizations(handle text, name text NOT NULL) ON COMMIT DROP; CREATE UNIQUE INDEX _organizations_handle ON _organizations(handle); + + CREATE TEMPORARY TABLE _rirdata(network inet NOT NULL, country text NOT NULL) + ON COMMIT DROP; + CREATE INDEX _rirdata_search ON _rirdata USING BTREE(family(network), masklen(network)); + CREATE UNIQUE INDEX _rirdata_network ON _rirdata(network); + """) + + # Remove all previously imported content + self.db.execute(""" + TRUNCATE TABLE networks; """)
for source in location.importer.WHOIS_SOURCES: @@ -372,6 +383,67 @@ class CLI(object): for block in f: self._parse_block(block)
+ # Process all parsed networks from every RIR we happen to have access to, + # insert the largest network chunks into the networks table immediately... + families = self.db.query("SELECT DISTINCT family(network) AS family FROM _rirdata ORDER BY family(network)") + + for family in (row.family for row in families): + smallest = self.db.get("SELECT MIN(masklen(network)) AS prefix FROM _rirdata WHERE family(network) = %s", family) + + self.db.execute("INSERT INTO networks(network, country) \ + SELECT network, country FROM _rirdata WHERE masklen(network) = %s AND family(network) = %s", smallest.prefix, family) + + # ... determine any other prefixes for this network family, ... + prefixes = self.db.query("SELECT DISTINCT masklen(network) AS prefix FROM _rirdata \ + WHERE family(network) = %s ORDER BY masklen(network) ASC OFFSET 1", family) + + # ... and insert networks with this prefix in case they provide additional + # information (i. e. subnet of a larger chunk with a different country) + for prefix in (row.prefix for row in prefixes): + self.db.execute(""" + WITH candidates AS ( + SELECT + _rirdata.network, + _rirdata.country + FROM + _rirdata + WHERE + family(_rirdata.network) = %s + AND + masklen(_rirdata.network) = %s + ), + filtered AS ( + SELECT + DISTINCT ON (c.network) + c.network, + c.country, + masklen(networks.network), + networks.country AS parent_country + FROM + candidates c + LEFT JOIN + networks + ON + c.network << networks.network + ORDER BY + c.network, + masklen(networks.network) DESC NULLS LAST + ) + INSERT INTO + networks(network, country) + SELECT + network, + country + FROM + filtered + WHERE + parent_country IS NULL + OR + country <> parent_country + ON CONFLICT DO NOTHING""", + family, prefix, + ) + self.db.execute(""" INSERT INTO autnums(number, name) SELECT _autnums.number, _organizations.name FROM _autnums @@ -472,17 +544,30 @@ class CLI(object): inetnum[key] = val.upper()
# Skip empty objects - if not inetnum: + if not inetnum or not "country" in inetnum: return
network = ipaddress.ip_network(inetnum.get("inet6num") or inetnum.get("inetnum"), strict=False)
+ # Bail out in case we have processed a network covering the entire IP range, which + # is necessary to work around faulty (?) IPv6 network processing + if network.prefixlen == 0: + logging.warning("Skipping network covering the entire IP adress range: %s" % network) + return + + # Bail out in case we have processed a network whose prefix length indicates it is + # not globally routable (we have decided not to process them at the moment, as they + # significantly enlarge our database without providing very helpful additional information) + if (network.prefixlen > 24 and network.version == 4) or (network.prefixlen > 48 and network.version == 6): + logging.info("Skipping network too small to be publicly announced: %s" % network) + return + # Bail out in case we have processed a non-public IP network if network.is_private: logging.warning("Skipping non-globally routable network: %s" % network) return
- self.db.execute("INSERT INTO networks(network, country) \ + self.db.execute("INSERT INTO _rirdata(network, country) \ VALUES(%s, %s) ON CONFLICT (network) DO UPDATE SET country = excluded.country", "%s" % network, inetnum.get("country"), )
In contrast to the location database itself, the xt_geoip module consumes a list of IP networks for each country, and returns after the first match.
We therefore need to...
(a) sort IP networks by their size, allow as precise matches as possible (b) export _any_ IP networks - including inverted subnets - to prevent undefined overlaps (c) do the entire thing as fast as possible, consuming as less disk space as possible, which is why we can't just iterate over all /24 chunks
Partially fixes: #12499
Signed-off-by: Michael Tremer michael.tremer@ipfire.org Signed-off-by: Peter Müller peter.mueller@ipfire.org --- src/python/export.py | 69 ++++++++++++++++++++++++++++++++++---------- 1 file changed, 54 insertions(+), 15 deletions(-)
diff --git a/src/python/export.py b/src/python/export.py index d15c6f0..5eaf43f 100644 --- a/src/python/export.py +++ b/src/python/export.py @@ -39,8 +39,8 @@ class OutputWriter(object): suffix = "networks" mode = "w"
- def __init__(self, f, prefix=None, flatten=True): - self.f, self.prefix, self.flatten = f, prefix, flatten + def __init__(self, db, f, prefix=None, flatten=True): + self.db, self.f, self.prefix, self.flatten = db, f, prefix, flatten
# The previously written network self._last_network = None @@ -49,13 +49,13 @@ class OutputWriter(object): self._write_header()
@classmethod - def open(cls, filename, **kwargs): + def open(cls, db, filename, **kwargs): """ Convenience function to open a file """ f = open(filename, cls.mode)
- return cls(f, **kwargs) + return cls(db, f, **kwargs)
def __repr__(self): return "<%s f=%s>" % (self.__class__.__name__, self.f) @@ -87,13 +87,31 @@ class OutputWriter(object): def _write_network(self, network): self.f.write("%s\n" % network)
- def write(self, network): + def write(self, network, subnets): if self.flatten and self._flatten(network): log.debug("Skipping writing network %s" % network) return
- # Write the network to file - self._write_network(network) + # Write the network when it has no subnets + if not subnets: + network = ipaddress.ip_network("%s" % network) + return self._write_network(network) + + # Collect all matching subnets + matching_subnets = [] + + for subnet in sorted(subnets): + # Try to find the subnet in the database + n = self.db.lookup("%s" % subnet.network_address) + + # No entry or matching country means those IP addresses belong here + if not n or n.country_code == network.country_code: + matching_subnets.append(subnet) + + # Write all networks as compact as possible + for network in ipaddress.collapse_addresses(matching_subnets): + log.debug("Writing %s to database" % network) + self._write_network(network)
def finish(self): """ @@ -143,10 +161,10 @@ class XTGeoIPOutputWriter(OutputWriter): mode = "wb"
def _write_network(self, network): - for address in (network.first_address, network.last_address): + for address in (network.network_address, network.broadcast_address): # Convert this into a string of bits bytes = socket.inet_pton( - network.family, address, + socket.AF_INET6 if network.version == 6 else socket.AF_INET, "%s" % address, )
self.f.write(bytes) @@ -175,7 +193,7 @@ class Exporter(object): directory, prefix=country_code, suffix=self.writer.suffix, family=family, )
- writers[country_code] = self.writer.open(filename, prefix="CC_%s" % country_code) + writers[country_code] = self.writer.open(self.db, filename, prefix="CC_%s" % country_code)
# Create writers for ASNs for asn in asns: @@ -183,22 +201,43 @@ class Exporter(object): directory, "AS%s" % asn, suffix=self.writer.suffix, family=family, )
- writers[asn] = self.writer.open(filename, prefix="AS%s" % asn) + writers[asn] = self.writer.open(self.db, filename, prefix="AS%s" % asn)
# Get all networks that match the family networks = self.db.search_networks(family=family)
+ # Materialise the generator into a list (uses quite some memory) + networks = list(networks) + # Walk through all networks - for network in networks: + for i, network in enumerate(networks): + _network = ipaddress.ip_network("%s" % network) + + # Search for all subnets + subnets = set() + + while i < len(networks): + subnet = networks[i+1] + + if subnet.is_subnet_of(network): + _subnet = ipaddress.ip_network("%s" % subnet) + + subnets.add(_subnet) + subnets.update(_network.address_exclude(_subnet)) + + i += 1 + else: + break + # Write matching countries try: - writers[network.country_code].write(network) + writers[network.country_code].write(network, subnets) except KeyError: pass
# Write matching ASNs try: - writers[network.asn].write(network) + writers[network.asn].write(network, subnets) except KeyError: pass
@@ -209,7 +248,7 @@ class Exporter(object): country = flags[flag]
try: - writers[country].write(network) + writers[country].write(network, subnets) except KeyError: pass
Sanity checks for parsed networks have been put into a separate function to avoid boilerplate code for extended sources. This makes the location database less vulnerable to garbage written into RIR databases on purpose or by chance.
Fixes: #12500
Signed-off-by: Peter Müller peter.mueller@ipfire.org --- src/python/location-importer.in | 83 ++++++++++++++++++++++++++------- 1 file changed, 67 insertions(+), 16 deletions(-)
diff --git a/src/python/location-importer.in b/src/python/location-importer.in index d249a35..20eb052 100644 --- a/src/python/location-importer.in +++ b/src/python/location-importer.in @@ -459,6 +459,69 @@ class CLI(object): for line in f: self._parse_line(line)
+ def _check_parsed_network(self, network): + """ + Assistive function to detect and subsequently sort out parsed + networks from RIR data (both Whois and so-called "extended sources"), + which are or have... + + (a) not globally routable (RFC 1918 space, et al.) + (b) covering a too large chunk of the IP address space (prefix length + is < 7 for IPv4 networks, and < 10 for IPv6) + (c) "0.0.0.0" or "::" as a network address + (d) are too small for being publicly announced (we have decided not to + process them at the moment, as they significantly enlarge our + database without providing very helpful additional information) + + This unfortunately is necessary due to brain-dead clutter across + various RIR databases, causing mismatches and eventually disruptions. + + We will return False in case a network is not suitable for adding + it to our database, and True otherwise. + """ + + if not network or not (isinstance(network, ipaddress.IPv4Network) or isinstance(network, ipaddress.IPv6Network)): + return False + + if not network.is_global: + logging.warning("Skipping non-globally routable network: %s" % network) + return False + + if network.version == 4: + if network.prefixlen < 7: + logging.warning("Skipping too big IP chunk: %s" % network) + return False + + if network.prefixlen > 24: + logging.info("Skipping network too small to be publicly announced: %s" % network) + return False + + if str(network.network_address) == "0.0.0.0": + logging.warning("Skipping network based on 0.0.0.0: %s" % network) + return False + + elif network.version == 6: + if network.prefixlen < 10: + logging.warning("Skipping too big IP chunk: %s" % network) + return False + + if network.prefixlen > 48: + logging.info("Skipping network too small to be publicly announced: %s" % network) + return False + + if str(network.network_address) == "::": + logging.warning("Skipping network based on '::': %s" % network) + return False + + else: + # This should not happen... + logging.warning("Skipping network of unknown family, this should not happen: %s" % network) + return False + + # In case we have made it here, the network is considered to + # be suitable for libloc consumption... + return True + def _parse_block(self, block): # Get first line to find out what type of block this is line = block[0] @@ -549,22 +612,7 @@ class CLI(object):
network = ipaddress.ip_network(inetnum.get("inet6num") or inetnum.get("inetnum"), strict=False)
- # Bail out in case we have processed a network covering the entire IP range, which - # is necessary to work around faulty (?) IPv6 network processing - if network.prefixlen == 0: - logging.warning("Skipping network covering the entire IP adress range: %s" % network) - return - - # Bail out in case we have processed a network whose prefix length indicates it is - # not globally routable (we have decided not to process them at the moment, as they - # significantly enlarge our database without providing very helpful additional information) - if (network.prefixlen > 24 and network.version == 4) or (network.prefixlen > 48 and network.version == 6): - logging.info("Skipping network too small to be publicly announced: %s" % network) - return - - # Bail out in case we have processed a non-public IP network - if network.is_private: - logging.warning("Skipping non-globally routable network: %s" % network) + if not self._check_parsed_network(network): return
self.db.execute("INSERT INTO _rirdata(network, country) \ @@ -648,6 +696,9 @@ class CLI(object): log.warning("Invalid IP address: %s" % address) return
+ if not self._check_parsed_network(network): + return + self.db.execute("INSERT INTO networks(network, country) \ VALUES(%s, %s) ON CONFLICT (network) DO \ UPDATE SET country = excluded.country",
Signed-off-by: Peter Müller peter.mueller@ipfire.org --- src/python/importer.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/src/python/importer.py b/src/python/importer.py index f19db4b..5f46bc3 100644 --- a/src/python/importer.py +++ b/src/python/importer.py @@ -64,7 +64,7 @@ EXTENDED_SOURCES = ( "https://ftp.arin.net/pub/stats/arin/delegated-arin-extended-latest",
# Latin America and Caribbean Network Information Centre - "http://ftp.lacnic.net/pub/stats/lacnic/delegated-lacnic-extended-latest", + "https://ftp.lacnic.net/pub/stats/lacnic/delegated-lacnic-extended-latest",
# Réseaux IP Européens #"https://ftp.ripe.net/pub/stats/ripencc/delegated-ripencc-extended-latest",
Some RIRs include detailled information regarding networks not managed by or allocated to themselves, particually APNIC. We need to filter those networks (they usually have a characteristic network name) in order to prevent operational quirks or returning wrong country codes.
Fixes: #12501 Partially fixes: #12499
Cc: Michael Tremer michael.tremer@ipfire.org Signed-off-by: Peter Müller peter.mueller@ipfire.org --- src/python/location-importer.in | 38 +++++++++++++++++++++------------ 1 file changed, 24 insertions(+), 14 deletions(-)
diff --git a/src/python/location-importer.in b/src/python/location-importer.in index 20eb052..4f4a46d 100644 --- a/src/python/location-importer.in +++ b/src/python/location-importer.in @@ -484,38 +484,38 @@ class CLI(object): return False
if not network.is_global: - logging.warning("Skipping non-globally routable network: %s" % network) + log.warning("Skipping non-globally routable network: %s" % network) return False
if network.version == 4: if network.prefixlen < 7: - logging.warning("Skipping too big IP chunk: %s" % network) + log.warning("Skipping too big IP chunk: %s" % network) return False
if network.prefixlen > 24: - logging.info("Skipping network too small to be publicly announced: %s" % network) + log.info("Skipping network too small to be publicly announced: %s" % network) return False
if str(network.network_address) == "0.0.0.0": - logging.warning("Skipping network based on 0.0.0.0: %s" % network) + log.warning("Skipping network based on 0.0.0.0: %s" % network) return False
elif network.version == 6: if network.prefixlen < 10: - logging.warning("Skipping too big IP chunk: %s" % network) + log.warning("Skipping too big IP chunk: %s" % network) return False
if network.prefixlen > 48: - logging.info("Skipping network too small to be publicly announced: %s" % network) + log.info("Skipping network too small to be publicly announced: %s" % network) return False
if str(network.network_address) == "::": - logging.warning("Skipping network based on '::': %s" % network) + log.warning("Skipping network based on '::': %s" % network) return False
else: # This should not happen... - logging.warning("Skipping network of unknown family, this should not happen: %s" % network) + log.warning("Skipping network of unknown family, this should not happen: %s" % network) return False
# In case we have made it here, the network is considered to @@ -564,15 +564,22 @@ class CLI(object): )
def _parse_inetnum_block(self, block): - logging.debug("Parsing inetnum block:") + log.debug("Parsing inetnum block:")
inetnum = {} for line in block: - logging.debug(line) + log.debug(line)
# Split line key, val = split_line(line)
+ # Filter any inetnum records which are only referring to IP space + # not managed by that specific RIR... + if key == "netname": + if re.match(r"(ERX-NETBLOCK|(AFRINIC|ARIN|LACNIC|RIPE)-CIDR-BLOCK|IANA-NETBLOCK-\d{1,3}|NON-RIPE-NCC-MANAGED-ADDRESS-BLOCK)", val.strip()): + log.warning("Skipping record indicating historic/orphaned data: %s" % val.strip()) + return + if key == "inetnum": start_address, delim, end_address = val.partition("-")
@@ -584,7 +591,7 @@ class CLI(object): start_address = ipaddress.ip_address(start_address) end_address = ipaddress.ip_address(end_address) except ValueError: - logging.warning("Could not parse line: %s" % line) + log.warning("Could not parse line: %s" % line) return
# Set prefix to default @@ -601,15 +608,18 @@ class CLI(object): inetnum[key] = val
elif key == "country": - if val == "UNITED STATES": - val = "US" - inetnum[key] = val.upper()
# Skip empty objects if not inetnum or not "country" in inetnum: return
+ # Skip objects with bogus country code 'ZZ' + if inetnum.get("country") == "ZZ": + log.warning("Skipping network with bogus country 'ZZ': %s" % \ + (inetnum.get("inet6num") or inetnum.get("inetnum"))) + return + network = ipaddress.ip_network(inetnum.get("inet6num") or inetnum.get("inetnum"), strict=False)
if not self._check_parsed_network(network):
This speeds up exporting the database as it avoits a sequential scan.
Signed-off-by: Michael Tremer michael.tremer@ipfire.org Signed-off-by: Peter Müller peter.mueller@ipfire.org --- src/python/location-importer.in | 1 + 1 file changed, 1 insertion(+)
diff --git a/src/python/location-importer.in b/src/python/location-importer.in index 4f4a46d..9946e64 100644 --- a/src/python/location-importer.in +++ b/src/python/location-importer.in @@ -152,6 +152,7 @@ class CLI(object): last_seen_at timestamp without time zone DEFAULT CURRENT_TIMESTAMP); CREATE UNIQUE INDEX IF NOT EXISTS announcements_networks ON announcements(network); CREATE INDEX IF NOT EXISTS announcements_family ON announcements(family(network)); + CREATE INDEX IF NOT EXISTS announcements_search ON announcements USING GIST(network inet_ops);
-- autnums CREATE TABLE IF NOT EXISTS autnums(number bigint, name text NOT NULL);
Signed-off-by: Peter Müller peter.mueller@ipfire.org --- src/python/location-importer.in | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/src/python/location-importer.in b/src/python/location-importer.in index 9946e64..036fff8 100644 --- a/src/python/location-importer.in +++ b/src/python/location-importer.in @@ -494,7 +494,7 @@ class CLI(object): return False
if network.prefixlen > 24: - log.info("Skipping network too small to be publicly announced: %s" % network) + log.debug("Skipping network too small to be publicly announced: %s" % network) return False
if str(network.network_address) == "0.0.0.0": @@ -507,7 +507,7 @@ class CLI(object): return False
if network.prefixlen > 48: - log.info("Skipping network too small to be publicly announced: %s" % network) + log.debug("Skipping network too small to be publicly announced: %s" % network) return False
if str(network.network_address) == "::":