Skip to content

Commit

Permalink
Merge pull request #99 from m-appel/41-alice-looking-glass-route-servers
Browse files Browse the repository at this point in the history
Add Alice-LG crawler
  • Loading branch information
romain-fontugne authored Dec 22, 2023
2 parents f7db6ed + f251602 commit 63acd5b
Show file tree
Hide file tree
Showing 12 changed files with 684 additions and 265 deletions.
11 changes: 9 additions & 2 deletions config.json.example
Original file line number Diff line number Diff line change
Expand Up @@ -69,9 +69,16 @@
"iyp.crawlers.pch.daily_routing_snapshots_v6",
"iyp.crawlers.emileaben.as_names",
"iyp.crawlers.ripe.atlas_probes",
"iyp.crawlers.iana.root_zone",
"iyp.crawlers.alice_lg.amsix",
"iyp.crawlers.alice_lg.bcix",
"iyp.crawlers.alice_lg.decix",
"iyp.crawlers.alice_lg.ixbr",
"iyp.crawlers.alice_lg.linx",
"iyp.crawlers.alice_lg.megaport",
"iyp.crawlers.alice_lg.netnod",
"iyp.crawlers.cloudflare.dns_top_locations",
"iyp.crawlers.cloudflare.dns_top_ases",
"iyp.crawlers.iana.root_zone"
"iyp.crawlers.cloudflare.dns_top_ases"
],

"post": [
Expand Down
47 changes: 47 additions & 0 deletions iyp/crawlers/alice_lg/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
# Alice-LG -- https://github.com/alice-lg/alice-lg

Alice-LG is a BGP looking glass which gets its data from external APIs.

It is used by some large IXPs (e.g., DE-CIX, LINX, AMS-IX) and IYP imports membership
information by reading the route server neighbors.

The crawler *can* also import the received routes of all neighbors, however testing has
shown that this takes an unreasonable amount of time for most IXPs due to the tiny
pagination size (250 routes per page). Therefore this functionality is disabled by default.

List of supported IXPs:

- AMS-IX (`amsix.py`)
- BCIX (`bcix.py`)
- DE-CIX (`decix.py`)
- IX.br (`ixbr.py`)
- LINX (`linx.py`)
- Megaport (`megaport.py`)
- Netnod (`netnod.py`)

## Graph representation

```Cypher
(:AS {asn: 2497})-[:MEMBER_OF {address: '80.81.193.136', routeserver_id: 'rs1_fra_ipv4'}]->(:IXP {name: 'DE-CIX Frankfurt'})
// Routes are not crawled by default
(:AS {asn: 3333})-[:ORIGINATE {neighbor_id: 'pb_0280_as20562', routeserver_id: 'rs01-bcix-v4'}]->(:Prefix {prefix: '193.0.0.0/21'})
```

There is the possibility of multiple relationships between the same node. However, these
contain different information, e.g., a member is present with multiple interfaces
(`address`) or the information is seen by different route servers (`routeserver_id`).
Similarly, a route can be seen via multiple neighbors (`neighbor_id`) or different route
servers (`routeserver_id`).

## Dependence

This crawler requires peering LAN information to map the neighbor IP to an IXP.
Therefore, it should be run after crawlers that create

```Cypher
(:Prefix)-[:MANAGED_BY]->(:IXP)
```

relationships:

- `iyp.crawlers.peeringdb.ix`
402 changes: 402 additions & 0 deletions iyp/crawlers/alice_lg/__init__.py

Large diffs are not rendered by default.

29 changes: 22 additions & 7 deletions iyp/crawlers/alice_lg/amsix.py
Original file line number Diff line number Diff line change
@@ -1,25 +1,40 @@
import argparse
import logging
import os
import sys

from iyp.crawlers.alice_lg import Crawler

ORG = 'Alice-LG'
URL = 'https://lg.ams-ix.net/api/v1/'
NAME = 'alice_lg.amsix'

# Main program
if __name__ == '__main__':

scriptname = sys.argv[0].replace('/', '_')[0:-3]
FORMAT = '%(asctime)s %(processName)s %(message)s'
def main() -> None:
parser = argparse.ArgumentParser()
parser.add_argument('--unit-test', action='store_true')
args = parser.parse_args()

scriptname = os.path.basename(sys.argv[0]).replace('/', '_')[0:-3]
FORMAT = '%(asctime)s %(levelname)s %(message)s'
logging.basicConfig(
format=FORMAT,
filename='log/' + scriptname + '.log',
level=logging.INFO,
datefmt='%Y-%m-%d %H:%M:%S'
)
logging.info('Started: %s' % sys.argv)

crawler = Crawler(URL)
if len(sys.argv) > 1 and sys.argv[1] == 'unit_test':
logging.info(f'Started: {sys.argv}')

crawler = Crawler(ORG, URL, NAME)
if args.unit_test:
crawler.unit_test(logging)
else:
crawler.run()
crawler.close()
logging.info(f'Finished: {sys.argv}')


if __name__ == '__main__':
main()
sys.exit(0)
40 changes: 40 additions & 0 deletions iyp/crawlers/alice_lg/bcix.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
import argparse
import logging
import os
import sys

from iyp.crawlers.alice_lg import Crawler

ORG = 'Alice-LG'
URL = 'https://lg.bcix.de/api/v1/'
NAME = 'alice_lg.bcix'


def main() -> None:
parser = argparse.ArgumentParser()
parser.add_argument('--unit-test', action='store_true')
args = parser.parse_args()

scriptname = os.path.basename(sys.argv[0]).replace('/', '_')[0:-3]
FORMAT = '%(asctime)s %(levelname)s %(message)s'
logging.basicConfig(
format=FORMAT,
filename='log/' + scriptname + '.log',
level=logging.INFO,
datefmt='%Y-%m-%d %H:%M:%S'
)

logging.info(f'Started: {sys.argv}')

crawler = Crawler(ORG, URL, NAME)
if args.unit_test:
crawler.unit_test(logging)
else:
crawler.run()
crawler.close()
logging.info(f'Finished: {sys.argv}')


if __name__ == '__main__':
main()
sys.exit(0)
29 changes: 22 additions & 7 deletions iyp/crawlers/alice_lg/decix.py
Original file line number Diff line number Diff line change
@@ -1,25 +1,40 @@
import argparse
import logging
import os
import sys

from iyp.crawlers.alice_lg import Crawler

ORG = 'Alice-LG'
URL = 'https://lg.de-cix.net/api/v1/'
NAME = 'alice_lg.decix'

# Main program
if __name__ == '__main__':

scriptname = sys.argv[0].replace('/', '_')[0:-3]
FORMAT = '%(asctime)s %(processName)s %(message)s'
def main() -> None:
parser = argparse.ArgumentParser()
parser.add_argument('--unit-test', action='store_true')
args = parser.parse_args()

scriptname = os.path.basename(sys.argv[0]).replace('/', '_')[0:-3]
FORMAT = '%(asctime)s %(levelname)s %(message)s'
logging.basicConfig(
format=FORMAT,
filename='log/' + scriptname + '.log',
level=logging.INFO,
datefmt='%Y-%m-%d %H:%M:%S'
)
logging.info('Started: %s' % sys.argv)

crawler = Crawler(URL)
if len(sys.argv) > 1 and sys.argv[1] == 'unit_test':
logging.info(f'Started: {sys.argv}')

crawler = Crawler(ORG, URL, NAME)
if args.unit_test:
crawler.unit_test(logging)
else:
crawler.run()
crawler.close()
logging.info(f'Finished: {sys.argv}')


if __name__ == '__main__':
main()
sys.exit(0)
25 changes: 0 additions & 25 deletions iyp/crawlers/alice_lg/ecix.py

This file was deleted.

40 changes: 40 additions & 0 deletions iyp/crawlers/alice_lg/ixbr.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
import argparse
import logging
import os
import sys

from iyp.crawlers.alice_lg import Crawler

ORG = 'Alice-LG'
URL = 'https://lg.ix.br/api/v1/'
NAME = 'alice_lg.ixbr'


def main() -> None:
parser = argparse.ArgumentParser()
parser.add_argument('--unit-test', action='store_true')
args = parser.parse_args()

scriptname = os.path.basename(sys.argv[0]).replace('/', '_')[0:-3]
FORMAT = '%(asctime)s %(levelname)s %(message)s'
logging.basicConfig(
format=FORMAT,
filename='log/' + scriptname + '.log',
level=logging.INFO,
datefmt='%Y-%m-%d %H:%M:%S'
)

logging.info(f'Started: {sys.argv}')

crawler = Crawler(ORG, URL, NAME)
if args.unit_test:
crawler.unit_test(logging)
else:
crawler.run()
crawler.close()
logging.info(f'Finished: {sys.argv}')


if __name__ == '__main__':
main()
sys.exit(0)
29 changes: 22 additions & 7 deletions iyp/crawlers/alice_lg/linx.py
Original file line number Diff line number Diff line change
@@ -1,25 +1,40 @@
import argparse
import logging
import os
import sys

from iyp.crawlers.alice_lg import Crawler

ORG = 'Alice-LG'
URL = 'https://alice-rs.linx.net/api/v1/'
NAME = 'alice_lg.linx'

# Main program
if __name__ == '__main__':

scriptname = sys.argv[0].replace('/', '_')[0:-3]
FORMAT = '%(asctime)s %(processName)s %(message)s'
def main() -> None:
parser = argparse.ArgumentParser()
parser.add_argument('--unit-test', action='store_true')
args = parser.parse_args()

scriptname = os.path.basename(sys.argv[0]).replace('/', '_')[0:-3]
FORMAT = '%(asctime)s %(levelname)s %(message)s'
logging.basicConfig(
format=FORMAT,
filename='log/' + scriptname + '.log',
level=logging.INFO,
datefmt='%Y-%m-%d %H:%M:%S'
)
logging.info('Started: %s' % sys.argv)

crawler = Crawler(URL)
if len(sys.argv) > 1 and sys.argv[1] == 'unit_test':
logging.info(f'Started: {sys.argv}')

crawler = Crawler(ORG, URL, NAME)
if args.unit_test:
crawler.unit_test(logging)
else:
crawler.run()
crawler.close()
logging.info(f'Finished: {sys.argv}')


if __name__ == '__main__':
main()
sys.exit(0)
40 changes: 40 additions & 0 deletions iyp/crawlers/alice_lg/megaport.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
import argparse
import logging
import os
import sys

from iyp.crawlers.alice_lg import Crawler

ORG = 'Alice-LG'
URL = 'https://lg.megaport.com/api/v1/'
NAME = 'alice_lg.megaport'


def main() -> None:
parser = argparse.ArgumentParser()
parser.add_argument('--unit-test', action='store_true')
args = parser.parse_args()

scriptname = os.path.basename(sys.argv[0]).replace('/', '_')[0:-3]
FORMAT = '%(asctime)s %(levelname)s %(message)s'
logging.basicConfig(
format=FORMAT,
filename='log/' + scriptname + '.log',
level=logging.INFO,
datefmt='%Y-%m-%d %H:%M:%S'
)

logging.info(f'Started: {sys.argv}')

crawler = Crawler(ORG, URL, NAME)
if args.unit_test:
crawler.unit_test(logging)
else:
crawler.run()
crawler.close()
logging.info(f'Finished: {sys.argv}')


if __name__ == '__main__':
main()
sys.exit(0)
Loading

0 comments on commit 63acd5b

Please sign in to comment.