diff --git a/docker-compose.yml b/docker-compose.yml index 283a45e..bf6f0b2 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,3 +1,4 @@ +version: "2.2" services: gateway: image: nginx @@ -15,13 +16,21 @@ services: - ./etc/alto.conf:/opt/alto/etc/alto.conf entrypoint: gunicorn command: ["-b", "0.0.0.0:8000", "--reload", "alto.server.northbound.wsgi", "--preload", "--capture-output"] - alto-lg-agent: + # alto-lg-agent-cern: + # image: openalto/alto + # volumes: + # - ./etc/lg-agent.json:/etc/lg-agent-cern.json + # - ./etc/alto.conf:/opt/alto/etc/alto.conf + # entrypoint: python + # command: ["-m", "alto.agent.manage", "--pid", "/tmp", "start", "-c", "/etc/lg-agent-cern.json", "-D", "cernlg"] + # network_mode: "service:alto-frontend" + alto-lg-agent-geant: image: openalto/alto volumes: - - ./etc/lg-agent.json:/etc/lg-agent.json + - ./etc/lg-agent-geant.json:/etc/lg-agent-geant.json - ./etc/alto.conf:/opt/alto/etc/alto.conf entrypoint: python - command: ["-m", "alto.agent.manage", "--pid", "/tmp", "start", "-c", "/etc/lg-agent.json", "-D", "cernlg"] + command: ["-m", "alto.agent.manage", "--pid", "/tmp", "start", "-c", "/etc/lg-agent-geant.json", "-D", "geantlg"] network_mode: "service:alto-frontend" alto-cric-agent: image: openalto/alto diff --git a/etc/lg-agent.json b/etc/lg-agent-cern.json similarity index 100% rename from etc/lg-agent.json rename to etc/lg-agent-cern.json diff --git a/etc/lg-agent-geant.json b/etc/lg-agent-geant.json new file mode 100644 index 0000000..f665908 --- /dev/null +++ b/etc/lg-agent-geant.json @@ -0,0 +1,186 @@ +{ + "namespace": "default", + "agent_class": "alto.agent.geantlg.LookingGlassAgent", + "uri": "https://lg.geant.org/rest/submit", + "listened_routers": [ + "mx2.bra.sk.geant.net", + "mx1.mil2.it.geant.net", + "mx1.lon.uk.geant.net", + "rt1.bil.es.geant.net", + "rt2.rig.lv.geant.net", + "rt2.tal.ee.geant.net", + "rt1.chi.md.geant.net", + "rt1.por.pt.geant.net", + "rt2.chi.md.geant.net", + "mx1.mad.es.geant.net", + "rt1.kau.lt.geant.net", + "mx1.buc.ro.geant.net", + "mx1.pra.cz.geant.net", + "mx1.bud.hu.geant.net", + "rt1.kie.ua.geant.net", + "rt2.ams.nl.geant.net", + "amt1.fra.de.re0.geant.net", + "mx2.bru.be.geant.net", + "mx1.gen.ch.geant.net", + "rt1.tal.ee.geant.net", + "rt1.rig.lv.geant.net", + "rt1.mar.fr.geant.net", + "mx1.fra.de.geant.net", + "mx1.dub2.ie.geant.net", + "rt1.bra.sk.geant.net", + "mx1.poz.pl.geant.net", + "rt2.bra.sk.geant.net", + "mx1.sof.bg.geant.net", + "mx2.zag.hr.geant.net", + "mx2.lis.pt.geant.net", + "mx1.par.fr.geant.net", + "rt1.mil2.it.geant.net", + "mx2.lju.si.geant.net", + "mx1.lon2.uk.geant.net", + "rt2.kie.ua.geant.net", + "rt2.kau.lt.geant.net", + "mx1.vie.at.geant.net", + "mx1.dub.ie.geant.net", + "mx1.ath2.gr.geant.net", + "mx1.ham.de.geant.net", + "mx2.ath.gr.geant.net", + "mx1.ams.nl.geant.net", + "amt1.lon2.uk.re0.geant.net" + ], + "refresh_interval": 300, + "prefixes": [ + "14.139.119.64/26", + "18.12.0.0/20", + "62.40.0.0/16", + "64.57.30.128/25", + "67.58.50.0/28", + "72.36.0.0/16", + "81.180.86.0/24", + "85.120.46.0/24", + "85.122.16.0/20", + "85.143.115.32/27", + "90.147.0.0/16", + "109.105.124.0/22", + "117.103.96.0/20", + "128.104.227.0/24", + "128.111.120.96/28", + "128.142.0.0/16", + "128.211.0.0/16", + "128.227.0.0/16", + "128.250.185.224/27", + "129.15.40.0/22", + "129.59.0.0/16", + "129.93.0.0/16", + "129.107.255.0/24", + "129.114.0.0/16", + "130.199.0.0/16", + "130.246.0.0/16", + "131.154.128.0/17", + "131.169.0.0/16", + "131.225.0.0/16", + "132.195.124.0/23", + "132.206.245.224/27", + "133.11.0.0/16", + "134.61.24.0/22", + "134.75.0.0/16", + "134.79.0.0/16", + "134.130.9.64/29", + "134.158.0.0/16", + "137.138.0.0/16", + "137.164.26.136/31", + "137.226.38.108/30", + "138.44.224.6/31", + "140.109.0.0/16", + "140.110.122.96/28", + "140.115.0.0/16", + "140.181.2.0/24", + "140.221.0.0/16", + "141.34.0.0/16", + "141.108.0.0/16", + "142.150.188.0/24", + "142.244.0.0/16", + "144.16.0.0/16", + "144.92.180.0/23", + "144.206.0.0/16", + "145.100.32.0/22", + "146.83.90.0/24", + "146.179.232.0/22", + "147.231.0.0/16", + "149.156.0.0/16", + "150.99.0.0/16", + "150.244.246.0/23", + "150.254.0.0/16", + "152.84.101.0/24", + "153.5.0.0/16", + "155.230.20.0/22", + "156.110.41.0/24", + "157.82.112.0/21", + "157.180.0.0/16", + "159.93.0.0/16", + "161.9.255.0/24", + "161.200.0.0/16", + "169.228.130.0/23", + "185.141.124.192/27", + "185.207.88.0/24", + "188.1.153.0/24", + "188.184.0.0/16", + "188.185.0.0/17", + "192.5.207.0/24", + "192.12.0.0/16", + "192.16.166.0/24", + "192.17.10.72/30", + "192.33.128.0/24", + "192.41.0.0/16", + "192.43.208.0/24", + "192.54.0.0/16", + "192.65.183.0/24", + "192.84.0.0/16", + "192.101.0.0/16", + "192.108.0.0/16", + "192.111.0.0/16", + "192.135.0.0/16", + "192.170.224.0/19", + "192.188.182.0/24", + "192.207.64.0/24", + "192.231.127.0/24", + "193.40.0.0/16", + "193.48.0.0/16", + "193.109.172.0/24", + "193.205.0.0/16", + "193.206.0.0/16", + "193.231.25.224/27", + "193.239.0.0/16", + "194.171.0.0/16", + "194.176.164.0/22", + "194.190.165.0/24", + "194.249.156.0/24", + "195.70.215.128/26", + "195.113.0.0/16", + "195.251.201.128/26", + "198.32.0.0/16", + "198.51.111.0/24", + "198.124.80.0/23", + "199.241.165.64/26", + "200.17.30.0/24", + "200.133.193.160/27", + "200.136.60.0/24", + "200.143.233.0/27", + "202.13.0.0/16", + "202.28.0.0/16", + "202.29.12.0/24", + "202.38.128.0/23", + "202.44.204.0/24", + "202.122.0.0/16", + "202.140.160.0/19", + "202.169.168.0/22", + "202.179.241.216/30", + "202.180.40.0/27", + "203.158.4.0/24", + "203.185.0.0/16", + "205.189.32.79/32", + "206.12.0.0/16", + "210.25.189.248/30", + "212.111.195.192/26", + "213.135.0.0/16" + ] +} diff --git a/setup.cfg b/setup.cfg index a551c0f..242fbcf 100644 --- a/setup.cfg +++ b/setup.cfg @@ -58,6 +58,7 @@ install_requires = lxml service gunicorn + xmltodict [options.packages.find] diff --git a/src/alto/agent/geantlg.py b/src/alto/agent/geantlg.py new file mode 100644 index 0000000..be3db58 --- /dev/null +++ b/src/alto/agent/geantlg.py @@ -0,0 +1,199 @@ +import requests +import json +import logging +import time +import xmltodict + +from pytricia import PyTricia + +from alto.server.components.datasource import DBInfo, DataSourceAgent +from alto.server.components.db import data_broker_manager, ForwardingRule, Match, Action + +class LookingGlassAgent(DataSourceAgent): + """ + Class of data source agent for looking glass server. + """ + + def __init__(self, dbinfo: DBInfo, name: str, namespace='default', **cfg): + super().__init__(dbinfo, name, namespace) + + self.uri = self.ensure_field(cfg, 'uri') + self.router = cfg.get('default_router', None) + self.proxies = cfg.get('proxies', None) + self.refresh_interval = cfg.get('refresh_interval', None) + self.listened_routers = cfg.get('listened_routers', set()) + self.default_router = cfg.get('default_router', None) + self.prefixes = cfg.get('prefixes', set()) + + logging.info("Loading databases") + self.db = [ self.request_db(t) for t in ['forwarding', 'endpoint']] + + if self.default_router: + if len(self.listened_routers) == 0: + self.listened_routers |= { self.default_router } + + eb_trans = self.db[1].new_transaction() + default_sw = {'dpid': self.default_router, 'in_port': '0'} + eb_trans.add_property('0.0.0.0/0', default_sw) + eb_trans.commit() + + def _parse_routes(self, results_dict, width=19, route_dict=PyTricia(128)): + route_dict = dict() + for prefix in results_dict: + route_info = [] + for entry in results_dict[prefix]["rt"]["rt-entry"]: + attribute_dict = dict() + attribute_dict["selected"] = entry["active-tag"] == "*" + attribute_dict["preference"] = int(entry.get("preference", None)) + attribute_dict["peer"] = entry.get("learned-from", None) + attribute_dict["as_path"] = entry.get("as-path", None) + + if isinstance(entry.get("nh", None), list): + for hop in entry.get("nh", []): + if hop.get("selected-next-hop", True) is None: + attribute_dict["next_hop"] = hop.get("to", None) + attribute_dict["outgoing"] = hop.get("via", None) + elif isinstance(entry.get("nh", None), set): + hop = entry.get("nh") + if hop.get("selected-next-hop", True) is None: + attribute_dict["next_hop"] = hop.get("to", None) + attribute_dict["outgoing"] = hop.get("via", None) + + route_info.append(attribute_dict) + route_dict[prefix] = route_info + return route_dict + + def _do_query(self, router=None, args=None): + results = dict() + for prefix in args: + try: + data = requests.post(self.uri, + json={ + 'selectedRouters': [{"name": router}], + 'selectedCommand': { + 'value': f'show route {prefix} | display xml' + }, + }, + headers={ + 'Content-Type': 'application/json' + }, + proxies=self.proxies, timeout=5) + doctree = data.json()['output'][router]['commandResult'] + query_result = json.loads(json.dumps(xmltodict.parse(doctree))) + for table in query_result["rpc-reply"]["route-information"]["route-table"]: + if table["table-name"] == "lhcone-l3vpn.inet.0": + results.update({prefix : table}) + except: + logging.warn(f"Error fetching prefix {prefix} on router {router}; skipping") + return results + + def get_routes(self, ipprefix=None, router=None, selected=False): + """ + Get route entries for a single router. + + Parameters + ---------- + ipprefix : str + Destination IP prefix to lookup. If None, `prefixes` will be used. + router : str + Name of the looking glass router. If None, `default_router` will be + used. + selected : bool + Whether only return selected routes or not. + + Returns + ------- + routes : list + All the route entries for the given destination IP prefix. + """ + logging.info('Loading routes on %s' % (router)) + args = ipprefix if ipprefix else self.prefixes + results_dict = self._do_query(router=router, args=args) + logging.info('Parsing routes on %s' % (router)) + routes = self._parse_routes(results_dict) + if selected: + for p in routes: + routes[p] = [r for r in routes[p] if r.get('selected')] + return routes + + def _do_batch_query(self, router_list=None, args=None): + results = [] + for prefix in args: + try: + data = requests.post(self.uri, + json={ + 'selectedRouters': [{"name": r} for r in router_list], + 'selectedCommand': { + 'value': f'show route {prefix} | display xml' + }, + }, + headers={ + 'Content-Type': 'application/json' + }, + proxies=self.proxies) + for router in router_list: + results_dict = dict() + doctree = data.json()['output'][router]['commandResult'] + query_result = json.loads(json.dumps(xmltodict.parse(doctree))) + for table in query_result["rpc-reply"]["route-information"]["route-table"]: + if table["table-name"] == "lhcone-l3vpn.inet.0": + results_dict.update({prefix : table}) + results.append(results_dict) + except: + logging.warn(f"Error fetching prefix {prefix} for batch; skipping") + return results + + def get_batch_routes(self, ipprefix=None, router_list=None, selected=False): + """ + Get a route entries. + + Parameters + ---------- + ipprefix : str + Destination IP prefix to lookup. If None, `prefixes` will be used. + router : str + Name of the looking glass router. If None, `listened_routers` will be + used. + selected : bool + Whether only return selected routes or not. + + Returns + ------- + routes : list + All the route entries for the given destination IP prefix. + """ + logging.info('Loading routes for batch') + args = ipprefix if ipprefix else self.prefixes + router_list = router_list if router_list else self.listened_routers + list_of_results_dict = self._do_batch_query(router_list=router_list, args=args) + logging.info('Parsing routes for batch') + routes_dict = [] + for i, r in enumerate(router_list): + routes = self._parse_routes(list_of_results_dict[i]) + if selected: + for p in routes: + routes[p] = [r for r in routes[p] if r.get('selected')] + routes_dict[r] = routes + return routes_dict + + def update(self): + fib_trans = self.db[0].new_transaction() + routes_dict = self.get_batch_routes(selected=True) + for _router in self.listened_routers: + for dst_prefix, route in routes_dict[_router].items(): + if route: + route = route[0] + else: + continue + pkt_match = Match(dst_prefix) + action = Action(**route) + rule = ForwardingRule(pkt_match, action) + fib_trans.add_rule(_router, rule) + fib_trans.commit() + + def run(self): + if self.refresh_interval is None: + self.refresh_interval = 60 + while True: + self.update() + time.sleep(self.refresh_interval)