diff --git a/run_tests_locally.sh b/run_tests_locally.sh index 271e06851dd2759309de51cadf6c45642cccba1a..b483de8062cb884cbdda5530176a97c5dd3f8bfa 100755 --- a/run_tests_locally.sh +++ b/run_tests_locally.sh @@ -19,46 +19,46 @@ cat $PROJECTDIR/coverage/.coveragerc.template | sed s+~/teraflow/controller+$PRO #kubectl --namespace tf-dev expose deployment contextservice --port=6379 --type=NodePort --name=redis-tests #echo "Waiting 10 seconds for Redis to start..." #sleep 10 -#export REDIS_SERVICE_HOST=$(kubectl get node kubernetes-master -o 'jsonpath={.status.addresses[?(@.type=="InternalIP")].address}') -#export REDIS_SERVICE_PORT=$(kubectl get service redis-tests --namespace tf-dev -o 'jsonpath={.spec.ports[?(@.port==6379)].nodePort}') +export REDIS_SERVICE_HOST=$(kubectl get node kubernetes-master -o 'jsonpath={.status.addresses[?(@.type=="InternalIP")].address}') +export REDIS_SERVICE_PORT=$(kubectl get service redis-tests --namespace tf-dev -o 'jsonpath={.spec.ports[?(@.port==6379)].nodePort}') # First destroy old coverage file rm -f $COVERAGEFILE -#coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \ -# common/orm/tests/test_unitary.py \ -# common/message_broker/tests/test_unitary.py \ -# common/rpc_method_wrapper/tests/test_unitary.py -# -#coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \ -# centralizedattackdetector/tests/test_unitary.py -# -#coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \ -# context/tests/test_unitary.py -# -#coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \ -# device/tests/test_unitary.py -# -#coverage run --rcfile=$RCFILE --append -m pytest -s --log-level=INFO --verbose \ -# l3_centralizedattackdetector/tests/test_unitary.py -# -#coverage run --rcfile=$RCFILE --append -m pytest -s --log-level=INFO --verbose \ -# l3_distributedattackdetector/tests/test_unitary.py -# -#coverage run --rcfile=$RCFILE --append -m pytest -s --log-level=INFO --verbose \ -# l3_attackmitigator/tests/test_unitary.py -# -#coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \ -# opticalcentralizedattackdetector/tests/test_unitary.py -# -#coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \ -# dbscanserving/tests/test_unitary.py -# -#coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \ -# opticalattackmitigator/tests/test_unitary.py -# -#coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \ -# service/tests/test_unitary.py +coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \ + common/orm/tests/test_unitary.py \ + common/message_broker/tests/test_unitary.py \ + common/rpc_method_wrapper/tests/test_unitary.py + +coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \ + centralizedattackdetector/tests/test_unitary.py + +coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \ + context/tests/test_unitary.py + +coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \ + device/tests/test_unitary.py + +coverage run --rcfile=$RCFILE --append -m pytest -s --log-level=INFO --verbose \ + l3_centralizedattackdetector/tests/test_unitary.py + +coverage run --rcfile=$RCFILE --append -m pytest -s --log-level=INFO --verbose \ + l3_distributedattackdetector/tests/test_unitary.py + +coverage run --rcfile=$RCFILE --append -m pytest -s --log-level=INFO --verbose \ + l3_attackmitigator/tests/test_unitary.py + +coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \ + opticalcentralizedattackdetector/tests/test_unitary.py + +coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \ + dbscanserving/tests/test_unitary.py + +coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \ + opticalattackmitigator/tests/test_unitary.py + +coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \ + service/tests/test_unitary.py coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \ compute/tests/test_unitary.py diff --git a/src/common/type_checkers/Checkers.py b/src/common/type_checkers/Checkers.py index d0eddcf213143c4c3d99c9edfafd1305384e777b..f78395c9c5f480bf75b4c9344dfeb3b48f9da062 100644 --- a/src/common/type_checkers/Checkers.py +++ b/src/common/type_checkers/Checkers.py @@ -1,5 +1,5 @@ import re -from typing import Any, Container, List, Optional, Pattern, Set, Sized, Tuple, Union +from typing import Any, Container, Dict, List, Optional, Pattern, Set, Sized, Tuple, Union def chk_none(name : str, value : Any, reason=None) -> Any: if value is None: return value @@ -11,6 +11,11 @@ def chk_not_none(name : str, value : Any, reason=None) -> Any: if reason is None: reason = 'must not be None.' raise ValueError('{}({}) {}'.format(str(name), str(value), str(reason))) +def chk_attribute(name : str, container : Dict, container_name : str, **kwargs): + if name in container: return container[name] + if 'default' in kwargs: return kwargs['default'] + raise AttributeError('Missing object({:s}) in container({:s})'.format(str(name), str(container_name))) + def chk_type(name : str, value : Any, type_or_types : Union[type, Set[type]] = set()) -> Any: if isinstance(value, type_or_types): return value msg = '{}({}) is of a wrong type({}). Accepted type_or_types({}).' diff --git a/src/compute/Config.py b/src/compute/Config.py index 761ed10cbe2c3193a9192eb41b5004007bd3616d..b2d3179fab6e55368ba751aac48de507551c4516 100644 --- a/src/compute/Config.py +++ b/src/compute/Config.py @@ -1,4 +1,5 @@ import logging +from werkzeug.security import generate_password_hash # General settings LOG_LEVEL = logging.WARNING @@ -10,7 +11,10 @@ GRPC_GRACE_PERIOD = 60 # REST-API settings RESTAPI_SERVICE_PORT = 8080 -RESTAPI_BASE_URL = '/restconf/data/ietf-l2vpn-svc:l2vpn-svc' +RESTAPI_BASE_URL = '/restconf/data' +RESTAPI_USERS = { # TODO: implement a database of credentials and permissions + 'admin': generate_password_hash('admin'), +} # Prometheus settings METRICS_PORT = 9192 diff --git a/src/compute/requirements.in b/src/compute/requirements.in index 1da334a54b1c42b01eb8f731d8fd5bd975edd2cf..a8029e52398bb54351b43fe5c2ac460b4b7754ed 100644 --- a/src/compute/requirements.in +++ b/src/compute/requirements.in @@ -1,5 +1,9 @@ +Flask +Flask-HTTPAuth +Flask-RESTful grpcio-health-checking grpcio +jsonschema prometheus-client pytest pytest-benchmark diff --git a/src/compute/service/__main__.py b/src/compute/service/__main__.py index f45af374c471222bb4fdb089860418c5895d6321..eacc1f6c464112192194fca5827033aedc57385c 100644 --- a/src/compute/service/__main__.py +++ b/src/compute/service/__main__.py @@ -4,9 +4,9 @@ from common.Settings import get_setting from compute.Config import ( GRPC_SERVICE_PORT, GRPC_MAX_WORKERS, GRPC_GRACE_PERIOD, LOG_LEVEL, RESTAPI_SERVICE_PORT, RESTAPI_BASE_URL, METRICS_PORT) -from compute.service.ComputeService import ComputeService -from compute.service.rest_server.Server import Server -from compute.service.rest_server.resources.Compute import Compute +from .ComputeService import ComputeService +from .rest_server.RestServer import RestServer +from .rest_server.nbi_plugins.ietf_l2vpn import register_ietf_l2vpn terminate = threading.Event() LOGGER = None @@ -41,9 +41,8 @@ def main(): grpc_service = ComputeService(port=grpc_service_port, max_workers=max_workers, grace_period=grace_period) grpc_service.start() - rest_server = Server(port=restapi_service_port, base_url=restapi_base_url) - rest_server.add_resource( - Compute, '/restconf/config/compute', endpoint='api.compute') + rest_server = RestServer(port=restapi_service_port, base_url=restapi_base_url) + register_ietf_l2vpn(rest_server) rest_server.start() # Wait for Ctrl+C or termination signal diff --git a/src/compute/service/rest_server/Server.py b/src/compute/service/rest_server/RestServer.py similarity index 87% rename from src/compute/service/rest_server/Server.py rename to src/compute/service/rest_server/RestServer.py index 03212d4aadfe65dbf715c43c79420b11ff4677a7..8ed8dbbbf69bc89c9c76fdf31e16b0687d47856e 100644 --- a/src/compute/service/rest_server/Server.py +++ b/src/compute/service/rest_server/RestServer.py @@ -1,10 +1,9 @@ import logging, threading, time from flask import Flask, request -from flask_restful import Api +from flask_restful import Api, Resource from werkzeug.serving import make_server from compute.Config import RESTAPI_BASE_URL, RESTAPI_SERVICE_PORT - logging.getLogger('werkzeug').setLevel(logging.WARNING) BIND_ADDRESS = '0.0.0.0' @@ -15,17 +14,19 @@ def log_request(response): LOGGER.info('%s %s %s %s %s', timestamp, request.remote_addr, request.method, request.full_path, response.status) return response -class Server(threading.Thread): +class RestServer(threading.Thread): def __init__(self, host=BIND_ADDRESS, port=RESTAPI_SERVICE_PORT, base_url=RESTAPI_BASE_URL): threading.Thread.__init__(self, daemon=True) self.host = host self.port = port self.base_url = base_url + self.srv = None + self.ctx = None self.app = Flask(__name__) self.app.after_request(log_request) self.api = Api(self.app, prefix=self.base_url) - def add_resource(self, resource, *urls, **kwargs): + def add_resource(self, resource : Resource, *urls, **kwargs): self.api.add_resource(resource, *urls, **kwargs) def run(self): diff --git a/src/compute/service/rest_server/resources/__init__.py b/src/compute/service/rest_server/nbi_plugins/__init__.py similarity index 100% rename from src/compute/service/rest_server/resources/__init__.py rename to src/compute/service/rest_server/nbi_plugins/__init__.py diff --git a/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/Constants.py b/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/Constants.py new file mode 100644 index 0000000000000000000000000000000000000000..87c32c444d39acb048ede9105c9a0dc2c7e3899e --- /dev/null +++ b/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/Constants.py @@ -0,0 +1,3 @@ +DEFAULT_MTU = 1512 +DEFAULT_ADDRESS_FAMILIES = ['IPV4'] +DEFAULT_SUB_INTERFACE_INDEX = 0 diff --git a/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/L2VPN_Service.py b/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/L2VPN_Service.py new file mode 100644 index 0000000000000000000000000000000000000000..752a027ad0d41f67f6a2312ee166a51ebcbc23bd --- /dev/null +++ b/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/L2VPN_Service.py @@ -0,0 +1,69 @@ +import logging +from typing import Dict, List +from flask import request +from flask.json import jsonify +from flask_restful import Resource +from werkzeug.exceptions import UnsupportedMediaType +from common.Constants import DEFAULT_CONTEXT_UUID +from common.Settings import get_setting +from context.client.ContextClient import ContextClient +from context.proto.context_pb2 import ServiceId +from service.client.ServiceClient import ServiceClient +from service.proto.context_pb2 import Service, ServiceStatusEnum, ServiceTypeEnum +from .tools.Authentication import HTTP_AUTH +from .tools.HttpStatusCodes import HTTP_CREATED, HTTP_GATEWAYTIMEOUT, HTTP_NOCONTENT, HTTP_OK, HTTP_SERVERERROR + +LOGGER = logging.getLogger(__name__) + +class L2VPN_Service(Resource): + def __init__(self) -> None: + super().__init__() + self.context_client = ContextClient( + get_setting('CONTEXTSERVICE_SERVICE_HOST'), get_setting('CONTEXTSERVICE_SERVICE_PORT_GRPC')) + self.service_client = ServiceClient( + get_setting('SERVICESERVICE_SERVICE_HOST'), get_setting('SERVICESERVICE_SERVICE_PORT_GRPC')) + + @HTTP_AUTH.login_required + def get(self, vpn_id : str): + LOGGER.debug('VPN_Id: {:s}'.format(str(vpn_id))) + LOGGER.debug('Request: {:s}'.format(str(request))) + + # pylint: disable=no-member + service_id_request = ServiceId() + service_id_request.context_id.context_uuid.uuid = DEFAULT_CONTEXT_UUID + service_id_request.service_uuid.uuid = vpn_id + + try: + service_reply = self.context_client.GetService(service_id_request) + if service_reply.service_id != service_id_request: # pylint: disable=no-member + raise Exception('Service retrieval failed. Wrong Service Id was returned') + + service_ready_status = ServiceStatusEnum.SERVICESTATUS_ACTIVE + service_status = service_reply.service_status.service_status + response = jsonify({}) + response.status_code = HTTP_OK if service_status == service_ready_status else HTTP_GATEWAYTIMEOUT + except Exception as e: # pylint: disable=broad-except + LOGGER.exception('Something went wrong Retrieving Service {:s}'.format(str(request))) + response = jsonify({'error': str(e)}) + response.status_code = HTTP_SERVERERROR + return response + + @HTTP_AUTH.login_required + def delete(self, vpn_id : str): + LOGGER.debug('VPN_Id: {:s}'.format(str(vpn_id))) + LOGGER.debug('Request: {:s}'.format(str(request))) + + # pylint: disable=no-member + service_id_request = ServiceId() + service_id_request.context_id.context_uuid.uuid = DEFAULT_CONTEXT_UUID + service_id_request.service_uuid.uuid = vpn_id + + try: + self.service_client.DeleteService(service_id_request) + response = jsonify({}) + response.status_code = HTTP_NOCONTENT + except Exception as e: # pylint: disable=broad-except + LOGGER.exception('Something went wrong Deleting Service {:s}'.format(str(request))) + response = jsonify({'error': str(e)}) + response.status_code = HTTP_SERVERERROR + return response diff --git a/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/L2VPN_Services.py b/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/L2VPN_Services.py new file mode 100644 index 0000000000000000000000000000000000000000..2ed0293f0729c6d4617a445034702f706a6daa25 --- /dev/null +++ b/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/L2VPN_Services.py @@ -0,0 +1,55 @@ +import logging +from typing import Dict, List +from flask import request +from flask.json import jsonify +from flask_restful import Resource +from werkzeug.exceptions import UnsupportedMediaType +from common.Constants import DEFAULT_CONTEXT_UUID +from common.Settings import get_setting +from service.client.ServiceClient import ServiceClient +from service.proto.context_pb2 import Service, ServiceStatusEnum, ServiceTypeEnum +from .schemas.vpn_service import SCHEMA_VPN_SERVICE +from .tools.Authentication import HTTP_AUTH +from .tools.HttpStatusCodes import HTTP_CREATED, HTTP_SERVERERROR +from .tools.Validator import validate_message + +LOGGER = logging.getLogger(__name__) + +class L2VPN_Services(Resource): + def __init__(self) -> None: + super().__init__() + self.service_client = ServiceClient( + get_setting('SERVICESERVICE_SERVICE_HOST'), get_setting('SERVICESERVICE_SERVICE_PORT_GRPC')) + + @HTTP_AUTH.login_required + def get(self): + return {} + + @HTTP_AUTH.login_required + def post(self): + if not request.is_json: raise UnsupportedMediaType('JSON payload is required') + request_data : Dict = request.json + LOGGER.debug('Request: {:s}'.format(str(request_data))) + validate_message(SCHEMA_VPN_SERVICE, request_data) + + vpn_services : List[Dict] = request_data['ietf-l2vpn-svc:vpn-service'] + for vpn_service in vpn_services: + # pylint: disable=no-member + service_request = Service() + service_request.service_id.context_id.context_uuid.uuid = DEFAULT_CONTEXT_UUID + service_request.service_id.service_uuid.uuid = vpn_service['vpn-id'] + service_request.service_type = ServiceTypeEnum.SERVICETYPE_L3NM + service_request.service_status.service_status = ServiceStatusEnum.SERVICESTATUS_PLANNED + + try: + service_reply = self.service_client.CreateService(service_request) + if service_reply != service_request.service_id: # pylint: disable=no-member + raise Exception('Service creation failed. Wrong Service Id was returned') + + response = jsonify({}) + response.status_code = HTTP_CREATED + except Exception as e: # pylint: disable=broad-except + LOGGER.exception('Something went wrong Creating Service {:s}'.format(str(request))) + response = jsonify({'error': str(e)}) + response.status_code = HTTP_SERVERERROR + return response diff --git a/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/L2VPN_SiteNetworkAccesses.py b/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/L2VPN_SiteNetworkAccesses.py new file mode 100644 index 0000000000000000000000000000000000000000..639e8c63f5a2e64b166ab976632fd83437673484 --- /dev/null +++ b/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/L2VPN_SiteNetworkAccesses.py @@ -0,0 +1,161 @@ +import json, logging +from typing import Dict +from flask import request +from flask.json import jsonify +from flask.wrappers import Response +from flask_restful import Resource +from werkzeug.exceptions import UnsupportedMediaType +from common.Constants import DEFAULT_CONTEXT_UUID +from common.Settings import get_setting +from context.client.ContextClient import ContextClient +from context.proto.context_pb2 import Service, ServiceId, ServiceStatusEnum +from service.client.ServiceClient import ServiceClient +from .schemas.site_network_access import SCHEMA_SITE_NETWORK_ACCESS +from .tools.Authentication import HTTP_AUTH +from .tools.HttpStatusCodes import HTTP_NOCONTENT, HTTP_SERVERERROR +from .tools.Validator import validate_message +from .Constants import DEFAULT_ADDRESS_FAMILIES, DEFAULT_MTU, DEFAULT_SUB_INTERFACE_INDEX + +LOGGER = logging.getLogger(__name__) + +def process_site_network_access(context_client : ContextClient, site_network_access : Dict) -> Service: + vpn_id = site_network_access['vpn-attachment']['vpn-id'] + cvlan_id = site_network_access['connection']['tagged-interface']['dot1q-vlan-tagged']['cvlan-id'] + bearer_reference = site_network_access['bearer']['bearer-reference'] + + # Assume bearer_reference = '<device_uuid>:<endpoint_uuid>:<router_id>' + # Assume route_distinguisher = 0:<cvlan_id> + device_uuid,endpoint_uuid,router_id = bearer_reference.split(':') + route_distinguisher = '0:{:d}'.format(cvlan_id) + + # pylint: disable=no-member + service_id = ServiceId() + service_id.context_id.context_uuid.uuid = DEFAULT_CONTEXT_UUID + service_id.service_uuid.uuid = vpn_id + + service_readonly = context_client.GetService(service_id) + service = Service() + service.CopyFrom(service_readonly) + + for endpoint_id in service.service_endpoint_ids: # pylint: disable=no-member + if endpoint_id.device_id.device_uuid.uuid != device_uuid: continue + if endpoint_id.endpoint_uuid.uuid != endpoint_uuid: continue + break # found, do nothing + else: + # not found, add it + endpoint_id = service.service_endpoint_ids.add() # pylint: disable=no-member + endpoint_id.device_id.device_uuid.uuid = device_uuid + endpoint_id.endpoint_uuid.uuid = endpoint_uuid + + for config_rule in service.service_config.config_rules: # pylint: disable=no-member + if config_rule.resource_key != 'settings': continue + json_settings = json.loads(config_rule.resource_value) + + if 'route_distinguisher' not in json_settings: # missing, add it + json_settings['route_distinguisher'] = route_distinguisher + elif json_settings['route_distinguisher'] != route_distinguisher: # differs, raise exception + msg = 'Specified RouteDistinguisher({:s}) differs from Service RouteDistinguisher({:s})' + raise Exception(msg.format(str(json_settings['route_distinguisher']), str(route_distinguisher))) + + if 'mtu' not in json_settings: # missing, add it + json_settings['mtu'] = DEFAULT_MTU + elif json_settings['mtu'] != DEFAULT_MTU: # differs, raise exception + msg = 'Specified MTU({:s}) differs from Service MTU({:s})' + raise Exception(msg.format(str(json_settings['mtu']), str(DEFAULT_MTU))) + + if 'address_families' not in json_settings: # missing, add it + json_settings['address_families'] = DEFAULT_ADDRESS_FAMILIES + elif json_settings['address_families'] != DEFAULT_ADDRESS_FAMILIES: # differs, raise exception + msg = 'Specified AddressFamilies({:s}) differs from Service AddressFamilies({:s})' + raise Exception(msg.format(str(json_settings['address_families']), str(DEFAULT_ADDRESS_FAMILIES))) + + config_rule.resource_value = json.dumps(json_settings, sort_keys=True) + break + else: + # not found, add it + config_rule = service.service_config.config_rules.add() # pylint: disable=no-member + config_rule.resource_key = 'settings' + config_rule.resource_value = json.dumps({ + 'route_distinguisher': route_distinguisher, + 'mtu': DEFAULT_MTU, + 'address_families': DEFAULT_ADDRESS_FAMILIES, + }, sort_keys=True) + + endpoint_settings_key = 'device[{:s}]/endpoint[{:s}]/settings'.format(device_uuid, endpoint_uuid) + for config_rule in service.service_config.config_rules: # pylint: disable=no-member + if config_rule.resource_key != endpoint_settings_key: continue + json_settings = json.loads(config_rule.resource_value) + + if 'router_id' not in json_settings: # missing, add it + json_settings['router_id'] = router_id + elif json_settings['router_id'] != router_id: # differs, raise exception + msg = 'Specified RouterId({:s}) differs from Service RouterId({:s})' + raise Exception(msg.format(str(json_settings['router_id']), str(router_id))) + + if 'sub_interface_index' not in json_settings: # missing, add it + json_settings['sub_interface_index'] = DEFAULT_SUB_INTERFACE_INDEX + elif json_settings['sub_interface_index'] != DEFAULT_SUB_INTERFACE_INDEX: # differs, raise exception + msg = 'Specified SubInterfaceIndex({:s}) differs from Service SubInterfaceIndex({:s})' + raise Exception(msg.format( + str(json_settings['sub_interface_index']), str(DEFAULT_SUB_INTERFACE_INDEX))) + + config_rule.resource_value = json.dumps(json_settings, sort_keys=True) + break + else: + # not found, add it + config_rule = service.service_config.config_rules.add() # pylint: disable=no-member + config_rule.resource_key = endpoint_settings_key + config_rule.resource_value = json.dumps({ + 'router_id': router_id, + 'sub_interface_index': DEFAULT_SUB_INTERFACE_INDEX, + }, sort_keys=True) + + if len(service.service_endpoint_ids) >= 2: + service.service_status.service_status = ServiceStatusEnum.SERVICESTATUS_ACTIVE + + return service + +def process_list_site_network_access( + context_client : ContextClient, service_client : ServiceClient, request_data : Dict) -> Response: + + LOGGER.debug('Request: {:s}'.format(str(request_data))) + validate_message(SCHEMA_SITE_NETWORK_ACCESS, request_data) + + errors = [] + for site_network_access in request_data['ietf-l2vpn-svc:site-network-access']: + try: + service_request = process_site_network_access(context_client, site_network_access) + service_reply = service_client.CreateService(service_request) + if service_reply != service_request.service_id: # pylint: disable=no-member + raise Exception('Service update failed. Wrong Service Id was returned') + except Exception as e: # pylint: disable=broad-except + LOGGER.exception('Something went wrong Updating Service {:s}'.format(str(request))) + errors.append({'error': str(e)}) + + response = jsonify(errors) + response.status_code = HTTP_NOCONTENT if len(errors) == 0 else HTTP_SERVERERROR + return response + +class L2VPN_SiteNetworkAccesses(Resource): + def __init__(self) -> None: + super().__init__() + self.context_client = ContextClient( + get_setting('CONTEXTSERVICE_SERVICE_HOST'), get_setting('CONTEXTSERVICE_SERVICE_PORT_GRPC')) + self.service_client = ServiceClient( + get_setting('SERVICESERVICE_SERVICE_HOST'), get_setting('SERVICESERVICE_SERVICE_PORT_GRPC')) + + #@HTTP_AUTH.login_required + #def get(self): + # return {} + + @HTTP_AUTH.login_required + def post(self, site_id : str): + if not request.is_json: raise UnsupportedMediaType('JSON payload is required') + LOGGER.debug('Site_Id: {:s}'.format(str(site_id))) + return process_list_site_network_access(self.context_client, self.service_client, request.json) + + @HTTP_AUTH.login_required + def put(self, site_id : str): + if not request.is_json: raise UnsupportedMediaType('JSON payload is required') + LOGGER.debug('Site_Id: {:s}'.format(str(site_id))) + return process_list_site_network_access(self.context_client, self.service_client, request.json) diff --git a/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/__init__.py b/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..979c8a3bc1903381516bf0f9683bbe4e4f2c3cb3 --- /dev/null +++ b/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/__init__.py @@ -0,0 +1,22 @@ +# RFC 8466 - L2VPN Service Model (L2SM) +# Ref: https://datatracker.ietf.org/doc/html/rfc8466 + +from flask_restful import Resource +from compute.service.rest_server.RestServer import RestServer +from .L2VPN_Services import L2VPN_Services +from .L2VPN_Service import L2VPN_Service +from .L2VPN_SiteNetworkAccesses import L2VPN_SiteNetworkAccesses + +URL_PREFIX = '/ietf-l2vpn-svc:l2vpn-svc' + +def _add_resource(rest_server : RestServer, resource : Resource, *urls, **kwargs): + urls = [(URL_PREFIX + url) for url in urls] + rest_server.add_resource(resource, *urls, **kwargs) + +def register_ietf_l2vpn(rest_server : RestServer): + _add_resource(rest_server, L2VPN_Services, + '/vpn-services') + _add_resource(rest_server, L2VPN_Service, + '/vpn-services/vpn-service=<vpn_id>', '/vpn-services/vpn-service=<vpn_id>/') + _add_resource(rest_server, L2VPN_SiteNetworkAccesses, + '/sites/site=<site_id>/site-network-accesses', '/sites/site=<site_id>/site-network-accesses/') diff --git a/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/schemas/Common.py b/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/schemas/Common.py new file mode 100644 index 0000000000000000000000000000000000000000..f54da792b526cede52b94892ee9946fb63c6b015 --- /dev/null +++ b/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/schemas/Common.py @@ -0,0 +1,2 @@ +# String pattern for UUIDs such as '3fd942ee-2dc3-41d1-aeec-65aa85d117b2' +REGEX_UUID = r'[a-fA-F0-9]{8}\-[a-fA-F0-9]{4}\-[a-fA-F0-9]{4}\-[a-fA-F0-9]{4}\-[a-fA-F0-9]{12}' diff --git a/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/schemas/__init__.py b/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/schemas/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/schemas/site_network_access.py b/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/schemas/site_network_access.py new file mode 100644 index 0000000000000000000000000000000000000000..33ba8cc7fe5be76f82fbd74cd3608703f37e76a0 --- /dev/null +++ b/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/schemas/site_network_access.py @@ -0,0 +1,66 @@ +# Example request: +# request = {'ietf-l2vpn-svc:site-network-access': [{ +# 'network-access-id': '3fd942ee-2dc3-41d1-aeec-65aa85d117b2', +# 'vpn-attachment': {'vpn-id': '954b1b53-4a8c-406d-9eff-750ec2c9a258', +# 'site-role': 'any-to-any-role'}, +# 'connection': {'encapsulation-type': 'dot1q-vlan-tagged', 'tagged-interface': { +# 'dot1q-vlan-tagged': {'cvlan-id': 1234}}}, +# 'bearer': {'bearer-reference': '1a'} +# }]} + +from .Common import REGEX_UUID + +SCHEMA_SITE_NETWORK_ACCESS = { + '$schema': 'https://json-schema.org/draft/2020-12/schema', + 'type': 'object', + 'required': ['ietf-l2vpn-svc:site-network-access'], + 'properties': { + 'ietf-l2vpn-svc:site-network-access': { + 'type': 'array', + 'minItems': 1, + 'maxItems': 1, # by now we do not support multiple site-network-access in the same message + 'items': { + 'type': 'object', + 'required': ['network-access-id', 'vpn-attachment', 'connection', 'bearer'], + 'properties': { + 'network-access-id': {'type': 'string', 'pattern': REGEX_UUID}, + 'vpn-attachment': { + 'type': 'object', + 'required': ['vpn-id', 'site-role'], + 'properties': { + 'vpn-id': {'type': 'string', 'pattern': REGEX_UUID}, + 'site-role': {'type': 'string', 'minLength': 1}, + }, + }, + 'connection': { + 'type': 'object', + 'required': ['encapsulation-type', 'tagged-interface'], + 'properties': { + 'encapsulation-type': {'enum': ['dot1q-vlan-tagged']}, + 'tagged-interface': { + 'type': 'object', + 'required': ['dot1q-vlan-tagged'], + 'properties': { + 'dot1q-vlan-tagged': { + 'type': 'object', + 'required': ['cvlan-id'], + 'properties': { + 'cvlan-id': {'type': 'integer', 'minimum': 1, 'maximum': 4094}, + }, + }, + }, + }, + }, + }, + 'bearer': { + 'type': 'object', + 'required': ['bearer-reference'], + 'properties': { + 'bearer-reference': {'type': 'string', 'minLength': 1}, + }, + }, + }, + }, + }, + }, +} diff --git a/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/schemas/vpn_service.py b/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/schemas/vpn_service.py new file mode 100644 index 0000000000000000000000000000000000000000..54e9c53163b8d764a37b613501f6b427d6e1773d --- /dev/null +++ b/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/schemas/vpn_service.py @@ -0,0 +1,32 @@ +# Example request: +# request = {'ietf-l2vpn-svc:vpn-service': [{ +# 'vpn-id': 'c6270231-f1de-4687-b2ed-7b58f9105775', +# 'vpn-svc-type': 'vpws', +# 'svc-topo': 'any-to-any', +# 'customer-name': 'osm' +# }]} + +from .Common import REGEX_UUID + +SCHEMA_VPN_SERVICE = { + '$schema': 'https://json-schema.org/draft/2020-12/schema', + 'type': 'object', + 'required': ['ietf-l2vpn-svc:vpn-service'], + 'properties': { + 'ietf-l2vpn-svc:vpn-service': { + 'type': 'array', + 'minItems': 1, + 'maxItems': 1, # by now we do not support multiple vpn-service in the same message + 'items': { + 'type': 'object', + 'required': ['vpn-id', 'vpn-svc-type', 'svc-topo', 'customer-name'], + 'properties': { + 'vpn-id': {'type': 'string', 'pattern': REGEX_UUID}, + 'vpn-svc-type': {'enum': ['vpws']}, + 'svc-topo': {'enum': ['any-to-any']}, + 'customer-name': {'const': 'osm'}, + }, + } + } + }, +} diff --git a/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/tools/Authentication.py b/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/tools/Authentication.py new file mode 100644 index 0000000000000000000000000000000000000000..de7c9eafd7b2d5afdc39b82a4d02bea20127fa4a --- /dev/null +++ b/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/tools/Authentication.py @@ -0,0 +1,11 @@ +from flask_httpauth import HTTPBasicAuth +from werkzeug.security import check_password_hash +from compute.Config import RESTAPI_USERS + +HTTP_AUTH = HTTPBasicAuth() + +@HTTP_AUTH.verify_password +def verify_password(username, password): + if username not in RESTAPI_USERS: return None + if not check_password_hash(RESTAPI_USERS[username], password): return None + return username diff --git a/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/tools/HttpStatusCodes.py b/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/tools/HttpStatusCodes.py new file mode 100644 index 0000000000000000000000000000000000000000..5879670102e861bf1598104ace80f1f0cdb931ca --- /dev/null +++ b/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/tools/HttpStatusCodes.py @@ -0,0 +1,6 @@ +HTTP_OK = 200 +HTTP_CREATED = 201 +HTTP_NOCONTENT = 204 +HTTP_BADREQUEST = 400 +HTTP_SERVERERROR = 500 +HTTP_GATEWAYTIMEOUT = 504 \ No newline at end of file diff --git a/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/tools/Validator.py b/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/tools/Validator.py new file mode 100644 index 0000000000000000000000000000000000000000..9c126d71beba72ebb7b69d9852927cb31ac2a614 --- /dev/null +++ b/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/tools/Validator.py @@ -0,0 +1,21 @@ +from typing import List +from flask.json import jsonify +from jsonschema import _utils +from jsonschema.validators import validator_for +from jsonschema.protocols import Validator +from jsonschema.exceptions import ValidationError +from werkzeug.exceptions import BadRequest +from .HttpStatusCodes import HTTP_BADREQUEST + +def validate_message(schema, message): + validator_class = validator_for(schema) + validator : Validator = validator_class(schema) + errors : List[ValidationError] = sorted(validator.iter_errors(message), key=str) + if len(errors) == 0: return + response = jsonify([ + {'message': str(error.message), 'schema': str(error.schema), 'validator': str(error.validator), + 'where': str(_utils.format_as_index(container='message', indices=error.relative_path))} + for error in errors + ]) + response.status_code = HTTP_BADREQUEST + raise BadRequest(response=response) diff --git a/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/tools/__init__.py b/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/tools/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/src/compute/service/rest_server/resources/Compute.py b/src/compute/service/rest_server/resources/Compute.py deleted file mode 100644 index 4b845be2edd20c512bd0669739d402207d71fa94..0000000000000000000000000000000000000000 --- a/src/compute/service/rest_server/resources/Compute.py +++ /dev/null @@ -1,52 +0,0 @@ -import logging -from flask.json import jsonify -from flask_restful import Resource -from common.Settings import get_setting -from common.Constants import DEFAULT_CONTEXT_UUID -from service.client.ServiceClient import ServiceClient -from service.proto.context_pb2 import Service, ServiceStatusEnum, ServiceTypeEnum - -LOGGER = logging.getLogger(__name__) - -class Compute(Resource): - def __init__(self) -> None: - super().__init__() - - def get(self): - # Here implement HTTP GET method - raise NotImplementedError() - - def post(self): - # Here implement HTTP POST method - - # Retrieve required data from request - new_service_context_id = DEFAULT_CONTEXT_UUID - new_service_id = 'my-service-id' - - # Find Service address/port from environment and instantiate client - service_host = get_setting('SERVICESERVICE_SERVICE_HOST') - service_port = get_setting('SERVICESERVICE_SERVICE_PORT_GRPC') - service_client = ServiceClient(service_host, service_port) - - # Compose a dummy CreateService request - request = Service() - request.service_id.context_id.context_uuid.uuid = new_service_context_id - request.service_id.service_uuid.uuid = new_service_id - request.service_type = ServiceTypeEnum.SERVICETYPE_L2NM - request.service_status.service_status = ServiceStatusEnum.SERVICESTATUS_PLANNED - - try: - # Issue gRPC request to Service component - reply = service_client.CreateService(request) - - # Parse CreateService reply, here we check that obtained service Id and context are the expected ones. - reply_context_uuid = reply.context_id.context_uuid.uuid - reply_service_uuid = reply.service_uuid.uuid - #succeeded = (reply_context_uuid == new_service_context_id) and (reply_service_uuid == new_service_id) - succeeded = True - reply = {'succeeded': succeeded} - except Exception as e: - LOGGER.exception('Something went wrong Creating Service {:s}'.format(str(request))) - reply = {'succeeded': False, 'error': str(e)} - - return jsonify(reply) diff --git a/src/compute/tests/MockService.py b/src/compute/tests/MockService.py new file mode 100644 index 0000000000000000000000000000000000000000..54b420f5aa1cf015c90f09b874f9b37225e07328 --- /dev/null +++ b/src/compute/tests/MockService.py @@ -0,0 +1,41 @@ +import grpc, logging +from concurrent import futures + +GRPC_MAX_WORKERS = 10 +GRPC_GRACE_PERIOD = 60 + +class MockService: + def __init__(self, address, port, max_workers=GRPC_MAX_WORKERS, grace_period=GRPC_GRACE_PERIOD, cls_name=__name__): + self.logger = logging.getLogger(cls_name) + self.address = address + self.port = port + self.endpoint = None + self.max_workers = max_workers + self.grace_period = grace_period + self.pool = None + self.server = None + + def install_servicers(self): + pass + + def start(self): + self.endpoint = '{:s}:{:s}'.format(str(self.address), str(self.port)) + self.logger.info('Starting Service (tentative endpoint: {:s}, max_workers: {:s})...'.format( + str(self.endpoint), str(self.max_workers))) + + self.pool = futures.ThreadPoolExecutor(max_workers=self.max_workers) + self.server = grpc.server(self.pool) # , interceptors=(tracer_interceptor,)) + + self.install_servicers() + + port = self.server.add_insecure_port(self.endpoint) + self.endpoint = '{:s}:{:s}'.format(str(self.address), str(port)) + self.logger.info('Listening on {:s}...'.format(str(self.endpoint))) + self.server.start() + + self.logger.debug('Service started') + + def stop(self): + self.logger.debug('Stopping service (grace period {:s} seconds)...'.format(str(self.grace_period))) + self.server.stop(self.grace_period) + self.logger.debug('Service stopped') diff --git a/src/compute/tests/MockServiceService.py b/src/compute/tests/MockServiceService.py deleted file mode 100644 index fe90aa4d895baa772b113ceba81daafade798b6b..0000000000000000000000000000000000000000 --- a/src/compute/tests/MockServiceService.py +++ /dev/null @@ -1,45 +0,0 @@ -import grpc, logging -from concurrent import futures -from service.Config import GRPC_SERVICE_PORT, GRPC_MAX_WORKERS, GRPC_GRACE_PERIOD -from service.proto.service_pb2_grpc import add_ServiceServiceServicer_to_server -from .MockServiceServiceServicerImpl import MockServiceServiceServicerImpl - -BIND_ADDRESS = '0.0.0.0' -LOGGER = logging.getLogger(__name__) - -class MockServiceService: - def __init__( - self, address=BIND_ADDRESS, port=GRPC_SERVICE_PORT, max_workers=GRPC_MAX_WORKERS, - grace_period=GRPC_GRACE_PERIOD): - - self.address = address - self.port = port - self.endpoint = None - self.max_workers = max_workers - self.grace_period = grace_period - self.service_servicer = None - self.pool = None - self.server = None - - def start(self): - self.endpoint = '{:s}:{:s}'.format(str(self.address), str(self.port)) - LOGGER.info('Starting Service (tentative endpoint: {:s}, max_workers: {:s})...'.format( - str(self.endpoint), str(self.max_workers))) - - self.pool = futures.ThreadPoolExecutor(max_workers=self.max_workers) - self.server = grpc.server(self.pool) # , interceptors=(tracer_interceptor,)) - - self.service_servicer = MockServiceServiceServicerImpl() - add_ServiceServiceServicer_to_server(self.service_servicer, self.server) - - port = self.server.add_insecure_port(self.endpoint) - self.endpoint = '{:s}:{:s}'.format(str(self.address), str(port)) - LOGGER.info('Listening on {:s}...'.format(str(self.endpoint))) - self.server.start() - - LOGGER.debug('Service started') - - def stop(self): - LOGGER.debug('Stopping service (grace period {:s} seconds)...'.format(str(self.grace_period))) - self.server.stop(self.grace_period) - LOGGER.debug('Service stopped') diff --git a/src/compute/tests/MockServicerImpl_Context.py b/src/compute/tests/MockServicerImpl_Context.py new file mode 100644 index 0000000000000000000000000000000000000000..d79a755d49773dff4b298abdba6dfa38d9e69d57 --- /dev/null +++ b/src/compute/tests/MockServicerImpl_Context.py @@ -0,0 +1,188 @@ +import grpc, logging +from typing import Any, Dict, Iterator, List +from context.proto.context_pb2 import ( + Context, ContextEvent, ContextId, ContextIdList, ContextList, Device, DeviceEvent, DeviceId, DeviceIdList, + DeviceList, Empty, Link, LinkEvent, LinkId, LinkIdList, LinkList, Service, ServiceEvent, ServiceId, ServiceIdList, + ServiceList, Topology, TopologyEvent, TopologyId, TopologyIdList, TopologyList) +from context.proto.context_pb2_grpc import ContextServiceServicer +from .Tools import grpc_message_to_json_string + +LOGGER = logging.getLogger(__name__) + +def get_container(database : Dict[str, Dict[str, Any]], container_name : str) -> Dict[str, Any]: + return database.setdefault(container_name, {}) + +def get_entries(database : Dict[str, Dict[str, Any]], container_name : str) -> List[Any]: + container = get_container(database, container_name) + return [container[entry_uuid] for entry_uuid in sorted(container.keys())] + +def get_entry( + context : grpc.ServicerContext, database : Dict[str, Dict[str, Any]], container_name : str, entry_uuid : str +) -> Any: + LOGGER.debug('[get_entry] AFTER database={:s}'.format(str(database))) + container = get_container(database, container_name) + if entry_uuid not in container: + context.abort(grpc.StatusCode.INTERNAL, str('{:s}({:s}) not found'.format(container_name, entry_uuid))) + return container[entry_uuid] + +def set_entry(database : Dict[str, Dict[str, Any]], container_name : str, entry_uuid : str, entry : Any) -> Any: + container = get_container(database, container_name) + LOGGER.debug('[set_entry] BEFORE database={:s}'.format(str(database))) + container[entry_uuid] = entry + LOGGER.debug('[set_entry] AFTER database={:s}'.format(str(database))) + return entry + +def del_entry( + context : grpc.ServicerContext, database : Dict[str, Dict[str, Any]], container_name : str, entry_uuid : str +) -> Any: + container = get_container(database, container_name) + if entry_uuid not in container: + context.abort(grpc.StatusCode.INTERNAL, str('{:s}({:s}) not found'.format(container_name, entry_uuid))) + del container[entry_uuid] + return Empty() + +class MockServicerImpl_Context(ContextServiceServicer): + def __init__(self): + LOGGER.info('[__init__] Creating Servicer...') + self.database : Dict[str, Any] = {} + LOGGER.info('[__init__] Servicer Created') + + # ----- Context ---------------------------------------------------------------------------------------------------- + + def ListContextIds(self, request: Empty, context : grpc.ServicerContext) -> ContextIdList: + LOGGER.info('[ListContextIds] request={:s}'.format(grpc_message_to_json_string(request))) + return ContextIdList(context_ids=[context.context_id for context in get_entries(self.database, 'context')]) + + def ListContexts(self, request: Empty, context : grpc.ServicerContext) -> ContextList: + LOGGER.info('[ListContexts] request={:s}'.format(grpc_message_to_json_string(request))) + return ContextList(contexts=get_entries(self.database, 'context')) + + def GetContext(self, request: ContextId, context : grpc.ServicerContext) -> Context: + LOGGER.info('[GetContext] request={:s}'.format(grpc_message_to_json_string(request))) + return get_entry(context, self.database, 'context', request.context_uuid.uuid) + + def SetContext(self, request: Context, context : grpc.ServicerContext) -> ContextId: + LOGGER.info('[SetContext] request={:s}'.format(grpc_message_to_json_string(request))) + return set_entry(self.database, 'context', request.context_uuid.uuid, request).context_id + + def RemoveContext(self, request: ContextId, context : grpc.ServicerContext) -> Empty: + LOGGER.info('[RemoveContext] request={:s}'.format(grpc_message_to_json_string(request))) + return del_entry(context, self.database, 'context', request.context_uuid.uuid) + + def GetContextEvents(self, request: Empty, context : grpc.ServicerContext) -> Iterator[ContextEvent]: + LOGGER.info('[GetContextEvents] request={:s}'.format(grpc_message_to_json_string(request))) + + + # ----- Topology --------------------------------------------------------------------------------------------------- + + def ListTopologyIds(self, request: ContextId, context : grpc.ServicerContext) -> TopologyIdList: + LOGGER.info('[ListTopologyIds] request={:s}'.format(grpc_message_to_json_string(request))) + topologies = get_entries(self.database, 'topology[{:s}]'.format(str(request.context_id.context_uuid.uuid))) + return TopologyIdList(topology_ids=[topology.topology_id for topology in topologies]) + + def ListTopologies(self, request: ContextId, context : grpc.ServicerContext) -> TopologyList: + LOGGER.info('[ListTopologies] request={:s}'.format(grpc_message_to_json_string(request))) + topologies = get_entries(self.database, 'topology[{:s}]'.format(str(request.context_id.context_uuid.uuid))) + return TopologyList(topologies=[topology for topology in topologies]) + + def GetTopology(self, request: TopologyId, context : grpc.ServicerContext) -> Topology: + LOGGER.info('[GetTopology] request={:s}'.format(grpc_message_to_json_string(request))) + container_name = 'topology[{:s}]'.format(str(request.context_id.context_uuid.uuid)) + return get_entry(context, self.database, container_name, request.topology_uuid.uuid) + + def SetTopology(self, request: Topology, context : grpc.ServicerContext) -> TopologyId: + LOGGER.info('[SetTopology] request={:s}'.format(grpc_message_to_json_string(request))) + container_name = 'topology[{:s}]'.format(str(request.topology_id.context_id.context_uuid.uuid)) + return set_entry(self.database, container_name, request.topology_id.topology_uuid.uuid, request).topology_id + + def RemoveTopology(self, request: TopologyId, context : grpc.ServicerContext) -> Empty: + LOGGER.info('[RemoveTopology] request={:s}'.format(grpc_message_to_json_string(request))) + container_name = 'topology[{:s}]'.format(str(request.context_id.context_uuid.uuid)) + return del_entry(context, self.database, container_name, request.topology_uuid.uuid) + + def GetTopologyEvents(self, request: Empty, context : grpc.ServicerContext) -> Iterator[TopologyEvent]: + LOGGER.info('[GetTopologyEvents] request={:s}'.format(grpc_message_to_json_string(request))) + + + # ----- Device ----------------------------------------------------------------------------------------------------- + + def ListDeviceIds(self, request: Empty, context : grpc.ServicerContext) -> DeviceIdList: + LOGGER.info('[ListDeviceIds] request={:s}'.format(grpc_message_to_json_string(request))) + return DeviceIdList(device_ids=[device.device_id for device in get_entries(self.database, 'device')]) + + def ListDevices(self, request: Empty, context : grpc.ServicerContext) -> DeviceList: + LOGGER.info('[ListDevices] request={:s}'.format(grpc_message_to_json_string(request))) + return DeviceList(devices=get_entries(self.database, 'device')) + + def GetDevice(self, request: DeviceId, context : grpc.ServicerContext) -> Device: + LOGGER.info('[GetDevice] request={:s}'.format(grpc_message_to_json_string(request))) + return get_entry(context, self.database, 'device', request.device_uuid.uuid) + + def SetDevice(self, request: Context, context : grpc.ServicerContext) -> DeviceId: + LOGGER.info('[SetDevice] request={:s}'.format(grpc_message_to_json_string(request))) + return set_entry(self.database, 'device', request.device_uuid.uuid, request).device_id + + def RemoveDevice(self, request: DeviceId, context : grpc.ServicerContext) -> Empty: + LOGGER.info('[RemoveDevice] request={:s}'.format(grpc_message_to_json_string(request))) + return del_entry(context, self.database, 'device', request.device_uuid.uuid) + + def GetDeviceEvents(self, request: Empty, context : grpc.ServicerContext) -> Iterator[DeviceEvent]: + LOGGER.info('[GetDeviceEvents] request={:s}'.format(grpc_message_to_json_string(request))) + + + # ----- Link ------------------------------------------------------------------------------------------------------- + + def ListLinkIds(self, request: Empty, context : grpc.ServicerContext) -> LinkIdList: + LOGGER.info('[ListLinkIds] request={:s}'.format(grpc_message_to_json_string(request))) + return LinkIdList(link_ids=[link.link_id for link in get_entries(self.database, 'link')]) + + def ListLinks(self, request: Empty, context : grpc.ServicerContext) -> LinkList: + LOGGER.info('[ListLinks] request={:s}'.format(grpc_message_to_json_string(request))) + return LinkList(links=get_entries(self.database, 'link')) + + def GetLink(self, request: LinkId, context : grpc.ServicerContext) -> Link: + LOGGER.info('[GetLink] request={:s}'.format(grpc_message_to_json_string(request))) + return get_entry(context, self.database, 'link', request.link_uuid.uuid) + + def SetLink(self, request: Context, context : grpc.ServicerContext) -> LinkId: + LOGGER.info('[SetLink] request={:s}'.format(grpc_message_to_json_string(request))) + return set_entry(self.database, 'link', request.link_uuid.uuid, request).link_id + + def RemoveLink(self, request: LinkId, context : grpc.ServicerContext) -> Empty: + LOGGER.info('[RemoveLink] request={:s}'.format(grpc_message_to_json_string(request))) + return del_entry(context, self.database, 'link', request.link_uuid.uuid) + + def GetLinkEvents(self, request: Empty, context : grpc.ServicerContext) -> Iterator[LinkEvent]: + LOGGER.info('[GetLinkEvents] request={:s}'.format(grpc_message_to_json_string(request))) + + + # ----- Service ---------------------------------------------------------------------------------------------------- + + def ListServiceIds(self, request: ContextId, context : grpc.ServicerContext) -> ServiceIdList: + LOGGER.info('[ListServiceIds] request={:s}'.format(grpc_message_to_json_string(request))) + services = get_entries(self.database, 'service[{:s}]'.format(str(request.context_id.context_uuid.uuid))) + return ServiceIdList(service_ids=[service.service_id for service in services]) + + def ListServices(self, request: ContextId, context : grpc.ServicerContext) -> ServiceList: + LOGGER.info('[ListServices] request={:s}'.format(grpc_message_to_json_string(request))) + services = get_entries(self.database, 'service[{:s}]'.format(str(request.context_id.context_uuid.uuid))) + return ServiceList(services=[service for service in services]) + + def GetService(self, request: ServiceId, context : grpc.ServicerContext) -> Service: + LOGGER.info('[GetService] request={:s}'.format(grpc_message_to_json_string(request))) + container_name = 'service[{:s}]'.format(str(request.context_id.context_uuid.uuid)) + return get_entry(context, self.database, container_name, request.service_uuid.uuid) + + def SetService(self, request: Service, context : grpc.ServicerContext) -> ServiceId: + LOGGER.info('[SetService] request={:s}'.format(grpc_message_to_json_string(request))) + return set_entry( + self.database, 'service[{:s}]'.format(str(request.service_id.context_id.context_uuid.uuid)), + request.service_id.service_uuid.uuid, request).service_id + + def RemoveService(self, request: ServiceId, context : grpc.ServicerContext) -> Empty: + LOGGER.info('[RemoveService] request={:s}'.format(grpc_message_to_json_string(request))) + container_name = 'service[{:s}]'.format(str(request.context_id.context_uuid.uuid)) + return del_entry(context, self.database, container_name, request.service_uuid.uuid) + + def GetServiceEvents(self, request: Empty, context : grpc.ServicerContext) -> Iterator[ServiceEvent]: + LOGGER.info('[GetServiceEvents] request={:s}'.format(grpc_message_to_json_string(request))) diff --git a/src/compute/tests/MockServiceServiceServicerImpl.py b/src/compute/tests/MockServicerImpl_Service.py similarity index 59% rename from src/compute/tests/MockServiceServiceServicerImpl.py rename to src/compute/tests/MockServicerImpl_Service.py index 35da854074ae83e7d89ec0c4b19616b4a5c50fb2..75fdc3073dac1b942c7701f3a0be9feacb60109b 100644 --- a/src/compute/tests/MockServiceServiceServicerImpl.py +++ b/src/compute/tests/MockServicerImpl_Service.py @@ -1,27 +1,31 @@ -import grpc, json, logging -from google.protobuf.json_format import MessageToDict +import grpc, logging +from common.Settings import get_setting +from context.client.ContextClient import ContextClient from service.proto.context_pb2 import ConnectionList, Empty, Service, ServiceId from service.proto.service_pb2_grpc import ServiceServiceServicer +from .Tools import grpc_message_to_json_string LOGGER = logging.getLogger(__name__) -def grpc_message_to_json_string(message): - return json.dumps(MessageToDict( - message, including_default_value_fields=True, preserving_proto_field_name=True, use_integers_for_enums=False), - sort_keys=True) +class MockServicerImpl_Service(ServiceServiceServicer): + def __init__(self): + LOGGER.info('[__init__] Creating Servicer...') + self.context_client = ContextClient( + get_setting('CONTEXTSERVICE_SERVICE_HOST'), + get_setting('CONTEXTSERVICE_SERVICE_PORT_GRPC')) + LOGGER.info('[__init__] Servicer Created') -class MockServiceServiceServicerImpl(ServiceServiceServicer): def CreateService(self, request : Service, context : grpc.ServicerContext) -> ServiceId: LOGGER.info('[CreateService] request={:s}'.format(grpc_message_to_json_string(request))) - return request.service_id + return self.context_client.SetService(request) def UpdateService(self, request : Service, context : grpc.ServicerContext) -> ServiceId: LOGGER.info('[UpdateService] request={:s}'.format(grpc_message_to_json_string(request))) - return request.service_id + return self.context_client.SetService(request) def DeleteService(self, request : ServiceId, context : grpc.ServicerContext) -> Empty: LOGGER.info('[DeleteService] request={:s}'.format(grpc_message_to_json_string(request))) - return Empty() + return self.context_client.RemoveService(request) def GetConnectionList(self, request : ServiceId, context : grpc.ServicerContext) -> ConnectionList: LOGGER.info('[GetConnectionList] request={:s}'.format(grpc_message_to_json_string(request))) diff --git a/src/compute/tests/Tools.py b/src/compute/tests/Tools.py new file mode 100644 index 0000000000000000000000000000000000000000..a96c38ce546d4062df8229f5506f9dd49af6fc81 --- /dev/null +++ b/src/compute/tests/Tools.py @@ -0,0 +1,7 @@ +import json +from google.protobuf.json_format import MessageToDict + +def grpc_message_to_json_string(message): + return json.dumps(MessageToDict( + message, including_default_value_fields=True, preserving_proto_field_name=True, use_integers_for_enums=False), + sort_keys=True) diff --git a/src/compute/tests/mock_osm/MockOSM.py b/src/compute/tests/mock_osm/MockOSM.py index f98cfed7a266bc6004bdc3596a0158f619f7c3e8..c50ee6c88e75a62a743bba065830ae82827fa7d7 100644 --- a/src/compute/tests/mock_osm/MockOSM.py +++ b/src/compute/tests/mock_osm/MockOSM.py @@ -5,43 +5,90 @@ LOGGER = logging.getLogger(__name__) WIM_USERNAME = 'admin' WIM_PASSWORD = 'admin' -WIM_MAPPING = [] -SERVICE_TYPE = 'ELAN' +# Ref: https://osm.etsi.org/wikipub/index.php/WIM +WIM_MAPPING = [ + { + 'device-id' : 'dev-1', # pop_switch_dpid + #'device_interface_id' : ??, # pop_switch_port + 'service_endpoint_id' : 'ep-1', # wan_service_endpoint_id + 'service_mapping_info': { # wan_service_mapping_info, other extra info + 'bearer': {'bearer-reference': 'dev-1:ep-1:10.0.0.1'}, + 'site-id': '1', + }, + #'switch_dpid' : ??, # wan_switch_dpid + #'switch_port' : ??, # wan_switch_port + #'datacenter_id' : ??, # vim_account + }, + { + 'device-id' : 'dev-2', # pop_switch_dpid + #'device_interface_id' : ??, # pop_switch_port + 'service_endpoint_id' : 'ep-2', # wan_service_endpoint_id + 'service_mapping_info': { # wan_service_mapping_info, other extra info + 'bearer': {'bearer-reference': 'dev-2:ep-2:10.0.0.2'}, + 'site-id': '2', + }, + #'switch_dpid' : ??, # wan_switch_dpid + #'switch_port' : ??, # wan_switch_port + #'datacenter_id' : ??, # vim_account + }, + { + 'device-id' : 'dev-3', # pop_switch_dpid + #'device_interface_id' : ??, # pop_switch_port + 'service_endpoint_id' : 'ep-3', # wan_service_endpoint_id + 'service_mapping_info': { # wan_service_mapping_info, other extra info + 'bearer': {'bearer-reference': 'dev-3:ep-3:10.0.0.3'}, + 'site-id': '3', + }, + #'switch_dpid' : ??, # wan_switch_dpid + #'switch_port' : ??, # wan_switch_port + #'datacenter_id' : ??, # vim_account + }, +] + +SERVICE_TYPE = 'ELINE' +SERVICE_CONNECTION_POINTS_1 = [ + {'service_endpoint_id': 'ep-1', + 'service_endpoint_encapsulation_type': 'dot1q', + 'service_endpoint_encapsulation_info': {'vlan': 1234}}, + {'service_endpoint_id': 'ep-2', + 'service_endpoint_encapsulation_type': 'dot1q', + 'service_endpoint_encapsulation_info': {'vlan': 1234}}, +] + +SERVICE_CONNECTION_POINTS_2 = [ + {'service_endpoint_id': 'ep-3', + 'service_endpoint_encapsulation_type': 'dot1q', + 'service_endpoint_encapsulation_info': {'vlan': 1234}}, +] class MockOSM: def __init__(self, wim_url): wim = {'wim_url': wim_url} wim_account = {'user': WIM_USERNAME, 'password': WIM_PASSWORD} - config = {'service_endpoint_mapping': WIM_MAPPING} + config = {'mapping_not_needed': False, 'service_endpoint_mapping': WIM_MAPPING} self.wim = WimconnectorIETFL2VPN(wim, wim_account, config=config) self.service_uuid = None + self.conn_info = None def create_connectivity_service(self): - connection_points = [] self.wim.check_credentials() - LOGGER.info('[create_connectivity_service] connection_points={:s}'.format(str(connection_points))) - result = self.wim.create_connectivity_service(SERVICE_TYPE, connection_points) + LOGGER.info('[create_connectivity_service] connection_points={:s}'.format(str(SERVICE_CONNECTION_POINTS_1))) + result = self.wim.create_connectivity_service(SERVICE_TYPE, SERVICE_CONNECTION_POINTS_1) LOGGER.info('[create_connectivity_service] result={:s}'.format(str(result))) - self.service_uuid = None + self.service_uuid, self.conn_info = result def get_connectivity_service_status(self): - connection_points = [] self.wim.check_credentials() - LOGGER.info('[get_connectivity_service] connection_points={:s}'.format(str(connection_points))) - result = self.wim.get_connectivity_service_status(self.service_uuid, conn_info=conn_info) + result = self.wim.get_connectivity_service_status(self.service_uuid, conn_info=self.conn_info) LOGGER.info('[get_connectivity_service] result={:s}'.format(str(result))) def edit_connectivity_service(self): - connection_points = [] self.wim.check_credentials() - LOGGER.info('[edit_connectivity_service] connection_points={:s}'.format(str(connection_points))) - result = self.wim.edit_connectivity_service(self.service_uuid, conn_info=conn_info, connection_points=connection_points) - LOGGER.info('[edit_connectivity_service] result={:s}'.format(str(result))) + LOGGER.info('[edit_connectivity_service] connection_points={:s}'.format(str(SERVICE_CONNECTION_POINTS_2))) + self.wim.edit_connectivity_service( + self.service_uuid, conn_info=self.conn_info, connection_points=SERVICE_CONNECTION_POINTS_2) def delete_connectivity_service(self): - connection_points = [] self.wim.check_credentials() - LOGGER.info('[delete_connectivity_service] connection_points={:s}'.format(str(connection_points))) - result = self.wim.delete_connectivity_service(self.service_uuid, conn_info=conn_info) - LOGGER.info('[delete_connectivity_service] result={:s}'.format(str(result))) + self.wim.delete_connectivity_service(self.service_uuid, conn_info=self.conn_info) diff --git a/src/compute/tests/mock_osm/WimconnectorIETFL2VPN.py b/src/compute/tests/mock_osm/WimconnectorIETFL2VPN.py index f47c56dc8e4cdfe49d54786ec3c64efa76da78f3..182115bad67a4fbe1eb04a83ed8d54be964568c8 100644 --- a/src/compute/tests/mock_osm/WimconnectorIETFL2VPN.py +++ b/src/compute/tests/mock_osm/WimconnectorIETFL2VPN.py @@ -54,7 +54,7 @@ class WimconnectorIETFL2VPN(SdnConnectorBase): m["service_endpoint_id"]: m for m in self.service_endpoint_mapping } self.user = wim_account.get("user") - self.passwd = wim_account.get("passwordd") + self.passwd = wim_account.get("password") # replace "passwordd" -> "password" if self.user and self.passwd is not None: self.auth = (self.user, self.passwd) @@ -177,7 +177,7 @@ class WimconnectorIETFL2VPN(SdnConnectorBase): uuid_l2vpn = str(uuid.uuid4()) vpn_service = {} vpn_service["vpn-id"] = uuid_l2vpn - vpn_service["vpn-scv-type"] = "vpws" + vpn_service["vpn-svc-type"] = "vpws" # Rename "vpn-scv-type" -> "vpn-svc-type" vpn_service["svc-topo"] = "any-to-any" vpn_service["customer-name"] = "osm" vpn_service_list = [] diff --git a/src/compute/tests/test_unitary.py b/src/compute/tests/test_unitary.py index 6935ac2a8b93b227718292ff9c93ae9f1625e0ff..001999f1b9607f03cd393f5582cc08a504c0e9d2 100644 --- a/src/compute/tests/test_unitary.py +++ b/src/compute/tests/test_unitary.py @@ -1,34 +1,54 @@ import logging, os, pytest, time from compute.Config import RESTAPI_SERVICE_PORT, RESTAPI_BASE_URL -from compute.service.rest_server.Server import Server -from compute.service.rest_server.resources.Compute import Compute -from service.Config import ( - GRPC_SERVICE_PORT as SERVICE_GRPC_SERVICE_PORT, GRPC_MAX_WORKERS as SERVICE_GRPC_MAX_WORKERS, - GRPC_GRACE_PERIOD as SERVICE_GRPC_GRACE_PERIOD) +from compute.service.rest_server.RestServer import RestServer +from context.proto.context_pb2_grpc import add_ContextServiceServicer_to_server +from service.proto.service_pb2_grpc import add_ServiceServiceServicer_to_server from .mock_osm.MockOSM import MockOSM -from .MockServiceService import MockServiceService - -compute_restapi_port = 10000 + RESTAPI_SERVICE_PORT # avoid privileged ports -service_grpc_port = 10000 + SERVICE_GRPC_SERVICE_PORT # avoid privileged ports - -os.environ['SERVICESERVICE_SERVICE_HOST'] = '127.0.0.1' -os.environ['SERVICESERVICE_SERVICE_PORT_GRPC'] = str(service_grpc_port) +from .MockService import MockService +from .MockServicerImpl_Context import MockServicerImpl_Context +from .MockServicerImpl_Service import MockServicerImpl_Service LOGGER = logging.getLogger(__name__) LOGGER.setLevel(logging.DEBUG) +LOCALHOST = '127.0.0.1' +MOCKSERVER_GRPC_PORT = 10000 +COMPUTE_RESTAPI_PORT = 10000 + RESTAPI_SERVICE_PORT # avoid privileged ports + +class MockService_ContextService(MockService): + # Mock Server implementing Context and Service to simplify unitary tests of Compute + + def __init__(self, cls_name='MockService_Service'): + super().__init__(LOCALHOST, MOCKSERVER_GRPC_PORT, cls_name=cls_name) + + # pylint: disable=attribute-defined-outside-init + def install_servicers(self): + self.context_servicer = MockServicerImpl_Context() + add_ContextServiceServicer_to_server(self.context_servicer, self.server) + self.service_servicer = MockServicerImpl_Service() + add_ServiceServiceServicer_to_server(self.service_servicer, self.server) + +os.environ['CONTEXTSERVICE_SERVICE_HOST'] = LOCALHOST +os.environ['CONTEXTSERVICE_SERVICE_PORT_GRPC'] = str(MOCKSERVER_GRPC_PORT) +os.environ['SERVICESERVICE_SERVICE_HOST'] = LOCALHOST +os.environ['SERVICESERVICE_SERVICE_PORT_GRPC'] = str(MOCKSERVER_GRPC_PORT) + +# NBI Plugin IETF L2VPN requires environment variables CONTEXTSERVICE_SERVICE_HOST, CONTEXTSERVICE_SERVICE_PORT_GRPC, +# SERVICESERVICE_SERVICE_HOST, and SERVICESERVICE_SERVICE_PORT_GRPC to work properly. +# pylint: disable=wrong-import-position,ungrouped-imports +from compute.service.rest_server.nbi_plugins.ietf_l2vpn import register_ietf_l2vpn + @pytest.fixture(scope='session') -def service_service(): - _service = MockServiceService( - port=service_grpc_port, max_workers=SERVICE_GRPC_MAX_WORKERS, grace_period=SERVICE_GRPC_GRACE_PERIOD) +def mockservice(): + _service = MockService_ContextService() _service.start() yield _service _service.stop() @pytest.fixture(scope='session') -def compute_service_rest(service_service): - _rest_server = Server(port=compute_restapi_port, base_url=RESTAPI_BASE_URL) - _rest_server.add_resource(Compute, '/vpn-services', endpoint='l2vpn.vpn-services') +def compute_service_rest(mockservice): # pylint: disable=redefined-outer-name + _rest_server = RestServer(port=COMPUTE_RESTAPI_PORT, base_url=RESTAPI_BASE_URL) + register_ietf_l2vpn(_rest_server) _rest_server.start() time.sleep(1) # bring time for the server to start yield _rest_server @@ -36,8 +56,8 @@ def compute_service_rest(service_service): _rest_server.join() @pytest.fixture(scope='session') -def osm_wim(service_service, compute_service_rest): # pylint: disable=redefined-outer-name - wim_url = 'http://127.0.0.1:{:d}'.format(compute_restapi_port) +def osm_wim(compute_service_rest): # pylint: disable=redefined-outer-name + wim_url = 'http://{:s}:{:d}'.format(LOCALHOST, COMPUTE_RESTAPI_PORT) return MockOSM(wim_url) def test_compute_create_connectivity_service_rest_api(osm_wim : MockOSM): # pylint: disable=redefined-outer-name