diff --git a/ecoc22 b/ecoc22 new file mode 120000 index 0000000000000000000000000000000000000000..3c61895e5ac62d0b38ce058ba5ff042442542320 --- /dev/null +++ b/ecoc22 @@ -0,0 +1 @@ +src/tests/ecoc22/ \ No newline at end of file diff --git a/manifests/webuiservice.yaml b/manifests/webuiservice.yaml index 0dba6104d8e78e8365eb9a9dc6ff5e681e0bc57e..cac64a816075f1a0ad91a21c519463aa5cd8f973 100644 --- a/manifests/webuiservice.yaml +++ b/manifests/webuiservice.yaml @@ -35,7 +35,7 @@ spec: image: registry.gitlab.com/teraflow-h2020/controller/webui:latest imagePullPolicy: Always ports: - - containerPort: 8004 # TODO: define the real port + - containerPort: 8004 env: - name: LOG_LEVEL value: "DEBUG" diff --git a/my_deploy.sh b/my_deploy.sh index b523f6652996e0bbf040f1239cf06c36ca635ef3..d2093b213be8e419004f666d9543dc4e47d57729 100644 --- a/my_deploy.sh +++ b/my_deploy.sh @@ -1,7 +1,6 @@ -# Set the URL of your local Docker registry where the images will be uploaded to. export TFS_REGISTRY_IMAGE="http://localhost:32000/tfs/" -# Set the list of components, separated by comas, you want to build images for, and deploy. +# Set the list of components, separated by spaces, you want to build images for, and deploy. # Supported components are: # context device automation policy service compute monitoring webui # interdomain slice pathcomp dlt @@ -11,12 +10,6 @@ export TFS_COMPONENTS="context device automation service compute monitoring webu # Set the tag you want to use for your images. export TFS_IMAGE_TAG="dev" - -# Set the name of the Kubernetes namespace to deploy to. export TFS_K8S_NAMESPACE="tfs" - -# Set additional manifest files to be applied after the deployment export TFS_EXTRA_MANIFESTS="manifests/nginx_ingress_http.yaml" - -# Set the neew Grafana admin password export TFS_GRAFANA_PASSWORD="admin123+" diff --git a/scripts/dump_logs.sh b/scripts/dump_logs.sh new file mode 100755 index 0000000000000000000000000000000000000000..a6db945d245b832564353de71610bf720eb0acb8 --- /dev/null +++ b/scripts/dump_logs.sh @@ -0,0 +1,37 @@ +#!/bin/bash +# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +######################################################################################################################## +# Define your deployment settings here +######################################################################################################################## + +# If not already set, set the name of the Kubernetes namespace to deploy to. +export TFS_K8S_NAMESPACE=${TFS_K8S_NAMESPACE:-"tfs"} + +######################################################################################################################## +# Automated steps start here +######################################################################################################################## + +mkdir -p tmp/exec_logs/$TFS_K8S_NAMESPACE/ +rm tmp/exec_logs/$TFS_K8S_NAMESPACE/* + +PODS=$(kubectl get pods --namespace $TFS_K8S_NAMESPACE --no-headers --output=custom-columns=":metadata.name") +for POD in $PODS; do + CONTAINERS=$(kubectl get pods --namespace $TFS_K8S_NAMESPACE $POD -o jsonpath='{.spec.containers[*].name}') + for CONTAINER in $CONTAINERS; do + kubectl --namespace $TFS_K8S_NAMESPACE logs pod/${POD} --container ${CONTAINER} \ + > tmp/exec_logs/$TFS_K8S_NAMESPACE/$POD\_\_$CONTAINER.log + done +done diff --git a/scripts/run_tests_locally-service.sh b/scripts/run_tests_locally-service.sh index 8a2a8d0be1d1960c6197a67e471ae29abba501a7..8816b9faa24e55e486a54852632fdb8e00db1d04 100755 --- a/scripts/run_tests_locally-service.sh +++ b/scripts/run_tests_locally-service.sh @@ -21,4 +21,5 @@ RCFILE=$PROJECTDIR/coverage/.coveragerc # Run unitary tests and analyze coverage of code at same time coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \ + service/tests/test_unitary_task_scheduler.py \ service/tests/test_unitary.py diff --git a/scripts/show_logs_slice.sh b/scripts/show_logs_slice.sh new file mode 100755 index 0000000000000000000000000000000000000000..c7bc0b69588307092b22ea3c600669359f04de99 --- /dev/null +++ b/scripts/show_logs_slice.sh @@ -0,0 +1,27 @@ +#!/bin/bash +# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +######################################################################################################################## +# Define your deployment settings here +######################################################################################################################## + +# If not already set, set the name of the Kubernetes namespace to deploy to. +export TFS_K8S_NAMESPACE=${TFS_K8S_NAMESPACE:-"tfs"} + +######################################################################################################################## +# Automated steps start here +######################################################################################################################## + +kubectl --namespace $TFS_K8S_NAMESPACE logs deployment/sliceservice diff --git a/src/common/DeviceTypes.py b/src/common/DeviceTypes.py index 4d67ff6615227f0d9e7d82e2f69b39d72011a75c..c5ea4c54fef7b739a4ad33dd3759c3bdef124038 100644 --- a/src/common/DeviceTypes.py +++ b/src/common/DeviceTypes.py @@ -21,7 +21,7 @@ class DeviceTypeEnum(Enum): DATACENTER = 'datacenter' MICROVAWE_RADIO_SYSTEM = 'microwave-radio-system' OPTICAL_ROADM = 'optical-roadm' - OPTICAL_TRANDPONDER = 'optical-trandponder' + OPTICAL_TRANSPONDER = 'optical-transponder' OPEN_LINE_SYSTEM = 'open-line-system' PACKET_ROUTER = 'packet-router' PACKET_SWITCH = 'packet-switch' diff --git a/src/common/rpc_method_wrapper/ServiceExceptions.py b/src/common/rpc_method_wrapper/ServiceExceptions.py index f4f0a64cad79c96dc069bd37e8d2c2be5f011c53..e8d5c79acca19117fca53ec216166c01d3f0781d 100644 --- a/src/common/rpc_method_wrapper/ServiceExceptions.py +++ b/src/common/rpc_method_wrapper/ServiceExceptions.py @@ -56,3 +56,11 @@ class OperationFailedException(ServiceException): details = 'Operation({:s}) failed'.format(str(operation)) super().__init__(grpc.StatusCode.INTERNAL, details, extra_details=extra_details) + +class NotImplementedException(ServiceException): + def __init__( + self, operation : str, extra_details : Union[str, Iterable[str]] = None + ) -> None: + + details = 'Operation({:s}) not implemented'.format(str(operation)) + super().__init__(grpc.StatusCode.UNIMPLEMENTED, details, extra_details=extra_details) diff --git a/src/common/tools/object_factory/Service.py b/src/common/tools/object_factory/Service.py index 51f75e6dbe5e430330e697da772d65703f7568c7..62f3dcbda148f1c624265ae7d76b0c17f5d36959 100644 --- a/src/common/tools/object_factory/Service.py +++ b/src/common/tools/object_factory/Service.py @@ -44,10 +44,20 @@ def json_service( def json_service_l3nm_planned( service_uuid : str, endpoint_ids : List[Dict] = [], constraints : List[Dict] = [], - config_rules : List[Dict] = [] + config_rules : List[Dict] = [], context_uuid : str = DEFAULT_CONTEXT_UUID ): return json_service( - service_uuid, ServiceTypeEnum.SERVICETYPE_L3NM, context_id=json_context_id(DEFAULT_CONTEXT_UUID), + service_uuid, ServiceTypeEnum.SERVICETYPE_L3NM, context_id=json_context_id(context_uuid), + status=ServiceStatusEnum.SERVICESTATUS_PLANNED, endpoint_ids=endpoint_ids, constraints=constraints, + config_rules=config_rules) + +def json_service_tapi_planned( + service_uuid : str, endpoint_ids : List[Dict] = [], constraints : List[Dict] = [], + config_rules : List[Dict] = [], context_uuid : str = DEFAULT_CONTEXT_UUID + ): + + return json_service( + service_uuid, ServiceTypeEnum.SERVICETYPE_TAPI_CONNECTIVITY_SERVICE, context_id=json_context_id(context_uuid), status=ServiceStatusEnum.SERVICESTATUS_PLANNED, endpoint_ids=endpoint_ids, constraints=constraints, config_rules=config_rules) diff --git a/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/Constants.py b/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/Constants.py index b6177cb9cdfca0c63404b77f28eb9bebf8a4a518..daa9f4fe3a170874fcbb58d875626e3009a95d3c 100644 --- a/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/Constants.py +++ b/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/Constants.py @@ -17,38 +17,50 @@ DEFAULT_ADDRESS_FAMILIES = ['IPV4'] DEFAULT_BGP_AS = 65000 DEFAULT_BGP_ROUTE_TARGET = '{:d}:{:d}'.format(DEFAULT_BGP_AS, 333) +# TODO: improve definition of bearer mappings + # Bearer mappings: # device_uuid:endpoint_uuid => ( # device_uuid, endpoint_uuid, router_id, route_distinguisher, sub_if_index, address_ip, address_prefix) BEARER_MAPPINGS = { - 'R1-EMU:13/1/2': ('R1-EMU', '13/1/2', '10.10.10.1', '65000:100', 400, '3.3.2.1', 24), - 'R2-EMU:13/1/2': ('R2-EMU', '13/1/2', '12.12.12.1', '65000:120', 450, '3.4.2.1', 24), - 'R3-EMU:13/1/2': ('R3-EMU', '13/1/2', '20.20.20.1', '65000:200', 500, '3.3.1.1', 24), - 'R4-EMU:13/1/2': ('R4-EMU', '13/1/2', '22.22.22.1', '65000:220', 550, '3.4.1.1', 24), + # OFC'22 + #'R1-EMU:13/2/1': ('R1-EMU', '13/2/1', '10.10.10.1', '65000:100', 400, '3.3.2.1', 24), + #'R2-EMU:13/2/1': ('R2-EMU', '13/2/1', '12.12.12.1', '65000:120', 450, '3.4.2.1', 24), + #'R3-EMU:13/2/1': ('R3-EMU', '13/2/1', '20.20.20.1', '65000:200', 500, '3.3.1.1', 24), + #'R4-EMU:13/2/1': ('R4-EMU', '13/2/1', '22.22.22.1', '65000:220', 550, '3.4.1.1', 24), + + # OECC/PSC'22 - domain 1 + #'R1@D1:3/1': ('R1@D1', '3/1', '10.0.1.1', '65001:101', 100, '1.1.3.1', 24), + #'R1@D1:3/2': ('R1@D1', '3/2', '10.0.1.1', '65001:101', 100, '1.1.3.2', 24), + #'R1@D1:3/3': ('R1@D1', '3/3', '10.0.1.1', '65001:101', 100, '1.1.3.3', 24), + #'R2@D1:3/1': ('R2@D1', '3/1', '10.0.1.2', '65001:102', 100, '1.2.3.1', 24), + #'R2@D1:3/2': ('R2@D1', '3/2', '10.0.1.2', '65001:102', 100, '1.2.3.2', 24), + #'R2@D1:3/3': ('R2@D1', '3/3', '10.0.1.2', '65001:102', 100, '1.2.3.3', 24), + #'R3@D1:3/1': ('R3@D1', '3/1', '10.0.1.3', '65001:103', 100, '1.3.3.1', 24), + #'R3@D1:3/2': ('R3@D1', '3/2', '10.0.1.3', '65001:103', 100, '1.3.3.2', 24), + #'R3@D1:3/3': ('R3@D1', '3/3', '10.0.1.3', '65001:103', 100, '1.3.3.3', 24), + #'R4@D1:3/1': ('R4@D1', '3/1', '10.0.1.4', '65001:104', 100, '1.4.3.1', 24), + #'R4@D1:3/2': ('R4@D1', '3/2', '10.0.1.4', '65001:104', 100, '1.4.3.2', 24), + #'R4@D1:3/3': ('R4@D1', '3/3', '10.0.1.4', '65001:104', 100, '1.4.3.3', 24), - 'R1@D1:3/1': ('R1@D1', '3/1', '10.0.1.1', '65001:101', 100, '1.1.3.1', 24), - 'R1@D1:3/2': ('R1@D1', '3/2', '10.0.1.1', '65001:101', 100, '1.1.3.2', 24), - 'R1@D1:3/3': ('R1@D1', '3/3', '10.0.1.1', '65001:101', 100, '1.1.3.3', 24), - 'R2@D1:3/1': ('R2@D1', '3/1', '10.0.1.2', '65001:102', 100, '1.2.3.1', 24), - 'R2@D1:3/2': ('R2@D1', '3/2', '10.0.1.2', '65001:102', 100, '1.2.3.2', 24), - 'R2@D1:3/3': ('R2@D1', '3/3', '10.0.1.2', '65001:102', 100, '1.2.3.3', 24), - 'R3@D1:3/1': ('R3@D1', '3/1', '10.0.1.3', '65001:103', 100, '1.3.3.1', 24), - 'R3@D1:3/2': ('R3@D1', '3/2', '10.0.1.3', '65001:103', 100, '1.3.3.2', 24), - 'R3@D1:3/3': ('R3@D1', '3/3', '10.0.1.3', '65001:103', 100, '1.3.3.3', 24), - 'R4@D1:3/1': ('R4@D1', '3/1', '10.0.1.4', '65001:104', 100, '1.4.3.1', 24), - 'R4@D1:3/2': ('R4@D1', '3/2', '10.0.1.4', '65001:104', 100, '1.4.3.2', 24), - 'R4@D1:3/3': ('R4@D1', '3/3', '10.0.1.4', '65001:104', 100, '1.4.3.3', 24), + # OECC/PSC'22 - domain 2 + #'R1@D2:3/1': ('R1@D2', '3/1', '10.0.2.1', '65002:101', 100, '2.1.3.1', 24), + #'R1@D2:3/2': ('R1@D2', '3/2', '10.0.2.1', '65002:101', 100, '2.1.3.2', 24), + #'R1@D2:3/3': ('R1@D2', '3/3', '10.0.2.1', '65002:101', 100, '2.1.3.3', 24), + #'R2@D2:3/1': ('R2@D2', '3/1', '10.0.2.2', '65002:102', 100, '2.2.3.1', 24), + #'R2@D2:3/2': ('R2@D2', '3/2', '10.0.2.2', '65002:102', 100, '2.2.3.2', 24), + #'R2@D2:3/3': ('R2@D2', '3/3', '10.0.2.2', '65002:102', 100, '2.2.3.3', 24), + #'R3@D2:3/1': ('R3@D2', '3/1', '10.0.2.3', '65002:103', 100, '2.3.3.1', 24), + #'R3@D2:3/2': ('R3@D2', '3/2', '10.0.2.3', '65002:103', 100, '2.3.3.2', 24), + #'R3@D2:3/3': ('R3@D2', '3/3', '10.0.2.3', '65002:103', 100, '2.3.3.3', 24), + #'R4@D2:3/1': ('R4@D2', '3/1', '10.0.2.4', '65002:104', 100, '2.4.3.1', 24), + #'R4@D2:3/2': ('R4@D2', '3/2', '10.0.2.4', '65002:104', 100, '2.4.3.2', 24), + #'R4@D2:3/3': ('R4@D2', '3/3', '10.0.2.4', '65002:104', 100, '2.4.3.3', 24), - 'R1@D2:3/1': ('R1@D2', '3/1', '10.0.2.1', '65002:101', 100, '2.1.3.1', 24), - 'R1@D2:3/2': ('R1@D2', '3/2', '10.0.2.1', '65002:101', 100, '2.1.3.2', 24), - 'R1@D2:3/3': ('R1@D2', '3/3', '10.0.2.1', '65002:101', 100, '2.1.3.3', 24), - 'R2@D2:3/1': ('R2@D2', '3/1', '10.0.2.2', '65002:102', 100, '2.2.3.1', 24), - 'R2@D2:3/2': ('R2@D2', '3/2', '10.0.2.2', '65002:102', 100, '2.2.3.2', 24), - 'R2@D2:3/3': ('R2@D2', '3/3', '10.0.2.2', '65002:102', 100, '2.2.3.3', 24), - 'R3@D2:3/1': ('R3@D2', '3/1', '10.0.2.3', '65002:103', 100, '2.3.3.1', 24), - 'R3@D2:3/2': ('R3@D2', '3/2', '10.0.2.3', '65002:103', 100, '2.3.3.2', 24), - 'R3@D2:3/3': ('R3@D2', '3/3', '10.0.2.3', '65002:103', 100, '2.3.3.3', 24), - 'R4@D2:3/1': ('R4@D2', '3/1', '10.0.2.4', '65002:104', 100, '2.4.3.1', 24), - 'R4@D2:3/2': ('R4@D2', '3/2', '10.0.2.4', '65002:104', 100, '2.4.3.2', 24), - 'R4@D2:3/3': ('R4@D2', '3/3', '10.0.2.4', '65002:104', 100, '2.4.3.3', 24), -} \ No newline at end of file + # ECOC'22 + # bearer_ref => device_uuid, endpoint_uuid, sub_if_index, router_id, remote_router, circuit_id + 'DC1-GW:CS1-GW1': ('CS1-GW1', '10/1', 0, '5.5.1.1', '5.5.2.1', 111), + 'DC1-GW:CS1-GW2': ('CS1-GW2', '10/1', 0, '5.5.1.2', '5.5.2.2', 222), + 'DC2-GW:CS2-GW1': ('CS2-GW1', '10/1', 0, '5.5.2.1', '5.5.1.1', 111), + 'DC2-GW:CS2-GW2': ('CS2-GW2', '10/1', 0, '5.5.2.2', '5.5.1.2', 222), +} diff --git a/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/L2VPN_Service.py b/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/L2VPN_Service.py index c77d714a94fa8d2d4ee9cd2c3db06949665a489c..224ebf094243fccf56367c14a15831edcc975f07 100644 --- a/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/L2VPN_Service.py +++ b/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/L2VPN_Service.py @@ -20,6 +20,7 @@ from common.Constants import DEFAULT_CONTEXT_UUID from common.proto.context_pb2 import ServiceId, ServiceStatusEnum, SliceStatusEnum from context.client.ContextClient import ContextClient from service.client.ServiceClient import ServiceClient +from slice.client.SliceClient import SliceClient from .tools.Authentication import HTTP_AUTH from .tools.ContextMethods import get_service, get_slice from .tools.HttpStatusCodes import HTTP_GATEWAYTIMEOUT, HTTP_NOCONTENT, HTTP_OK, HTTP_SERVERERROR @@ -32,31 +33,32 @@ class L2VPN_Service(Resource): LOGGER.debug('VPN_Id: {:s}'.format(str(vpn_id))) LOGGER.debug('Request: {:s}'.format(str(request))) - response = jsonify({}) try: context_client = ContextClient() - target = get_service(context_client, vpn_id) - if target is not None: - if target.service_id.service_uuid.uuid != vpn_id: # pylint: disable=no-member - raise Exception('Service retrieval failed. Wrong Service Id was returned') - service_ready_status = ServiceStatusEnum.SERVICESTATUS_ACTIVE - service_status = target.service_status.service_status # pylint: disable=no-member - response.status_code = HTTP_OK if service_status == service_ready_status else HTTP_GATEWAYTIMEOUT - return response - target = get_slice(context_client, vpn_id) if target is not None: if target.slice_id.slice_uuid.uuid != vpn_id: # pylint: disable=no-member raise Exception('Slice retrieval failed. Wrong Slice Id was returned') slice_ready_status = SliceStatusEnum.SLICESTATUS_ACTIVE slice_status = target.slice_status.slice_status # pylint: disable=no-member + response = jsonify({}) response.status_code = HTTP_OK if slice_status == slice_ready_status else HTTP_GATEWAYTIMEOUT return response + target = get_service(context_client, vpn_id) + if target is not None: + if target.service_id.service_uuid.uuid != vpn_id: # pylint: disable=no-member + raise Exception('Service retrieval failed. Wrong Service Id was returned') + service_ready_status = ServiceStatusEnum.SERVICESTATUS_ACTIVE + service_status = target.service_status.service_status # pylint: disable=no-member + response = jsonify({}) + response.status_code = HTTP_OK if service_status == service_ready_status else HTTP_GATEWAYTIMEOUT + return response + raise Exception('VPN({:s}) not found in database'.format(str(vpn_id))) except Exception as e: # pylint: disable=broad-except - LOGGER.exception('Something went wrong Retrieving VPN({:s})'.format(str(request))) + LOGGER.exception('Something went wrong Retrieving VPN({:s})'.format(str(vpn_id))) response = jsonify({'error': str(e)}) response.status_code = HTTP_SERVERERROR return response @@ -66,18 +68,35 @@ class L2VPN_Service(Resource): LOGGER.debug('VPN_Id: {:s}'.format(str(vpn_id))) LOGGER.debug('Request: {:s}'.format(str(request))) - # pylint: disable=no-member - service_id_request = ServiceId() - service_id_request.context_id.context_uuid.uuid = DEFAULT_CONTEXT_UUID - service_id_request.service_uuid.uuid = vpn_id - try: - service_client = ServiceClient() - service_client.DeleteService(service_id_request) + context_client = ContextClient() + + target = get_slice(context_client, vpn_id) + if target is not None: + if target.slice_id.slice_uuid.uuid != vpn_id: # pylint: disable=no-member + raise Exception('Slice retrieval failed. Wrong Slice Id was returned') + slice_client = SliceClient() + slice_client.DeleteSlice(target.slice_id) + response = jsonify({}) + response.status_code = HTTP_NOCONTENT + return response + + target = get_service(context_client, vpn_id) + if target is not None: + if target.service_id.service_uuid.uuid != vpn_id: # pylint: disable=no-member + raise Exception('Service retrieval failed. Wrong Service Id was returned') + service_client = ServiceClient() + service_client.DeleteService(target.service_id) + response = jsonify({}) + response.status_code = HTTP_NOCONTENT + return response + + LOGGER.warning('VPN({:s}) not found in database. Nothing done.'.format(str(vpn_id))) response = jsonify({}) response.status_code = HTTP_NOCONTENT + return response except Exception as e: # pylint: disable=broad-except - LOGGER.exception('Something went wrong Deleting Service {:s}'.format(str(request))) + LOGGER.exception('Something went wrong Deleting VPN({:s})'.format(str(vpn_id))) response = jsonify({'error': str(e)}) response.status_code = HTTP_SERVERERROR return response diff --git a/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/L2VPN_Services.py b/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/L2VPN_Services.py index 7b959b2895d0f0acd27058fcb5e9a571cf6553d2..50b1c2abbb2b49ed9b8cb84a3a0933df55d3bd8f 100644 --- a/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/L2VPN_Services.py +++ b/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/L2VPN_Services.py @@ -44,7 +44,11 @@ class L2VPN_Services(Resource): vpn_services : List[Dict] = request_data['ietf-l2vpn-svc:vpn-service'] for vpn_service in vpn_services: try: - vpn_service_type = vpn_service['vpn-svc-type'] + # By now, assume requests from OSM always need transport slices + # TODO: think how to differentiate + #vpn_service_type = vpn_service['vpn-svc-type'] + vpn_service_type = 'vpls' + if vpn_service_type == 'vpws': # pylint: disable=no-member service_request = Service() diff --git a/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/L2VPN_SiteNetworkAccesses.py b/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/L2VPN_SiteNetworkAccesses.py index 8be63895b813d7411b76ddeb33902babbf4c9743..0dea176972ab06156dbcee875e7b857a0b4f8c95 100644 --- a/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/L2VPN_SiteNetworkAccesses.py +++ b/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/L2VPN_SiteNetworkAccesses.py @@ -12,15 +12,19 @@ # See the License for the specific language governing permissions and # limitations under the License. -from ctypes import Union -import json, logging -from typing import Dict +import logging +from typing import Dict, Optional, Union from flask import request from flask.json import jsonify from flask.wrappers import Response from flask_restful import Resource from werkzeug.exceptions import UnsupportedMediaType -from common.proto.context_pb2 import ConfigActionEnum, Service, Slice +from common.proto.context_pb2 import Service, Slice +from common.tools.grpc.ConfigRules import update_config_rule_custom +from common.tools.grpc.Constraints import ( + update_constraint_custom, update_constraint_endpoint_location, update_constraint_endpoint_priority, + update_constraint_sla_availability) +from common.tools.grpc.EndPointIds import update_endpoint_ids from common.tools.grpc.Tools import grpc_message_to_json_string from context.client.ContextClient import ContextClient from service.client.ServiceClient import ServiceClient @@ -30,150 +34,102 @@ from .tools.Authentication import HTTP_AUTH from .tools.ContextMethods import get_service, get_slice from .tools.HttpStatusCodes import HTTP_NOCONTENT, HTTP_SERVERERROR from .tools.Validator import validate_message -from .Constants import BEARER_MAPPINGS, DEFAULT_ADDRESS_FAMILIES, DEFAULT_BGP_AS, DEFAULT_BGP_ROUTE_TARGET, DEFAULT_MTU +from .Constants import ( + BEARER_MAPPINGS, DEFAULT_ADDRESS_FAMILIES, DEFAULT_BGP_AS, DEFAULT_BGP_ROUTE_TARGET, DEFAULT_MTU) LOGGER = logging.getLogger(__name__) -def process_site_network_access(context_client : ContextClient, site_network_access : Dict) -> Service: +def process_site_network_access(context_client : ContextClient, site_id : str, site_network_access : Dict) -> Service: vpn_id = site_network_access['vpn-attachment']['vpn-id'] - cvlan_id = site_network_access['connection']['tagged-interface']['dot1q-vlan-tagged']['cvlan-id'] + encapsulation_type = site_network_access['connection']['encapsulation-type'] + cvlan_id = site_network_access['connection']['tagged-interface'][encapsulation_type]['cvlan-id'] + bearer_reference = site_network_access['bearer']['bearer-reference'] + access_priority : Optional[int] = site_network_access.get('availability', {}).get('access-priority') + single_active : bool = len(site_network_access.get('availability', {}).get('single-active', [])) > 0 + all_active : bool = len(site_network_access.get('availability', {}).get('all-active', [])) > 0 + + diversity_constraints = site_network_access.get('access-diversity', {}).get('constraints', {}).get('constraint', []) + raise_if_differs = True + diversity_constraints = { + constraint['constraint-type']:([ + target[0] + for target in constraint['target'].items() + if len(target[1]) == 1 + ][0], raise_if_differs) + for constraint in diversity_constraints + } + mapping = BEARER_MAPPINGS.get(bearer_reference) if mapping is None: msg = 'Specified Bearer({:s}) is not configured.' raise Exception(msg.format(str(bearer_reference))) - device_uuid,endpoint_uuid,router_id,route_distinguisher,sub_if_index,address_ip,address_prefix = mapping + #device_uuid,endpoint_uuid,router_id,route_dist,sub_if_index,address_ip,address_prefix = mapping + route_dist, address_ip, address_prefix = None, None, None + device_uuid, endpoint_uuid, sub_if_index, router_id, remote_router, circuit_id = mapping target : Union[Service, Slice, None] = None - if target is None: target = get_service(context_client, vpn_id) if target is None: target = get_slice (context_client, vpn_id) + if target is None: target = get_service(context_client, vpn_id) if target is None: raise Exception('VPN({:s}) not found in database'.format(str(vpn_id))) - # pylint: disable=no-member - endpoint_ids = target.service_endpoint_ids if isinstance(target, Service) else target.slice_endpoint_ids - - for endpoint_id in endpoint_ids: - if endpoint_id.device_id.device_uuid.uuid != device_uuid: continue - if endpoint_id.endpoint_uuid.uuid != endpoint_uuid: continue - break # found, do nothing + if isinstance(target, Service): + endpoint_ids = target.service_endpoint_ids # pylint: disable=no-member + config_rules = target.service_config.config_rules # pylint: disable=no-member + constraints = target.service_constraints # pylint: disable=no-member + elif isinstance(target, Slice): + endpoint_ids = target.slice_endpoint_ids # pylint: disable=no-member + config_rules = target.slice_config.config_rules # pylint: disable=no-member + constraints = target.slice_constraints # pylint: disable=no-member else: - # not found, add it - endpoint_id = endpoint_ids.add() - endpoint_id.device_id.device_uuid.uuid = device_uuid - endpoint_id.endpoint_uuid.uuid = endpoint_uuid - - if isinstance(target, Slice): return target - - for config_rule in target.service_config.config_rules: # pylint: disable=no-member - if config_rule.WhichOneof('config_rule') != 'custom': continue - if config_rule.custom.resource_key != '/settings': continue - json_settings = json.loads(config_rule.custom.resource_value) - - if 'mtu' not in json_settings: # missing, add it - json_settings['mtu'] = DEFAULT_MTU - elif json_settings['mtu'] != DEFAULT_MTU: # differs, raise exception - msg = 'Specified MTU({:s}) differs from Service MTU({:s})' - raise Exception(msg.format(str(json_settings['mtu']), str(DEFAULT_MTU))) - - if 'address_families' not in json_settings: # missing, add it - json_settings['address_families'] = DEFAULT_ADDRESS_FAMILIES - elif json_settings['address_families'] != DEFAULT_ADDRESS_FAMILIES: # differs, raise exception - msg = 'Specified AddressFamilies({:s}) differs from Service AddressFamilies({:s})' - raise Exception(msg.format(str(json_settings['address_families']), str(DEFAULT_ADDRESS_FAMILIES))) - - if 'bgp_as' not in json_settings: # missing, add it - json_settings['bgp_as'] = DEFAULT_BGP_AS - elif json_settings['bgp_as'] != DEFAULT_BGP_AS: # differs, raise exception - msg = 'Specified BgpAs({:s}) differs from Service BgpAs({:s})' - raise Exception(msg.format(str(json_settings['bgp_as']), str(DEFAULT_BGP_AS))) - - if 'bgp_route_target' not in json_settings: # missing, add it - json_settings['bgp_route_target'] = DEFAULT_BGP_ROUTE_TARGET - elif json_settings['bgp_route_target'] != DEFAULT_BGP_ROUTE_TARGET: # differs, raise exception - msg = 'Specified BgpRouteTarget({:s}) differs from Service BgpRouteTarget({:s})' - raise Exception(msg.format(str(json_settings['bgp_route_target']), str(DEFAULT_BGP_ROUTE_TARGET))) - - config_rule.custom.resource_value = json.dumps(json_settings, sort_keys=True) - break - else: - # not found, add it - config_rule = target.service_config.config_rules.add() # pylint: disable=no-member - config_rule.action = ConfigActionEnum.CONFIGACTION_SET - config_rule.custom.resource_key = '/settings' - config_rule.custom.resource_value = json.dumps({ - 'mtu' : DEFAULT_MTU, - 'address_families': DEFAULT_ADDRESS_FAMILIES, - 'bgp_as' : DEFAULT_BGP_AS, - 'bgp_route_target': DEFAULT_BGP_ROUTE_TARGET, - }, sort_keys=True) + raise Exception('Target({:s}) not supported'.format(str(target.__class__.__name__))) + + endpoint_id = update_endpoint_ids(endpoint_ids, device_uuid, endpoint_uuid) + + service_settings_key = '/settings' + update_config_rule_custom(config_rules, service_settings_key, { + 'mtu' : (DEFAULT_MTU, True), + #'address_families': (DEFAULT_ADDRESS_FAMILIES, True), + #'bgp_as' : (DEFAULT_BGP_AS, True), + #'bgp_route_target': (DEFAULT_BGP_ROUTE_TARGET, True), + }) endpoint_settings_key = '/device[{:s}]/endpoint[{:s}]/settings'.format(device_uuid, endpoint_uuid) - for config_rule in target.service_config.config_rules: # pylint: disable=no-member - if config_rule.WhichOneof('config_rule') != 'custom': continue - if config_rule.custom.resource_key != endpoint_settings_key: continue - json_settings = json.loads(config_rule.custom.resource_value) - - if 'router_id' not in json_settings: # missing, add it - json_settings['router_id'] = router_id - elif json_settings['router_id'] != router_id: # differs, raise exception - msg = 'Specified RouterId({:s}) differs from Service RouterId({:s})' - raise Exception(msg.format(str(json_settings['router_id']), str(router_id))) - - if 'route_distinguisher' not in json_settings: # missing, add it - json_settings['route_distinguisher'] = route_distinguisher - elif json_settings['route_distinguisher'] != route_distinguisher: # differs, raise exception - msg = 'Specified RouteDistinguisher({:s}) differs from Service RouteDistinguisher({:s})' - raise Exception(msg.format(str(json_settings['route_distinguisher']), str(route_distinguisher))) - - if 'sub_interface_index' not in json_settings: # missing, add it - json_settings['sub_interface_index'] = sub_if_index - elif json_settings['sub_interface_index'] != sub_if_index: # differs, raise exception - msg = 'Specified SubInterfaceIndex({:s}) differs from Service SubInterfaceIndex({:s})' - raise Exception(msg.format( - str(json_settings['sub_interface_index']), str(sub_if_index))) - - if 'vlan_id' not in json_settings: # missing, add it - json_settings['vlan_id'] = cvlan_id - elif json_settings['vlan_id'] != cvlan_id: # differs, raise exception - msg = 'Specified VLANId({:s}) differs from Service VLANId({:s})' - raise Exception(msg.format( - str(json_settings['vlan_id']), str(cvlan_id))) - - if 'address_ip' not in json_settings: # missing, add it - json_settings['address_ip'] = address_ip - elif json_settings['address_ip'] != address_ip: # differs, raise exception - msg = 'Specified AddressIP({:s}) differs from Service AddressIP({:s})' - raise Exception(msg.format( - str(json_settings['address_ip']), str(address_ip))) - - if 'address_prefix' not in json_settings: # missing, add it - json_settings['address_prefix'] = address_prefix - elif json_settings['address_prefix'] != address_prefix: # differs, raise exception - msg = 'Specified AddressPrefix({:s}) differs from Service AddressPrefix({:s})' - raise Exception(msg.format( - str(json_settings['address_prefix']), str(address_prefix))) - - config_rule.custom.resource_value = json.dumps(json_settings, sort_keys=True) - break - else: - # not found, add it - config_rule = target.service_config.config_rules.add() # pylint: disable=no-member - config_rule.action = ConfigActionEnum.CONFIGACTION_SET - config_rule.custom.resource_key = endpoint_settings_key - config_rule.custom.resource_value = json.dumps({ - 'router_id': router_id, - 'route_distinguisher': route_distinguisher, - 'sub_interface_index': sub_if_index, - 'vlan_id': cvlan_id, - 'address_ip': address_ip, - 'address_prefix': address_prefix, - }, sort_keys=True) + field_updates = {} + if router_id is not None: field_updates['router_id' ] = (router_id, True) + if route_dist is not None: field_updates['route_distinguisher'] = (route_dist, True) + if sub_if_index is not None: field_updates['sub_interface_index'] = (sub_if_index, True) + if cvlan_id is not None: field_updates['vlan_id' ] = (cvlan_id, True) + if address_ip is not None: field_updates['address_ip' ] = (address_ip, True) + if address_prefix is not None: field_updates['address_prefix' ] = (address_prefix, True) + if remote_router is not None: field_updates['remote_router' ] = (remote_router, True) + if circuit_id is not None: field_updates['circuit_id' ] = (circuit_id, True) + update_config_rule_custom(config_rules, endpoint_settings_key, field_updates) + + field_updates = {} + if len(diversity_constraints) > 0: + field_updates.update(diversity_constraints) + update_constraint_custom(constraints, 'diversity', field_updates) + + update_constraint_endpoint_location(constraints, endpoint_id, region=site_id) + if access_priority is not None: update_constraint_endpoint_priority(constraints, endpoint_id, access_priority) + if single_active or all_active: + # assume 1 disjoint path per endpoint/location included in service/slice + location_endpoints = {} + for constraint in constraints: + if constraint.WhichOneof('constraint') != 'endpoint_location': continue + str_endpoint_id = grpc_message_to_json_string(constraint.endpoint_location.endpoint_id) + str_location_id = grpc_message_to_json_string(constraint.endpoint_location.location) + location_endpoints.setdefault(str_location_id, set()).add(str_endpoint_id) + num_endpoints_per_location = {len(endpoints) for endpoints in location_endpoints.values()} + num_disjoint_paths = min(num_endpoints_per_location) + update_constraint_sla_availability(constraints, num_disjoint_paths, all_active) return target def process_list_site_network_access( - context_client : ContextClient, service_client : ServiceClient, slice_client : SliceClient, + context_client : ContextClient, service_client : ServiceClient, slice_client : SliceClient, site_id : str, request_data : Dict ) -> Response: @@ -182,7 +138,7 @@ def process_list_site_network_access( errors = [] for site_network_access in request_data['ietf-l2vpn-svc:site-network-access']: - sna_request = process_site_network_access(context_client, site_network_access) + sna_request = process_site_network_access(context_client, site_id, site_network_access) LOGGER.debug('sna_request = {:s}'.format(grpc_message_to_json_string(sna_request))) try: if isinstance(sna_request, Service): @@ -196,7 +152,7 @@ def process_list_site_network_access( else: raise NotImplementedError('Support for Class({:s}) not implemented'.format(str(type(sna_request)))) except Exception as e: # pylint: disable=broad-except - msg = 'Something went wrong Updating Service {:s}' + msg = 'Something went wrong Updating VPN {:s}' LOGGER.exception(msg.format(grpc_message_to_json_string(sna_request))) errors.append({'error': str(e)}) @@ -212,7 +168,7 @@ class L2VPN_SiteNetworkAccesses(Resource): context_client = ContextClient() service_client = ServiceClient() slice_client = SliceClient() - return process_list_site_network_access(context_client, service_client, slice_client, request.json) + return process_list_site_network_access(context_client, service_client, slice_client, site_id, request.json) @HTTP_AUTH.login_required def put(self, site_id : str): @@ -221,4 +177,4 @@ class L2VPN_SiteNetworkAccesses(Resource): context_client = ContextClient() service_client = ServiceClient() slice_client = SliceClient() - return process_list_site_network_access(context_client, service_client, slice_client, request.json) + return process_list_site_network_access(context_client, service_client, slice_client, site_id, request.json) diff --git a/src/compute/tests/mock_osm/WimconnectorIETFL2VPN.py b/src/compute/tests/mock_osm/WimconnectorIETFL2VPN.py index b9639e8046593c1dbf4017cff963ceb7c51d0532..ec9918ff0cda450f91ebb20c379c3dddd5ba9e8c 100644 --- a/src/compute/tests/mock_osm/WimconnectorIETFL2VPN.py +++ b/src/compute/tests/mock_osm/WimconnectorIETFL2VPN.py @@ -33,6 +33,7 @@ the Layer 2 service. import requests import uuid import logging +import copy #from osm_ro_plugin.sdnconn import SdnConnectorBase, SdnConnectorError from .sdnconn import SdnConnectorBase, SdnConnectorError @@ -222,8 +223,29 @@ class WimconnectorIETFL2VPN(SdnConnectorBase): http_code=response_service_creation.status_code, ) - """Second step, create the connections and vpn attachments""" + self.logger.info('connection_points = {:s}'.format(str(connection_points))) + + # Check if protected paths are requested + extended_connection_points = [] for connection_point in connection_points: + extended_connection_points.append(connection_point) + + connection_point_wan_info = self.search_mapp(connection_point) + service_mapping_info = connection_point_wan_info.get('service_mapping_info', {}) + redundant_service_endpoint_ids = service_mapping_info.get('redundant') + + if redundant_service_endpoint_ids is None: continue + if len(redundant_service_endpoint_ids) == 0: continue + + for redundant_service_endpoint_id in redundant_service_endpoint_ids: + redundant_connection_point = copy.deepcopy(connection_point) + redundant_connection_point['service_endpoint_id'] = redundant_service_endpoint_id + extended_connection_points.append(redundant_connection_point) + + self.logger.info('extended_connection_points = {:s}'.format(str(extended_connection_points))) + + """Second step, create the connections and vpn attachments""" + for connection_point in extended_connection_points: connection_point_wan_info = self.search_mapp(connection_point) site_network_access = {} connection = {} @@ -264,6 +286,23 @@ class WimconnectorIETFL2VPN(SdnConnectorBase): site_network_access["bearer"] = connection_point_wan_info[ "service_mapping_info" ]["bearer"] + + access_priority = connection_point_wan_info["service_mapping_info"].get("priority") + if access_priority is not None: + availability = {} + availability["access-priority"] = access_priority + availability["single-active"] = [None] + site_network_access["availability"] = availability + + constraint = {} + constraint['constraint-type'] = 'end-to-end-diverse' + constraint['target'] = {'all-other-accesses': [None]} + + access_diversity = {} + access_diversity['constraints'] = {'constraint': []} + access_diversity['constraints']['constraint'].append(constraint) + site_network_access["access-diversity"] = access_diversity + site_network_accesses = {} site_network_access_list = [] site_network_access_list.append(site_network_access) diff --git a/src/context/tests/context_report.xml b/src/context/tests/context_report.xml deleted file mode 100644 index 5ee1c17cd6f59c58d55a5eba38de7ea0366a757c..0000000000000000000000000000000000000000 --- a/src/context/tests/context_report.xml +++ /dev/null @@ -1,1539 +0,0 @@ -<?xml version="1.0" encoding="utf-8"?><testsuites><testsuite name="pytest" errors="0" failures="25" skipped="0" tests="51" time="8.764" timestamp="2022-07-29T09:29:23.786468" hostname="613b7e973910"><testcase classname="context.tests.test_unitary" name="test_grpc_context[all_inmemory]" time="0.028" /><testcase classname="context.tests.test_unitary" name="test_grpc_topology[all_inmemory]" time="0.026" /><testcase classname="context.tests.test_unitary" name="test_grpc_device[all_inmemory]" time="0.139" /><testcase classname="context.tests.test_unitary" name="test_grpc_link[all_inmemory]" time="0.139" /><testcase classname="context.tests.test_unitary" name="test_grpc_service[all_inmemory]" time="0.152" /><testcase classname="context.tests.test_unitary" name="test_grpc_connection[all_inmemory]" time="0.274" /><testcase classname="context.tests.test_unitary" name="test_rest_populate_database[all_inmemory]" time="0.093" /><testcase classname="context.tests.test_unitary" name="test_rest_get_context_ids[all_inmemory]" time="1.033" /><testcase classname="context.tests.test_unitary" name="test_rest_get_contexts[all_inmemory]" time="0.009" /><testcase classname="context.tests.test_unitary" name="test_rest_get_context[all_inmemory]" time="0.009" /><testcase classname="context.tests.test_unitary" name="test_rest_get_topology_ids[all_inmemory]" time="0.006" /><testcase classname="context.tests.test_unitary" name="test_rest_get_topologies[all_inmemory]" time="0.013" /><testcase classname="context.tests.test_unitary" name="test_rest_get_topology[all_inmemory]" time="0.012" /><testcase classname="context.tests.test_unitary" name="test_rest_get_service_ids[all_inmemory]" time="0.007" /><testcase classname="context.tests.test_unitary" name="test_rest_get_services[all_inmemory]" time="0.039" /><testcase classname="context.tests.test_unitary" name="test_rest_get_service[all_inmemory]" time="0.017" /><testcase classname="context.tests.test_unitary" name="test_rest_get_device_ids[all_inmemory]" time="0.005" /><testcase classname="context.tests.test_unitary" name="test_rest_get_devices[all_inmemory]" time="0.070" /><testcase classname="context.tests.test_unitary" name="test_rest_get_device[all_inmemory]" time="0.027" /><testcase classname="context.tests.test_unitary" name="test_rest_get_link_ids[all_inmemory]" time="0.005" /><testcase classname="context.tests.test_unitary" name="test_rest_get_links[all_inmemory]" time="0.023" /><testcase classname="context.tests.test_unitary" name="test_rest_get_link[all_inmemory]" time="0.011" /><testcase classname="context.tests.test_unitary" name="test_rest_get_connection_ids[all_inmemory]" time="0.007" /><testcase classname="context.tests.test_unitary" name="test_rest_get_connections[all_inmemory]" time="0.032" /><testcase classname="context.tests.test_unitary" name="test_rest_get_connection[all_inmemory]" time="0.032" /><testcase classname="context.tests.test_unitary" name="test_grpc_context[all_redis]" time="0.477"><failure message="redis.exceptions.ConnectionError: Error 111 connecting to 127.0.0.1:6379. Connection refused.">self = Connection<host=127.0.0.1,port=6379,db=0> - - def connect(self): - "Connects to the Redis server if not already connected" - if self._sock: - return - try: -> sock = self.retry.call_with_retry( - lambda: self._connect(), lambda error: self.disconnect(error) - ) - -/usr/local/lib/python3.9/site-packages/redis/connection.py:607: -_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ - -self = <redis.retry.Retry object at 0x7fb0186487f0> -do = <function Connection.connect.<locals>.<lambda> at 0x7fb01a79edc0> -fail = <function Connection.connect.<locals>.<lambda> at 0x7fb0186538b0> - - def call_with_retry(self, do, fail): - """ - Execute an operation that might fail and returns its result, or - raise the exception that was thrown depending on the `Backoff` object. - `do`: the operation to call. Expects no argument. - `fail`: the failure handler, expects the last error that was thrown - """ - self._backoff.reset() - failures = 0 - while True: - try: -> return do() - -/usr/local/lib/python3.9/site-packages/redis/retry.py:45: -_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ - -> lambda: self._connect(), lambda error: self.disconnect(error) - ) - -/usr/local/lib/python3.9/site-packages/redis/connection.py:608: -_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ - -self = Connection<host=127.0.0.1,port=6379,db=0> - - def _connect(self): - "Create a TCP socket connection" - # we want to mimic what socket.create_connection does to support - # ipv4/ipv6, but we want to set options prior to calling - # socket.connect() - err = None - for res in socket.getaddrinfo( - self.host, self.port, self.socket_type, socket.SOCK_STREAM - ): - family, socktype, proto, canonname, socket_address = res - sock = None - try: - sock = socket.socket(family, socktype, proto) - # TCP_NODELAY - sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) - - # TCP_KEEPALIVE - if self.socket_keepalive: - sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) - for k, v in self.socket_keepalive_options.items(): - sock.setsockopt(socket.IPPROTO_TCP, k, v) - - # set the socket_connect_timeout before we connect - sock.settimeout(self.socket_connect_timeout) - - # connect - sock.connect(socket_address) - - # set the socket_timeout now that we're connected - sock.settimeout(self.socket_timeout) - return sock - - except OSError as _: - err = _ - if sock is not None: - sock.close() - - if err is not None: -> raise err - -/usr/local/lib/python3.9/site-packages/redis/connection.py:673: -_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ - -self = Connection<host=127.0.0.1,port=6379,db=0> - - def _connect(self): - "Create a TCP socket connection" - # we want to mimic what socket.create_connection does to support - # ipv4/ipv6, but we want to set options prior to calling - # socket.connect() - err = None - for res in socket.getaddrinfo( - self.host, self.port, self.socket_type, socket.SOCK_STREAM - ): - family, socktype, proto, canonname, socket_address = res - sock = None - try: - sock = socket.socket(family, socktype, proto) - # TCP_NODELAY - sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) - - # TCP_KEEPALIVE - if self.socket_keepalive: - sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) - for k, v in self.socket_keepalive_options.items(): - sock.setsockopt(socket.IPPROTO_TCP, k, v) - - # set the socket_connect_timeout before we connect - sock.settimeout(self.socket_connect_timeout) - - # connect -> sock.connect(socket_address) -E ConnectionRefusedError: [Errno 111] Connection refused - -/usr/local/lib/python3.9/site-packages/redis/connection.py:661: ConnectionRefusedError - -During handling of the above exception, another exception occurred: - -context_client_grpc = <context.client.ContextClient.ContextClient object at 0x7fb018f15a30> -context_db_mb = (<common.orm.Database.Database object at 0x7fb018f15910>, <common.message_broker.MessageBroker.MessageBroker object at 0x7fb018f15460>) - - def test_grpc_context( - context_client_grpc : ContextClient, # pylint: disable=redefined-outer-name - context_db_mb : Tuple[Database, MessageBroker]): # pylint: disable=redefined-outer-name - context_database = context_db_mb[0] - - # ----- Clean the database ----------------------------------------------------------------------------------------- -> context_database.clear_all() - -context/tests/test_unitary.py:128: -_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ -common/orm/Database.py:32: in clear_all - for key in self._backend.keys(): -common/orm/backend/redis/RedisBackend.py:48: in keys - return [k.decode('UTF-8') for k in self._client.keys()] -/usr/local/lib/python3.9/site-packages/redis/commands/core.py:1370: in keys - return self.execute_command("KEYS", pattern, **kwargs) -/usr/local/lib/python3.9/site-packages/redis/client.py:1173: in execute_command - conn = self.connection or pool.get_connection(command_name, **options) -/usr/local/lib/python3.9/site-packages/redis/connection.py:1370: in get_connection - connection.connect() -_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ - -self = Connection<host=127.0.0.1,port=6379,db=0> - - def connect(self): - "Connects to the Redis server if not already connected" - if self._sock: - return - try: - sock = self.retry.call_with_retry( - lambda: self._connect(), lambda error: self.disconnect(error) - ) - except socket.timeout: - raise TimeoutError("Timeout connecting to server") - except OSError as e: -> raise ConnectionError(self._error_message(e)) -E redis.exceptions.ConnectionError: Error 111 connecting to 127.0.0.1:6379. Connection refused. - -/usr/local/lib/python3.9/site-packages/redis/connection.py:613: ConnectionError</failure></testcase><testcase classname="context.tests.test_unitary" name="test_grpc_topology[all_redis]" time="0.002"><failure message="redis.exceptions.ConnectionError: Error 111 connecting to 127.0.0.1:6379. Connection refused.">self = Connection<host=127.0.0.1,port=6379,db=0> - - def connect(self): - "Connects to the Redis server if not already connected" - if self._sock: - return - try: -> sock = self.retry.call_with_retry( - lambda: self._connect(), lambda error: self.disconnect(error) - ) - -/usr/local/lib/python3.9/site-packages/redis/connection.py:607: -_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ - -self = <redis.retry.Retry object at 0x7fb0186487f0> -do = <function Connection.connect.<locals>.<lambda> at 0x7fb018563b80> -fail = <function Connection.connect.<locals>.<lambda> at 0x7fb018587550> - - def call_with_retry(self, do, fail): - """ - Execute an operation that might fail and returns its result, or - raise the exception that was thrown depending on the `Backoff` object. - `do`: the operation to call. Expects no argument. - `fail`: the failure handler, expects the last error that was thrown - """ - self._backoff.reset() - failures = 0 - while True: - try: -> return do() - -/usr/local/lib/python3.9/site-packages/redis/retry.py:45: -_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ - -> lambda: self._connect(), lambda error: self.disconnect(error) - ) - -/usr/local/lib/python3.9/site-packages/redis/connection.py:608: -_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ - -self = Connection<host=127.0.0.1,port=6379,db=0> - - def _connect(self): - "Create a TCP socket connection" - # we want to mimic what socket.create_connection does to support - # ipv4/ipv6, but we want to set options prior to calling - # socket.connect() - err = None - for res in socket.getaddrinfo( - self.host, self.port, self.socket_type, socket.SOCK_STREAM - ): - family, socktype, proto, canonname, socket_address = res - sock = None - try: - sock = socket.socket(family, socktype, proto) - # TCP_NODELAY - sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) - - # TCP_KEEPALIVE - if self.socket_keepalive: - sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) - for k, v in self.socket_keepalive_options.items(): - sock.setsockopt(socket.IPPROTO_TCP, k, v) - - # set the socket_connect_timeout before we connect - sock.settimeout(self.socket_connect_timeout) - - # connect - sock.connect(socket_address) - - # set the socket_timeout now that we're connected - sock.settimeout(self.socket_timeout) - return sock - - except OSError as _: - err = _ - if sock is not None: - sock.close() - - if err is not None: -> raise err - -/usr/local/lib/python3.9/site-packages/redis/connection.py:673: -_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ - -self = Connection<host=127.0.0.1,port=6379,db=0> - - def _connect(self): - "Create a TCP socket connection" - # we want to mimic what socket.create_connection does to support - # ipv4/ipv6, but we want to set options prior to calling - # socket.connect() - err = None - for res in socket.getaddrinfo( - self.host, self.port, self.socket_type, socket.SOCK_STREAM - ): - family, socktype, proto, canonname, socket_address = res - sock = None - try: - sock = socket.socket(family, socktype, proto) - # TCP_NODELAY - sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) - - # TCP_KEEPALIVE - if self.socket_keepalive: - sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) - for k, v in self.socket_keepalive_options.items(): - sock.setsockopt(socket.IPPROTO_TCP, k, v) - - # set the socket_connect_timeout before we connect - sock.settimeout(self.socket_connect_timeout) - - # connect -> sock.connect(socket_address) -E ConnectionRefusedError: [Errno 111] Connection refused - -/usr/local/lib/python3.9/site-packages/redis/connection.py:661: ConnectionRefusedError - -During handling of the above exception, another exception occurred: - -context_client_grpc = <context.client.ContextClient.ContextClient object at 0x7fb018f15a30> -context_db_mb = (<common.orm.Database.Database object at 0x7fb018f15910>, <common.message_broker.MessageBroker.MessageBroker object at 0x7fb018f15460>) - - def test_grpc_topology( - context_client_grpc : ContextClient, # pylint: disable=redefined-outer-name - context_db_mb : Tuple[Database, MessageBroker]): # pylint: disable=redefined-outer-name - context_database = context_db_mb[0] - - # ----- Clean the database ----------------------------------------------------------------------------------------- -> context_database.clear_all() - -context/tests/test_unitary.py:249: -_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ -common/orm/Database.py:32: in clear_all - for key in self._backend.keys(): -common/orm/backend/redis/RedisBackend.py:48: in keys - return [k.decode('UTF-8') for k in self._client.keys()] -/usr/local/lib/python3.9/site-packages/redis/commands/core.py:1370: in keys - return self.execute_command("KEYS", pattern, **kwargs) -/usr/local/lib/python3.9/site-packages/redis/client.py:1173: in execute_command - conn = self.connection or pool.get_connection(command_name, **options) -/usr/local/lib/python3.9/site-packages/redis/connection.py:1370: in get_connection - connection.connect() -_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ - -self = Connection<host=127.0.0.1,port=6379,db=0> - - def connect(self): - "Connects to the Redis server if not already connected" - if self._sock: - return - try: - sock = self.retry.call_with_retry( - lambda: self._connect(), lambda error: self.disconnect(error) - ) - except socket.timeout: - raise TimeoutError("Timeout connecting to server") - except OSError as e: -> raise ConnectionError(self._error_message(e)) -E redis.exceptions.ConnectionError: Error 111 connecting to 127.0.0.1:6379. Connection refused. - -/usr/local/lib/python3.9/site-packages/redis/connection.py:613: ConnectionError</failure></testcase><testcase classname="context.tests.test_unitary" name="test_grpc_device[all_redis]" time="0.001"><failure message="redis.exceptions.ConnectionError: Error 111 connecting to 127.0.0.1:6379. Connection refused.">self = Connection<host=127.0.0.1,port=6379,db=0> - - def connect(self): - "Connects to the Redis server if not already connected" - if self._sock: - return - try: -> sock = self.retry.call_with_retry( - lambda: self._connect(), lambda error: self.disconnect(error) - ) - -/usr/local/lib/python3.9/site-packages/redis/connection.py:607: -_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ - -self = <redis.retry.Retry object at 0x7fb0186487f0> -do = <function Connection.connect.<locals>.<lambda> at 0x7fb018683820> -fail = <function Connection.connect.<locals>.<lambda> at 0x7fb018653f70> - - def call_with_retry(self, do, fail): - """ - Execute an operation that might fail and returns its result, or - raise the exception that was thrown depending on the `Backoff` object. - `do`: the operation to call. Expects no argument. - `fail`: the failure handler, expects the last error that was thrown - """ - self._backoff.reset() - failures = 0 - while True: - try: -> return do() - -/usr/local/lib/python3.9/site-packages/redis/retry.py:45: -_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ - -> lambda: self._connect(), lambda error: self.disconnect(error) - ) - -/usr/local/lib/python3.9/site-packages/redis/connection.py:608: -_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ - -self = Connection<host=127.0.0.1,port=6379,db=0> - - def _connect(self): - "Create a TCP socket connection" - # we want to mimic what socket.create_connection does to support - # ipv4/ipv6, but we want to set options prior to calling - # socket.connect() - err = None - for res in socket.getaddrinfo( - self.host, self.port, self.socket_type, socket.SOCK_STREAM - ): - family, socktype, proto, canonname, socket_address = res - sock = None - try: - sock = socket.socket(family, socktype, proto) - # TCP_NODELAY - sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) - - # TCP_KEEPALIVE - if self.socket_keepalive: - sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) - for k, v in self.socket_keepalive_options.items(): - sock.setsockopt(socket.IPPROTO_TCP, k, v) - - # set the socket_connect_timeout before we connect - sock.settimeout(self.socket_connect_timeout) - - # connect - sock.connect(socket_address) - - # set the socket_timeout now that we're connected - sock.settimeout(self.socket_timeout) - return sock - - except OSError as _: - err = _ - if sock is not None: - sock.close() - - if err is not None: -> raise err - -/usr/local/lib/python3.9/site-packages/redis/connection.py:673: -_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ - -self = Connection<host=127.0.0.1,port=6379,db=0> - - def _connect(self): - "Create a TCP socket connection" - # we want to mimic what socket.create_connection does to support - # ipv4/ipv6, but we want to set options prior to calling - # socket.connect() - err = None - for res in socket.getaddrinfo( - self.host, self.port, self.socket_type, socket.SOCK_STREAM - ): - family, socktype, proto, canonname, socket_address = res - sock = None - try: - sock = socket.socket(family, socktype, proto) - # TCP_NODELAY - sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) - - # TCP_KEEPALIVE - if self.socket_keepalive: - sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) - for k, v in self.socket_keepalive_options.items(): - sock.setsockopt(socket.IPPROTO_TCP, k, v) - - # set the socket_connect_timeout before we connect - sock.settimeout(self.socket_connect_timeout) - - # connect -> sock.connect(socket_address) -E ConnectionRefusedError: [Errno 111] Connection refused - -/usr/local/lib/python3.9/site-packages/redis/connection.py:661: ConnectionRefusedError - -During handling of the above exception, another exception occurred: - -context_client_grpc = <context.client.ContextClient.ContextClient object at 0x7fb018f15a30> -context_db_mb = (<common.orm.Database.Database object at 0x7fb018f15910>, <common.message_broker.MessageBroker.MessageBroker object at 0x7fb018f15460>) - - def test_grpc_device( - context_client_grpc : ContextClient, # pylint: disable=redefined-outer-name - context_db_mb : Tuple[Database, MessageBroker]): # pylint: disable=redefined-outer-name - context_database = context_db_mb[0] - - # ----- Clean the database ----------------------------------------------------------------------------------------- -> context_database.clear_all() - -context/tests/test_unitary.py:381: -_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ -common/orm/Database.py:32: in clear_all - for key in self._backend.keys(): -common/orm/backend/redis/RedisBackend.py:48: in keys - return [k.decode('UTF-8') for k in self._client.keys()] -/usr/local/lib/python3.9/site-packages/redis/commands/core.py:1370: in keys - return self.execute_command("KEYS", pattern, **kwargs) -/usr/local/lib/python3.9/site-packages/redis/client.py:1173: in execute_command - conn = self.connection or pool.get_connection(command_name, **options) -/usr/local/lib/python3.9/site-packages/redis/connection.py:1370: in get_connection - connection.connect() -_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ - -self = Connection<host=127.0.0.1,port=6379,db=0> - - def connect(self): - "Connects to the Redis server if not already connected" - if self._sock: - return - try: - sock = self.retry.call_with_retry( - lambda: self._connect(), lambda error: self.disconnect(error) - ) - except socket.timeout: - raise TimeoutError("Timeout connecting to server") - except OSError as e: -> raise ConnectionError(self._error_message(e)) -E redis.exceptions.ConnectionError: Error 111 connecting to 127.0.0.1:6379. Connection refused. - -/usr/local/lib/python3.9/site-packages/redis/connection.py:613: ConnectionError</failure></testcase><testcase classname="context.tests.test_unitary" name="test_grpc_link[all_redis]" time="0.001"><failure message="redis.exceptions.ConnectionError: Error 111 connecting to 127.0.0.1:6379. Connection refused.">self = Connection<host=127.0.0.1,port=6379,db=0> - - def connect(self): - "Connects to the Redis server if not already connected" - if self._sock: - return - try: -> sock = self.retry.call_with_retry( - lambda: self._connect(), lambda error: self.disconnect(error) - ) - -/usr/local/lib/python3.9/site-packages/redis/connection.py:607: -_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ - -self = <redis.retry.Retry object at 0x7fb0186487f0> -do = <function Connection.connect.<locals>.<lambda> at 0x7fb0186c0550> -fail = <function Connection.connect.<locals>.<lambda> at 0x7fb0186c0670> - - def call_with_retry(self, do, fail): - """ - Execute an operation that might fail and returns its result, or - raise the exception that was thrown depending on the `Backoff` object. - `do`: the operation to call. Expects no argument. - `fail`: the failure handler, expects the last error that was thrown - """ - self._backoff.reset() - failures = 0 - while True: - try: -> return do() - -/usr/local/lib/python3.9/site-packages/redis/retry.py:45: -_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ - -> lambda: self._connect(), lambda error: self.disconnect(error) - ) - -/usr/local/lib/python3.9/site-packages/redis/connection.py:608: -_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ - -self = Connection<host=127.0.0.1,port=6379,db=0> - - def _connect(self): - "Create a TCP socket connection" - # we want to mimic what socket.create_connection does to support - # ipv4/ipv6, but we want to set options prior to calling - # socket.connect() - err = None - for res in socket.getaddrinfo( - self.host, self.port, self.socket_type, socket.SOCK_STREAM - ): - family, socktype, proto, canonname, socket_address = res - sock = None - try: - sock = socket.socket(family, socktype, proto) - # TCP_NODELAY - sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) - - # TCP_KEEPALIVE - if self.socket_keepalive: - sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) - for k, v in self.socket_keepalive_options.items(): - sock.setsockopt(socket.IPPROTO_TCP, k, v) - - # set the socket_connect_timeout before we connect - sock.settimeout(self.socket_connect_timeout) - - # connect - sock.connect(socket_address) - - # set the socket_timeout now that we're connected - sock.settimeout(self.socket_timeout) - return sock - - except OSError as _: - err = _ - if sock is not None: - sock.close() - - if err is not None: -> raise err - -/usr/local/lib/python3.9/site-packages/redis/connection.py:673: -_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ - -self = Connection<host=127.0.0.1,port=6379,db=0> - - def _connect(self): - "Create a TCP socket connection" - # we want to mimic what socket.create_connection does to support - # ipv4/ipv6, but we want to set options prior to calling - # socket.connect() - err = None - for res in socket.getaddrinfo( - self.host, self.port, self.socket_type, socket.SOCK_STREAM - ): - family, socktype, proto, canonname, socket_address = res - sock = None - try: - sock = socket.socket(family, socktype, proto) - # TCP_NODELAY - sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) - - # TCP_KEEPALIVE - if self.socket_keepalive: - sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) - for k, v in self.socket_keepalive_options.items(): - sock.setsockopt(socket.IPPROTO_TCP, k, v) - - # set the socket_connect_timeout before we connect - sock.settimeout(self.socket_connect_timeout) - - # connect -> sock.connect(socket_address) -E ConnectionRefusedError: [Errno 111] Connection refused - -/usr/local/lib/python3.9/site-packages/redis/connection.py:661: ConnectionRefusedError - -During handling of the above exception, another exception occurred: - -context_client_grpc = <context.client.ContextClient.ContextClient object at 0x7fb018f15a30> -context_db_mb = (<common.orm.Database.Database object at 0x7fb018f15910>, <common.message_broker.MessageBroker.MessageBroker object at 0x7fb018f15460>) - - def test_grpc_link( - context_client_grpc : ContextClient, # pylint: disable=redefined-outer-name - context_db_mb : Tuple[Database, MessageBroker]): # pylint: disable=redefined-outer-name - context_database = context_db_mb[0] - - # ----- Clean the database ----------------------------------------------------------------------------------------- -> context_database.clear_all() - -context/tests/test_unitary.py:556: -_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ -common/orm/Database.py:32: in clear_all - for key in self._backend.keys(): -common/orm/backend/redis/RedisBackend.py:48: in keys - return [k.decode('UTF-8') for k in self._client.keys()] -/usr/local/lib/python3.9/site-packages/redis/commands/core.py:1370: in keys - return self.execute_command("KEYS", pattern, **kwargs) -/usr/local/lib/python3.9/site-packages/redis/client.py:1173: in execute_command - conn = self.connection or pool.get_connection(command_name, **options) -/usr/local/lib/python3.9/site-packages/redis/connection.py:1370: in get_connection - connection.connect() -_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ - -self = Connection<host=127.0.0.1,port=6379,db=0> - - def connect(self): - "Connects to the Redis server if not already connected" - if self._sock: - return - try: - sock = self.retry.call_with_retry( - lambda: self._connect(), lambda error: self.disconnect(error) - ) - except socket.timeout: - raise TimeoutError("Timeout connecting to server") - except OSError as e: -> raise ConnectionError(self._error_message(e)) -E redis.exceptions.ConnectionError: Error 111 connecting to 127.0.0.1:6379. Connection refused. - -/usr/local/lib/python3.9/site-packages/redis/connection.py:613: ConnectionError</failure></testcase><testcase classname="context.tests.test_unitary" name="test_grpc_service[all_redis]" time="0.001"><failure message="redis.exceptions.ConnectionError: Error 111 connecting to 127.0.0.1:6379. Connection refused.">self = Connection<host=127.0.0.1,port=6379,db=0> - - def connect(self): - "Connects to the Redis server if not already connected" - if self._sock: - return - try: -> sock = self.retry.call_with_retry( - lambda: self._connect(), lambda error: self.disconnect(error) - ) - -/usr/local/lib/python3.9/site-packages/redis/connection.py:607: -_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ - -self = <redis.retry.Retry object at 0x7fb0186487f0> -do = <function Connection.connect.<locals>.<lambda> at 0x7fb018f2f700> -fail = <function Connection.connect.<locals>.<lambda> at 0x7fb0186188b0> - - def call_with_retry(self, do, fail): - """ - Execute an operation that might fail and returns its result, or - raise the exception that was thrown depending on the `Backoff` object. - `do`: the operation to call. Expects no argument. - `fail`: the failure handler, expects the last error that was thrown - """ - self._backoff.reset() - failures = 0 - while True: - try: -> return do() - -/usr/local/lib/python3.9/site-packages/redis/retry.py:45: -_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ - -> lambda: self._connect(), lambda error: self.disconnect(error) - ) - -/usr/local/lib/python3.9/site-packages/redis/connection.py:608: -_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ - -self = Connection<host=127.0.0.1,port=6379,db=0> - - def _connect(self): - "Create a TCP socket connection" - # we want to mimic what socket.create_connection does to support - # ipv4/ipv6, but we want to set options prior to calling - # socket.connect() - err = None - for res in socket.getaddrinfo( - self.host, self.port, self.socket_type, socket.SOCK_STREAM - ): - family, socktype, proto, canonname, socket_address = res - sock = None - try: - sock = socket.socket(family, socktype, proto) - # TCP_NODELAY - sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) - - # TCP_KEEPALIVE - if self.socket_keepalive: - sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) - for k, v in self.socket_keepalive_options.items(): - sock.setsockopt(socket.IPPROTO_TCP, k, v) - - # set the socket_connect_timeout before we connect - sock.settimeout(self.socket_connect_timeout) - - # connect - sock.connect(socket_address) - - # set the socket_timeout now that we're connected - sock.settimeout(self.socket_timeout) - return sock - - except OSError as _: - err = _ - if sock is not None: - sock.close() - - if err is not None: -> raise err - -/usr/local/lib/python3.9/site-packages/redis/connection.py:673: -_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ - -self = Connection<host=127.0.0.1,port=6379,db=0> - - def _connect(self): - "Create a TCP socket connection" - # we want to mimic what socket.create_connection does to support - # ipv4/ipv6, but we want to set options prior to calling - # socket.connect() - err = None - for res in socket.getaddrinfo( - self.host, self.port, self.socket_type, socket.SOCK_STREAM - ): - family, socktype, proto, canonname, socket_address = res - sock = None - try: - sock = socket.socket(family, socktype, proto) - # TCP_NODELAY - sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) - - # TCP_KEEPALIVE - if self.socket_keepalive: - sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) - for k, v in self.socket_keepalive_options.items(): - sock.setsockopt(socket.IPPROTO_TCP, k, v) - - # set the socket_connect_timeout before we connect - sock.settimeout(self.socket_connect_timeout) - - # connect -> sock.connect(socket_address) -E ConnectionRefusedError: [Errno 111] Connection refused - -/usr/local/lib/python3.9/site-packages/redis/connection.py:661: ConnectionRefusedError - -During handling of the above exception, another exception occurred: - -context_client_grpc = <context.client.ContextClient.ContextClient object at 0x7fb018f15a30> -context_db_mb = (<common.orm.Database.Database object at 0x7fb018f15910>, <common.message_broker.MessageBroker.MessageBroker object at 0x7fb018f15460>) - - def test_grpc_service( - context_client_grpc : ContextClient, # pylint: disable=redefined-outer-name - context_db_mb : Tuple[Database, MessageBroker]): # pylint: disable=redefined-outer-name - context_database = context_db_mb[0] - - # ----- Clean the database ----------------------------------------------------------------------------------------- -> context_database.clear_all() - -context/tests/test_unitary.py:739: -_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ -common/orm/Database.py:32: in clear_all - for key in self._backend.keys(): -common/orm/backend/redis/RedisBackend.py:48: in keys - return [k.decode('UTF-8') for k in self._client.keys()] -/usr/local/lib/python3.9/site-packages/redis/commands/core.py:1370: in keys - return self.execute_command("KEYS", pattern, **kwargs) -/usr/local/lib/python3.9/site-packages/redis/client.py:1173: in execute_command - conn = self.connection or pool.get_connection(command_name, **options) -/usr/local/lib/python3.9/site-packages/redis/connection.py:1370: in get_connection - connection.connect() -_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ - -self = Connection<host=127.0.0.1,port=6379,db=0> - - def connect(self): - "Connects to the Redis server if not already connected" - if self._sock: - return - try: - sock = self.retry.call_with_retry( - lambda: self._connect(), lambda error: self.disconnect(error) - ) - except socket.timeout: - raise TimeoutError("Timeout connecting to server") - except OSError as e: -> raise ConnectionError(self._error_message(e)) -E redis.exceptions.ConnectionError: Error 111 connecting to 127.0.0.1:6379. Connection refused. - -/usr/local/lib/python3.9/site-packages/redis/connection.py:613: ConnectionError</failure></testcase><testcase classname="context.tests.test_unitary" name="test_grpc_connection[all_redis]" time="0.001"><failure message="redis.exceptions.ConnectionError: Error 111 connecting to 127.0.0.1:6379. Connection refused.">self = Connection<host=127.0.0.1,port=6379,db=0> - - def connect(self): - "Connects to the Redis server if not already connected" - if self._sock: - return - try: -> sock = self.retry.call_with_retry( - lambda: self._connect(), lambda error: self.disconnect(error) - ) - -/usr/local/lib/python3.9/site-packages/redis/connection.py:607: -_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ - -self = <redis.retry.Retry object at 0x7fb0186487f0> -do = <function Connection.connect.<locals>.<lambda> at 0x7fb0186a43a0> -fail = <function Connection.connect.<locals>.<lambda> at 0x7fb0186a4310> - - def call_with_retry(self, do, fail): - """ - Execute an operation that might fail and returns its result, or - raise the exception that was thrown depending on the `Backoff` object. - `do`: the operation to call. Expects no argument. - `fail`: the failure handler, expects the last error that was thrown - """ - self._backoff.reset() - failures = 0 - while True: - try: -> return do() - -/usr/local/lib/python3.9/site-packages/redis/retry.py:45: -_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ - -> lambda: self._connect(), lambda error: self.disconnect(error) - ) - -/usr/local/lib/python3.9/site-packages/redis/connection.py:608: -_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ - -self = Connection<host=127.0.0.1,port=6379,db=0> - - def _connect(self): - "Create a TCP socket connection" - # we want to mimic what socket.create_connection does to support - # ipv4/ipv6, but we want to set options prior to calling - # socket.connect() - err = None - for res in socket.getaddrinfo( - self.host, self.port, self.socket_type, socket.SOCK_STREAM - ): - family, socktype, proto, canonname, socket_address = res - sock = None - try: - sock = socket.socket(family, socktype, proto) - # TCP_NODELAY - sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) - - # TCP_KEEPALIVE - if self.socket_keepalive: - sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) - for k, v in self.socket_keepalive_options.items(): - sock.setsockopt(socket.IPPROTO_TCP, k, v) - - # set the socket_connect_timeout before we connect - sock.settimeout(self.socket_connect_timeout) - - # connect - sock.connect(socket_address) - - # set the socket_timeout now that we're connected - sock.settimeout(self.socket_timeout) - return sock - - except OSError as _: - err = _ - if sock is not None: - sock.close() - - if err is not None: -> raise err - -/usr/local/lib/python3.9/site-packages/redis/connection.py:673: -_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ - -self = Connection<host=127.0.0.1,port=6379,db=0> - - def _connect(self): - "Create a TCP socket connection" - # we want to mimic what socket.create_connection does to support - # ipv4/ipv6, but we want to set options prior to calling - # socket.connect() - err = None - for res in socket.getaddrinfo( - self.host, self.port, self.socket_type, socket.SOCK_STREAM - ): - family, socktype, proto, canonname, socket_address = res - sock = None - try: - sock = socket.socket(family, socktype, proto) - # TCP_NODELAY - sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) - - # TCP_KEEPALIVE - if self.socket_keepalive: - sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) - for k, v in self.socket_keepalive_options.items(): - sock.setsockopt(socket.IPPROTO_TCP, k, v) - - # set the socket_connect_timeout before we connect - sock.settimeout(self.socket_connect_timeout) - - # connect -> sock.connect(socket_address) -E ConnectionRefusedError: [Errno 111] Connection refused - -/usr/local/lib/python3.9/site-packages/redis/connection.py:661: ConnectionRefusedError - -During handling of the above exception, another exception occurred: - -context_client_grpc = <context.client.ContextClient.ContextClient object at 0x7fb018f15a30> -context_db_mb = (<common.orm.Database.Database object at 0x7fb018f15910>, <common.message_broker.MessageBroker.MessageBroker object at 0x7fb018f15460>) - - def test_grpc_connection( - context_client_grpc : ContextClient, # pylint: disable=redefined-outer-name - context_db_mb : Tuple[Database, MessageBroker]): # pylint: disable=redefined-outer-name - context_database = context_db_mb[0] - - # ----- Clean the database ----------------------------------------------------------------------------------------- -> context_database.clear_all() - -context/tests/test_unitary.py:926: -_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ -common/orm/Database.py:32: in clear_all - for key in self._backend.keys(): -common/orm/backend/redis/RedisBackend.py:48: in keys - return [k.decode('UTF-8') for k in self._client.keys()] -/usr/local/lib/python3.9/site-packages/redis/commands/core.py:1370: in keys - return self.execute_command("KEYS", pattern, **kwargs) -/usr/local/lib/python3.9/site-packages/redis/client.py:1173: in execute_command - conn = self.connection or pool.get_connection(command_name, **options) -/usr/local/lib/python3.9/site-packages/redis/connection.py:1370: in get_connection - connection.connect() -_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ - -self = Connection<host=127.0.0.1,port=6379,db=0> - - def connect(self): - "Connects to the Redis server if not already connected" - if self._sock: - return - try: - sock = self.retry.call_with_retry( - lambda: self._connect(), lambda error: self.disconnect(error) - ) - except socket.timeout: - raise TimeoutError("Timeout connecting to server") - except OSError as e: -> raise ConnectionError(self._error_message(e)) -E redis.exceptions.ConnectionError: Error 111 connecting to 127.0.0.1:6379. Connection refused. - -/usr/local/lib/python3.9/site-packages/redis/connection.py:613: ConnectionError</failure></testcase><testcase classname="context.tests.test_unitary" name="test_rest_populate_database[all_redis]" time="0.001"><failure message="redis.exceptions.ConnectionError: Error 111 connecting to 127.0.0.1:6379. Connection refused.">self = Connection<host=127.0.0.1,port=6379,db=0> - - def connect(self): - "Connects to the Redis server if not already connected" - if self._sock: - return - try: -> sock = self.retry.call_with_retry( - lambda: self._connect(), lambda error: self.disconnect(error) - ) - -/usr/local/lib/python3.9/site-packages/redis/connection.py:607: -_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ - -self = <redis.retry.Retry object at 0x7fb0186487f0> -do = <function Connection.connect.<locals>.<lambda> at 0x7fb0186d49d0> -fail = <function Connection.connect.<locals>.<lambda> at 0x7fb0186d4280> - - def call_with_retry(self, do, fail): - """ - Execute an operation that might fail and returns its result, or - raise the exception that was thrown depending on the `Backoff` object. - `do`: the operation to call. Expects no argument. - `fail`: the failure handler, expects the last error that was thrown - """ - self._backoff.reset() - failures = 0 - while True: - try: -> return do() - -/usr/local/lib/python3.9/site-packages/redis/retry.py:45: -_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ - -> lambda: self._connect(), lambda error: self.disconnect(error) - ) - -/usr/local/lib/python3.9/site-packages/redis/connection.py:608: -_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ - -self = Connection<host=127.0.0.1,port=6379,db=0> - - def _connect(self): - "Create a TCP socket connection" - # we want to mimic what socket.create_connection does to support - # ipv4/ipv6, but we want to set options prior to calling - # socket.connect() - err = None - for res in socket.getaddrinfo( - self.host, self.port, self.socket_type, socket.SOCK_STREAM - ): - family, socktype, proto, canonname, socket_address = res - sock = None - try: - sock = socket.socket(family, socktype, proto) - # TCP_NODELAY - sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) - - # TCP_KEEPALIVE - if self.socket_keepalive: - sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) - for k, v in self.socket_keepalive_options.items(): - sock.setsockopt(socket.IPPROTO_TCP, k, v) - - # set the socket_connect_timeout before we connect - sock.settimeout(self.socket_connect_timeout) - - # connect - sock.connect(socket_address) - - # set the socket_timeout now that we're connected - sock.settimeout(self.socket_timeout) - return sock - - except OSError as _: - err = _ - if sock is not None: - sock.close() - - if err is not None: -> raise err - -/usr/local/lib/python3.9/site-packages/redis/connection.py:673: -_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ - -self = Connection<host=127.0.0.1,port=6379,db=0> - - def _connect(self): - "Create a TCP socket connection" - # we want to mimic what socket.create_connection does to support - # ipv4/ipv6, but we want to set options prior to calling - # socket.connect() - err = None - for res in socket.getaddrinfo( - self.host, self.port, self.socket_type, socket.SOCK_STREAM - ): - family, socktype, proto, canonname, socket_address = res - sock = None - try: - sock = socket.socket(family, socktype, proto) - # TCP_NODELAY - sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) - - # TCP_KEEPALIVE - if self.socket_keepalive: - sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) - for k, v in self.socket_keepalive_options.items(): - sock.setsockopt(socket.IPPROTO_TCP, k, v) - - # set the socket_connect_timeout before we connect - sock.settimeout(self.socket_connect_timeout) - - # connect -> sock.connect(socket_address) -E ConnectionRefusedError: [Errno 111] Connection refused - -/usr/local/lib/python3.9/site-packages/redis/connection.py:661: ConnectionRefusedError - -During handling of the above exception, another exception occurred: - -context_db_mb = (<common.orm.Database.Database object at 0x7fb018f15910>, <common.message_broker.MessageBroker.MessageBroker object at 0x7fb018f15460>) -context_service_grpc = <context.service.grpc_server.ContextService.ContextService object at 0x7fb018f158e0> - - def test_rest_populate_database( - context_db_mb : Tuple[Database, MessageBroker], # pylint: disable=redefined-outer-name - context_service_grpc : ContextService # pylint: disable=redefined-outer-name - ): - database = context_db_mb[0] -> database.clear_all() - -context/tests/test_unitary.py:1179: -_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ -common/orm/Database.py:32: in clear_all - for key in self._backend.keys(): -common/orm/backend/redis/RedisBackend.py:48: in keys - return [k.decode('UTF-8') for k in self._client.keys()] -/usr/local/lib/python3.9/site-packages/redis/commands/core.py:1370: in keys - return self.execute_command("KEYS", pattern, **kwargs) -/usr/local/lib/python3.9/site-packages/redis/client.py:1173: in execute_command - conn = self.connection or pool.get_connection(command_name, **options) -/usr/local/lib/python3.9/site-packages/redis/connection.py:1370: in get_connection - connection.connect() -_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ - -self = Connection<host=127.0.0.1,port=6379,db=0> - - def connect(self): - "Connects to the Redis server if not already connected" - if self._sock: - return - try: - sock = self.retry.call_with_retry( - lambda: self._connect(), lambda error: self.disconnect(error) - ) - except socket.timeout: - raise TimeoutError("Timeout connecting to server") - except OSError as e: -> raise ConnectionError(self._error_message(e)) -E redis.exceptions.ConnectionError: Error 111 connecting to 127.0.0.1:6379. Connection refused. - -/usr/local/lib/python3.9/site-packages/redis/connection.py:613: ConnectionError</failure></testcase><testcase classname="context.tests.test_unitary" name="test_rest_get_context_ids[all_redis]" time="1.033"><failure message="AssertionError: Reply failed with code 500 assert 500 == 200 +500 -200">context_service_rest = <RestServer(Thread-71, started daemon 140392926267136)> - - def test_rest_get_context_ids(context_service_rest : RestServer): # pylint: disable=redefined-outer-name -> reply = do_rest_request('/context_ids') - -context/tests/test_unitary.py:1183: -_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ - -url = '/context_ids' - - def do_rest_request(url : str): - base_url = get_service_baseurl_http(ServiceNameEnum.CONTEXT) - request_url = 'http://{:s}:{:s}{:s}{:s}'.format(str(LOCAL_HOST), str(HTTP_PORT), str(base_url), url) - LOGGER.warning('Request: GET {:s}'.format(str(request_url))) - reply = requests.get(request_url) - LOGGER.warning('Reply: {:s}'.format(str(reply.text))) -> assert reply.status_code == 200, 'Reply failed with code {}'.format(reply.status_code) -E AssertionError: Reply failed with code 500 -E assert 500 == 200 -E +500 -E -200 - -context/tests/test_unitary.py:116: AssertionError</failure></testcase><testcase classname="context.tests.test_unitary" name="test_rest_get_contexts[all_redis]" time="0.006"><failure message="AssertionError: Reply failed with code 500 assert 500 == 200 +500 -200">context_service_rest = <RestServer(Thread-71, started daemon 140392926267136)> - - def test_rest_get_contexts(context_service_rest : RestServer): # pylint: disable=redefined-outer-name -> reply = do_rest_request('/contexts') - -context/tests/test_unitary.py:1187: -_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ - -url = '/contexts' - - def do_rest_request(url : str): - base_url = get_service_baseurl_http(ServiceNameEnum.CONTEXT) - request_url = 'http://{:s}:{:s}{:s}{:s}'.format(str(LOCAL_HOST), str(HTTP_PORT), str(base_url), url) - LOGGER.warning('Request: GET {:s}'.format(str(request_url))) - reply = requests.get(request_url) - LOGGER.warning('Reply: {:s}'.format(str(reply.text))) -> assert reply.status_code == 200, 'Reply failed with code {}'.format(reply.status_code) -E AssertionError: Reply failed with code 500 -E assert 500 == 200 -E +500 -E -200 - -context/tests/test_unitary.py:116: AssertionError</failure></testcase><testcase classname="context.tests.test_unitary" name="test_rest_get_context[all_redis]" time="0.007"><failure message="AssertionError: Reply failed with code 500 assert 500 == 200 +500 -200">context_service_rest = <RestServer(Thread-71, started daemon 140392926267136)> - - def test_rest_get_context(context_service_rest : RestServer): # pylint: disable=redefined-outer-name - context_uuid = urllib.parse.quote(DEFAULT_CONTEXT_UUID) -> reply = do_rest_request('/context/{:s}'.format(context_uuid)) - -context/tests/test_unitary.py:1192: -_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ - -url = '/context/admin' - - def do_rest_request(url : str): - base_url = get_service_baseurl_http(ServiceNameEnum.CONTEXT) - request_url = 'http://{:s}:{:s}{:s}{:s}'.format(str(LOCAL_HOST), str(HTTP_PORT), str(base_url), url) - LOGGER.warning('Request: GET {:s}'.format(str(request_url))) - reply = requests.get(request_url) - LOGGER.warning('Reply: {:s}'.format(str(reply.text))) -> assert reply.status_code == 200, 'Reply failed with code {}'.format(reply.status_code) -E AssertionError: Reply failed with code 500 -E assert 500 == 200 -E +500 -E -200 - -context/tests/test_unitary.py:116: AssertionError</failure></testcase><testcase classname="context.tests.test_unitary" name="test_rest_get_topology_ids[all_redis]" time="0.007"><failure message="AssertionError: Reply failed with code 500 assert 500 == 200 +500 -200">context_service_rest = <RestServer(Thread-71, started daemon 140392926267136)> - - def test_rest_get_topology_ids(context_service_rest : RestServer): # pylint: disable=redefined-outer-name - context_uuid = urllib.parse.quote(DEFAULT_CONTEXT_UUID) -> reply = do_rest_request('/context/{:s}/topology_ids'.format(context_uuid)) - -context/tests/test_unitary.py:1197: -_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ - -url = '/context/admin/topology_ids' - - def do_rest_request(url : str): - base_url = get_service_baseurl_http(ServiceNameEnum.CONTEXT) - request_url = 'http://{:s}:{:s}{:s}{:s}'.format(str(LOCAL_HOST), str(HTTP_PORT), str(base_url), url) - LOGGER.warning('Request: GET {:s}'.format(str(request_url))) - reply = requests.get(request_url) - LOGGER.warning('Reply: {:s}'.format(str(reply.text))) -> assert reply.status_code == 200, 'Reply failed with code {}'.format(reply.status_code) -E AssertionError: Reply failed with code 500 -E assert 500 == 200 -E +500 -E -200 - -context/tests/test_unitary.py:116: AssertionError</failure></testcase><testcase classname="context.tests.test_unitary" name="test_rest_get_topologies[all_redis]" time="0.007"><failure message="AssertionError: Reply failed with code 500 assert 500 == 200 +500 -200">context_service_rest = <RestServer(Thread-71, started daemon 140392926267136)> - - def test_rest_get_topologies(context_service_rest : RestServer): # pylint: disable=redefined-outer-name - context_uuid = urllib.parse.quote(DEFAULT_CONTEXT_UUID) -> reply = do_rest_request('/context/{:s}/topologies'.format(context_uuid)) - -context/tests/test_unitary.py:1202: -_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ - -url = '/context/admin/topologies' - - def do_rest_request(url : str): - base_url = get_service_baseurl_http(ServiceNameEnum.CONTEXT) - request_url = 'http://{:s}:{:s}{:s}{:s}'.format(str(LOCAL_HOST), str(HTTP_PORT), str(base_url), url) - LOGGER.warning('Request: GET {:s}'.format(str(request_url))) - reply = requests.get(request_url) - LOGGER.warning('Reply: {:s}'.format(str(reply.text))) -> assert reply.status_code == 200, 'Reply failed with code {}'.format(reply.status_code) -E AssertionError: Reply failed with code 500 -E assert 500 == 200 -E +500 -E -200 - -context/tests/test_unitary.py:116: AssertionError</failure></testcase><testcase classname="context.tests.test_unitary" name="test_rest_get_topology[all_redis]" time="0.007"><failure message="AssertionError: Reply failed with code 500 assert 500 == 200 +500 -200">context_service_rest = <RestServer(Thread-71, started daemon 140392926267136)> - - def test_rest_get_topology(context_service_rest : RestServer): # pylint: disable=redefined-outer-name - context_uuid = urllib.parse.quote(DEFAULT_CONTEXT_UUID) - topology_uuid = urllib.parse.quote(DEFAULT_TOPOLOGY_UUID) -> reply = do_rest_request('/context/{:s}/topology/{:s}'.format(context_uuid, topology_uuid)) - -context/tests/test_unitary.py:1208: -_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ - -url = '/context/admin/topology/admin' - - def do_rest_request(url : str): - base_url = get_service_baseurl_http(ServiceNameEnum.CONTEXT) - request_url = 'http://{:s}:{:s}{:s}{:s}'.format(str(LOCAL_HOST), str(HTTP_PORT), str(base_url), url) - LOGGER.warning('Request: GET {:s}'.format(str(request_url))) - reply = requests.get(request_url) - LOGGER.warning('Reply: {:s}'.format(str(reply.text))) -> assert reply.status_code == 200, 'Reply failed with code {}'.format(reply.status_code) -E AssertionError: Reply failed with code 500 -E assert 500 == 200 -E +500 -E -200 - -context/tests/test_unitary.py:116: AssertionError</failure></testcase><testcase classname="context.tests.test_unitary" name="test_rest_get_service_ids[all_redis]" time="0.007"><failure message="AssertionError: Reply failed with code 500 assert 500 == 200 +500 -200">context_service_rest = <RestServer(Thread-71, started daemon 140392926267136)> - - def test_rest_get_service_ids(context_service_rest : RestServer): # pylint: disable=redefined-outer-name - context_uuid = urllib.parse.quote(DEFAULT_CONTEXT_UUID) -> reply = do_rest_request('/context/{:s}/service_ids'.format(context_uuid)) - -context/tests/test_unitary.py:1213: -_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ - -url = '/context/admin/service_ids' - - def do_rest_request(url : str): - base_url = get_service_baseurl_http(ServiceNameEnum.CONTEXT) - request_url = 'http://{:s}:{:s}{:s}{:s}'.format(str(LOCAL_HOST), str(HTTP_PORT), str(base_url), url) - LOGGER.warning('Request: GET {:s}'.format(str(request_url))) - reply = requests.get(request_url) - LOGGER.warning('Reply: {:s}'.format(str(reply.text))) -> assert reply.status_code == 200, 'Reply failed with code {}'.format(reply.status_code) -E AssertionError: Reply failed with code 500 -E assert 500 == 200 -E +500 -E -200 - -context/tests/test_unitary.py:116: AssertionError</failure></testcase><testcase classname="context.tests.test_unitary" name="test_rest_get_services[all_redis]" time="0.007"><failure message="AssertionError: Reply failed with code 500 assert 500 == 200 +500 -200">context_service_rest = <RestServer(Thread-71, started daemon 140392926267136)> - - def test_rest_get_services(context_service_rest : RestServer): # pylint: disable=redefined-outer-name - context_uuid = urllib.parse.quote(DEFAULT_CONTEXT_UUID) -> reply = do_rest_request('/context/{:s}/services'.format(context_uuid)) - -context/tests/test_unitary.py:1218: -_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ - -url = '/context/admin/services' - - def do_rest_request(url : str): - base_url = get_service_baseurl_http(ServiceNameEnum.CONTEXT) - request_url = 'http://{:s}:{:s}{:s}{:s}'.format(str(LOCAL_HOST), str(HTTP_PORT), str(base_url), url) - LOGGER.warning('Request: GET {:s}'.format(str(request_url))) - reply = requests.get(request_url) - LOGGER.warning('Reply: {:s}'.format(str(reply.text))) -> assert reply.status_code == 200, 'Reply failed with code {}'.format(reply.status_code) -E AssertionError: Reply failed with code 500 -E assert 500 == 200 -E +500 -E -200 - -context/tests/test_unitary.py:116: AssertionError</failure></testcase><testcase classname="context.tests.test_unitary" name="test_rest_get_service[all_redis]" time="0.007"><failure message="AssertionError: Reply failed with code 500 assert 500 == 200 +500 -200">context_service_rest = <RestServer(Thread-71, started daemon 140392926267136)> - - def test_rest_get_service(context_service_rest : RestServer): # pylint: disable=redefined-outer-name - context_uuid = urllib.parse.quote(DEFAULT_CONTEXT_UUID) - service_uuid = urllib.parse.quote(SERVICE_R1_R2_UUID, safe='') -> reply = do_rest_request('/context/{:s}/service/{:s}'.format(context_uuid, service_uuid)) - -context/tests/test_unitary.py:1224: -_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ - -url = '/context/admin/service/SVC%3AR1%2FEP100-R2%2FEP100' - - def do_rest_request(url : str): - base_url = get_service_baseurl_http(ServiceNameEnum.CONTEXT) - request_url = 'http://{:s}:{:s}{:s}{:s}'.format(str(LOCAL_HOST), str(HTTP_PORT), str(base_url), url) - LOGGER.warning('Request: GET {:s}'.format(str(request_url))) - reply = requests.get(request_url) - LOGGER.warning('Reply: {:s}'.format(str(reply.text))) -> assert reply.status_code == 200, 'Reply failed with code {}'.format(reply.status_code) -E AssertionError: Reply failed with code 500 -E assert 500 == 200 -E +500 -E -200 - -context/tests/test_unitary.py:116: AssertionError</failure></testcase><testcase classname="context.tests.test_unitary" name="test_rest_get_device_ids[all_redis]" time="0.006"><failure message="AssertionError: Reply failed with code 500 assert 500 == 200 +500 -200">context_service_rest = <RestServer(Thread-71, started daemon 140392926267136)> - - def test_rest_get_device_ids(context_service_rest : RestServer): # pylint: disable=redefined-outer-name -> reply = do_rest_request('/device_ids') - -context/tests/test_unitary.py:1228: -_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ - -url = '/device_ids' - - def do_rest_request(url : str): - base_url = get_service_baseurl_http(ServiceNameEnum.CONTEXT) - request_url = 'http://{:s}:{:s}{:s}{:s}'.format(str(LOCAL_HOST), str(HTTP_PORT), str(base_url), url) - LOGGER.warning('Request: GET {:s}'.format(str(request_url))) - reply = requests.get(request_url) - LOGGER.warning('Reply: {:s}'.format(str(reply.text))) -> assert reply.status_code == 200, 'Reply failed with code {}'.format(reply.status_code) -E AssertionError: Reply failed with code 500 -E assert 500 == 200 -E +500 -E -200 - -context/tests/test_unitary.py:116: AssertionError</failure></testcase><testcase classname="context.tests.test_unitary" name="test_rest_get_devices[all_redis]" time="0.006"><failure message="AssertionError: Reply failed with code 500 assert 500 == 200 +500 -200">context_service_rest = <RestServer(Thread-71, started daemon 140392926267136)> - - def test_rest_get_devices(context_service_rest : RestServer): # pylint: disable=redefined-outer-name -> reply = do_rest_request('/devices') - -context/tests/test_unitary.py:1232: -_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ - -url = '/devices' - - def do_rest_request(url : str): - base_url = get_service_baseurl_http(ServiceNameEnum.CONTEXT) - request_url = 'http://{:s}:{:s}{:s}{:s}'.format(str(LOCAL_HOST), str(HTTP_PORT), str(base_url), url) - LOGGER.warning('Request: GET {:s}'.format(str(request_url))) - reply = requests.get(request_url) - LOGGER.warning('Reply: {:s}'.format(str(reply.text))) -> assert reply.status_code == 200, 'Reply failed with code {}'.format(reply.status_code) -E AssertionError: Reply failed with code 500 -E assert 500 == 200 -E +500 -E -200 - -context/tests/test_unitary.py:116: AssertionError</failure></testcase><testcase classname="context.tests.test_unitary" name="test_rest_get_device[all_redis]" time="0.007"><failure message="AssertionError: Reply failed with code 500 assert 500 == 200 +500 -200">context_service_rest = <RestServer(Thread-71, started daemon 140392926267136)> - - def test_rest_get_device(context_service_rest : RestServer): # pylint: disable=redefined-outer-name - device_uuid = urllib.parse.quote(DEVICE_R1_UUID, safe='') -> reply = do_rest_request('/device/{:s}'.format(device_uuid)) - -context/tests/test_unitary.py:1237: -_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ - -url = '/device/R1' - - def do_rest_request(url : str): - base_url = get_service_baseurl_http(ServiceNameEnum.CONTEXT) - request_url = 'http://{:s}:{:s}{:s}{:s}'.format(str(LOCAL_HOST), str(HTTP_PORT), str(base_url), url) - LOGGER.warning('Request: GET {:s}'.format(str(request_url))) - reply = requests.get(request_url) - LOGGER.warning('Reply: {:s}'.format(str(reply.text))) -> assert reply.status_code == 200, 'Reply failed with code {}'.format(reply.status_code) -E AssertionError: Reply failed with code 500 -E assert 500 == 200 -E +500 -E -200 - -context/tests/test_unitary.py:116: AssertionError</failure></testcase><testcase classname="context.tests.test_unitary" name="test_rest_get_link_ids[all_redis]" time="0.007"><failure message="AssertionError: Reply failed with code 500 assert 500 == 200 +500 -200">context_service_rest = <RestServer(Thread-71, started daemon 140392926267136)> - - def test_rest_get_link_ids(context_service_rest : RestServer): # pylint: disable=redefined-outer-name -> reply = do_rest_request('/link_ids') - -context/tests/test_unitary.py:1241: -_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ - -url = '/link_ids' - - def do_rest_request(url : str): - base_url = get_service_baseurl_http(ServiceNameEnum.CONTEXT) - request_url = 'http://{:s}:{:s}{:s}{:s}'.format(str(LOCAL_HOST), str(HTTP_PORT), str(base_url), url) - LOGGER.warning('Request: GET {:s}'.format(str(request_url))) - reply = requests.get(request_url) - LOGGER.warning('Reply: {:s}'.format(str(reply.text))) -> assert reply.status_code == 200, 'Reply failed with code {}'.format(reply.status_code) -E AssertionError: Reply failed with code 500 -E assert 500 == 200 -E +500 -E -200 - -context/tests/test_unitary.py:116: AssertionError</failure></testcase><testcase classname="context.tests.test_unitary" name="test_rest_get_links[all_redis]" time="0.006"><failure message="AssertionError: Reply failed with code 500 assert 500 == 200 +500 -200">context_service_rest = <RestServer(Thread-71, started daemon 140392926267136)> - - def test_rest_get_links(context_service_rest : RestServer): # pylint: disable=redefined-outer-name -> reply = do_rest_request('/links') - -context/tests/test_unitary.py:1245: -_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ - -url = '/links' - - def do_rest_request(url : str): - base_url = get_service_baseurl_http(ServiceNameEnum.CONTEXT) - request_url = 'http://{:s}:{:s}{:s}{:s}'.format(str(LOCAL_HOST), str(HTTP_PORT), str(base_url), url) - LOGGER.warning('Request: GET {:s}'.format(str(request_url))) - reply = requests.get(request_url) - LOGGER.warning('Reply: {:s}'.format(str(reply.text))) -> assert reply.status_code == 200, 'Reply failed with code {}'.format(reply.status_code) -E AssertionError: Reply failed with code 500 -E assert 500 == 200 -E +500 -E -200 - -context/tests/test_unitary.py:116: AssertionError</failure></testcase><testcase classname="context.tests.test_unitary" name="test_rest_get_link[all_redis]" time="0.008"><failure message="AssertionError: Reply failed with code 500 assert 500 == 200 +500 -200">context_service_rest = <RestServer(Thread-71, started daemon 140392926267136)> - - def test_rest_get_link(context_service_rest : RestServer): # pylint: disable=redefined-outer-name - link_uuid = urllib.parse.quote(LINK_R1_R2_UUID, safe='') -> reply = do_rest_request('/link/{:s}'.format(link_uuid)) - -context/tests/test_unitary.py:1250: -_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ - -url = '/link/R1%2FEP2-R2%2FEP1' - - def do_rest_request(url : str): - base_url = get_service_baseurl_http(ServiceNameEnum.CONTEXT) - request_url = 'http://{:s}:{:s}{:s}{:s}'.format(str(LOCAL_HOST), str(HTTP_PORT), str(base_url), url) - LOGGER.warning('Request: GET {:s}'.format(str(request_url))) - reply = requests.get(request_url) - LOGGER.warning('Reply: {:s}'.format(str(reply.text))) -> assert reply.status_code == 200, 'Reply failed with code {}'.format(reply.status_code) -E AssertionError: Reply failed with code 500 -E assert 500 == 200 -E +500 -E -200 - -context/tests/test_unitary.py:116: AssertionError</failure></testcase><testcase classname="context.tests.test_unitary" name="test_rest_get_connection_ids[all_redis]" time="0.007"><failure message="AssertionError: Reply failed with code 500 assert 500 == 200 +500 -200">context_service_rest = <RestServer(Thread-71, started daemon 140392926267136)> - - def test_rest_get_connection_ids(context_service_rest : RestServer): # pylint: disable=redefined-outer-name - context_uuid = urllib.parse.quote(DEFAULT_CONTEXT_UUID) - service_uuid = urllib.parse.quote(SERVICE_R1_R3_UUID, safe='') -> reply = do_rest_request('/context/{:s}/service/{:s}/connection_ids'.format(context_uuid, service_uuid)) - -context/tests/test_unitary.py:1256: -_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ - -url = '/context/admin/service/SVC%3AR1%2FEP100-R3%2FEP100/connection_ids' - - def do_rest_request(url : str): - base_url = get_service_baseurl_http(ServiceNameEnum.CONTEXT) - request_url = 'http://{:s}:{:s}{:s}{:s}'.format(str(LOCAL_HOST), str(HTTP_PORT), str(base_url), url) - LOGGER.warning('Request: GET {:s}'.format(str(request_url))) - reply = requests.get(request_url) - LOGGER.warning('Reply: {:s}'.format(str(reply.text))) -> assert reply.status_code == 200, 'Reply failed with code {}'.format(reply.status_code) -E AssertionError: Reply failed with code 500 -E assert 500 == 200 -E +500 -E -200 - -context/tests/test_unitary.py:116: AssertionError</failure></testcase><testcase classname="context.tests.test_unitary" name="test_rest_get_connections[all_redis]" time="0.007"><failure message="AssertionError: Reply failed with code 500 assert 500 == 200 +500 -200">context_service_rest = <RestServer(Thread-71, started daemon 140392926267136)> - - def test_rest_get_connections(context_service_rest : RestServer): # pylint: disable=redefined-outer-name - context_uuid = urllib.parse.quote(DEFAULT_CONTEXT_UUID) - service_uuid = urllib.parse.quote(SERVICE_R1_R3_UUID, safe='') -> reply = do_rest_request('/context/{:s}/service/{:s}/connections'.format(context_uuid, service_uuid)) - -context/tests/test_unitary.py:1262: -_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ - -url = '/context/admin/service/SVC%3AR1%2FEP100-R3%2FEP100/connections' - - def do_rest_request(url : str): - base_url = get_service_baseurl_http(ServiceNameEnum.CONTEXT) - request_url = 'http://{:s}:{:s}{:s}{:s}'.format(str(LOCAL_HOST), str(HTTP_PORT), str(base_url), url) - LOGGER.warning('Request: GET {:s}'.format(str(request_url))) - reply = requests.get(request_url) - LOGGER.warning('Reply: {:s}'.format(str(reply.text))) -> assert reply.status_code == 200, 'Reply failed with code {}'.format(reply.status_code) -E AssertionError: Reply failed with code 500 -E assert 500 == 200 -E +500 -E -200 - -context/tests/test_unitary.py:116: AssertionError</failure></testcase><testcase classname="context.tests.test_unitary" name="test_rest_get_connection[all_redis]" time="0.007"><failure message="AssertionError: Reply failed with code 500 assert 500 == 200 +500 -200">context_service_rest = <RestServer(Thread-71, started daemon 140392926267136)> - - def test_rest_get_connection(context_service_rest : RestServer): # pylint: disable=redefined-outer-name - connection_uuid = urllib.parse.quote(CONNECTION_R1_R3_UUID, safe='') -> reply = do_rest_request('/connection/{:s}'.format(connection_uuid)) - -context/tests/test_unitary.py:1267: -_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ - -url = '/connection/CON%3AR1%2FEP100-R3%2FEP100' - - def do_rest_request(url : str): - base_url = get_service_baseurl_http(ServiceNameEnum.CONTEXT) - request_url = 'http://{:s}:{:s}{:s}{:s}'.format(str(LOCAL_HOST), str(HTTP_PORT), str(base_url), url) - LOGGER.warning('Request: GET {:s}'.format(str(request_url))) - reply = requests.get(request_url) - LOGGER.warning('Reply: {:s}'.format(str(reply.text))) -> assert reply.status_code == 200, 'Reply failed with code {}'.format(reply.status_code) -E AssertionError: Reply failed with code 500 -E assert 500 == 200 -E +500 -E -200 - -context/tests/test_unitary.py:116: AssertionError</failure></testcase><testcase classname="context.tests.test_unitary" name="test_tools_fast_string_hasher" time="0.423" /></testsuite></testsuites> \ No newline at end of file diff --git a/src/device/service/__main__.py b/src/device/service/__main__.py index 1f0adfa8f1dd8b3e307ed202967b1d5195171f11..5c9b41531e7bc579cbe5cc563f20b193f6bc5a90 100644 --- a/src/device/service/__main__.py +++ b/src/device/service/__main__.py @@ -34,7 +34,7 @@ def main(): global LOGGER # pylint: disable=global-statement log_level = get_log_level() - logging.basicConfig(level=log_level) + logging.basicConfig(level=log_level, format="[%(asctime)s] %(levelname)s:%(name)s:%(message)s") logging.getLogger('apscheduler.executors.default').setLevel(logging.WARNING) logging.getLogger('apscheduler.scheduler').setLevel(logging.WARNING) logging.getLogger('monitoring-client').setLevel(logging.WARNING) diff --git a/src/device/service/drivers/__init__.py b/src/device/service/drivers/__init__.py index 1187b3de36cbf4d16fed1b3e18c0cb1fa21cafa9..821a70418be7d323c5b1279c30a62fd9691e9e3f 100644 --- a/src/device/service/drivers/__init__.py +++ b/src/device/service/drivers/__init__.py @@ -60,7 +60,7 @@ if LOAD_ALL_DEVICE_DRIVERS: (TransportApiDriver, [ { # Real OLS, specifying TAPI Driver => use TransportApiDriver - FilterFieldEnum.DEVICE_TYPE: DeviceTypeEnum.OPTICAL_LINE_SYSTEM, + FilterFieldEnum.DEVICE_TYPE: DeviceTypeEnum.OPEN_LINE_SYSTEM, FilterFieldEnum.DRIVER : ORM_DeviceDriverEnum.TRANSPORT_API, } ])) diff --git a/src/device/service/drivers/openconfig/templates/EndPoints.py b/src/device/service/drivers/openconfig/templates/EndPoints.py index c11b1669d5b4cf3ca47986817ded28f75ae8358f..718a02d193531924bef863f5ccd2cbb999388dbd 100644 --- a/src/device/service/drivers/openconfig/templates/EndPoints.py +++ b/src/device/service/drivers/openconfig/templates/EndPoints.py @@ -20,7 +20,7 @@ from .Tools import add_value_from_collection, add_value_from_tag LOGGER = logging.getLogger(__name__) -XPATH_PORTS = "//ocp:components/ocp:component/ocp:state[ocp:type='PORT']/.." +XPATH_PORTS = "//ocp:components/ocp:component" XPATH_IFACE_COUNTER = "//oci:interfaces/oci:interface[oci:name='{:s}']/state/counters/{:s}" def parse(xml_data : ET.Element) -> List[Tuple[str, Dict[str, Any]]]: @@ -28,6 +28,13 @@ def parse(xml_data : ET.Element) -> List[Tuple[str, Dict[str, Any]]]: for xml_component in xml_data.xpath(XPATH_PORTS, namespaces=NAMESPACES): #LOGGER.info('xml_component = {:s}'.format(str(ET.tostring(xml_component)))) + component_type = xml_component.find('ocp:state/ocp:type', namespaces=NAMESPACES) + if component_type is None or component_type.text is None: continue + component_type = component_type.text + if component_type not in {'PORT', 'oc-platform-types:PORT'}: continue + + LOGGER.info('PORT xml_component = {:s}'.format(str(ET.tostring(xml_component)))) + endpoint = {} component_name = xml_component.find('ocp:name', namespaces=NAMESPACES) diff --git a/src/device/service/drivers/openconfig/templates/interface/subinterface/edit_config.xml b/src/device/service/drivers/openconfig/templates/interface/subinterface/edit_config.xml index fdfd771f603fd2f054f34e6d0ebeddfc9b4343a1..1bdb8efbff495f04ee90dadaffaa7412332531b7 100644 --- a/src/device/service/drivers/openconfig/templates/interface/subinterface/edit_config.xml +++ b/src/device/service/drivers/openconfig/templates/interface/subinterface/edit_config.xml @@ -1,51 +1,48 @@ <interfaces xmlns="http://openconfig.net/yang/interfaces" xmlns:oc-ip="http://openconfig.net/yang/interfaces/ip" > - <interface> - <name>{{name}}</name> - <config> - <name>{{name}}</name> - <type xmlns:ianaift="urn:ietf:params:xml:ns:yang:iana-if-type">ianaift:{{type}}</type> - {% if mtu is defined %}<mtu>{{mtu}}</mtu>{% endif%} - <enabled>true</enabled> - </config> - <subinterfaces> - <subinterface> - <index>{{index}}</index> + <interface> + <name>{{name}}</name> <config> - <index>{{index}}</index> - <description>{{description}}</description> - {% if vendor=="ADVA" and vlan_id is not defined %} - <untagged-allowed xmlns="http://www.advaoptical.com/cim/adva-dnos-oc-interfaces">true</untagged-allowed> - {% endif%} + <name>{{name}}</name> + <type xmlns:ianaift="urn:ietf:params:xml:ns:yang:iana-if-type">ianaift:{{type}}</type> + {% if mtu is defined %}<mtu>{{mtu}}</mtu>{% endif%} + <enabled>true</enabled> </config> - {% if vlan_id is defined %} - <vlan xmlns="http://openconfig.net/yang/vlan"> - <match> - <single-tagged> - <config> - <vlan-id>{{vlan_id}}</vlan-id> - </config> - </single-tagged> - </match> - </vlan> - {% endif%} - {% if address_ip is defined %} - <oc-ip:ipv4> - <oc-ip:addresses> - <oc-ip:address> - <oc-ip:ip>{{address_ip}}</oc-ip:ip> - <oc-ip:config> - <oc-ip:ip>{{address_ip}}</oc-ip:ip> - <oc-ip:prefix-length>{{address_prefix}}</oc-ip:prefix-length> - </oc-ip:config> - </oc-ip:address> - </oc-ip:addresses> - </oc-ip:ipv4> - {% endif%} - </subinterface> - </subinterfaces> - </interface> + <subinterfaces> + <subinterface> + <index>{{index}}</index> + <config> + <index>{{index}}</index> + <description>{{description}}</description> + {% if vendor=="ADVA" and vlan_id is not defined %} + <untagged-allowed xmlns="http://www.advaoptical.com/cim/adva-dnos-oc-interfaces">true</untagged-allowed> + {% endif%} + </config> + {% if vlan_id is defined %} + <vlan xmlns="http://openconfig.net/yang/vlan"> + <match> + <single-tagged> + <config> + <vlan-id>{{vlan_id}}</vlan-id> + </config> + </single-tagged> + </match> + </vlan> + {% endif %} + {% if address_ip is defined %} + <oc-ip:ipv4> + <oc-ip:addresses> + <oc-ip:address> + <oc-ip:ip>{{address_ip}}</oc-ip:ip> + <oc-ip:config> + <oc-ip:ip>{{address_ip}}</oc-ip:ip> + <oc-ip:prefix-length>{{address_prefix}}</oc-ip:prefix-length> + </oc-ip:config> + </oc-ip:address> + </oc-ip:addresses> + </oc-ip:ipv4> + {% endif %} + </subinterface> + </subinterfaces> + </interface> </interfaces> - - - diff --git a/src/device/service/drivers/openconfig/templates/network_instance/connection_point/edit_config.xml b/src/device/service/drivers/openconfig/templates/network_instance/connection_point/edit_config.xml index d34f462dbba7331a6eb553ff8124dad25851c69f..60272e5fba4dd87c9bc48ef596197c2508b75e59 100644 --- a/src/device/service/drivers/openconfig/templates/network_instance/connection_point/edit_config.xml +++ b/src/device/service/drivers/openconfig/templates/network_instance/connection_point/edit_config.xml @@ -26,4 +26,4 @@ </connection-point> </connection-points> </network-instance> -</network-instances> \ No newline at end of file +</network-instances> diff --git a/src/device/tests/device_report.xml b/src/device/tests/device_report.xml deleted file mode 100644 index c05ea0ba79d2b1b6fb5434a76c2e6af022eb2e2c..0000000000000000000000000000000000000000 --- a/src/device/tests/device_report.xml +++ /dev/null @@ -1 +0,0 @@ -<?xml version="1.0" encoding="utf-8"?><testsuites><testsuite name="pytest" errors="0" failures="0" skipped="0" tests="0" time="0.017" timestamp="2022-07-29T09:28:47.168633" hostname="86d45e18bd70" /></testsuites> \ No newline at end of file diff --git a/src/monitoring/requirements.in b/src/monitoring/requirements.in index e0176e0266ad6239dabb3aeedc273ddc0b638ded..50f283a1940ed99d16276857d2cab22220921879 100644 --- a/src/monitoring/requirements.in +++ b/src/monitoring/requirements.in @@ -5,11 +5,11 @@ fastcache==1.1.0 #opencensus[stackdriver] #google-cloud-profiler #numpy -Jinja2==3.0.3 -ncclient==0.6.13 -p4runtime==1.3.0 -paramiko==2.9.2 -# influx-line-protocol==0.1.4 +#Jinja2==3.0.3 +#ncclient==0.6.13 +#p4runtime==1.3.0 +#paramiko==2.9.2 +influx-line-protocol==0.1.4 python-dateutil==2.8.2 python-json-logger==2.0.2 pytz==2021.3 diff --git a/src/pathcomp/frontend/service/algorithms/KDisjointPathAlgorithm.py b/src/pathcomp/frontend/service/algorithms/KDisjointPathAlgorithm.py index 70b50dae53ba22eb6c8df018fb5663cce0bc125e..76b49bc8bd4a5ded840ccad13f0941d05070d344 100644 --- a/src/pathcomp/frontend/service/algorithms/KDisjointPathAlgorithm.py +++ b/src/pathcomp/frontend/service/algorithms/KDisjointPathAlgorithm.py @@ -194,9 +194,13 @@ class KDisjointPathAlgorithm(_Algorithm): grpc_services[service_key] = self.add_service_to_reply(reply, context_uuid, service_uuid) for num_path,service_path_ero in enumerate(paths): + self.logger.warning('num_path={:d}'.format(num_path)) + self.logger.warning('service_path_ero={:s}'.format(str(service_path_ero))) if service_path_ero is None: continue path_hops = eropath_to_hops(service_path_ero, self.endpoint_to_link_dict) + self.logger.warning('path_hops={:s}'.format(str(path_hops))) connections = convert_explicit_path_hops_to_connections(path_hops, self.device_dict, service_uuid) + self.logger.warning('connections={:s}'.format(str(connections))) for connection in connections: connection_uuid,device_layer,path_hops,_ = connection @@ -221,8 +225,8 @@ class KDisjointPathAlgorithm(_Algorithm): grpc_connection = self.add_connection_to_reply(reply, connection_uuid, grpc_service, path_hops) grpc_connections[connection_uuid] = grpc_connection - for service_uuid in dependencies: - sub_service_key = (context_uuid, service_uuid) + for sub_service_uuid in dependencies: + sub_service_key = (context_uuid, sub_service_uuid) grpc_sub_service = grpc_services.get(sub_service_key) if grpc_sub_service is None: raise Exception('Service({:s}) not found'.format(str(sub_service_key))) diff --git a/src/pathcomp/frontend/service/algorithms/_Algorithm.py b/src/pathcomp/frontend/service/algorithms/_Algorithm.py index bb96ff354ef32cb0a269d2b678fdb9552d86939d..b798813a83d984d6d1d75450529e9c826e220624 100644 --- a/src/pathcomp/frontend/service/algorithms/_Algorithm.py +++ b/src/pathcomp/frontend/service/algorithms/_Algorithm.py @@ -12,11 +12,13 @@ # See the License for the specific language governing permissions and # limitations under the License. -import json, logging, requests, uuid +import json, logging, requests from typing import Dict, List, Optional, Tuple -from common.proto.context_pb2 import Connection, Device, DeviceList, EndPointId, Link, LinkList, Service, ServiceStatusEnum, ServiceTypeEnum +from common.proto.context_pb2 import ( + ConfigRule, Connection, Device, DeviceList, EndPointId, Link, LinkList, Service, ServiceStatusEnum, + ServiceTypeEnum) from common.proto.pathcomp_pb2 import PathCompReply, PathCompRequest -from common.tools.grpc.Tools import grpc_message_to_json_string +from common.tools.object_factory.ConfigRule import json_config_rule_set from pathcomp.frontend.Config import BACKEND_URL from pathcomp.frontend.service.algorithms.tools.ConstantsMappings import DEVICE_LAYER_TO_SERVICE_TYPE, DeviceLayerEnum from .tools.EroPathToHops import eropath_to_hops @@ -156,6 +158,17 @@ class _Algorithm: raise Exception(MSG.format(str(device_layer))) service.service_type = service_type + if service_type == ServiceTypeEnum.SERVICETYPE_TAPI_CONNECTIVITY_SERVICE: + json_tapi_settings = { + 'capacity_value' : 50.0, + 'capacity_unit' : 'GHz', + 'layer_proto_name': 'PHOTONIC_MEDIA', + 'layer_proto_qual': 'tapi-photonic-media:PHOTONIC_LAYER_QUALIFIER_NMC', + 'direction' : 'UNIDIRECTIONAL', + } + config_rule = ConfigRule(**json_config_rule_set('/settings', json_tapi_settings)) + service.service_config.config_rules.append(config_rule) + service.service_status.service_status = ServiceStatusEnum.SERVICESTATUS_PLANNED if path_hops is not None and len(path_hops) > 0: diff --git a/src/pathcomp/frontend/service/algorithms/tools/ConstantsMappings.py b/src/pathcomp/frontend/service/algorithms/tools/ConstantsMappings.py index 5e4f5408398cca012dca52fb19bf11a2b84a5721..2ff97b96c2a33e77745239b5f944cecb19639b1d 100644 --- a/src/pathcomp/frontend/service/algorithms/tools/ConstantsMappings.py +++ b/src/pathcomp/frontend/service/algorithms/tools/ConstantsMappings.py @@ -92,7 +92,7 @@ DEVICE_TYPE_TO_LAYER = { DeviceTypeEnum.OPEN_LINE_SYSTEM.value : DeviceLayerEnum.OPTICAL_CONTROLLER, DeviceTypeEnum.OPTICAL_ROADM.value : DeviceLayerEnum.OPTICAL_DEVICE, - DeviceTypeEnum.OPTICAL_TRANDPONDER.value : DeviceLayerEnum.OPTICAL_DEVICE, + DeviceTypeEnum.OPTICAL_TRANSPONDER.value : DeviceLayerEnum.OPTICAL_DEVICE, } DEVICE_LAYER_TO_SERVICE_TYPE = { diff --git a/src/service/Dockerfile b/src/service/Dockerfile index c53a897821b759a8005118ba81a3a0f5c0b73c66..e469898e590b8797e8d3305e1c583caed41bfc80 100644 --- a/src/service/Dockerfile +++ b/src/service/Dockerfile @@ -64,6 +64,7 @@ RUN python3 -m pip install -r requirements.txt WORKDIR /var/teraflow COPY src/context/. context/ COPY src/device/. device/ +COPY src/pathcomp/frontend/. pathcomp/frontend/ COPY src/service/. service/ # Start the service diff --git a/src/service/service/ServiceService.py b/src/service/service/ServiceService.py index b152376254b52f39c7351eca628165a4a05fac31..2f44fe01894230f84749115ce781178b7d955a36 100644 --- a/src/service/service/ServiceService.py +++ b/src/service/service/ServiceService.py @@ -14,9 +14,6 @@ from common.Constants import ServiceNameEnum from common.Settings import get_service_port_grpc -from common.orm.backend.BackendEnum import BackendEnum -from common.orm.Database import Database -from common.orm.Factory import get_database_backend from common.proto.service_pb2_grpc import add_ServiceServiceServicer_to_server from common.tools.service.GenericGrpcService import GenericGrpcService from .ServiceServiceServicerImpl import ServiceServiceServicerImpl @@ -26,8 +23,7 @@ class ServiceService(GenericGrpcService): def __init__(self, service_handler_factory : ServiceHandlerFactory, cls_name: str = __name__) -> None: port = get_service_port_grpc(ServiceNameEnum.SERVICE) super().__init__(port, cls_name=cls_name) - database = Database(get_database_backend(backend=BackendEnum.INMEMORY)) - self.service_servicer = ServiceServiceServicerImpl(database, service_handler_factory) + self.service_servicer = ServiceServiceServicerImpl(service_handler_factory) def install_servicers(self): add_ServiceServiceServicer_to_server(self.service_servicer, self.server) diff --git a/src/service/service/ServiceServiceServicerImpl.py b/src/service/service/ServiceServiceServicerImpl.py index 6355cafbef0fc65338269df064a0f56e115b746e..bc71168f621afc9f0a9ed93d51844542beed813c 100644 --- a/src/service/service/ServiceServiceServicerImpl.py +++ b/src/service/service/ServiceServiceServicerImpl.py @@ -12,25 +12,19 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Dict, List import grpc, json, logging -from common.orm.Database import Database -from common.orm.HighLevel import get_object -from common.orm.backend.Tools import key_to_str -from common.proto.context_pb2 import Empty, Service, ServiceId +from typing import Optional +from common.proto.context_pb2 import Empty, Service, ServiceId, ServiceStatusEnum +from common.proto.pathcomp_pb2 import PathCompRequest from common.proto.service_pb2_grpc import ServiceServiceServicer from common.rpc_method_wrapper.Decorator import create_metrics, safe_and_metered_rpc_method -from common.rpc_method_wrapper.ServiceExceptions import InvalidArgumentException, NotFoundException +from common.rpc_method_wrapper.ServiceExceptions import AlreadyExistsException, InvalidArgumentException from common.tools.grpc.Tools import grpc_message_to_json, grpc_message_to_json_string from context.client.ContextClient import ContextClient -from device.client.DeviceClient import DeviceClient -from service.service.database.DeviceModel import DeviceModel -from .database.DatabaseServiceTools import ( - sync_service_from_context, sync_service_to_context, update_service_in_local_database) -from .database.ServiceModel import ServiceModel -from .path_computation_element.PathComputationElement import PathComputationElement, dump_connectivity +from pathcomp.frontend.client.PathCompClient import PathCompClient +from service.service.tools.ContextGetters import get_service from .service_handler_api.ServiceHandlerFactory import ServiceHandlerFactory -from .Tools import delete_service, sync_devices_from_context, update_service +from .task_scheduler.TaskScheduler import TasksScheduler LOGGER = logging.getLogger(__name__) @@ -39,11 +33,8 @@ METHOD_NAMES = ['CreateService', 'UpdateService', 'DeleteService'] METRICS = create_metrics(SERVICE_NAME, METHOD_NAMES) class ServiceServiceServicerImpl(ServiceServiceServicer): - def __init__(self, database : Database, service_handler_factory : ServiceHandlerFactory) -> None: + def __init__(self, service_handler_factory : ServiceHandlerFactory) -> None: LOGGER.debug('Creating Servicer...') - self.context_client = ContextClient() - self.device_client = DeviceClient() - self.database = database self.service_handler_factory = service_handler_factory LOGGER.debug('Servicer Created') @@ -84,96 +75,81 @@ class ServiceServiceServicerImpl(ServiceServiceServicer): extra_details='RPC method CreateService does not accept Config Rules. '\ 'Config Rules should be configured after creating the service.') - sync_service_from_context(service_context_uuid, service_uuid, self.context_client, self.database) - db_service,_ = update_service_in_local_database(self.database, request) + # check that service does not exist + context_client = ContextClient() + current_service = get_service(context_client, request.service_id) + if current_service is not None: + context_uuid = request.service_id.context_id.context_uuid.uuid + service_uuid = request.service_id.service_uuid.uuid + raise AlreadyExistsException( + 'Service', service_uuid, extra_details='context_uuid={:s}'.format(str(context_uuid))) - LOGGER.info('[CreateService] db_service = {:s}'.format(str(db_service.dump( - include_endpoint_ids=True, include_constraints=True, include_config_rules=True)))) - - sync_service_to_context(db_service, self.context_client) - return ServiceId(**db_service.dump_id()) + # just create the service in the Context database to lock the service_id + # update will perform changes on the resources + service_id = context_client.SetService(request) + return service_id @safe_and_metered_rpc_method(METRICS, LOGGER) def UpdateService(self, request : Service, context : grpc.ServicerContext) -> ServiceId: LOGGER.info('[UpdateService] begin ; request = {:s}'.format(grpc_message_to_json_string(request))) - service_id = request.service_id - service_uuid = service_id.service_uuid.uuid - service_context_uuid = service_id.context_id.context_uuid.uuid - - pce = PathComputationElement() - pce.load_topology(self.context_client) - pce.load_connectivity(self.context_client, service_id) - #pce.dump_topology_to_file('../data/topo.dot') - #pce.dump_connectivity_to_file('../data/conn-before.txt') - connectivity = pce.route_service(request) - #pce.dump_connectivity_to_file('../data/conn-after.txt') - - LOGGER.info('[UpdateService] connectivity = {:s}'.format(str(dump_connectivity(connectivity)))) - - if connectivity is None: - # just update local database and context - str_service_key = key_to_str([service_context_uuid, service_uuid]) - db_service = get_object(self.database, ServiceModel, str_service_key, raise_if_not_found=False) - LOGGER.info('[UpdateService] before db_service = {:s}'.format(str(db_service.dump( - include_endpoint_ids=True, include_constraints=True, include_config_rules=True)))) - db_devices : Dict[str, DeviceModel] = sync_devices_from_context( - self.context_client, self.database, db_service, request.service_endpoint_ids) - LOGGER.info('[UpdateService] db_devices[{:d}] = {:s}'.format( - len(db_devices), str({ - device_uuid:db_device.dump(include_config_rules=True, include_drivers=True, include_endpoints=True) - for device_uuid,db_device in db_devices.items() - }))) - sync_service_from_context(service_context_uuid, service_uuid, self.context_client, self.database) - db_service,_ = update_service_in_local_database(self.database, request) - LOGGER.info('[UpdateService] after db_service = {:s}'.format(str(db_service.dump( - include_endpoint_ids=True, include_constraints=True, include_config_rules=True)))) - sync_service_to_context(db_service, self.context_client) - else: - for sub_service, sub_connections in connectivity.get('requirements', []): - for sub_connection in sub_connections: - update_service( - self.database, self.context_client, self.device_client, self.service_handler_factory, - sub_service, sub_connection) - - for connection in connectivity.get('connections'): - db_service = update_service( - self.database, self.context_client, self.device_client, self.service_handler_factory, - request, connection) - - str_service_key = key_to_str([service_context_uuid, service_uuid]) - db_service = get_object(self.database, ServiceModel, str_service_key, raise_if_not_found=False) - if db_service is None: raise NotFoundException('Service', str_service_key) - - LOGGER.info('[UpdateService] db_service = {:s}'.format(str(db_service.dump( - include_endpoint_ids=True, include_constraints=True, include_config_rules=True)))) - - return ServiceId(**db_service.dump_id()) + # Set service status to "SERVICESTATUS_PLANNED" to ensure rest of components are aware the service is + # being modified. + context_client = ContextClient() + _service : Optional[Service] = get_service(context_client, request.service_id) + service = Service() + service.CopyFrom(request if _service is None else _service) + service.service_status.service_status = ServiceStatusEnum.SERVICESTATUS_PLANNED + context_client.SetService(service) + + num_disjoint_paths = None + for constraint in request.service_constraints: + if constraint.WhichOneof('constraint') == 'sla_availability': + num_disjoint_paths = constraint.sla_availability.num_disjoint_paths + break + + tasks_scheduler = TasksScheduler(self.service_handler_factory) + if len(request.service_endpoint_ids) >= (2 if num_disjoint_paths is None else 4): + pathcomp_request = PathCompRequest() + pathcomp_request.services.append(request) + + if num_disjoint_paths is None: + pathcomp_request.shortest_path.Clear() + else: + pathcomp_request.k_disjoint_path.num_disjoint = num_disjoint_paths + + pathcomp = PathCompClient() + LOGGER.info('pathcomp_request={:s}'.format(grpc_message_to_json_string(pathcomp_request))) + pathcomp_reply = pathcomp.Compute(pathcomp_request) + LOGGER.info('pathcomp_reply={:s}'.format(grpc_message_to_json_string(pathcomp_reply))) + + # Feed TaskScheduler with this path computation reply. TaskScheduler identifies inter-dependencies among + # the services and connections retrieved and produces a schedule of tasks (an ordered list of tasks to be + # executed) to implement the requested create/update operation. + tasks_scheduler.compose_from_pathcompreply(pathcomp_reply, is_delete=False) + + tasks_scheduler.execute_all() + return request.service_id @safe_and_metered_rpc_method(METRICS, LOGGER) def DeleteService(self, request : ServiceId, context : grpc.ServicerContext) -> Empty: LOGGER.info('[DeleteService] begin ; request = {:s}'.format(grpc_message_to_json_string(request))) - pce = PathComputationElement() - pce.load_topology(self.context_client) - pce.load_connectivity(self.context_client, request) - #pce.dump_topology_to_file('../data/topo.dot') - #pce.dump_connectivity_to_file('../data/conn-before.txt') - connectivity = pce.get_connectivity_from_service_id(request) - if connectivity is None: return Empty() - #pce.dump_connectivity_to_file('../data/conn-after.txt') - - LOGGER.info('[DeleteService] connectivity = {:s}'.format(str(dump_connectivity(connectivity)))) - - for connection in connectivity.get('connections'): - delete_service( - self.database, self.context_client, self.device_client, self.service_handler_factory, - request, connection) - - for sub_service, sub_connections in connectivity.get('requirements', []): - for sub_connection in sub_connections: - delete_service( - self.database, self.context_client, self.device_client, self.service_handler_factory, - sub_service.service_id, sub_connection) - + context_client = ContextClient() + + # Set service status to "SERVICESTATUS_PENDING_REMOVAL" to ensure rest of components are aware the service is + # being modified. + _service : Optional[Service] = get_service(context_client, request) + if _service is None: raise Exception('Service({:s}) not found'.format(grpc_message_to_json_string(request))) + service = Service() + service.CopyFrom(_service) + service.service_status.service_status = ServiceStatusEnum.SERVICESTATUS_PENDING_REMOVAL + context_client.SetService(service) + + # Feed TaskScheduler with this service and the sub-services and sub-connections related to this service. + # TaskScheduler identifies inter-dependencies among them and produces a schedule of tasks (an ordered list of + # tasks to be executed) to implement the requested delete operation. + tasks_scheduler = TasksScheduler(self.service_handler_factory) + tasks_scheduler.compose_from_service(service, is_delete=True) + tasks_scheduler.execute_all() return Empty() diff --git a/src/service/service/Tools.py b/src/service/service/Tools.py deleted file mode 100644 index 4386793c52a979cd0b3d86701a3476314857f3ac..0000000000000000000000000000000000000000 --- a/src/service/service/Tools.py +++ /dev/null @@ -1,342 +0,0 @@ -# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import logging -from typing import Any, Dict, List, Optional, Tuple -from common.orm.Database import Database -from common.orm.HighLevel import get_object, get_related_objects -from common.orm.backend.Tools import key_to_str -from common.proto.context_pb2 import ( - ConfigRule, Connection, Constraint, EndPointId, Service, ServiceId, ServiceStatusEnum) -from common.rpc_method_wrapper.ServiceExceptions import ( - InvalidArgumentException, NotFoundException, OperationFailedException) -from context.client.ContextClient import ContextClient -from device.client.DeviceClient import DeviceClient -from .database.ConfigModel import ( - ConfigModel, ConfigRuleModel, ORM_ConfigActionEnum, get_config_rules, grpc_config_rules_to_raw) -from .database.ConstraintModel import ConstraintModel, ConstraintsModel, get_constraints, grpc_constraints_to_raw -from .database.DatabaseDeviceTools import sync_device_from_context -from .database.DatabaseServiceTools import ( - delete_service_from_context, sync_service_from_context, sync_service_to_context, update_service_in_local_database) -from .database.DeviceModel import DeviceModel, DriverModel -from .database.EndPointModel import EndPointModel, grpc_endpointids_to_raw -from .database.RelationModels import ServiceEndPointModel -from .database.ServiceModel import ServiceModel -from .service_handler_api._ServiceHandler import _ServiceHandler -from .service_handler_api.FilterFields import FilterFieldEnum -from .service_handler_api.ServiceHandlerFactory import ServiceHandlerFactory -from .service_handler_api.Tools import ( - check_errors_deleteconfig, check_errors_deleteconstraint, check_errors_deleteendpoint, check_errors_setconfig, - check_errors_setconstraint, check_errors_setendpoint) - -LOGGER = logging.getLogger(__name__) - -def sync_devices_from_context( - context_client : ContextClient, database : Database, db_service : Optional[ServiceModel], - service_endpoint_ids : List[EndPointId] - ) -> Dict[str, DeviceModel]: - - required_device_uuids = set() - if db_service is not None: - db_endpoints = get_related_objects(db_service, ServiceEndPointModel, 'endpoint_fk') - for db_endpoint in db_endpoints: - db_device = get_object(database, DeviceModel, db_endpoint.device_fk, raise_if_not_found=False) - required_device_uuids.add(db_device.device_uuid) - - for endpoint_id in service_endpoint_ids: - required_device_uuids.add(endpoint_id.device_id.device_uuid.uuid) - - db_devices = {} - devices_not_found = set() - for device_uuid in required_device_uuids: - sync_device_from_context(device_uuid, context_client, database) - db_device = get_object(database, DeviceModel, device_uuid, raise_if_not_found=False) - if db_device is None: - devices_not_found.add(device_uuid) - else: - db_devices[device_uuid] = db_device - - if len(devices_not_found) > 0: - extra_details = ['Devices({:s}) cannot be retrieved from Context'.format(str(devices_not_found))] - raise NotFoundException('Device', '...', extra_details=extra_details) - - return db_devices - -def classify_config_rules( - db_service : ServiceModel, service_config_rules : List[ConfigRule], - resources_to_set: List[Tuple[str, Any]], resources_to_delete : List[Tuple[str, Any]]): - - context_config_rules = get_config_rules(db_service.database, db_service.pk, 'running') - context_config_rules = {config_rule[1]: config_rule[2] for config_rule in context_config_rules} - #LOGGER.info('[classify_config_rules] context_config_rules = {:s}'.format(str(context_config_rules))) - - request_config_rules = grpc_config_rules_to_raw(service_config_rules) - #LOGGER.info('[classify_config_rules] request_config_rules = {:s}'.format(str(request_config_rules))) - - for config_rule in request_config_rules: - action, key, value = config_rule - if action == ORM_ConfigActionEnum.SET: - if (key not in context_config_rules) or (context_config_rules[key] != value): - resources_to_set.append((key, value)) - elif action == ORM_ConfigActionEnum.DELETE: - if key in context_config_rules: - resources_to_delete.append((key, value)) - else: - raise InvalidArgumentException('config_rule.action', str(action), extra_details=str(request_config_rules)) - - #LOGGER.info('[classify_config_rules] resources_to_set = {:s}'.format(str(resources_to_set))) - #LOGGER.info('[classify_config_rules] resources_to_delete = {:s}'.format(str(resources_to_delete))) - -def classify_constraints( - db_service : ServiceModel, service_constraints : List[Constraint], - constraints_to_set: List[Tuple[str, str]], constraints_to_delete : List[Tuple[str, str]]): - - context_constraints = get_constraints(db_service.database, db_service.pk, 'running') - context_constraints = {constraint[0]: constraint[1] for constraint in context_constraints} - #LOGGER.info('[classify_constraints] context_constraints = {:s}'.format(str(context_constraints))) - - request_constraints = grpc_constraints_to_raw(service_constraints) - #LOGGER.info('[classify_constraints] request_constraints = {:s}'.format(str(request_constraints))) - - for constraint in request_constraints: - constraint_type, constraint_value = constraint - if constraint_type in context_constraints: - if context_constraints[constraint_type] != constraint_value: - constraints_to_set.append(constraint) - else: - constraints_to_set.append(constraint) - context_constraints.pop(constraint_type, None) - - for constraint in context_constraints: - constraints_to_delete.append(constraint) - - #LOGGER.info('[classify_constraints] constraints_to_set = {:s}'.format(str(constraints_to_set))) - #LOGGER.info('[classify_constraints] constraints_to_delete = {:s}'.format(str(constraints_to_delete))) - -def get_service_endpointids(db_service : ServiceModel) -> List[Tuple[str, str, Optional[str]]]: - db_endpoints : List[EndPointModel] = get_related_objects(db_service, ServiceEndPointModel, 'endpoint_fk') - endpoint_ids = [db_endpoint.dump_id() for db_endpoint in db_endpoints] - return [ - (endpoint_id['device_id']['device_uuid']['uuid'], endpoint_id['endpoint_uuid']['uuid'], - endpoint_id.get('topology_id', {}).get('topology_uuid', {}).get('uuid', None)) - for endpoint_id in endpoint_ids - ] - -def classify_endpointids( - db_service : ServiceModel, service_endpoint_ids : List[EndPointId], - endpointids_to_set: List[Tuple[str, str, Optional[str]]], - endpointids_to_delete : List[Tuple[str, str, Optional[str]]]): - - context_endpoint_ids = get_service_endpointids(db_service) - #LOGGER.info('[classify_endpointids] context_endpoint_ids = {:s}'.format(str(context_endpoint_ids))) - context_endpoint_ids = set(context_endpoint_ids) - #LOGGER.info('[classify_endpointids] context_endpoint_ids = {:s}'.format(str(context_endpoint_ids))) - - request_endpoint_ids = grpc_endpointids_to_raw(service_endpoint_ids) - #LOGGER.info('[classify_endpointids] request_endpoint_ids = {:s}'.format(str(request_endpoint_ids))) - - if len(service_endpoint_ids) != 2: return - for endpoint_id in request_endpoint_ids: - #if endpoint_id not in context_endpoint_ids: - # endpointids_to_set.append(endpoint_id) - #context_endpoint_ids.discard(endpoint_id) - endpointids_to_set.append(endpoint_id) - - #for endpoint_id in context_endpoint_ids: - # endpointids_to_delete.append(endpoint_id) - - #LOGGER.info('[classify_endpointids] endpointids_to_set = {:s}'.format(str(endpointids_to_set))) - #LOGGER.info('[classify_endpointids] endpointids_to_delete = {:s}'.format(str(endpointids_to_delete))) - -def get_service_handler_class( - service_handler_factory : ServiceHandlerFactory, db_service : ServiceModel, db_devices : Dict[str, DeviceModel] - ) -> Optional[_ServiceHandler]: - - str_service_key = db_service.pk - database = db_service.database - - # Assume all devices involved in the service must support at least one driver in common - device_drivers = None - for _,db_device in db_devices.items(): - db_driver_pks = db_device.references(DriverModel) - db_driver_names = [DriverModel(database, pk).driver.value for pk,_ in db_driver_pks] - if device_drivers is None: - device_drivers = set(db_driver_names) - else: - device_drivers.intersection_update(db_driver_names) - - filter_fields = { - FilterFieldEnum.SERVICE_TYPE.value : db_service.service_type.value, # must be supported - FilterFieldEnum.DEVICE_DRIVER.value : device_drivers, # at least one must be supported - } - - msg = 'Selecting service handler for service({:s}) with filter_fields({:s})...' - LOGGER.info(msg.format(str(str_service_key), str(filter_fields))) - service_handler_class = service_handler_factory.get_service_handler_class(**filter_fields) - msg = 'ServiceHandler({:s}) selected for service({:s}) with filter_fields({:s})...' - LOGGER.info(msg.format(str(service_handler_class.__name__), str(str_service_key), str(filter_fields))) - return service_handler_class - -def update_service( - database : Database, context_client : ContextClient, device_client : DeviceClient, - service_handler_factory : ServiceHandlerFactory, service : Service, connection : Connection - ) -> ServiceModel: - - service_id = service.service_id - service_uuid = service_id.service_uuid.uuid - service_context_uuid = service_id.context_id.context_uuid.uuid - str_service_key = key_to_str([service_context_uuid, service_uuid]) - - # Sync before updating service to ensure we have devices, endpoints, constraints, and config rules to be - # set/deleted before actuallymodifying them in the local in-memory database. - - sync_service_from_context(service_context_uuid, service_uuid, context_client, database) - db_service = get_object(database, ServiceModel, str_service_key, raise_if_not_found=False) - db_devices = sync_devices_from_context(context_client, database, db_service, service.service_endpoint_ids) - - if db_service is None: db_service,_ = update_service_in_local_database(database, service) - LOGGER.info('[update_service] db_service = {:s}'.format(str(db_service.dump( - include_endpoint_ids=True, include_constraints=True, include_config_rules=True)))) - - resources_to_set : List[Tuple[str, Any]] = [] # resource_key, resource_value - resources_to_delete : List[Tuple[str, Any]] = [] # resource_key, resource_value - classify_config_rules(db_service, service.service_config.config_rules, resources_to_set, resources_to_delete) - - constraints_to_set : List[Tuple[str, str]] = [] # constraint_type, constraint_value - constraints_to_delete : List[Tuple[str, str]] = [] # constraint_type, constraint_value - classify_constraints(db_service, service.service_constraints, constraints_to_set, constraints_to_delete) - - endpointids_to_set : List[Tuple[str, str, Optional[str]]] = [] # device_uuid, endpoint_uuid, topology_uuid - endpointids_to_delete : List[Tuple[str, str, Optional[str]]] = [] # device_uuid, endpoint_uuid, topology_uuid - classify_endpointids(db_service, service.service_endpoint_ids, endpointids_to_set, endpointids_to_delete) - - service_handler_class = get_service_handler_class(service_handler_factory, db_service, db_devices) - service_handler_settings = {} - service_handler : _ServiceHandler = service_handler_class( - db_service, database, context_client, device_client, **service_handler_settings) - - errors = [] - - if len(errors) == 0: - results_deleteendpoint = service_handler.DeleteEndpoint(endpointids_to_delete) - errors.extend(check_errors_deleteendpoint(endpointids_to_delete, results_deleteendpoint)) - - if len(errors) == 0: - results_deleteconstraint = service_handler.DeleteConstraint(constraints_to_delete) - errors.extend(check_errors_deleteconstraint(constraints_to_delete, results_deleteconstraint)) - - if len(errors) == 0: - results_deleteconfig = service_handler.DeleteConfig(resources_to_delete) - errors.extend(check_errors_deleteconfig(resources_to_delete, results_deleteconfig)) - - if len(errors) == 0: - results_setconfig = service_handler.SetConfig(resources_to_set) - errors.extend(check_errors_setconfig(resources_to_set, results_setconfig)) - - if len(errors) == 0: - results_setconstraint = service_handler.SetConstraint(constraints_to_set) - errors.extend(check_errors_setconstraint(constraints_to_set, results_setconstraint)) - - if len(errors) == 0: - results_setendpoint = service_handler.SetEndpoint(endpointids_to_set) - errors.extend(check_errors_setendpoint(endpointids_to_set, results_setendpoint)) - - if len(errors) > 0: - raise OperationFailedException('UpdateService', extra_details=errors) - - LOGGER.info('[update_service] len(service.service_endpoint_ids) = {:d}'.format(len(service.service_endpoint_ids))) - if len(service.service_endpoint_ids) >= 2: - service.service_status.service_status = ServiceStatusEnum.SERVICESTATUS_ACTIVE - - db_service,_ = update_service_in_local_database(database, service) - LOGGER.info('[update_service] db_service = {:s}'.format(str(db_service.dump( - include_endpoint_ids=True, include_constraints=True, include_config_rules=True)))) - - sync_service_to_context(db_service, context_client) - context_client.SetConnection(connection) - return db_service - -def delete_service( - database : Database, context_client : ContextClient, device_client : DeviceClient, - service_handler_factory : ServiceHandlerFactory, service_id : ServiceId, connection : Connection - ) -> None: - - context_client.RemoveConnection(connection.connection_id) - - service_uuid = service_id.service_uuid.uuid - service_context_uuid = service_id.context_id.context_uuid.uuid - str_service_key = key_to_str([service_context_uuid, service_uuid]) - - # Sync before updating service to ensure we have devices, endpoints, constraints, and config rules to be - # set/deleted before actuallymodifying them in the local in-memory database. - - sync_service_from_context(service_context_uuid, service_uuid, context_client, database) - db_service : ServiceModel = get_object(database, ServiceModel, str_service_key, raise_if_not_found=False) - if db_service is None: return - LOGGER.info('[delete_service] db_service = {:s}'.format(str(db_service.dump( - include_endpoint_ids=True, include_constraints=True, include_config_rules=True)))) - - db_devices = sync_devices_from_context(context_client, database, db_service, []) - - resources_to_delete : List[Tuple[str, str]] = [ # resource_key, resource_value - (config_rule[1], config_rule[2]) - for config_rule in get_config_rules(db_service.database, db_service.pk, 'running') - ] - - constraints_to_delete : List[Tuple[str, str]] = [ # constraint_type, constraint_value - (constraint[0], constraint[1]) - for constraint in get_constraints(db_service.database, db_service.pk, 'running') - ] - - # device_uuid, endpoint_uuid, topology_uuid - endpointids_to_delete : List[Tuple[str, str, Optional[str]]] = list(set(get_service_endpointids(db_service))) - - service_handler_class = get_service_handler_class(service_handler_factory, db_service, db_devices) - service_handler_settings = {} - service_handler : _ServiceHandler = service_handler_class( - db_service, database, context_client, device_client, **service_handler_settings) - - errors = [] - - if len(errors) == 0: - results_deleteendpoint = service_handler.DeleteEndpoint(endpointids_to_delete) - errors.extend(check_errors_deleteendpoint(endpointids_to_delete, results_deleteendpoint)) - - if len(errors) == 0: - results_deleteconstraint = service_handler.DeleteConstraint(constraints_to_delete) - errors.extend(check_errors_deleteconstraint(constraints_to_delete, results_deleteconstraint)) - - if len(errors) == 0: - results_deleteconfig = service_handler.DeleteConfig(resources_to_delete) - errors.extend(check_errors_deleteconfig(resources_to_delete, results_deleteconfig)) - - if len(errors) > 0: - raise OperationFailedException('DeleteService', extra_details=errors) - - delete_service_from_context(db_service, context_client) - - for db_service_endpoint_pk,_ in db_service.references(ServiceEndPointModel): - ServiceEndPointModel(database, db_service_endpoint_pk).delete() - - db_running_config = ConfigModel(database, db_service.service_config_fk) - for db_config_rule_pk,_ in db_running_config.references(ConfigRuleModel): - ConfigRuleModel(database, db_config_rule_pk).delete() - - db_running_constraints = ConstraintsModel(database, db_service.service_constraints_fk) - for db_constraint_pk,_ in db_running_constraints.references(ConstraintModel): - ConstraintModel(database, db_constraint_pk).delete() - - db_service.delete() - db_running_config.delete() - db_running_constraints.delete() diff --git a/src/service/service/__main__.py b/src/service/service/__main__.py index 1a67a309ff19bda2bf3174c80dfb908e99f72d14..04cf00b06bff809f837833964a9e093f18888ac2 100644 --- a/src/service/service/__main__.py +++ b/src/service/service/__main__.py @@ -33,14 +33,16 @@ def main(): global LOGGER # pylint: disable=global-statement log_level = get_log_level() - logging.basicConfig(level=log_level) + logging.basicConfig(level=log_level, format="[%(asctime)s] %(levelname)s:%(name)s:%(message)s") LOGGER = logging.getLogger(__name__) wait_for_environment_variables([ - get_env_var_name(ServiceNameEnum.CONTEXT, ENVVAR_SUFIX_SERVICE_HOST ), - get_env_var_name(ServiceNameEnum.CONTEXT, ENVVAR_SUFIX_SERVICE_PORT_GRPC), - get_env_var_name(ServiceNameEnum.DEVICE, ENVVAR_SUFIX_SERVICE_HOST ), - get_env_var_name(ServiceNameEnum.DEVICE, ENVVAR_SUFIX_SERVICE_PORT_GRPC), + get_env_var_name(ServiceNameEnum.CONTEXT, ENVVAR_SUFIX_SERVICE_HOST ), + get_env_var_name(ServiceNameEnum.CONTEXT, ENVVAR_SUFIX_SERVICE_PORT_GRPC), + get_env_var_name(ServiceNameEnum.DEVICE, ENVVAR_SUFIX_SERVICE_HOST ), + get_env_var_name(ServiceNameEnum.DEVICE, ENVVAR_SUFIX_SERVICE_PORT_GRPC), + get_env_var_name(ServiceNameEnum.PATHCOMP, ENVVAR_SUFIX_SERVICE_HOST ), + get_env_var_name(ServiceNameEnum.PATHCOMP, ENVVAR_SUFIX_SERVICE_PORT_GRPC), ]) signal.signal(signal.SIGINT, signal_handler) diff --git a/src/service/service/service_handler_api/FilterFields.py b/src/service/service/service_handler_api/FilterFields.py index 9d8f9ad28f69ef606bcf2e06e3a6a17514f016b4..98113ba30fb095a29a2142e592b7759d2634eab9 100644 --- a/src/service/service/service_handler_api/FilterFields.py +++ b/src/service/service/service_handler_api/FilterFields.py @@ -13,15 +13,30 @@ # limitations under the License. from enum import Enum -from service.service.database.ServiceModel import ORM_ServiceTypeEnum -from service.service.database.DeviceModel import ORM_DeviceDriverEnum +from common.proto.context_pb2 import DeviceDriverEnum, ServiceTypeEnum class FilterFieldEnum(Enum): SERVICE_TYPE = 'service_type' DEVICE_DRIVER = 'device_driver' +SERVICE_TYPE_VALUES = { + ServiceTypeEnum.SERVICETYPE_UNKNOWN, + ServiceTypeEnum.SERVICETYPE_L3NM, + ServiceTypeEnum.SERVICETYPE_L2NM, + ServiceTypeEnum.SERVICETYPE_TAPI_CONNECTIVITY_SERVICE, +} + +DEVICE_DRIVER_VALUES = { + DeviceDriverEnum.DEVICEDRIVER_UNDEFINED, + DeviceDriverEnum.DEVICEDRIVER_OPENCONFIG, + DeviceDriverEnum.DEVICEDRIVER_TRANSPORT_API, + DeviceDriverEnum.DEVICEDRIVER_P4, + DeviceDriverEnum.DEVICEDRIVER_IETF_NETWORK_TOPOLOGY, + DeviceDriverEnum.DEVICEDRIVER_ONF_TR_352, +} + # Map allowed filter fields to allowed values per Filter field. If no restriction (free text) None is specified FILTER_FIELD_ALLOWED_VALUES = { - FilterFieldEnum.SERVICE_TYPE.value : {i.value for i in ORM_ServiceTypeEnum}, - FilterFieldEnum.DEVICE_DRIVER.value : {i.value for i in ORM_DeviceDriverEnum}, + FilterFieldEnum.SERVICE_TYPE.value : SERVICE_TYPE_VALUES, + FilterFieldEnum.DEVICE_DRIVER.value : DEVICE_DRIVER_VALUES, } diff --git a/src/service/service/service_handler_api/ServiceHandlerFactory.py b/src/service/service/service_handler_api/ServiceHandlerFactory.py index 8b7223a95613a8b490862bb3dad091baf3b38388..09a56775d4f391d71fe5ac30f9be74430120e306 100644 --- a/src/service/service/service_handler_api/ServiceHandlerFactory.py +++ b/src/service/service/service_handler_api/ServiceHandlerFactory.py @@ -14,7 +14,9 @@ import logging, operator from enum import Enum -from typing import Any, Dict, Iterable, List, Set, Tuple +from typing import Any, Dict, Iterable, List, Optional, Set, Tuple +from common.proto.context_pb2 import Device, Service +from common.tools.grpc.Tools import grpc_message_to_json_string from service.service.service_handler_api._ServiceHandler import _ServiceHandler from .Exceptions import ( UnsatisfiedFilterException, UnsupportedServiceHandlerClassException, UnsupportedFilterFieldException, @@ -91,3 +93,40 @@ class ServiceHandlerFactory: candidate_service_handler_classes = sorted( candidate_service_handler_classes.items(), key=operator.itemgetter(1), reverse=True) return candidate_service_handler_classes[0][0] + +def get_device_supported_drivers(device : Device) -> Set[int]: + return {device_driver for device_driver in device.device_drivers} + +def get_common_device_drivers(drivers_per_device : List[Set[int]]) -> Set[int]: + common_device_drivers = None + for device_drivers in drivers_per_device: + if common_device_drivers is None: + common_device_drivers = set(device_drivers) + else: + common_device_drivers.intersection_update(device_drivers) + if common_device_drivers is None: common_device_drivers = set() + return common_device_drivers + +def get_service_handler_class( + service_handler_factory : ServiceHandlerFactory, service : Service, connection_devices : Dict[str, Device] +) -> Optional[_ServiceHandler]: + + str_service_key = grpc_message_to_json_string(service.service_id) + + # Assume all devices involved in the service's connection must support at least one driver in common + common_device_drivers = get_common_device_drivers([ + get_device_supported_drivers(device) + for device in connection_devices.values() + ]) + + filter_fields = { + FilterFieldEnum.SERVICE_TYPE.value : service.service_type, # must be supported + FilterFieldEnum.DEVICE_DRIVER.value : common_device_drivers, # at least one must be supported + } + + MSG = 'Selecting service handler for service({:s}) with filter_fields({:s})...' + LOGGER.info(MSG.format(str(str_service_key), str(filter_fields))) + service_handler_class = service_handler_factory.get_service_handler_class(**filter_fields) + MSG = 'ServiceHandler({:s}) selected for service({:s}) with filter_fields({:s})...' + LOGGER.info(MSG.format(str(service_handler_class.__name__), str(str_service_key), str(filter_fields))) + return service_handler_class diff --git a/src/service/service/service_handler_api/_ServiceHandler.py b/src/service/service/service_handler_api/_ServiceHandler.py index c72efa5c39c71a0a1a77c5238aabd35de54df9f7..9cbe3f49e8594badf3b419b24154cb59a30a17bf 100644 --- a/src/service/service/service_handler_api/_ServiceHandler.py +++ b/src/service/service/service_handler_api/_ServiceHandler.py @@ -42,14 +42,18 @@ class _ServiceHandler: """ raise NotImplementedError() - def SetEndpoint(self, endpoints: List[Tuple[str, str, Optional[str]]]) \ - -> List[Union[bool, Exception]]: - """ Create/Update service endpoints. + def SetEndpoint( + self, endpoints : List[Tuple[str, str, Optional[str]]], + connection_uuid : Optional[str] = None + ) -> List[Union[bool, Exception]]: + """ Create/Update service endpoints form a list. Parameters: endpoints: List[Tuple[str, str, Optional[str]]] List of tuples, each containing a device_uuid, endpoint_uuid and, optionally, the topology_uuid of the endpoint to be added. + connection_uuid : Optional[str] + If specified, is the UUID of the connection this endpoint is associated to. Returns: results: List[Union[bool, Exception]] List of results for endpoint changes requested. @@ -60,14 +64,18 @@ class _ServiceHandler: """ raise NotImplementedError() - def DeleteEndpoint(self, endpoints: List[Tuple[str, str, Optional[str]]]) \ - -> List[Union[bool, Exception]]: - """ Delete service endpoints. + def DeleteEndpoint( + self, endpoints : List[Tuple[str, str, Optional[str]]], + connection_uuid : Optional[str] = None + ) -> List[Union[bool, Exception]]: + """ Delete service endpoints form a list. Parameters: endpoints: List[Tuple[str, str, Optional[str]]] List of tuples, each containing a device_uuid, endpoint_uuid, and the topology_uuid of the endpoint to be removed. + connection_uuid : Optional[str] + If specified, is the UUID of the connection this endpoint is associated to. Returns: results: List[Union[bool, Exception]] List of results for endpoint deletions requested. diff --git a/src/service/service/service_handlers/__init__.py b/src/service/service/service_handlers/__init__.py index 6abe4048fb6771efc0a44f11aa40fc7739a87648..89e717722d152ce978dca10a768119d9e9adaf1e 100644 --- a/src/service/service/service_handlers/__init__.py +++ b/src/service/service/service_handlers/__init__.py @@ -12,28 +12,36 @@ # See the License for the specific language governing permissions and # limitations under the License. -from ..service_handler_api.FilterFields import FilterFieldEnum, ORM_DeviceDriverEnum, ORM_ServiceTypeEnum +from common.proto.context_pb2 import DeviceDriverEnum, ServiceTypeEnum +from ..service_handler_api.FilterFields import FilterFieldEnum +from .l2nm_emulated.L2NMEmulatedServiceHandler import L2NMEmulatedServiceHandler from .l3nm_emulated.L3NMEmulatedServiceHandler import L3NMEmulatedServiceHandler from .l3nm_openconfig.L3NMOpenConfigServiceHandler import L3NMOpenConfigServiceHandler from .tapi_tapi.TapiServiceHandler import TapiServiceHandler SERVICE_HANDLERS = [ + (L2NMEmulatedServiceHandler, [ + { + FilterFieldEnum.SERVICE_TYPE : ServiceTypeEnum.SERVICETYPE_L2NM, + FilterFieldEnum.DEVICE_DRIVER : DeviceDriverEnum.DEVICEDRIVER_UNDEFINED, + } + ]), (L3NMEmulatedServiceHandler, [ { - FilterFieldEnum.SERVICE_TYPE : ORM_ServiceTypeEnum.L3NM, - FilterFieldEnum.DEVICE_DRIVER : ORM_DeviceDriverEnum.UNDEFINED, + FilterFieldEnum.SERVICE_TYPE : ServiceTypeEnum.SERVICETYPE_L3NM, + FilterFieldEnum.DEVICE_DRIVER : DeviceDriverEnum.DEVICEDRIVER_UNDEFINED, } ]), (L3NMOpenConfigServiceHandler, [ { - FilterFieldEnum.SERVICE_TYPE : ORM_ServiceTypeEnum.L3NM, - FilterFieldEnum.DEVICE_DRIVER : ORM_DeviceDriverEnum.OPENCONFIG, + FilterFieldEnum.SERVICE_TYPE : ServiceTypeEnum.SERVICETYPE_L3NM, + FilterFieldEnum.DEVICE_DRIVER : DeviceDriverEnum.DEVICEDRIVER_OPENCONFIG, } ]), (TapiServiceHandler, [ { - FilterFieldEnum.SERVICE_TYPE : ORM_ServiceTypeEnum.TAPI_CONNECTIVITY_SERVICE, - FilterFieldEnum.DEVICE_DRIVER : ORM_DeviceDriverEnum.TRANSPORT_API, + FilterFieldEnum.SERVICE_TYPE : ServiceTypeEnum.SERVICETYPE_TAPI_CONNECTIVITY_SERVICE, + FilterFieldEnum.DEVICE_DRIVER : DeviceDriverEnum.DEVICEDRIVER_TRANSPORT_API, } ]), ] diff --git a/src/service/service/service_handlers/l2nm_emulated/ConfigRules.py b/src/service/service/service_handlers/l2nm_emulated/ConfigRules.py new file mode 100644 index 0000000000000000000000000000000000000000..d173f3f270ebbd18a6440f55f7e82cf66720c03a --- /dev/null +++ b/src/service/service/service_handlers/l2nm_emulated/ConfigRules.py @@ -0,0 +1,132 @@ +# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Dict, List +from common.tools.object_factory.ConfigRule import json_config_rule_delete, json_config_rule_set +from service.service.service_handler_api.AnyTreeTools import TreeNode + +def setup_config_rules( + service_uuid : str, connection_uuid : str, device_uuid : str, endpoint_uuid : str, + service_settings : TreeNode, endpoint_settings : TreeNode +) -> List[Dict]: + + json_settings : Dict = {} if service_settings is None else service_settings.value + json_endpoint_settings : Dict = {} if endpoint_settings is None else endpoint_settings.value + + mtu = json_settings.get('mtu', 1450 ) # 1512 + #address_families = json_settings.get('address_families', [] ) # ['IPV4'] + #bgp_as = json_settings.get('bgp_as', 0 ) # 65000 + #bgp_route_target = json_settings.get('bgp_route_target', '0:0') # 65000:333 + + router_id = json_endpoint_settings.get('router_id', '0.0.0.0') # '10.95.0.10' + #route_distinguisher = json_endpoint_settings.get('route_distinguisher', '0:0' ) # '60001:801' + sub_interface_index = json_endpoint_settings.get('sub_interface_index', 0 ) # 1 + vlan_id = json_endpoint_settings.get('vlan_id', 1 ) # 400 + #address_ip = json_endpoint_settings.get('address_ip', '0.0.0.0') # '2.2.2.1' + #address_prefix = json_endpoint_settings.get('address_prefix', 24 ) # 30 + remote_router = json_endpoint_settings.get('remote_router', '0.0.0.0') # '5.5.5.5' + circuit_id = json_endpoint_settings.get('circuit_id', '000' ) # '111' + + if_cirid_name = '{:s}.{:s}'.format(endpoint_uuid, str(circuit_id)) + network_instance_name = 'ELAN-AC:{:s}'.format(str(circuit_id)) + connection_point_id = 'VC-1' + + json_config_rules = [ + json_config_rule_set( + '/network_instance[default]', + {'name': 'default', 'type': 'DEFAULT_INSTANCE', 'router_id': router_id}), + + json_config_rule_set( + '/network_instance[default]/protocols[OSPF]', + {'name': 'default', 'identifier': 'OSPF', 'protocol_name': 'OSPF'}), + + json_config_rule_set( + '/network_instance[default]/protocols[STATIC]', + {'name': 'default', 'identifier': 'STATIC', 'protocol_name': 'STATIC'}), + + json_config_rule_set( + '/network_instance[{:s}]'.format(network_instance_name), + {'name': network_instance_name, 'type': 'L2VSI'}), + + json_config_rule_set( + '/interface[{:s}]/subinterface[0]'.format(if_cirid_name, sub_interface_index), + {'name': if_cirid_name, 'type': 'l2vlan', 'index': sub_interface_index, 'vlan_id': vlan_id}), + + json_config_rule_set( + '/network_instance[{:s}]/interface[{:s}]'.format(network_instance_name, if_cirid_name), + {'name': network_instance_name, 'id': if_cirid_name, 'interface': if_cirid_name, 'subinterface': 0}), + + json_config_rule_set( + '/network_instance[{:s}]/connection_point[{:s}]'.format(network_instance_name, connection_point_id), + {'name': network_instance_name, 'connection_point': connection_point_id, 'VC_ID': circuit_id, + 'remote_system': remote_router}), + ] + return json_config_rules + +def teardown_config_rules( + service_uuid : str, connection_uuid : str, device_uuid : str, endpoint_uuid : str, + service_settings : TreeNode, endpoint_settings : TreeNode +) -> List[Dict]: + + json_settings : Dict = {} if service_settings is None else service_settings.value + json_endpoint_settings : Dict = {} if endpoint_settings is None else endpoint_settings.value + + mtu = json_settings.get('mtu', 1450 ) # 1512 + #address_families = json_settings.get('address_families', [] ) # ['IPV4'] + #bgp_as = json_settings.get('bgp_as', 0 ) # 65000 + #bgp_route_target = json_settings.get('bgp_route_target', '0:0') # 65000:333 + + router_id = json_endpoint_settings.get('router_id', '0.0.0.0') # '10.95.0.10' + #route_distinguisher = json_endpoint_settings.get('route_distinguisher', '0:0' ) # '60001:801' + sub_interface_index = json_endpoint_settings.get('sub_interface_index', 0 ) # 1 + vlan_id = json_endpoint_settings.get('vlan_id', 1 ) # 400 + #address_ip = json_endpoint_settings.get('address_ip', '0.0.0.0') # '2.2.2.1' + #address_prefix = json_endpoint_settings.get('address_prefix', 24 ) # 30 + remote_router = json_endpoint_settings.get('remote_router', '0.0.0.0') # '5.5.5.5' + circuit_id = json_endpoint_settings.get('circuit_id', '000' ) # '111' + + if_cirid_name = '{:s}.{:s}'.format(endpoint_uuid, str(circuit_id)) + network_instance_name = 'ELAN-AC:{:s}'.format(str(circuit_id)) + connection_point_id = 'VC-1' + + json_config_rules = [ + json_config_rule_delete( + '/network_instance[{:s}]/connection_point[{:s}]'.format(network_instance_name, connection_point_id), + {'name': network_instance_name, 'connection_point': connection_point_id}), + + json_config_rule_delete( + '/network_instance[{:s}]/interface[{:s}]'.format(network_instance_name, if_cirid_name), + {'name': network_instance_name, 'id': if_cirid_name, 'interface': if_cirid_name, 'subinterface': 0}), + + json_config_rule_delete( + '/interface[{:s}]/subinterface[0]'.format(if_cirid_name, sub_interface_index), + {'name': if_cirid_name, 'index': sub_interface_index}), + + json_config_rule_delete( + '/network_instance[{:s}]'.format(network_instance_name), + {'name': network_instance_name}), + + json_config_rule_delete( + '/network_instance[default]/protocols[STATIC]', + {'name': 'default', 'identifier': 'STATIC', 'protocol_name': 'STATIC'}), + + json_config_rule_delete( + '/network_instance[default]/protocols[OSPF]', + {'name': 'default', 'identifier': 'OSPF', 'protocol_name': 'OSPF'}), + + json_config_rule_delete( + '/network_instance[default]', + {'name': 'default', 'type': 'DEFAULT_INSTANCE', 'router_id': router_id}), + ] + return json_config_rules diff --git a/src/service/service/service_handlers/l2nm_emulated/L2NMEmulatedServiceHandler.py b/src/service/service/service_handlers/l2nm_emulated/L2NMEmulatedServiceHandler.py new file mode 100644 index 0000000000000000000000000000000000000000..5d1e0126e3b36b7b5c687fc25c96af46721da69b --- /dev/null +++ b/src/service/service/service_handlers/l2nm_emulated/L2NMEmulatedServiceHandler.py @@ -0,0 +1,161 @@ +# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import anytree, json, logging +from typing import Any, List, Optional, Tuple, Union +from common.proto.context_pb2 import ConfigActionEnum, ConfigRule, DeviceId, Service +from common.tools.object_factory.Device import json_device_id +from common.type_checkers.Checkers import chk_length, chk_type +from service.service.service_handler_api._ServiceHandler import _ServiceHandler +from service.service.service_handler_api.AnyTreeTools import TreeNode, delete_subnode, get_subnode, set_subnode_value +from service.service.task_scheduler.TaskExecutor import TaskExecutor +from .ConfigRules import setup_config_rules, teardown_config_rules + +LOGGER = logging.getLogger(__name__) + +class L2NMEmulatedServiceHandler(_ServiceHandler): + def __init__( # pylint: disable=super-init-not-called + self, service : Service, task_executor : TaskExecutor, **settings + ) -> None: + self.__service = service + self.__task_executor = task_executor # pylint: disable=unused-private-member + self.__resolver = anytree.Resolver(pathattr='name') + self.__config = TreeNode('.') + for config_rule in service.service_config.config_rules: + action = config_rule.action + if config_rule.WhichOneof('config_rule') != 'custom': continue + resource_key = config_rule.custom.resource_key + resource_value = config_rule.custom.resource_value + if action == ConfigActionEnum.CONFIGACTION_SET: + try: + resource_value = json.loads(resource_value) + except: # pylint: disable=bare-except + pass + set_subnode_value(self.__resolver, self.__config, resource_key, resource_value) + elif action == ConfigActionEnum.CONFIGACTION_DELETE: + delete_subnode(self.__resolver, self.__config, resource_key) + + def SetEndpoint( + self, endpoints : List[Tuple[str, str, Optional[str]]], connection_uuid : Optional[str] = None + ) -> List[Union[bool, Exception]]: + chk_type('endpoints', endpoints, list) + if len(endpoints) == 0: return [] + + service_uuid = self.__service.service_id.service_uuid.uuid + settings : TreeNode = get_subnode(self.__resolver, self.__config, '/settings', None) + + results = [] + for endpoint in endpoints: + try: + chk_type('endpoint', endpoint, (tuple, list)) + chk_length('endpoint', endpoint, min_length=2, max_length=3) + device_uuid, endpoint_uuid = endpoint[0:2] # ignore topology_uuid by now + + endpoint_settings_uri = '/device[{:s}]/endpoint[{:s}]/settings'.format(device_uuid, endpoint_uuid) + endpoint_settings : TreeNode = get_subnode(self.__resolver, self.__config, endpoint_settings_uri, None) + + json_config_rules = setup_config_rules( + service_uuid, connection_uuid, device_uuid, endpoint_uuid, settings, endpoint_settings) + + device = self.__task_executor.get_device(DeviceId(**json_device_id(device_uuid))) + for json_config_rule in json_config_rules: + device.device_config.config_rules.append(ConfigRule(**json_config_rule)) + self.__task_executor.configure_device(device) + results.append(True) + except Exception as e: # pylint: disable=broad-except + LOGGER.exception('Unable to SetEndpoint({:s})'.format(str(endpoint))) + results.append(e) + + return results + + def DeleteEndpoint( + self, endpoints : List[Tuple[str, str, Optional[str]]], connection_uuid : Optional[str] = None + ) -> List[Union[bool, Exception]]: + chk_type('endpoints', endpoints, list) + if len(endpoints) == 0: return [] + + service_uuid = self.__service.service_id.service_uuid.uuid + settings : TreeNode = get_subnode(self.__resolver, self.__config, '/settings', None) + + results = [] + for endpoint in endpoints: + try: + chk_type('endpoint', endpoint, (tuple, list)) + chk_length('endpoint', endpoint, min_length=2, max_length=3) + device_uuid, endpoint_uuid = endpoint[0:2] # ignore topology_uuid by now + + endpoint_settings_uri = '/device[{:s}]/endpoint[{:s}]/settings'.format(device_uuid, endpoint_uuid) + endpoint_settings : TreeNode = get_subnode(self.__resolver, self.__config, endpoint_settings_uri, None) + + json_config_rules = teardown_config_rules( + service_uuid, connection_uuid, device_uuid, endpoint_uuid, settings, endpoint_settings) + + device = self.__task_executor.get_device(DeviceId(**json_device_id(device_uuid))) + for json_config_rule in json_config_rules: + device.device_config.config_rules.append(ConfigRule(**json_config_rule)) + self.__task_executor.configure_device(device) + results.append(True) + except Exception as e: # pylint: disable=broad-except + LOGGER.exception('Unable to DeleteEndpoint({:s})'.format(str(endpoint))) + results.append(e) + + return results + + def SetConstraint(self, constraints : List[Tuple[str, Any]]) -> List[Union[bool, Exception]]: + chk_type('constraints', constraints, list) + if len(constraints) == 0: return [] + + msg = '[SetConstraint] Method not implemented. Constraints({:s}) are being ignored.' + LOGGER.warning(msg.format(str(constraints))) + return [True for _ in range(len(constraints))] + + def DeleteConstraint(self, constraints : List[Tuple[str, Any]]) -> List[Union[bool, Exception]]: + chk_type('constraints', constraints, list) + if len(constraints) == 0: return [] + + msg = '[DeleteConstraint] Method not implemented. Constraints({:s}) are being ignored.' + LOGGER.warning(msg.format(str(constraints))) + return [True for _ in range(len(constraints))] + + def SetConfig(self, resources : List[Tuple[str, Any]]) -> List[Union[bool, Exception]]: + chk_type('resources', resources, list) + if len(resources) == 0: return [] + + results = [] + for resource in resources: + try: + resource_key, resource_value = resource + resource_value = json.loads(resource_value) + set_subnode_value(self.__resolver, self.__config, resource_key, resource_value) + results.append(True) + except Exception as e: # pylint: disable=broad-except + LOGGER.exception('Unable to SetConfig({:s})'.format(str(resource))) + results.append(e) + + return results + + def DeleteConfig(self, resources : List[Tuple[str, Any]]) -> List[Union[bool, Exception]]: + chk_type('resources', resources, list) + if len(resources) == 0: return [] + + results = [] + for resource in resources: + try: + resource_key, _ = resource + delete_subnode(self.__resolver, self.__config, resource_key) + except Exception as e: # pylint: disable=broad-except + LOGGER.exception('Unable to DeleteConfig({:s})'.format(str(resource))) + results.append(e) + + return results diff --git a/src/service/service/service_handlers/l2nm_emulated/__init__.py b/src/service/service/service_handlers/l2nm_emulated/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..70a33251242c51f49140e596b8208a19dd5245f7 --- /dev/null +++ b/src/service/service/service_handlers/l2nm_emulated/__init__.py @@ -0,0 +1,14 @@ +# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + diff --git a/src/service/service/service_handlers/tapi_tapi/TapiServiceHandler.py b/src/service/service/service_handlers/tapi_tapi/TapiServiceHandler.py index 1249af0ae7944f09bd12f2fab4e6e78523320c06..aeba6a26ab5d3fdc42925bcd9bda0a3c5790ece4 100644 --- a/src/service/service/service_handlers/tapi_tapi/TapiServiceHandler.py +++ b/src/service/service/service_handlers/tapi_tapi/TapiServiceHandler.py @@ -14,58 +14,54 @@ import anytree, json, logging from typing import Any, Dict, List, Optional, Tuple, Union -from common.orm.Database import Database from common.orm.HighLevel import get_object -from common.orm.backend.Tools import key_to_str -from common.proto.context_pb2 import Device +from common.proto.context_pb2 import ConfigActionEnum, ConfigRule, Device, DeviceId, Service from common.tools.object_factory.ConfigRule import json_config_rule_delete, json_config_rule_set +from common.tools.object_factory.Device import json_device_id from common.type_checkers.Checkers import chk_type -from context.client.ContextClient import ContextClient -from device.client.DeviceClient import DeviceClient -from service.service.database.ConfigModel import ORM_ConfigActionEnum, get_config_rules -from service.service.database.ContextModel import ContextModel from service.service.database.DeviceModel import DeviceModel -from service.service.database.ServiceModel import ServiceModel from service.service.service_handler_api._ServiceHandler import _ServiceHandler from service.service.service_handler_api.AnyTreeTools import TreeNode, delete_subnode, get_subnode, set_subnode_value +from service.service.task_scheduler.TaskExecutor import TaskExecutor LOGGER = logging.getLogger(__name__) class TapiServiceHandler(_ServiceHandler): def __init__( # pylint: disable=super-init-not-called - self, db_service : ServiceModel, database : Database, context_client : ContextClient, - device_client : DeviceClient, **settings + self, service : Service, task_executor : TaskExecutor, **settings ) -> None: - self.__db_service = db_service - self.__database = database - self.__context_client = context_client # pylint: disable=unused-private-member - self.__device_client = device_client - - self.__db_context : ContextModel = get_object(self.__database, ContextModel, self.__db_service.context_fk) - str_service_key = key_to_str([self.__db_context.context_uuid, self.__db_service.service_uuid]) - db_config = get_config_rules(self.__database, str_service_key, 'running') + self.__service = service + self.__task_executor = task_executor # pylint: disable=unused-private-member self.__resolver = anytree.Resolver(pathattr='name') self.__config = TreeNode('.') - for action, resource_key, resource_value in db_config: - if action == ORM_ConfigActionEnum.SET: + for config_rule in service.service_config.config_rules: + action = config_rule.action + if config_rule.WhichOneof('config_rule') != 'custom': continue + resource_key = config_rule.custom.resource_key + resource_value = config_rule.custom.resource_value + if action == ConfigActionEnum.CONFIGACTION_SET: try: resource_value = json.loads(resource_value) except: # pylint: disable=bare-except pass set_subnode_value(self.__resolver, self.__config, resource_key, resource_value) - elif action == ORM_ConfigActionEnum.DELETE: + elif action == ConfigActionEnum.CONFIGACTION_DELETE: delete_subnode(self.__resolver, self.__config, resource_key) - def SetEndpoint(self, endpoints : List[Tuple[str, str, Optional[str]]]) -> List[Union[bool, Exception]]: + def SetEndpoint( + self, endpoints : List[Tuple[str, str, Optional[str]]], connection_uuid : Optional[str] = None + ) -> List[Union[bool, Exception]]: + LOGGER.info('[SetEndpoint] endpoints={:s}'.format(str(endpoints))) + LOGGER.info('[SetEndpoint] connection_uuid={:s}'.format(str(connection_uuid))) chk_type('endpoints', endpoints, list) if len(endpoints) != 2: return [] - service_uuid = self.__db_service.service_uuid - service_settings : TreeNode = get_subnode(self.__resolver, self.__config, 'settings', None) - if service_settings is None: raise Exception('Unable to settings for Service({:s})'.format(str(service_uuid))) + service_uuid = self.__service.service_id.service_uuid.uuid + settings : TreeNode = get_subnode(self.__resolver, self.__config, '/settings', None) + if settings is None: raise Exception('Unable to retrieve settings for Service({:s})'.format(str(service_uuid))) - json_settings : Dict = service_settings.value - capacity_value = json_settings.get('capacity_value', 1) + json_settings : Dict = settings.value + capacity_value = json_settings.get('capacity_value', 50.0) capacity_unit = json_settings.get('capacity_unit', 'GHz') layer_proto_name = json_settings.get('layer_proto_name', 'PHOTONIC_MEDIA') layer_proto_qual = json_settings.get('layer_proto_qual', 'tapi-photonic-media:PHOTONIC_LAYER_QUALIFIER_NMC') @@ -74,46 +70,44 @@ class TapiServiceHandler(_ServiceHandler): results = [] try: device_uuid = endpoints[0][0] - db_device : DeviceModel = get_object(self.__database, DeviceModel, device_uuid, raise_if_not_found=True) - json_device = db_device.dump(include_config_rules=False, include_drivers=True, include_endpoints=True) - json_device_config : Dict = json_device.setdefault('device_config', {}) - json_device_config_rules : List = json_device_config.setdefault('config_rules', []) - json_device_config_rules.extend([ - json_config_rule_set('/service[{:s}]'.format(service_uuid), { - 'uuid' : service_uuid, - 'input_sip' : endpoints[0][1], - 'output_sip' : endpoints[1][1], - 'capacity_unit' : capacity_unit, - 'capacity_value' : capacity_value, - 'layer_protocol_name' : layer_proto_name, - 'layer_protocol_qualifier': layer_proto_qual, - 'direction' : direction, - }), - ]) - self.__device_client.ConfigureDevice(Device(**json_device)) + device = self.__task_executor.get_device(DeviceId(**json_device_id(device_uuid))) + json_config_rule = json_config_rule_set('/service[{:s}]'.format(service_uuid), { + 'uuid' : service_uuid, + 'input_sip' : endpoints[0][1], + 'output_sip' : endpoints[1][1], + 'capacity_unit' : capacity_unit, + 'capacity_value' : capacity_value, + 'layer_protocol_name' : layer_proto_name, + 'layer_protocol_qualifier': layer_proto_qual, + 'direction' : direction, + }) + del device.device_config.config_rules[:] + device.device_config.config_rules.append(ConfigRule(**json_config_rule)) + self.__task_executor.configure_device(device) results.append(True) except Exception as e: # pylint: disable=broad-except - LOGGER.exception('Unable to SetEndpoint for Service({:s})'.format(str(service_uuid))) + LOGGER.exception('Unable to configure Service({:s})'.format(str(service_uuid))) results.append(e) return results - def DeleteEndpoint(self, endpoints : List[Tuple[str, str, Optional[str]]]) -> List[Union[bool, Exception]]: + def DeleteEndpoint( + self, endpoints : List[Tuple[str, str, Optional[str]]], connection_uuid : Optional[str] = None + ) -> List[Union[bool, Exception]]: + LOGGER.info('[DeleteEndpoint] endpoints={:s}'.format(str(endpoints))) + LOGGER.info('[DeleteEndpoint] connection_uuid={:s}'.format(str(connection_uuid))) + chk_type('endpoints', endpoints, list) if len(endpoints) != 2: return [] - service_uuid = self.__db_service.service_uuid + service_uuid = self.__service.service_id.service_uuid.uuid results = [] try: device_uuid = endpoints[0][0] - db_device : DeviceModel = get_object(self.__database, DeviceModel, device_uuid, raise_if_not_found=True) - json_device = db_device.dump(include_config_rules=False, include_drivers=True, include_endpoints=True) - json_device_config : Dict = json_device.setdefault('device_config', {}) - json_device_config_rules : List = json_device_config.setdefault('config_rules', []) - json_device_config_rules.extend([ - json_config_rule_delete('/service[{:s}]'.format(service_uuid), {'uuid': service_uuid}) - ]) - self.__device_client.ConfigureDevice(Device(**json_device)) + device = self.__task_executor.get_device(DeviceId(**json_device_id(device_uuid))) + json_config_rule = json_config_rule_delete('/service[{:s}]'.format(service_uuid), {'uuid': service_uuid}) + device.device_config.config_rules.append(ConfigRule(**json_config_rule)) + self.__task_executor.configure_device(device) results.append(True) except Exception as e: # pylint: disable=broad-except LOGGER.exception('Unable to DeleteEndpoint for Service({:s})'.format(str(service_uuid))) diff --git a/src/service/service/task_scheduler/ConnectionExpander.py b/src/service/service/task_scheduler/ConnectionExpander.py new file mode 100644 index 0000000000000000000000000000000000000000..39c91b1ba7129d6915ab578f2e85b670049def04 --- /dev/null +++ b/src/service/service/task_scheduler/ConnectionExpander.py @@ -0,0 +1,66 @@ +# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Dict, List, Optional, Tuple +from common.tools.grpc.Tools import grpc_message_to_json_string +from common.proto.context_pb2 import Connection, Empty, EndPointId, Link +from context.client.ContextClient import ContextClient + +class ConnectionExpander: + def __init__(self) -> None: + self.context_client = ContextClient() + self.endpointkey_to_link : Dict[Tuple[str, str], Link] = dict() + self.refresh_links() + + def refresh_links(self) -> None: + links = self.context_client.ListLinks(Empty()) + for link in links.links: + for link_endpoint_id in link.link_endpoint_ids: + device_uuid = link_endpoint_id.device_id.device_uuid.uuid + endpoint_uuid = link_endpoint_id.endpoint_uuid.uuid + endpoint_key = (device_uuid, endpoint_uuid) + self.endpointkey_to_link[endpoint_key] = link + + def get_link_from_endpoint_id(self, endpoint_id : EndPointId, raise_if_not_found : bool = False) -> Optional[Link]: + device_uuid = endpoint_id.device_id.device_uuid.uuid + endpoint_uuid = endpoint_id.endpoint_uuid.uuid + endpoint_key = (device_uuid, endpoint_uuid) + link = self.endpointkey_to_link.get(endpoint_key) + if link is None and raise_if_not_found: + str_endpoint_id = grpc_message_to_json_string(endpoint_id) + raise Exception('Link for Endpoint({:s}) not found'.format(str_endpoint_id)) + return link + + def get_links(self, connection : Connection) -> List[Link]: + path_links = list() + last_link_uuid = None + for endpoint_id in connection.path_hops_endpoint_ids: + link = self.get_link_from_endpoint_id(endpoint_id, raise_if_not_found=True) + link_uuid = link.link_id.link_uuid.uuid + if last_link_uuid is None or last_link_uuid != link_uuid: + path_links.append(link) + last_link_uuid = link_uuid + return path_links + + def get_endpoints_traversed(self, connection : Connection) -> List[EndPointId]: + path_endpoint_ids = list() + last_link_uuid = None + for endpoint_id in connection.path_hops_endpoint_ids: + link = self.get_link_from_endpoint_id(endpoint_id, raise_if_not_found=True) + link_uuid = link.link_id.link_uuid.uuid + if last_link_uuid is None or last_link_uuid != link_uuid: + for link_endpoint_id in link.link_endpoint_ids: + path_endpoint_ids.append(link_endpoint_id) + last_link_uuid = link_uuid + return path_endpoint_ids diff --git a/src/service/service/task_scheduler/TaskExecutor.py b/src/service/service/task_scheduler/TaskExecutor.py new file mode 100644 index 0000000000000000000000000000000000000000..416e1698f2432e22ae5cfe8e437570fc7d3c8880 --- /dev/null +++ b/src/service/service/task_scheduler/TaskExecutor.py @@ -0,0 +1,142 @@ +# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from enum import Enum +from typing import Any, Dict, Optional, Union +from common.proto.context_pb2 import Connection, ConnectionId, Device, DeviceId, Service, ServiceId +from common.rpc_method_wrapper.ServiceExceptions import NotFoundException +from context.client.ContextClient import ContextClient +from device.client.DeviceClient import DeviceClient +from service.service.service_handler_api._ServiceHandler import _ServiceHandler +from service.service.service_handler_api.ServiceHandlerFactory import ServiceHandlerFactory, get_service_handler_class +from service.service.tools.ContextGetters import get_connection, get_device, get_service +from service.service.tools.ObjectKeys import get_connection_key, get_device_key, get_service_key + +CacheableObject = Union[Connection, Device, Service] + +class CacheableObjectType(Enum): + CONNECTION = 'connection' + DEVICE = 'device' + SERVICE = 'service' + +class TaskExecutor: + def __init__(self, service_handler_factory : ServiceHandlerFactory) -> None: + self._service_handler_factory = service_handler_factory + self._context_client = ContextClient() + self._device_client = DeviceClient() + self._grpc_objects_cache : Dict[str, CacheableObject] = dict() + + @property + def service_handler_factory(self) -> ServiceHandlerFactory: return self._service_handler_factory + + # ----- Common methods --------------------------------------------------------------------------------------------- + + def _load_grpc_object(self, object_type : CacheableObjectType, object_key : str) -> Optional[CacheableObject]: + object_key = '{:s}:{:s}'.format(object_type.value, object_key) + return self._grpc_objects_cache.get(object_key) + + def _store_grpc_object(self, object_type : CacheableObjectType, object_key : str, grpc_object) -> None: + object_key = '{:s}:{:s}'.format(object_type.value, object_key) + self._grpc_objects_cache[object_key] = grpc_object + + def _delete_grpc_object(self, object_type : CacheableObjectType, object_key : str) -> None: + object_key = '{:s}:{:s}'.format(object_type.value, object_key) + self._grpc_objects_cache.pop(object_key, None) + + def _store_editable_grpc_object( + self, object_type : CacheableObjectType, object_key : str, grpc_class, grpc_ro_object + ) -> Any: + grpc_rw_object = grpc_class() + grpc_rw_object.CopyFrom(grpc_ro_object) + self._store_grpc_object(object_type, object_key, grpc_rw_object) + return grpc_rw_object + + # ----- Connection-related methods --------------------------------------------------------------------------------- + + def get_connection(self, connection_id : ConnectionId) -> Connection: + connection_key = get_connection_key(connection_id) + connection = self._load_grpc_object(CacheableObjectType.CONNECTION, connection_key) + if connection is None: + connection = get_connection(self._context_client, connection_id) + if connection is None: raise NotFoundException('Connection', connection_key) + connection : Connection = self._store_editable_grpc_object( + CacheableObjectType.CONNECTION, connection_key, Connection, connection) + return connection + + def set_connection(self, connection : Connection) -> None: + connection_key = get_connection_key(connection.connection_id) + self._context_client.SetConnection(connection) + self._store_grpc_object(CacheableObjectType.CONNECTION, connection_key, connection) + + def delete_connection(self, connection_id : ConnectionId) -> None: + connection_key = get_connection_key(connection_id) + self._context_client.RemoveConnection(connection_id) + self._delete_grpc_object(CacheableObjectType.CONNECTION, connection_key) + + # ----- Device-related methods ------------------------------------------------------------------------------------- + + def get_device(self, device_id : DeviceId) -> Device: + device_key = get_device_key(device_id) + device = self._load_grpc_object(CacheableObjectType.DEVICE, device_key) + if device is None: + device = get_device(self._context_client, device_id) + if device is None: raise NotFoundException('Device', device_key) + device : Device = self._store_editable_grpc_object( + CacheableObjectType.DEVICE, device_key, Device, device) + return device + + def configure_device(self, device : Device) -> None: + device_key = get_device_key(device.device_id) + self._device_client.ConfigureDevice(device) + self._store_grpc_object(CacheableObjectType.DEVICE, device_key, device) + + def get_devices_from_connection(self, connection : Connection) -> Dict[str, Device]: + devices = dict() + for endpoint_id in connection.path_hops_endpoint_ids: + device = self.get_device(endpoint_id.device_id) + device_uuid = endpoint_id.device_id.device_uuid.uuid + if device is None: raise Exception('Device({:s}) not found'.format(str(device_uuid))) + devices[device_uuid] = device + return devices + + # ----- Service-related methods ------------------------------------------------------------------------------------ + + def get_service(self, service_id : ServiceId) -> Service: + service_key = get_service_key(service_id) + service = self._load_grpc_object(CacheableObjectType.SERVICE, service_key) + if service is None: + service = get_service(self._context_client, service_id) + if service is None: raise NotFoundException('Service', service_key) + service : service = self._store_editable_grpc_object( + CacheableObjectType.SERVICE, service_key, Service, service) + return service + + def set_service(self, service : Service) -> None: + service_key = get_service_key(service.service_id) + self._context_client.SetService(service) + self._store_grpc_object(CacheableObjectType.SERVICE, service_key, service) + + def delete_service(self, service_id : ServiceId) -> None: + service_key = get_service_key(service_id) + self._context_client.RemoveService(service_id) + self._delete_grpc_object(CacheableObjectType.SERVICE, service_key) + + # ----- Service Handler Factory ------------------------------------------------------------------------------------ + + def get_service_handler( + self, connection : Connection, service : Service, **service_handler_settings + ) -> _ServiceHandler: + connection_devices = self.get_devices_from_connection(connection) + service_handler_class = get_service_handler_class(self._service_handler_factory, service, connection_devices) + return service_handler_class(service, self, **service_handler_settings) diff --git a/src/service/service/task_scheduler/TaskScheduler.py b/src/service/service/task_scheduler/TaskScheduler.py new file mode 100644 index 0000000000000000000000000000000000000000..de7e9eb7a70e683051e9d2fd906252713dcdba54 --- /dev/null +++ b/src/service/service/task_scheduler/TaskScheduler.py @@ -0,0 +1,210 @@ +# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import graphlib, logging, queue, time +from typing import Dict, Tuple +from common.proto.context_pb2 import Connection, ConnectionId, Service, ServiceId, ServiceStatusEnum +from common.proto.pathcomp_pb2 import PathCompReply +from common.tools.grpc.Tools import grpc_message_to_json_string +from context.client.ContextClient import ContextClient +from service.service.service_handler_api.ServiceHandlerFactory import ServiceHandlerFactory +from service.service.tools.ObjectKeys import get_connection_key, get_service_key +from .tasks._Task import _Task +from .tasks.Task_ConnectionConfigure import Task_ConnectionConfigure +from .tasks.Task_ConnectionDeconfigure import Task_ConnectionDeconfigure +from .tasks.Task_ServiceDelete import Task_ServiceDelete +from .tasks.Task_ServiceSetStatus import Task_ServiceSetStatus +from .TaskExecutor import CacheableObjectType, TaskExecutor + +LOGGER = logging.getLogger(__name__) + +class TasksScheduler: + def __init__(self, service_handler_factory : ServiceHandlerFactory) -> None: + self._dag = graphlib.TopologicalSorter() + self._executor = TaskExecutor(service_handler_factory) + self._tasks : Dict[str, _Task] = dict() + self._context_client = ContextClient() + + # ----- Helper methods --------------------------------------------------------------------------------------------- + + def _add_task_if_not_exists(self, task : _Task) -> str: + task_key = task.key + if task_key not in self._tasks: + self._tasks[task_key] = task + return task_key + + def _add_connection_to_executor_cache(self, connection : Connection) -> None: + connection_key = get_connection_key(connection.connection_id) + self._executor._store_editable_grpc_object( + CacheableObjectType.CONNECTION, connection_key, Connection, connection) + + def _add_service_to_executor_cache(self, service : Service) -> None: + service_key = get_service_key(service.service_id) + self._executor._store_editable_grpc_object( + CacheableObjectType.SERVICE, service_key, Service, service) + + # ----- Task & DAG composition methods ----------------------------------------------------------------------------- + + def _service_create(self, service_id : ServiceId) -> Tuple[str, str]: + service_planned_key = self._add_task_if_not_exists(Task_ServiceSetStatus( + self._executor, service_id, ServiceStatusEnum.SERVICESTATUS_PLANNED)) + + service_active_key = self._add_task_if_not_exists(Task_ServiceSetStatus( + self._executor, service_id, ServiceStatusEnum.SERVICESTATUS_ACTIVE)) + + # activating a service requires the service is in planning state + self._dag.add(service_active_key, service_planned_key) + return service_planned_key, service_active_key + + def _service_remove(self, service_id : ServiceId) -> Tuple[str, str]: + service_removing_key = self._add_task_if_not_exists(Task_ServiceSetStatus( + self._executor, service_id, ServiceStatusEnum.SERVICESTATUS_PENDING_REMOVAL)) + + service_delete_key = self._add_task_if_not_exists(Task_ServiceDelete(self._executor, service_id)) + + # deleting a service requires the service is in removing state + self._dag.add(service_delete_key, service_removing_key) + return service_removing_key, service_delete_key + + def _connection_configure(self, connection_id : ConnectionId, service_id : ServiceId) -> str: + connection_configure_key = self._add_task_if_not_exists(Task_ConnectionConfigure( + self._executor, connection_id)) + + # the connection configuration depends on its connection's service being in planning state + service_planned_key = self._add_task_if_not_exists(Task_ServiceSetStatus( + self._executor, service_id, ServiceStatusEnum.SERVICESTATUS_PLANNED)) + self._dag.add(connection_configure_key, service_planned_key) + + # the connection's service depends on the connection configuration to transition to active state + service_active_key = self._add_task_if_not_exists(Task_ServiceSetStatus( + self._executor, service_id, ServiceStatusEnum.SERVICESTATUS_ACTIVE)) + self._dag.add(service_active_key, connection_configure_key) + + return connection_configure_key + + def _connection_deconfigure(self, connection_id : ConnectionId, service_id : ServiceId) -> str: + connection_deconfigure_key = self._add_task_if_not_exists(Task_ConnectionDeconfigure( + self._executor, connection_id)) + + # the connection deconfiguration depends on its connection's service being in removing state + service_pending_removal_key = self._add_task_if_not_exists(Task_ServiceSetStatus( + self._executor, service_id, ServiceStatusEnum.SERVICESTATUS_PENDING_REMOVAL)) + self._dag.add(connection_deconfigure_key, service_pending_removal_key) + + # the connection's service depends on the connection deconfiguration to transition to delete + service_delete_key = self._add_task_if_not_exists(Task_ServiceDelete( + self._executor, service_id)) + self._dag.add(service_delete_key, connection_deconfigure_key) + + return connection_deconfigure_key + + def compose_from_pathcompreply(self, pathcomp_reply : PathCompReply, is_delete : bool = False) -> None: + t0 = time.time() + include_service = self._service_remove if is_delete else self._service_create + include_connection = self._connection_deconfigure if is_delete else self._connection_configure + + for service in pathcomp_reply.services: + include_service(service.service_id) + self._add_service_to_executor_cache(service) + + for connection in pathcomp_reply.connections: + connection_key = include_connection(connection.connection_id, connection.service_id) + self._add_connection_to_executor_cache(connection) + self._executor.get_service(connection.service_id) + for sub_service_id in connection.sub_service_ids: + _,service_key_done = include_service(sub_service_id) + self._executor.get_service(sub_service_id) + self._dag.add(connection_key, service_key_done) + + t1 = time.time() + LOGGER.info('[compose_from_pathcompreply] elapsed_time: {:f} sec'.format(t1-t0)) + + def compose_from_service(self, service : Service, is_delete : bool = False) -> None: + t0 = time.time() + include_service = self._service_remove if is_delete else self._service_create + include_connection = self._connection_deconfigure if is_delete else self._connection_configure + + explored_items = set() + pending_items_to_explore = queue.Queue() + pending_items_to_explore.put(service) + + while not pending_items_to_explore.empty(): + try: + item = pending_items_to_explore.get(block=False) + except queue.Empty: + break + + if isinstance(item, Service): + str_item_key = grpc_message_to_json_string(item.service_id) + if str_item_key in explored_items: continue + + include_service(item.service_id) + self._add_service_to_executor_cache(item) + connections = self._context_client.ListConnections(item.service_id) + for connection in connections.connections: + self._add_connection_to_executor_cache(connection) + pending_items_to_explore.put(connection) + + explored_items.add(str_item_key) + + elif isinstance(item, ServiceId): + str_item_key = grpc_message_to_json_string(item) + if str_item_key in explored_items: continue + + include_service(item) + self._executor.get_service(item) + connections = self._context_client.ListConnections(item) + for connection in connections.connections: + self._add_connection_to_executor_cache(connection) + pending_items_to_explore.put(connection) + + explored_items.add(str_item_key) + + elif isinstance(item, Connection): + str_item_key = grpc_message_to_json_string(item.connection_id) + if str_item_key in explored_items: continue + + connection_key = include_connection(item.connection_id, item.service_id) + self._add_connection_to_executor_cache(connection) + + self._executor.get_service(item.service_id) + pending_items_to_explore.put(item.service_id) + + for sub_service_id in item.sub_service_ids: + _,service_key_done = include_service(sub_service_id) + self._executor.get_service(sub_service_id) + self._dag.add(service_key_done, connection_key) + pending_items_to_explore.put(sub_service_id) + + explored_items.add(str_item_key) + + else: + MSG = 'Unsupported item {:s}({:s})' + raise Exception(MSG.format(type(item).__name__, grpc_message_to_json_string(item))) + + t1 = time.time() + LOGGER.info('[compose_from_service] elapsed_time: {:f} sec'.format(t1-t0)) + + def execute_all(self, dry_run : bool = False) -> None: + ordered_task_keys = list(self._dag.static_order()) + LOGGER.info('[execute_all] ordered_task_keys={:s}'.format(str(ordered_task_keys))) + + results = [] + for task_key in ordered_task_keys: + task = self._tasks.get(task_key) + succeeded = True if dry_run else task.execute() + results.append(succeeded) + + LOGGER.info('[execute_all] results={:s}'.format(str(results))) + return zip(ordered_task_keys, results) diff --git a/src/service/service/task_scheduler/__init__.py b/src/service/service/task_scheduler/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..70bfa5118f47eb93d5cdd0832ee7928030369286 --- /dev/null +++ b/src/service/service/task_scheduler/__init__.py @@ -0,0 +1,51 @@ +# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# TaskScheduler is initialized with a PathComputation Reply or a Service, and it collects/identifies the sub-services, +# sub-connections, and operations associated to them. It discovers and takes care of the inter-dependencies among them, +# and produces an ordered list of tasks to be executed to implement the desired create/delete operation on the service. +# E.g., a service cannot be deleted if connections supporting that service still exist. If these connections are +# supported by sub-services, the connection needs to be torn down before destroying the services. +# +# Internally, it composes a Directed Acyclic Graph (DAG) of dependencies between tasks. Each task performs a specific +# operation on a connection or service. The DAG composition is based on information extracted from a PathComp reply +# and/or interrogating the Context component. +# +# Example: +# A B C +# *---L3---*---L3---* +# *--L0--* *--L0--* +# - L3 service between A and C, depends on L3 connections A-B and B-C. +# - Each L3 connection is supported by an L0 service and its corresponding L0 connection. +# +# Dependency structure: +# service L3:A-C +# connection L3:A-B +# service L0:A-B +# connection L0:A-B +# connection L3:B-C +# service L0:B-C +# connection L0:B-C +# +# Resolution: +# - service.set(L3:A-C, state=PLANNING) +# - service.set(L0:A-B, state=PLANNING) +# - connection.configure(L0:A-B) +# - service.set(L0:A-B, state=ACTIVE) +# - connection.configure(L3:A-B) +# - service.set(L0:B-C, state=PLANNING) +# - connection.configure(L0:B-C) +# - service.set(L0:B-C, state=ACTIVE) +# - connection.configure(L3:B-C) +# - service.set(L3:A-C, state=ACTIVE) diff --git a/src/service/service/task_scheduler/tasks/Task_ConnectionConfigure.py b/src/service/service/task_scheduler/tasks/Task_ConnectionConfigure.py new file mode 100644 index 0000000000000000000000000000000000000000..beb7e5a0426b7705dbf780d8305a587a3d4fec14 --- /dev/null +++ b/src/service/service/task_scheduler/tasks/Task_ConnectionConfigure.py @@ -0,0 +1,59 @@ +# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from common.proto.context_pb2 import ConnectionId +from common.rpc_method_wrapper.ServiceExceptions import OperationFailedException +from common.tools.grpc.Tools import grpc_message_to_json_string +from service.service.service_handler_api.Tools import check_errors_setendpoint +from service.service.task_scheduler.TaskExecutor import TaskExecutor +from service.service.tools.EndpointIdFormatters import endpointids_to_raw +from service.service.tools.ObjectKeys import get_connection_key +from ._Task import _Task + +KEY_TEMPLATE = 'connection({connection_id:s}):configure' + +class Task_ConnectionConfigure(_Task): + def __init__(self, task_executor : TaskExecutor, connection_id : ConnectionId) -> None: + super().__init__(task_executor) + self._connection_id = connection_id + + @property + def connection_id(self) -> ConnectionId: return self._connection_id + + @staticmethod + def build_key(connection_id : ConnectionId) -> str: + str_connection_id = get_connection_key(connection_id) + return KEY_TEMPLATE.format(connection_id=str_connection_id) + + @property + def key(self) -> str: return self.build_key(self._connection_id) + + def execute(self) -> None: + connection = self._task_executor.get_connection(self._connection_id) + service = self._task_executor.get_service(connection.service_id) + + service_handler_settings = {} + service_handler = self._task_executor.get_service_handler(connection, service, **service_handler_settings) + + endpointids_to_set = endpointids_to_raw(connection.path_hops_endpoint_ids) + connection_uuid = connection.connection_id.connection_uuid.uuid + results_setendpoint = service_handler.SetEndpoint(endpointids_to_set, connection_uuid=connection_uuid) + errors = check_errors_setendpoint(endpointids_to_set, results_setendpoint) + if len(errors) > 0: + MSG = 'SetEndpoint for Connection({:s}) from Service({:s})' + str_connection = grpc_message_to_json_string(connection) + str_service = grpc_message_to_json_string(service) + raise OperationFailedException(MSG.format(str_connection, str_service), extra_details=errors) + + self._task_executor.set_connection(connection) diff --git a/src/service/service/task_scheduler/tasks/Task_ConnectionDeconfigure.py b/src/service/service/task_scheduler/tasks/Task_ConnectionDeconfigure.py new file mode 100644 index 0000000000000000000000000000000000000000..c04d950a8993166c3bbfab3c083d4f2898dcd3e8 --- /dev/null +++ b/src/service/service/task_scheduler/tasks/Task_ConnectionDeconfigure.py @@ -0,0 +1,59 @@ +# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from common.proto.context_pb2 import ConnectionId +from common.rpc_method_wrapper.ServiceExceptions import OperationFailedException +from common.tools.grpc.Tools import grpc_message_to_json_string +from service.service.service_handler_api.Tools import check_errors_deleteendpoint +from service.service.task_scheduler.TaskExecutor import TaskExecutor +from service.service.tools.EndpointIdFormatters import endpointids_to_raw +from service.service.tools.ObjectKeys import get_connection_key +from ._Task import _Task + +KEY_TEMPLATE = 'connection({connection_id:s}):deconfigure' + +class Task_ConnectionDeconfigure(_Task): + def __init__(self, task_executor : TaskExecutor, connection_id : ConnectionId) -> None: + super().__init__(task_executor) + self._connection_id = connection_id + + @property + def connection_id(self) -> ConnectionId: return self._connection_id + + @staticmethod + def build_key(connection_id : ConnectionId) -> str: + str_connection_id = get_connection_key(connection_id) + return KEY_TEMPLATE.format(connection_id=str_connection_id) + + @property + def key(self) -> str: return self.build_key(self._connection_id) + + def execute(self) -> None: + connection = self._task_executor.get_connection(self._connection_id) + service = self._task_executor.get_service(connection.service_id) + + service_handler_settings = {} + service_handler = self._task_executor.get_service_handler(connection, service, **service_handler_settings) + + endpointids_to_delete = endpointids_to_raw(connection.path_hops_endpoint_ids) + connection_uuid = connection.connection_id.connection_uuid.uuid + results_deleteendpoint = service_handler.DeleteEndpoint(endpointids_to_delete, connection_uuid=connection_uuid) + errors = check_errors_deleteendpoint(endpointids_to_delete, results_deleteendpoint) + if len(errors) > 0: + MSG = 'DeleteEndpoint for Connection({:s}) from Service({:s})' + str_connection = grpc_message_to_json_string(connection) + str_service = grpc_message_to_json_string(service) + raise OperationFailedException(MSG.format(str_connection, str_service), extra_details=errors) + + self._task_executor.delete_connection(self._connection_id) diff --git a/src/service/service/task_scheduler/tasks/Task_ServiceDelete.py b/src/service/service/task_scheduler/tasks/Task_ServiceDelete.py new file mode 100644 index 0000000000000000000000000000000000000000..15da1ffedbb3235e6697dcd6c4b0c0429cad0450 --- /dev/null +++ b/src/service/service/task_scheduler/tasks/Task_ServiceDelete.py @@ -0,0 +1,39 @@ +# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from common.proto.context_pb2 import ServiceId +from service.service.task_scheduler.TaskExecutor import TaskExecutor +from service.service.tools.ObjectKeys import get_service_key +from ._Task import _Task + +KEY_TEMPLATE = 'service({service_id:s}):delete' + +class Task_ServiceDelete(_Task): + def __init__(self, task_executor : TaskExecutor, service_id : ServiceId) -> None: + super().__init__(task_executor) + self._service_id = service_id + + @property + def service_id(self) -> ServiceId: return self._service_id + + @staticmethod + def build_key(service_id : ServiceId) -> str: + str_service_id = get_service_key(service_id) + return KEY_TEMPLATE.format(service_id=str_service_id) + + @property + def key(self) -> str: return self.build_key(self._service_id) + + def execute(self) -> None: + self._task_executor.delete_service(self._service_id) diff --git a/src/service/service/task_scheduler/tasks/Task_ServiceSetStatus.py b/src/service/service/task_scheduler/tasks/Task_ServiceSetStatus.py new file mode 100644 index 0000000000000000000000000000000000000000..163954f1b786916ad8c5fde5e8a04def84af259b --- /dev/null +++ b/src/service/service/task_scheduler/tasks/Task_ServiceSetStatus.py @@ -0,0 +1,46 @@ +# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from common.proto.context_pb2 import ServiceId, ServiceStatusEnum +from service.service.task_scheduler.TaskExecutor import TaskExecutor +from service.service.tools.ObjectKeys import get_service_key +from ._Task import _Task + +KEY_TEMPLATE = 'service({service_id:s}):set_status({new_status:s})' + +class Task_ServiceSetStatus(_Task): + def __init__(self, task_executor : TaskExecutor, service_id : ServiceId, new_status : ServiceStatusEnum) -> None: + super().__init__(task_executor) + self._service_id = service_id + self._new_status = new_status + + @property + def service_id(self) -> ServiceId: return self._service_id + + @property + def new_status(self) -> ServiceStatusEnum: return self._new_status + + @staticmethod + def build_key(service_id : ServiceId, new_status : ServiceStatusEnum) -> str: + str_service_id = get_service_key(service_id) + str_new_status = ServiceStatusEnum.Name(new_status) + return KEY_TEMPLATE.format(service_id=str_service_id, new_status=str_new_status) + + @property + def key(self) -> str: return self.build_key(self._service_id, self._new_status) + + def execute(self) -> None: + service = self._task_executor.get_service(self._service_id) + service.service_status.service_status = self._new_status + self._task_executor.set_service(service) diff --git a/src/service/service/task_scheduler/tasks/_Task.py b/src/service/service/task_scheduler/tasks/_Task.py new file mode 100644 index 0000000000000000000000000000000000000000..c36f92973bfa3847c86d2d745792062ec828492f --- /dev/null +++ b/src/service/service/task_scheduler/tasks/_Task.py @@ -0,0 +1,30 @@ +# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from service.service.task_scheduler.TaskExecutor import TaskExecutor + +class _Task: + def __init__(self, task_executor : TaskExecutor) -> None: + self._task_executor = task_executor + + @staticmethod + def build_key() -> str: + raise NotImplementedError('Task:build_key() not implemented') + + @property + def key(self) -> str: + raise NotImplementedError('Task:key() not implemented') + + def execute(self) -> bool: + raise NotImplementedError('Task:execute() not implemented') diff --git a/src/service/service/task_scheduler/tasks/__init__.py b/src/service/service/task_scheduler/tasks/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..70a33251242c51f49140e596b8208a19dd5245f7 --- /dev/null +++ b/src/service/service/task_scheduler/tasks/__init__.py @@ -0,0 +1,14 @@ +# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + diff --git a/src/service/service/tools/ContextGetters.py b/src/service/service/tools/ContextGetters.py new file mode 100644 index 0000000000000000000000000000000000000000..79ccf956b26e914bfbe6bdedd005d9f98e216d38 --- /dev/null +++ b/src/service/service/tools/ContextGetters.py @@ -0,0 +1,42 @@ +# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import grpc +from typing import Optional +from common.proto.context_pb2 import Connection, ConnectionId, Device, DeviceId, Service, ServiceId +from context.client.ContextClient import ContextClient + +def get_connection(context_client : ContextClient, connection_id : ConnectionId) -> Optional[Connection]: + try: + connection : Connection = context_client.GetConnection(connection_id) + return connection + except grpc.RpcError as e: + if e.code() != grpc.StatusCode.NOT_FOUND: raise # pylint: disable=no-member + return None + +def get_device(context_client : ContextClient, device_id : DeviceId) -> Optional[Device]: + try: + device : Device = context_client.GetDevice(device_id) + return device + except grpc.RpcError as e: + if e.code() != grpc.StatusCode.NOT_FOUND: raise # pylint: disable=no-member + return None + +def get_service(context_client : ContextClient, service_id : ServiceId) -> Optional[Service]: + try: + service : Service = context_client.GetService(service_id) + return service + except grpc.RpcError as e: + if e.code() != grpc.StatusCode.NOT_FOUND: raise # pylint: disable=no-member + return None diff --git a/src/service/service/tools/EndpointIdFormatters.py b/src/service/service/tools/EndpointIdFormatters.py new file mode 100644 index 0000000000000000000000000000000000000000..2435df42cfa10d336553945e7e70171838f69237 --- /dev/null +++ b/src/service/service/tools/EndpointIdFormatters.py @@ -0,0 +1,27 @@ +# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import List, Optional, Tuple +from common.proto.context_pb2 import EndPointId + +def endpointids_to_raw(traversed_endpoint_ids : List[EndPointId]) -> List[Tuple[str, str, Optional[str]]]: + raw_endpoint_ids : List[Tuple[str, str, Optional[str]]] = [] + for endpoint_id in traversed_endpoint_ids: + device_uuid = endpoint_id.device_id.device_uuid.uuid + endpoint_uuid = endpoint_id.endpoint_uuid.uuid + topology_uuid = endpoint_id.topology_id.topology_uuid.uuid + if len(topology_uuid) == 0: topology_uuid = None + endpoint_id_tuple = device_uuid, endpoint_uuid, topology_uuid + raw_endpoint_ids.append(endpoint_id_tuple) + return raw_endpoint_ids diff --git a/src/service/service/tools/ObjectKeys.py b/src/service/service/tools/ObjectKeys.py new file mode 100644 index 0000000000000000000000000000000000000000..e58d8bd3e9e5c992a3b9be9c3275f3b40c7ba5e9 --- /dev/null +++ b/src/service/service/tools/ObjectKeys.py @@ -0,0 +1,26 @@ +# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from common.proto.context_pb2 import ConnectionId, DeviceId, ServiceId + +def get_connection_key(connection_id : ConnectionId) -> str: + return connection_id.connection_uuid.uuid + +def get_device_key(device_id : DeviceId) -> str: + return device_id.device_uuid.uuid + +def get_service_key(service_id : ServiceId) -> str: + context_uuid = service_id.context_id.context_uuid.uuid + service_uuid = service_id.service_uuid.uuid + return '{:s}/{:s}'.format(context_uuid, service_uuid) diff --git a/src/service/service/tools/__init__.py b/src/service/service/tools/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..70a33251242c51f49140e596b8208a19dd5245f7 --- /dev/null +++ b/src/service/service/tools/__init__.py @@ -0,0 +1,14 @@ +# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + diff --git a/src/service/tests/test_unitary_task_scheduler.py b/src/service/tests/test_unitary_task_scheduler.py new file mode 100644 index 0000000000000000000000000000000000000000..020386d764ddc508d8fe6806ab1de6887620e33f --- /dev/null +++ b/src/service/tests/test_unitary_task_scheduler.py @@ -0,0 +1,96 @@ +# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +#from common.proto.context_pb2 import Connection, Service +from common.proto.pathcomp_pb2 import PathCompReply +from common.tools.grpc.Tools import grpc_message_to_json_string +from service.service.service_handler_api.ServiceHandlerFactory import ServiceHandlerFactory +from service.service.task_scheduler.TaskScheduler import TasksScheduler +from .PrepareTestScenario import context_client # pylint: disable=unused-import + +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(logging.DEBUG) + +def test_task_scheduler(): + # test: add services and connections that depend on each other + # then, check if they are properly resolved. + # - service MAIN, depends on connection PKT-1, TAPI, and PKT-2 + # - connection PKT-1, depends on nothing + # - connection TAPI, depends on service TAPI-1 and TAPI-2 + # - connection PKT-2, depends on nothing + # - service TAPI-1, depends on connection TAPI-1 + # - service TAPI-2, depends on connection TAPI-2 + + pathcomp_reply = PathCompReply() + + service_main = pathcomp_reply.services.add() + service_main.service_id.context_id.context_uuid.uuid = 'admin' + service_main.service_id.service_uuid.uuid = 'MAIN' + + service_tapi1 = pathcomp_reply.services.add() + service_tapi1.service_id.context_id.context_uuid.uuid = 'admin' + service_tapi1.service_id.service_uuid.uuid = 'TAPI-1' + + service_tapi2 = pathcomp_reply.services.add() + service_tapi2.service_id.context_id.context_uuid.uuid = 'admin' + service_tapi2.service_id.service_uuid.uuid = 'TAPI-2' + + connection_pkt1 = pathcomp_reply.connections.add() + connection_pkt1.connection_id.connection_uuid.uuid = 'PKT-1' + connection_pkt1.service_id.CopyFrom(service_main.service_id) + + connection_tapi = pathcomp_reply.connections.add() + connection_tapi.connection_id.connection_uuid.uuid = 'TAPI' + connection_tapi.service_id.CopyFrom(service_main.service_id) + + connection_pkt2 = pathcomp_reply.connections.add() + connection_pkt2.connection_id.connection_uuid.uuid = 'PKT-2' + connection_pkt2.service_id.CopyFrom(service_main.service_id) + + connection_tapi1 = pathcomp_reply.connections.add() + connection_tapi1.connection_id.connection_uuid.uuid = 'TAPI-1' + connection_tapi1.service_id.CopyFrom(service_tapi1.service_id) + connection_tapi.sub_service_ids.append(service_tapi1.service_id) + + connection_tapi2 = pathcomp_reply.connections.add() + connection_tapi2.connection_id.connection_uuid.uuid = 'TAPI-2' + connection_tapi2.service_id.CopyFrom(service_tapi2.service_id) + connection_tapi.sub_service_ids.append(service_tapi2.service_id) + + LOGGER.info('pathcomp_reply={:s}'.format(grpc_message_to_json_string(pathcomp_reply))) + + service_handler_factory = ServiceHandlerFactory([]) + task_scheduler = TasksScheduler(service_handler_factory) + task_scheduler.compose_from_pathcompreply(pathcomp_reply) + tasks_and_results = list(task_scheduler.execute_all(dry_run=True)) + + LOGGER.info('tasks_and_results={:s}'.format(str(tasks_and_results))) + + CORRECT_ORDERED_TASK_KEYS = [ + 'service(admin/MAIN):set_status(SERVICESTATUS_PLANNED)', + 'service(admin/TAPI-1):set_status(SERVICESTATUS_PLANNED)', + 'service(admin/TAPI-2):set_status(SERVICESTATUS_PLANNED)', + 'connection(PKT-1):configure', + 'connection(PKT-2):configure', + 'connection(TAPI-1):configure', + 'connection(TAPI-2):configure', + 'service(admin/TAPI-1):set_status(SERVICESTATUS_ACTIVE)', + 'service(admin/TAPI-2):set_status(SERVICESTATUS_ACTIVE)', + 'connection(TAPI):configure', + 'service(admin/MAIN):set_status(SERVICESTATUS_ACTIVE)' + ] + + for (task_key,_),correct_key in zip(tasks_and_results, CORRECT_ORDERED_TASK_KEYS): + assert task_key == correct_key diff --git a/src/tests/ecoc22/.gitignore b/src/tests/ecoc22/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..0a3f4400d5c88b1af32c7667d69d2fdc12d5424e --- /dev/null +++ b/src/tests/ecoc22/.gitignore @@ -0,0 +1,2 @@ +# Add here your files containing confidential testbed details such as IP addresses, ports, usernames, passwords, etc. +descriptors_real.json diff --git a/src/tests/ecoc22/__init__.py b/src/tests/ecoc22/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..70a33251242c51f49140e596b8208a19dd5245f7 --- /dev/null +++ b/src/tests/ecoc22/__init__.py @@ -0,0 +1,14 @@ +# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + diff --git a/src/tests/ecoc22/deploy_specs.sh b/src/tests/ecoc22/deploy_specs.sh new file mode 100644 index 0000000000000000000000000000000000000000..5e7a7cc3c26f710f87366fd1b194cdc970301e8e --- /dev/null +++ b/src/tests/ecoc22/deploy_specs.sh @@ -0,0 +1,17 @@ +# Set the URL of your local Docker registry where the images will be uploaded to. +export TFS_REGISTRY_IMAGE="http://localhost:32000/tfs/" + +# Set the list of components, separated by spaces, you want to build images for, and deploy. +export TFS_COMPONENTS="context device service automation pathcomp slice compute webui" + +# Set the tag you want to use for your images. +export TFS_IMAGE_TAG="dev" + +# Set the name of the Kubernetes namespace to deploy to. +export TFS_K8S_NAMESPACE="tfs" + +# Set additional manifest files to be applied after the deployment +export TFS_EXTRA_MANIFESTS="manifests/nginx_ingress_http.yaml" + +# Set the neew Grafana admin password +export TFS_GRAFANA_PASSWORD="admin123+" diff --git a/src/tests/ecoc22/descriptors_emulated-BigNet.json b/src/tests/ecoc22/descriptors_emulated-BigNet.json new file mode 100644 index 0000000000000000000000000000000000000000..cd038269425755258cea9b0908478d66702ad9cc --- /dev/null +++ b/src/tests/ecoc22/descriptors_emulated-BigNet.json @@ -0,0 +1,1299 @@ +{ + "contexts": [ + { + "context_id": { + "context_uuid": { + "uuid": "admin" + } + }, + "service_ids": [], + "topology_ids": [] + } + ], + "devices": [ + { + "device_config": { + "config_rules": [ + { + "action": 1, + "custom": { + "resource_key": "_connect/address", + "resource_value": "127.0.0.1" + } + }, + { + "action": 1, + "custom": { + "resource_key": "_connect/port", + "resource_value": "0" + } + }, + { + "action": 1, + "custom": { + "resource_key": "_connect/settings", + "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"1/1\"}]}" + } + } + ] + }, + "device_drivers": [ + 0 + ], + "device_endpoints": [], + "device_id": { + "device_uuid": { + "uuid": "CE1" + } + }, + "device_operational_status": 1, + "device_type": "emu-packet-router" + }, + { + "device_config": { + "config_rules": [ + { + "action": 1, + "custom": { + "resource_key": "_connect/address", + "resource_value": "127.0.0.1" + } + }, + { + "action": 1, + "custom": { + "resource_key": "_connect/port", + "resource_value": "0" + } + }, + { + "action": 1, + "custom": { + "resource_key": "_connect/settings", + "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"1/1\"}]}" + } + } + ] + }, + "device_drivers": [ + 0 + ], + "device_endpoints": [], + "device_id": { + "device_uuid": { + "uuid": "CE2" + } + }, + "device_operational_status": 1, + "device_type": "emu-packet-router" + }, + { + "device_config": { + "config_rules": [ + { + "action": 1, + "custom": { + "resource_key": "_connect/address", + "resource_value": "127.0.0.1" + } + }, + { + "action": 1, + "custom": { + "resource_key": "_connect/port", + "resource_value": "0" + } + }, + { + "action": 1, + "custom": { + "resource_key": "_connect/settings", + "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"1/1\"}]}" + } + } + ] + }, + "device_drivers": [ + 0 + ], + "device_endpoints": [], + "device_id": { + "device_uuid": { + "uuid": "CE3" + } + }, + "device_operational_status": 1, + "device_type": "emu-packet-router" + }, + { + "device_config": { + "config_rules": [ + { + "action": 1, + "custom": { + "resource_key": "_connect/address", + "resource_value": "127.0.0.1" + } + }, + { + "action": 1, + "custom": { + "resource_key": "_connect/port", + "resource_value": "0" + } + }, + { + "action": 1, + "custom": { + "resource_key": "_connect/settings", + "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"1/1\"}]}" + } + } + ] + }, + "device_drivers": [ + 0 + ], + "device_endpoints": [], + "device_id": { + "device_uuid": { + "uuid": "CE4" + } + }, + "device_operational_status": 1, + "device_type": "emu-packet-router" + }, + { + "device_config": { + "config_rules": [ + { + "action": 1, + "custom": { + "resource_key": "_connect/address", + "resource_value": "127.0.0.1" + } + }, + { + "action": 1, + "custom": { + "resource_key": "_connect/port", + "resource_value": "0" + } + }, + { + "action": 1, + "custom": { + "resource_key": "_connect/settings", + "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"1/1\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"2/1\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"2/2\"}]}" + } + } + ] + }, + "device_drivers": [ + 0 + ], + "device_endpoints": [], + "device_id": { + "device_uuid": { + "uuid": "PE1" + } + }, + "device_operational_status": 1, + "device_type": "emu-packet-router" + }, + { + "device_config": { + "config_rules": [ + { + "action": 1, + "custom": { + "resource_key": "_connect/address", + "resource_value": "127.0.0.1" + } + }, + { + "action": 1, + "custom": { + "resource_key": "_connect/port", + "resource_value": "0" + } + }, + { + "action": 1, + "custom": { + "resource_key": "_connect/settings", + "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"1/1\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"2/1\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"2/2\"}]}" + } + } + ] + }, + "device_drivers": [ + 0 + ], + "device_endpoints": [], + "device_id": { + "device_uuid": { + "uuid": "PE2" + } + }, + "device_operational_status": 1, + "device_type": "emu-packet-router" + }, + { + "device_config": { + "config_rules": [ + { + "action": 1, + "custom": { + "resource_key": "_connect/address", + "resource_value": "127.0.0.1" + } + }, + { + "action": 1, + "custom": { + "resource_key": "_connect/port", + "resource_value": "0" + } + }, + { + "action": 1, + "custom": { + "resource_key": "_connect/settings", + "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"1/1\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"2/1\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"2/2\"}]}" + } + } + ] + }, + "device_drivers": [ + 0 + ], + "device_endpoints": [], + "device_id": { + "device_uuid": { + "uuid": "PE3" + } + }, + "device_operational_status": 1, + "device_type": "emu-packet-router" + }, + { + "device_config": { + "config_rules": [ + { + "action": 1, + "custom": { + "resource_key": "_connect/address", + "resource_value": "127.0.0.1" + } + }, + { + "action": 1, + "custom": { + "resource_key": "_connect/port", + "resource_value": "0" + } + }, + { + "action": 1, + "custom": { + "resource_key": "_connect/settings", + "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"1/1\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"2/1\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"2/2\"}]}" + } + } + ] + }, + "device_drivers": [ + 0 + ], + "device_endpoints": [], + "device_id": { + "device_uuid": { + "uuid": "PE4" + } + }, + "device_operational_status": 1, + "device_type": "emu-packet-router" + }, + { + "device_config": { + "config_rules": [ + { + "action": 1, + "custom": { + "resource_key": "_connect/address", + "resource_value": "127.0.0.1" + } + }, + { + "action": 1, + "custom": { + "resource_key": "_connect/port", + "resource_value": "0" + } + }, + { + "action": 1, + "custom": { + "resource_key": "_connect/settings", + "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"1/1\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"1/2\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"2/1\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"2/2\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"2/3\"}]}" + } + } + ] + }, + "device_drivers": [ + 0 + ], + "device_endpoints": [], + "device_id": { + "device_uuid": { + "uuid": "BB1" + } + }, + "device_operational_status": 1, + "device_type": "emu-packet-router" + }, + { + "device_config": { + "config_rules": [ + { + "action": 1, + "custom": { + "resource_key": "_connect/address", + "resource_value": "127.0.0.1" + } + }, + { + "action": 1, + "custom": { + "resource_key": "_connect/port", + "resource_value": "0" + } + }, + { + "action": 1, + "custom": { + "resource_key": "_connect/settings", + "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"1/1\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"1/2\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"2/1\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"2/2\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"2/3\"}]}" + } + } + ] + }, + "device_drivers": [ + 0 + ], + "device_endpoints": [], + "device_id": { + "device_uuid": { + "uuid": "BB2" + } + }, + "device_operational_status": 1, + "device_type": "emu-packet-router" + }, + { + "device_config": { + "config_rules": [ + { + "action": 1, + "custom": { + "resource_key": "_connect/address", + "resource_value": "127.0.0.1" + } + }, + { + "action": 1, + "custom": { + "resource_key": "_connect/port", + "resource_value": "0" + } + }, + { + "action": 1, + "custom": { + "resource_key": "_connect/settings", + "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"2/1\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"2/2\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"2/3\"}]}" + } + } + ] + }, + "device_drivers": [ + 0 + ], + "device_endpoints": [], + "device_id": { + "device_uuid": { + "uuid": "BB6" + } + }, + "device_operational_status": 1, + "device_type": "emu-packet-router" + }, + { + "device_config": { + "config_rules": [ + { + "action": 1, + "custom": { + "resource_key": "_connect/address", + "resource_value": "127.0.0.1" + } + }, + { + "action": 1, + "custom": { + "resource_key": "_connect/port", + "resource_value": "0" + } + }, + { + "action": 1, + "custom": { + "resource_key": "_connect/settings", + "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"2/1\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"2/2\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"2/3\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"2/4\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"2/5\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"2/6\"}]}" + } + } + ] + }, + "device_drivers": [ + 0 + ], + "device_endpoints": [], + "device_id": { + "device_uuid": { + "uuid": "BB7" + } + }, + "device_operational_status": 1, + "device_type": "emu-packet-router" + }, + { + "device_config": { + "config_rules": [ + { + "action": 1, + "custom": { + "resource_key": "_connect/address", + "resource_value": "127.0.0.1" + } + }, + { + "action": 1, + "custom": { + "resource_key": "_connect/port", + "resource_value": "0" + } + }, + { + "action": 1, + "custom": { + "resource_key": "_connect/settings", + "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"2/1\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"2/2\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"2/3\"}]}" + } + } + ] + }, + "device_drivers": [ + 0 + ], + "device_endpoints": [], + "device_id": { + "device_uuid": { + "uuid": "BB3" + } + }, + "device_operational_status": 1, + "device_type": "emu-packet-router" + }, + { + "device_config": { + "config_rules": [ + { + "action": 1, + "custom": { + "resource_key": "_connect/address", + "resource_value": "127.0.0.1" + } + }, + { + "action": 1, + "custom": { + "resource_key": "_connect/port", + "resource_value": "0" + } + }, + { + "action": 1, + "custom": { + "resource_key": "_connect/settings", + "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"1/1\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"1/2\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"2/1\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"2/2\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"2/3\"}]}" + } + } + ] + }, + "device_drivers": [ + 0 + ], + "device_endpoints": [], + "device_id": { + "device_uuid": { + "uuid": "BB5" + } + }, + "device_operational_status": 1, + "device_type": "emu-packet-router" + }, + { + "device_config": { + "config_rules": [ + { + "action": 1, + "custom": { + "resource_key": "_connect/address", + "resource_value": "127.0.0.1" + } + }, + { + "action": 1, + "custom": { + "resource_key": "_connect/port", + "resource_value": "0" + } + }, + { + "action": 1, + "custom": { + "resource_key": "_connect/settings", + "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"1/1\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"1/2\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"2/1\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"2/2\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"2/3\"}]}" + } + } + ] + }, + "device_drivers": [ + 0 + ], + "device_endpoints": [], + "device_id": { + "device_uuid": { + "uuid": "BB4" + } + }, + "device_operational_status": 1, + "device_type": "emu-packet-router" + } + ], + "links": [ + { + "link_endpoint_ids": [ + { + "device_id": { + "device_uuid": { + "uuid": "CE1" + } + }, + "endpoint_uuid": { + "uuid": "1/1" + } + }, + { + "device_id": { + "device_uuid": { + "uuid": "PE1" + } + }, + "endpoint_uuid": { + "uuid": "1/1" + } + } + ], + "link_id": { + "link_uuid": { + "uuid": "CE1/1/1==CE1/1/1" + } + } + }, + { + "link_endpoint_ids": [ + { + "device_id": { + "device_uuid": { + "uuid": "CE2" + } + }, + "endpoint_uuid": { + "uuid": "1/1" + } + }, + { + "device_id": { + "device_uuid": { + "uuid": "PE2" + } + }, + "endpoint_uuid": { + "uuid": "1/1" + } + } + ], + "link_id": { + "link_uuid": { + "uuid": "CE2/1/1==CE2/1/1" + } + } + }, + { + "link_endpoint_ids": [ + { + "device_id": { + "device_uuid": { + "uuid": "CE3" + } + }, + "endpoint_uuid": { + "uuid": "1/1" + } + }, + { + "device_id": { + "device_uuid": { + "uuid": "PE3" + } + }, + "endpoint_uuid": { + "uuid": "1/1" + } + } + ], + "link_id": { + "link_uuid": { + "uuid": "CE3/1/1==CE3/1/1" + } + } + }, + { + "link_endpoint_ids": [ + { + "device_id": { + "device_uuid": { + "uuid": "CE4" + } + }, + "endpoint_uuid": { + "uuid": "1/1" + } + }, + { + "device_id": { + "device_uuid": { + "uuid": "PE4" + } + }, + "endpoint_uuid": { + "uuid": "1/1" + } + } + ], + "link_id": { + "link_uuid": { + "uuid": "CE4/1/1==CE4/1/1" + } + } + }, + { + "link_endpoint_ids": [ + { + "device_id": { + "device_uuid": { + "uuid": "PE1" + } + }, + "endpoint_uuid": { + "uuid": "2/1" + } + }, + { + "device_id": { + "device_uuid": { + "uuid": "BB1" + } + }, + "endpoint_uuid": { + "uuid": "1/1" + } + } + ], + "link_id": { + "link_uuid": { + "uuid": "PE1/2/1==PE1/1/1" + } + } + }, + { + "link_endpoint_ids": [ + { + "device_id": { + "device_uuid": { + "uuid": "PE1" + } + }, + "endpoint_uuid": { + "uuid": "2/2" + } + }, + { + "device_id": { + "device_uuid": { + "uuid": "BB2" + } + }, + "endpoint_uuid": { + "uuid": "1/1" + } + } + ], + "link_id": { + "link_uuid": { + "uuid": "PE1/2/2==PE1/1/1" + } + } + }, + { + "link_endpoint_ids": [ + { + "device_id": { + "device_uuid": { + "uuid": "PE2" + } + }, + "endpoint_uuid": { + "uuid": "2/1" + } + }, + { + "device_id": { + "device_uuid": { + "uuid": "BB1" + } + }, + "endpoint_uuid": { + "uuid": "1/2" + } + } + ], + "link_id": { + "link_uuid": { + "uuid": "PE2/2/1==PE2/1/2" + } + } + }, + { + "link_endpoint_ids": [ + { + "device_id": { + "device_uuid": { + "uuid": "PE2" + } + }, + "endpoint_uuid": { + "uuid": "2/2" + } + }, + { + "device_id": { + "device_uuid": { + "uuid": "BB2" + } + }, + "endpoint_uuid": { + "uuid": "1/2" + } + } + ], + "link_id": { + "link_uuid": { + "uuid": "PE2/2/2==PE2/1/2" + } + } + }, + { + "link_endpoint_ids": [ + { + "device_id": { + "device_uuid": { + "uuid": "PE3" + } + }, + "endpoint_uuid": { + "uuid": "2/2" + } + }, + { + "device_id": { + "device_uuid": { + "uuid": "BB5" + } + }, + "endpoint_uuid": { + "uuid": "1/1" + } + } + ], + "link_id": { + "link_uuid": { + "uuid": "PE3/2/2==PE3/1/1" + } + } + }, + { + "link_endpoint_ids": [ + { + "device_id": { + "device_uuid": { + "uuid": "PE3" + } + }, + "endpoint_uuid": { + "uuid": "2/1" + } + }, + { + "device_id": { + "device_uuid": { + "uuid": "BB4" + } + }, + "endpoint_uuid": { + "uuid": "1/1" + } + } + ], + "link_id": { + "link_uuid": { + "uuid": "PE3/2/1==PE3/1/1" + } + } + }, + { + "link_endpoint_ids": [ + { + "device_id": { + "device_uuid": { + "uuid": "PE4" + } + }, + "endpoint_uuid": { + "uuid": "2/2" + } + }, + { + "device_id": { + "device_uuid": { + "uuid": "BB5" + } + }, + "endpoint_uuid": { + "uuid": "1/2" + } + } + ], + "link_id": { + "link_uuid": { + "uuid": "PE4/2/2==PE4/1/2" + } + } + }, + { + "link_endpoint_ids": [ + { + "device_id": { + "device_uuid": { + "uuid": "PE4" + } + }, + "endpoint_uuid": { + "uuid": "2/1" + } + }, + { + "device_id": { + "device_uuid": { + "uuid": "BB4" + } + }, + "endpoint_uuid": { + "uuid": "1/2" + } + } + ], + "link_id": { + "link_uuid": { + "uuid": "PE4/2/1==PE4/1/2" + } + } + }, + { + "link_endpoint_ids": [ + { + "device_id": { + "device_uuid": { + "uuid": "BB1" + } + }, + "endpoint_uuid": { + "uuid": "2/1" + } + }, + { + "device_id": { + "device_uuid": { + "uuid": "BB2" + } + }, + "endpoint_uuid": { + "uuid": "2/2" + } + } + ], + "link_id": { + "link_uuid": { + "uuid": "BB1/2/1==BB1/2/2" + } + } + }, + { + "link_endpoint_ids": [ + { + "device_id": { + "device_uuid": { + "uuid": "BB2" + } + }, + "endpoint_uuid": { + "uuid": "2/1" + } + }, + { + "device_id": { + "device_uuid": { + "uuid": "BB3" + } + }, + "endpoint_uuid": { + "uuid": "2/2" + } + } + ], + "link_id": { + "link_uuid": { + "uuid": "BB2/2/1==BB2/2/2" + } + } + }, + { + "link_endpoint_ids": [ + { + "device_id": { + "device_uuid": { + "uuid": "BB3" + } + }, + "endpoint_uuid": { + "uuid": "2/1" + } + }, + { + "device_id": { + "device_uuid": { + "uuid": "BB4" + } + }, + "endpoint_uuid": { + "uuid": "2/2" + } + } + ], + "link_id": { + "link_uuid": { + "uuid": "BB3/2/1==BB3/2/2" + } + } + }, + { + "link_endpoint_ids": [ + { + "device_id": { + "device_uuid": { + "uuid": "BB4" + } + }, + "endpoint_uuid": { + "uuid": "2/1" + } + }, + { + "device_id": { + "device_uuid": { + "uuid": "BB5" + } + }, + "endpoint_uuid": { + "uuid": "2/2" + } + } + ], + "link_id": { + "link_uuid": { + "uuid": "BB4/2/1==BB4/2/2" + } + } + }, + { + "link_endpoint_ids": [ + { + "device_id": { + "device_uuid": { + "uuid": "BB5" + } + }, + "endpoint_uuid": { + "uuid": "2/1" + } + }, + { + "device_id": { + "device_uuid": { + "uuid": "BB6" + } + }, + "endpoint_uuid": { + "uuid": "2/2" + } + } + ], + "link_id": { + "link_uuid": { + "uuid": "BB5/2/1==BB5/2/2" + } + } + }, + { + "link_endpoint_ids": [ + { + "device_id": { + "device_uuid": { + "uuid": "BB6" + } + }, + "endpoint_uuid": { + "uuid": "2/1" + } + }, + { + "device_id": { + "device_uuid": { + "uuid": "BB1" + } + }, + "endpoint_uuid": { + "uuid": "2/2" + } + } + ], + "link_id": { + "link_uuid": { + "uuid": "BB6/2/1==BB6/2/2" + } + } + }, + { + "link_endpoint_ids": [ + { + "device_id": { + "device_uuid": { + "uuid": "BB1" + } + }, + "endpoint_uuid": { + "uuid": "2/3" + } + }, + { + "device_id": { + "device_uuid": { + "uuid": "BB7" + } + }, + "endpoint_uuid": { + "uuid": "2/1" + } + } + ], + "link_id": { + "link_uuid": { + "uuid": "BB1/2/3==BB1/2/1" + } + } + }, + { + "link_endpoint_ids": [ + { + "device_id": { + "device_uuid": { + "uuid": "BB2" + } + }, + "endpoint_uuid": { + "uuid": "2/3" + } + }, + { + "device_id": { + "device_uuid": { + "uuid": "BB7" + } + }, + "endpoint_uuid": { + "uuid": "2/2" + } + } + ], + "link_id": { + "link_uuid": { + "uuid": "BB2/2/3==BB2/2/2" + } + } + }, + { + "link_endpoint_ids": [ + { + "device_id": { + "device_uuid": { + "uuid": "BB3" + } + }, + "endpoint_uuid": { + "uuid": "2/3" + } + }, + { + "device_id": { + "device_uuid": { + "uuid": "BB7" + } + }, + "endpoint_uuid": { + "uuid": "2/3" + } + } + ], + "link_id": { + "link_uuid": { + "uuid": "BB3/2/3==BB3/2/3" + } + } + }, + { + "link_endpoint_ids": [ + { + "device_id": { + "device_uuid": { + "uuid": "BB4" + } + }, + "endpoint_uuid": { + "uuid": "2/3" + } + }, + { + "device_id": { + "device_uuid": { + "uuid": "BB7" + } + }, + "endpoint_uuid": { + "uuid": "2/4" + } + } + ], + "link_id": { + "link_uuid": { + "uuid": "BB4/2/3==BB4/2/4" + } + } + }, + { + "link_endpoint_ids": [ + { + "device_id": { + "device_uuid": { + "uuid": "BB5" + } + }, + "endpoint_uuid": { + "uuid": "2/3" + } + }, + { + "device_id": { + "device_uuid": { + "uuid": "BB7" + } + }, + "endpoint_uuid": { + "uuid": "2/5" + } + } + ], + "link_id": { + "link_uuid": { + "uuid": "BB5/2/3==BB5/2/5" + } + } + }, + { + "link_endpoint_ids": [ + { + "device_id": { + "device_uuid": { + "uuid": "BB6" + } + }, + "endpoint_uuid": { + "uuid": "2/3" + } + }, + { + "device_id": { + "device_uuid": { + "uuid": "BB7" + } + }, + "endpoint_uuid": { + "uuid": "2/6" + } + } + ], + "link_id": { + "link_uuid": { + "uuid": "BB6/2/3==BB6/2/6" + } + } + } + ], + "topologies": [ + { + "device_ids": [], + "link_ids": [], + "topology_id": { + "context_id": { + "context_uuid": { + "uuid": "admin" + } + }, + "topology_uuid": { + "uuid": "admin" + } + } + } + ] +} \ No newline at end of file diff --git a/src/tests/ecoc22/descriptors_emulated-DC_CSGW_TN.json b/src/tests/ecoc22/descriptors_emulated-DC_CSGW_TN.json new file mode 100644 index 0000000000000000000000000000000000000000..5f40edac2feef134c02a74b08fcad21d917aae07 --- /dev/null +++ b/src/tests/ecoc22/descriptors_emulated-DC_CSGW_TN.json @@ -0,0 +1,1005 @@ +{ + "contexts": [ + { + "context_id": { + "context_uuid": { + "uuid": "admin" + } + }, + "service_ids": [], + "topology_ids": [] + } + ], + "devices": [ + { + "device_config": { + "config_rules": [ + { + "action": 1, + "custom": { + "resource_key": "_connect/address", + "resource_value": "127.0.0.1" + } + }, + { + "action": 1, + "custom": { + "resource_key": "_connect/port", + "resource_value": "0" + } + }, + { + "action": 1, + "custom": { + "resource_key": "_connect/settings", + "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"eth1\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"eth2\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"int\"}]}" + } + } + ] + }, + "device_drivers": [ + 0 + ], + "device_endpoints": [], + "device_id": { + "device_uuid": { + "uuid": "DC1-GW" + } + }, + "device_operational_status": 1, + "device_type": "emu-datacenter" + }, + { + "device_config": { + "config_rules": [ + { + "action": 1, + "custom": { + "resource_key": "_connect/address", + "resource_value": "127.0.0.1" + } + }, + { + "action": 1, + "custom": { + "resource_key": "_connect/port", + "resource_value": "0" + } + }, + { + "action": 1, + "custom": { + "resource_key": "_connect/settings", + "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"eth1\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"eth2\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"int\"}]}" + } + } + ] + }, + "device_drivers": [ + 0 + ], + "device_endpoints": [], + "device_id": { + "device_uuid": { + "uuid": "DC2-GW" + } + }, + "device_operational_status": 1, + "device_type": "emu-datacenter" + }, + { + "device_config": { + "config_rules": [ + { + "action": 1, + "custom": { + "resource_key": "_connect/address", + "resource_value": "127.0.0.1" + } + }, + { + "action": 1, + "custom": { + "resource_key": "_connect/port", + "resource_value": "0" + } + }, + { + "action": 1, + "custom": { + "resource_key": "_connect/settings", + "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"10/1\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"1/1\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"1/2\"}]}" + } + } + ] + }, + "device_drivers": [ + 0 + ], + "device_endpoints": [], + "device_id": { + "device_uuid": { + "uuid": "CS1-GW1" + } + }, + "device_operational_status": 1, + "device_type": "emu-packet-router" + }, + { + "device_config": { + "config_rules": [ + { + "action": 1, + "custom": { + "resource_key": "_connect/address", + "resource_value": "127.0.0.1" + } + }, + { + "action": 1, + "custom": { + "resource_key": "_connect/port", + "resource_value": "0" + } + }, + { + "action": 1, + "custom": { + "resource_key": "_connect/settings", + "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"10/1\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"1/1\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"1/2\"}]}" + } + } + ] + }, + "device_drivers": [ + 0 + ], + "device_endpoints": [], + "device_id": { + "device_uuid": { + "uuid": "CS1-GW2" + } + }, + "device_operational_status": 1, + "device_type": "emu-packet-router" + }, + { + "device_config": { + "config_rules": [ + { + "action": 1, + "custom": { + "resource_key": "_connect/address", + "resource_value": "127.0.0.1" + } + }, + { + "action": 1, + "custom": { + "resource_key": "_connect/port", + "resource_value": "0" + } + }, + { + "action": 1, + "custom": { + "resource_key": "_connect/settings", + "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"10/1\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"1/1\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"1/2\"}]}" + } + } + ] + }, + "device_drivers": [ + 0 + ], + "device_endpoints": [], + "device_id": { + "device_uuid": { + "uuid": "CS2-GW1" + } + }, + "device_operational_status": 1, + "device_type": "emu-packet-router" + }, + { + "device_config": { + "config_rules": [ + { + "action": 1, + "custom": { + "resource_key": "_connect/address", + "resource_value": "127.0.0.1" + } + }, + { + "action": 1, + "custom": { + "resource_key": "_connect/port", + "resource_value": "0" + } + }, + { + "action": 1, + "custom": { + "resource_key": "_connect/settings", + "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"10/1\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"1/1\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"1/2\"}]}" + } + } + ] + }, + "device_drivers": [ + 0 + ], + "device_endpoints": [], + "device_id": { + "device_uuid": { + "uuid": "CS2-GW2" + } + }, + "device_operational_status": 1, + "device_type": "emu-packet-router" + }, + { + "device_config": { + "config_rules": [ + { + "action": 1, + "custom": { + "resource_key": "_connect/address", + "resource_value": "127.0.0.1" + } + }, + { + "action": 1, + "custom": { + "resource_key": "_connect/port", + "resource_value": "0" + } + }, + { + "action": 1, + "custom": { + "resource_key": "_connect/settings", + "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"1/1\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"1/2\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"2/1\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"2/2\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"2/3\"}]}" + } + } + ] + }, + "device_drivers": [ + 0 + ], + "device_endpoints": [], + "device_id": { + "device_uuid": { + "uuid": "TN-R1" + } + }, + "device_operational_status": 1, + "device_type": "emu-packet-router" + }, + { + "device_config": { + "config_rules": [ + { + "action": 1, + "custom": { + "resource_key": "_connect/address", + "resource_value": "127.0.0.1" + } + }, + { + "action": 1, + "custom": { + "resource_key": "_connect/port", + "resource_value": "0" + } + }, + { + "action": 1, + "custom": { + "resource_key": "_connect/settings", + "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"1/1\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"1/2\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"2/1\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"2/2\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"2/3\"}]}" + } + } + ] + }, + "device_drivers": [ + 0 + ], + "device_endpoints": [], + "device_id": { + "device_uuid": { + "uuid": "TN-R2" + } + }, + "device_operational_status": 1, + "device_type": "emu-packet-router" + }, + { + "device_config": { + "config_rules": [ + { + "action": 1, + "custom": { + "resource_key": "_connect/address", + "resource_value": "127.0.0.1" + } + }, + { + "action": 1, + "custom": { + "resource_key": "_connect/port", + "resource_value": "0" + } + }, + { + "action": 1, + "custom": { + "resource_key": "_connect/settings", + "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"1/1\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"1/2\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"2/1\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"2/2\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"2/3\"}]}" + } + } + ] + }, + "device_drivers": [ + 0 + ], + "device_endpoints": [], + "device_id": { + "device_uuid": { + "uuid": "TN-R3" + } + }, + "device_operational_status": 1, + "device_type": "emu-packet-router" + }, + { + "device_config": { + "config_rules": [ + { + "action": 1, + "custom": { + "resource_key": "_connect/address", + "resource_value": "127.0.0.1" + } + }, + { + "action": 1, + "custom": { + "resource_key": "_connect/port", + "resource_value": "0" + } + }, + { + "action": 1, + "custom": { + "resource_key": "_connect/settings", + "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"1/1\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"1/2\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"2/1\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"2/2\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"2/3\"}]}" + } + } + ] + }, + "device_drivers": [ + 0 + ], + "device_endpoints": [], + "device_id": { + "device_uuid": { + "uuid": "TN-R4" + } + }, + "device_operational_status": 1, + "device_type": "emu-packet-router" + } + ], + "links": [ + { + "link_endpoint_ids": [ + { + "device_id": { + "device_uuid": { + "uuid": "DC1-GW" + } + }, + "endpoint_uuid": { + "uuid": "eth1" + } + }, + { + "device_id": { + "device_uuid": { + "uuid": "CS1-GW1" + } + }, + "endpoint_uuid": { + "uuid": "10/1" + } + } + ], + "link_id": { + "link_uuid": { + "uuid": "DC1-GW/eth1==CS1-GW1/10/1" + } + } + }, + { + "link_endpoint_ids": [ + { + "device_id": { + "device_uuid": { + "uuid": "DC1-GW" + } + }, + "endpoint_uuid": { + "uuid": "eth2" + } + }, + { + "device_id": { + "device_uuid": { + "uuid": "CS1-GW2" + } + }, + "endpoint_uuid": { + "uuid": "10/1" + } + } + ], + "link_id": { + "link_uuid": { + "uuid": "DC1-GW/eth2==CS1-GW2/10/1" + } + } + }, + { + "link_endpoint_ids": [ + { + "device_id": { + "device_uuid": { + "uuid": "DC2-GW" + } + }, + "endpoint_uuid": { + "uuid": "eth1" + } + }, + { + "device_id": { + "device_uuid": { + "uuid": "CS2-GW1" + } + }, + "endpoint_uuid": { + "uuid": "10/1" + } + } + ], + "link_id": { + "link_uuid": { + "uuid": "DC2-GW/eth1==CS2-GW1/10/1" + } + } + }, + { + "link_endpoint_ids": [ + { + "device_id": { + "device_uuid": { + "uuid": "DC2-GW" + } + }, + "endpoint_uuid": { + "uuid": "eth2" + } + }, + { + "device_id": { + "device_uuid": { + "uuid": "CS2-GW2" + } + }, + "endpoint_uuid": { + "uuid": "10/1" + } + } + ], + "link_id": { + "link_uuid": { + "uuid": "DC2-GW/eth2==CS2-GW2/10/1" + } + } + }, + { + "link_endpoint_ids": [ + { + "device_id": { + "device_uuid": { + "uuid": "CS1-GW1" + } + }, + "endpoint_uuid": { + "uuid": "1/1" + } + }, + { + "device_id": { + "device_uuid": { + "uuid": "TN-R1" + } + }, + "endpoint_uuid": { + "uuid": "1/1" + } + } + ], + "link_id": { + "link_uuid": { + "uuid": "CS1-GW1/1/1==TN-R1/1/1" + } + } + }, + { + "link_endpoint_ids": [ + { + "device_id": { + "device_uuid": { + "uuid": "CS1-GW2" + } + }, + "endpoint_uuid": { + "uuid": "1/1" + } + }, + { + "device_id": { + "device_uuid": { + "uuid": "TN-R2" + } + }, + "endpoint_uuid": { + "uuid": "1/1" + } + } + ], + "link_id": { + "link_uuid": { + "uuid": "CS1-GW2/1/1==TN-R2/1/1" + } + } + }, + { + "link_endpoint_ids": [ + { + "device_id": { + "device_uuid": { + "uuid": "CS1-GW1" + } + }, + "endpoint_uuid": { + "uuid": "1/2" + } + }, + { + "device_id": { + "device_uuid": { + "uuid": "TN-R2" + } + }, + "endpoint_uuid": { + "uuid": "1/2" + } + } + ], + "link_id": { + "link_uuid": { + "uuid": "CS1-GW1/1/2==TN-R2/1/2" + } + } + }, + { + "link_endpoint_ids": [ + { + "device_id": { + "device_uuid": { + "uuid": "CS1-GW2" + } + }, + "endpoint_uuid": { + "uuid": "1/2" + } + }, + { + "device_id": { + "device_uuid": { + "uuid": "TN-R1" + } + }, + "endpoint_uuid": { + "uuid": "1/2" + } + } + ], + "link_id": { + "link_uuid": { + "uuid": "CS1-GW2/1/2==TN-R1/1/2" + } + } + }, + { + "link_endpoint_ids": [ + { + "device_id": { + "device_uuid": { + "uuid": "CS2-GW1" + } + }, + "endpoint_uuid": { + "uuid": "1/1" + } + }, + { + "device_id": { + "device_uuid": { + "uuid": "TN-R3" + } + }, + "endpoint_uuid": { + "uuid": "1/1" + } + } + ], + "link_id": { + "link_uuid": { + "uuid": "CS2-GW1/1/1==TN-R3/1/1" + } + } + }, + { + "link_endpoint_ids": [ + { + "device_id": { + "device_uuid": { + "uuid": "CS2-GW2" + } + }, + "endpoint_uuid": { + "uuid": "1/1" + } + }, + { + "device_id": { + "device_uuid": { + "uuid": "TN-R4" + } + }, + "endpoint_uuid": { + "uuid": "1/1" + } + } + ], + "link_id": { + "link_uuid": { + "uuid": "CS2-GW2/1/1==TN-R4/1/1" + } + } + }, + { + "link_endpoint_ids": [ + { + "device_id": { + "device_uuid": { + "uuid": "CS2-GW1" + } + }, + "endpoint_uuid": { + "uuid": "1/2" + } + }, + { + "device_id": { + "device_uuid": { + "uuid": "TN-R4" + } + }, + "endpoint_uuid": { + "uuid": "1/2" + } + } + ], + "link_id": { + "link_uuid": { + "uuid": "CS2-GW1/1/2==TN-R4/1/2" + } + } + }, + { + "link_endpoint_ids": [ + { + "device_id": { + "device_uuid": { + "uuid": "CS2-GW2" + } + }, + "endpoint_uuid": { + "uuid": "1/2" + } + }, + { + "device_id": { + "device_uuid": { + "uuid": "TN-R3" + } + }, + "endpoint_uuid": { + "uuid": "1/2" + } + } + ], + "link_id": { + "link_uuid": { + "uuid": "CS2-GW2/1/2==TN-R3/1/2" + } + } + }, + { + "link_endpoint_ids": [ + { + "device_id": { + "device_uuid": { + "uuid": "TN-R1" + } + }, + "endpoint_uuid": { + "uuid": "2/1" + } + }, + { + "device_id": { + "device_uuid": { + "uuid": "TN-R2" + } + }, + "endpoint_uuid": { + "uuid": "2/2" + } + } + ], + "link_id": { + "link_uuid": { + "uuid": "TN-R1/2/1==TN-R2/2/2" + } + } + }, + { + "link_endpoint_ids": [ + { + "device_id": { + "device_uuid": { + "uuid": "TN-R2" + } + }, + "endpoint_uuid": { + "uuid": "2/1" + } + }, + { + "device_id": { + "device_uuid": { + "uuid": "TN-R3" + } + }, + "endpoint_uuid": { + "uuid": "2/2" + } + } + ], + "link_id": { + "link_uuid": { + "uuid": "TN-R2/2/1==TN-R3/2/2" + } + } + }, + { + "link_endpoint_ids": [ + { + "device_id": { + "device_uuid": { + "uuid": "TN-R3" + } + }, + "endpoint_uuid": { + "uuid": "2/1" + } + }, + { + "device_id": { + "device_uuid": { + "uuid": "TN-R4" + } + }, + "endpoint_uuid": { + "uuid": "2/2" + } + } + ], + "link_id": { + "link_uuid": { + "uuid": "TN-R3/2/1==TN-R4/2/2" + } + } + }, + { + "link_endpoint_ids": [ + { + "device_id": { + "device_uuid": { + "uuid": "TN-R4" + } + }, + "endpoint_uuid": { + "uuid": "2/1" + } + }, + { + "device_id": { + "device_uuid": { + "uuid": "TN-R1" + } + }, + "endpoint_uuid": { + "uuid": "2/2" + } + } + ], + "link_id": { + "link_uuid": { + "uuid": "TN-R4/2/1==TN-R1/2/2" + } + } + }, + { + "link_endpoint_ids": [ + { + "device_id": { + "device_uuid": { + "uuid": "TN-R1" + } + }, + "endpoint_uuid": { + "uuid": "2/3" + } + }, + { + "device_id": { + "device_uuid": { + "uuid": "TN-R3" + } + }, + "endpoint_uuid": { + "uuid": "2/3" + } + } + ], + "link_id": { + "link_uuid": { + "uuid": "TN-R1/2/3==TN-R3/2/3" + } + } + }, + { + "link_endpoint_ids": [ + { + "device_id": { + "device_uuid": { + "uuid": "TN-R2" + } + }, + "endpoint_uuid": { + "uuid": "2/3" + } + }, + { + "device_id": { + "device_uuid": { + "uuid": "TN-R4" + } + }, + "endpoint_uuid": { + "uuid": "2/3" + } + } + ], + "link_id": { + "link_uuid": { + "uuid": "TN-R2/2/3==TN-R4/2/3" + } + } + } + ], + "topologies": [ + { + "device_ids": [], + "link_ids": [], + "topology_id": { + "context_id": { + "context_uuid": { + "uuid": "admin" + } + }, + "topology_uuid": { + "uuid": "admin" + } + } + }, + { + "device_ids": [], + "link_ids": [], + "topology_id": { + "context_id": { + "context_uuid": { + "uuid": "admin" + } + }, + "topology_uuid": { + "uuid": "DC1" + } + } + }, + { + "device_ids": [], + "link_ids": [], + "topology_id": { + "context_id": { + "context_uuid": { + "uuid": "admin" + } + }, + "topology_uuid": { + "uuid": "DC2" + } + } + }, + { + "device_ids": [], + "link_ids": [], + "topology_id": { + "context_id": { + "context_uuid": { + "uuid": "admin" + } + }, + "topology_uuid": { + "uuid": "CS1" + } + } + }, + { + "device_ids": [], + "link_ids": [], + "topology_id": { + "context_id": { + "context_uuid": { + "uuid": "admin" + } + }, + "topology_uuid": { + "uuid": "CS2" + } + } + }, + { + "device_ids": [], + "link_ids": [], + "topology_id": { + "context_id": { + "context_uuid": { + "uuid": "admin" + } + }, + "topology_uuid": { + "uuid": "TN" + } + } + } + ] +} \ No newline at end of file diff --git a/src/tests/ecoc22/descriptors_emulated-DC_CSGW_TN_OLS.json b/src/tests/ecoc22/descriptors_emulated-DC_CSGW_TN_OLS.json new file mode 100644 index 0000000000000000000000000000000000000000..8d8e6fde3d3c183688fdc6ec7c3e6498c0d6791a --- /dev/null +++ b/src/tests/ecoc22/descriptors_emulated-DC_CSGW_TN_OLS.json @@ -0,0 +1,985 @@ +{ + "contexts": [ + { + "context_id": { + "context_uuid": { + "uuid": "admin" + } + }, + "service_ids": [], + "topology_ids": [] + } + ], + "devices": [ + { + "device_config": { + "config_rules": [ + { + "action": 1, + "custom": { + "resource_key": "_connect/address", + "resource_value": "127.0.0.1" + } + }, + { + "action": 1, + "custom": { + "resource_key": "_connect/port", + "resource_value": "0" + } + }, + { + "action": 1, + "custom": { + "resource_key": "_connect/settings", + "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"eth1\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"eth2\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"int\"}]}" + } + } + ] + }, + "device_drivers": [ + 0 + ], + "device_endpoints": [], + "device_id": { + "device_uuid": { + "uuid": "DC1-GW" + } + }, + "device_operational_status": 1, + "device_type": "emu-datacenter" + }, + { + "device_config": { + "config_rules": [ + { + "action": 1, + "custom": { + "resource_key": "_connect/address", + "resource_value": "127.0.0.1" + } + }, + { + "action": 1, + "custom": { + "resource_key": "_connect/port", + "resource_value": "0" + } + }, + { + "action": 1, + "custom": { + "resource_key": "_connect/settings", + "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"eth1\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"eth2\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"int\"}]}" + } + } + ] + }, + "device_drivers": [ + 0 + ], + "device_endpoints": [], + "device_id": { + "device_uuid": { + "uuid": "DC2-GW" + } + }, + "device_operational_status": 1, + "device_type": "emu-datacenter" + }, + { + "device_config": { + "config_rules": [ + { + "action": 1, + "custom": { + "resource_key": "_connect/address", + "resource_value": "127.0.0.1" + } + }, + { + "action": 1, + "custom": { + "resource_key": "_connect/port", + "resource_value": "0" + } + }, + { + "action": 1, + "custom": { + "resource_key": "_connect/settings", + "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"10/1\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"1/1\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"1/2\"}]}" + } + } + ] + }, + "device_drivers": [ + 0 + ], + "device_endpoints": [], + "device_id": { + "device_uuid": { + "uuid": "CS1-GW1" + } + }, + "device_operational_status": 1, + "device_type": "emu-packet-router" + }, + { + "device_config": { + "config_rules": [ + { + "action": 1, + "custom": { + "resource_key": "_connect/address", + "resource_value": "127.0.0.1" + } + }, + { + "action": 1, + "custom": { + "resource_key": "_connect/port", + "resource_value": "0" + } + }, + { + "action": 1, + "custom": { + "resource_key": "_connect/settings", + "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"10/1\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"1/1\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"1/2\"}]}" + } + } + ] + }, + "device_drivers": [ + 0 + ], + "device_endpoints": [], + "device_id": { + "device_uuid": { + "uuid": "CS1-GW2" + } + }, + "device_operational_status": 1, + "device_type": "emu-packet-router" + }, + { + "device_config": { + "config_rules": [ + { + "action": 1, + "custom": { + "resource_key": "_connect/address", + "resource_value": "127.0.0.1" + } + }, + { + "action": 1, + "custom": { + "resource_key": "_connect/port", + "resource_value": "0" + } + }, + { + "action": 1, + "custom": { + "resource_key": "_connect/settings", + "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"10/1\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"1/1\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"1/2\"}]}" + } + } + ] + }, + "device_drivers": [ + 0 + ], + "device_endpoints": [], + "device_id": { + "device_uuid": { + "uuid": "CS2-GW1" + } + }, + "device_operational_status": 1, + "device_type": "emu-packet-router" + }, + { + "device_config": { + "config_rules": [ + { + "action": 1, + "custom": { + "resource_key": "_connect/address", + "resource_value": "127.0.0.1" + } + }, + { + "action": 1, + "custom": { + "resource_key": "_connect/port", + "resource_value": "0" + } + }, + { + "action": 1, + "custom": { + "resource_key": "_connect/settings", + "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"10/1\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"1/1\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"1/2\"}]}" + } + } + ] + }, + "device_drivers": [ + 0 + ], + "device_endpoints": [], + "device_id": { + "device_uuid": { + "uuid": "CS2-GW2" + } + }, + "device_operational_status": 1, + "device_type": "emu-packet-router" + }, + { + "device_config": { + "config_rules": [ + { + "action": 1, + "custom": { + "resource_key": "_connect/address", + "resource_value": "127.0.0.1" + } + }, + { + "action": 1, + "custom": { + "resource_key": "_connect/port", + "resource_value": "0" + } + }, + { + "action": 1, + "custom": { + "resource_key": "_connect/settings", + "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"1/1\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"1/2\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"2/1\"}]}" + } + } + ] + }, + "device_drivers": [ + 0 + ], + "device_endpoints": [], + "device_id": { + "device_uuid": { + "uuid": "TN-R1" + } + }, + "device_operational_status": 1, + "device_type": "emu-packet-router" + }, + { + "device_config": { + "config_rules": [ + { + "action": 1, + "custom": { + "resource_key": "_connect/address", + "resource_value": "127.0.0.1" + } + }, + { + "action": 1, + "custom": { + "resource_key": "_connect/port", + "resource_value": "0" + } + }, + { + "action": 1, + "custom": { + "resource_key": "_connect/settings", + "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"1/1\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"1/2\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"2/1\"}]}" + } + } + ] + }, + "device_drivers": [ + 0 + ], + "device_endpoints": [], + "device_id": { + "device_uuid": { + "uuid": "TN-R2" + } + }, + "device_operational_status": 1, + "device_type": "emu-packet-router" + }, + { + "device_config": { + "config_rules": [ + { + "action": 1, + "custom": { + "resource_key": "_connect/address", + "resource_value": "127.0.0.1" + } + }, + { + "action": 1, + "custom": { + "resource_key": "_connect/port", + "resource_value": "0" + } + }, + { + "action": 1, + "custom": { + "resource_key": "_connect/settings", + "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"1/1\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"1/2\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"2/1\"}]}" + } + } + ] + }, + "device_drivers": [ + 0 + ], + "device_endpoints": [], + "device_id": { + "device_uuid": { + "uuid": "TN-R3" + } + }, + "device_operational_status": 1, + "device_type": "emu-packet-router" + }, + { + "device_config": { + "config_rules": [ + { + "action": 1, + "custom": { + "resource_key": "_connect/address", + "resource_value": "127.0.0.1" + } + }, + { + "action": 1, + "custom": { + "resource_key": "_connect/port", + "resource_value": "0" + } + }, + { + "action": 1, + "custom": { + "resource_key": "_connect/settings", + "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"1/1\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"1/2\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"2/1\"}]}" + } + } + ] + }, + "device_drivers": [ + 0 + ], + "device_endpoints": [], + "device_id": { + "device_uuid": { + "uuid": "TN-R4" + } + }, + "device_operational_status": 1, + "device_type": "emu-packet-router" + }, + { + "device_config": { + "config_rules": [ + { + "action": 1, + "custom": { + "resource_key": "_connect/address", + "resource_value": "127.0.0.1" + } + }, + { + "action": 1, + "custom": { + "resource_key": "_connect/port", + "resource_value": "0" + } + }, + { + "action": 1, + "custom": { + "resource_key": "_connect/settings", + "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"optical\", \"uuid\": \"a3adcbbcc03f\"}, {\"sample_types\": [], \"type\": \"optical\", \"uuid\": \"9329780033f5\"}, {\"sample_types\": [], \"type\": \"optical\", \"uuid\": \"e8a127ea3ed1\"}, {\"sample_types\": [], \"type\": \"optical\", \"uuid\": \"ef1c58823a49\"}]}" + } + } + ] + }, + "device_drivers": [ + 0 + ], + "device_endpoints": [], + "device_id": { + "device_uuid": { + "uuid": "TN-OLS" + } + }, + "device_operational_status": 1, + "device_type": "emu-open-line-system" + } + ], + "links": [ + { + "link_endpoint_ids": [ + { + "device_id": { + "device_uuid": { + "uuid": "DC1-GW" + } + }, + "endpoint_uuid": { + "uuid": "eth1" + } + }, + { + "device_id": { + "device_uuid": { + "uuid": "CS1-GW1" + } + }, + "endpoint_uuid": { + "uuid": "10/1" + } + } + ], + "link_id": { + "link_uuid": { + "uuid": "DC1-GW/eth1==CS1-GW1/10/1" + } + } + }, + { + "link_endpoint_ids": [ + { + "device_id": { + "device_uuid": { + "uuid": "DC1-GW" + } + }, + "endpoint_uuid": { + "uuid": "eth2" + } + }, + { + "device_id": { + "device_uuid": { + "uuid": "CS1-GW2" + } + }, + "endpoint_uuid": { + "uuid": "10/1" + } + } + ], + "link_id": { + "link_uuid": { + "uuid": "DC1-GW/eth2==CS1-GW2/10/1" + } + } + }, + { + "link_endpoint_ids": [ + { + "device_id": { + "device_uuid": { + "uuid": "DC2-GW" + } + }, + "endpoint_uuid": { + "uuid": "eth1" + } + }, + { + "device_id": { + "device_uuid": { + "uuid": "CS2-GW1" + } + }, + "endpoint_uuid": { + "uuid": "10/1" + } + } + ], + "link_id": { + "link_uuid": { + "uuid": "DC2-GW/eth1==CS2-GW1/10/1" + } + } + }, + { + "link_endpoint_ids": [ + { + "device_id": { + "device_uuid": { + "uuid": "DC2-GW" + } + }, + "endpoint_uuid": { + "uuid": "eth2" + } + }, + { + "device_id": { + "device_uuid": { + "uuid": "CS2-GW2" + } + }, + "endpoint_uuid": { + "uuid": "10/1" + } + } + ], + "link_id": { + "link_uuid": { + "uuid": "DC2-GW/eth2==CS2-GW2/10/1" + } + } + }, + { + "link_endpoint_ids": [ + { + "device_id": { + "device_uuid": { + "uuid": "CS1-GW1" + } + }, + "endpoint_uuid": { + "uuid": "1/1" + } + }, + { + "device_id": { + "device_uuid": { + "uuid": "TN-R1" + } + }, + "endpoint_uuid": { + "uuid": "1/1" + } + } + ], + "link_id": { + "link_uuid": { + "uuid": "CS1-GW1/1/1==TN-R1/1/1" + } + } + }, + { + "link_endpoint_ids": [ + { + "device_id": { + "device_uuid": { + "uuid": "CS1-GW2" + } + }, + "endpoint_uuid": { + "uuid": "1/1" + } + }, + { + "device_id": { + "device_uuid": { + "uuid": "TN-R2" + } + }, + "endpoint_uuid": { + "uuid": "1/1" + } + } + ], + "link_id": { + "link_uuid": { + "uuid": "CS1-GW2/1/1==TN-R2/1/1" + } + } + }, + { + "link_endpoint_ids": [ + { + "device_id": { + "device_uuid": { + "uuid": "CS1-GW1" + } + }, + "endpoint_uuid": { + "uuid": "1/2" + } + }, + { + "device_id": { + "device_uuid": { + "uuid": "TN-R2" + } + }, + "endpoint_uuid": { + "uuid": "1/2" + } + } + ], + "link_id": { + "link_uuid": { + "uuid": "CS1-GW1/1/2==TN-R2/1/2" + } + } + }, + { + "link_endpoint_ids": [ + { + "device_id": { + "device_uuid": { + "uuid": "CS1-GW2" + } + }, + "endpoint_uuid": { + "uuid": "1/2" + } + }, + { + "device_id": { + "device_uuid": { + "uuid": "TN-R1" + } + }, + "endpoint_uuid": { + "uuid": "1/2" + } + } + ], + "link_id": { + "link_uuid": { + "uuid": "CS1-GW2/1/2==TN-R1/1/2" + } + } + }, + { + "link_endpoint_ids": [ + { + "device_id": { + "device_uuid": { + "uuid": "CS2-GW1" + } + }, + "endpoint_uuid": { + "uuid": "1/1" + } + }, + { + "device_id": { + "device_uuid": { + "uuid": "TN-R3" + } + }, + "endpoint_uuid": { + "uuid": "1/1" + } + } + ], + "link_id": { + "link_uuid": { + "uuid": "CS2-GW1/1/1==TN-R3/1/1" + } + } + }, + { + "link_endpoint_ids": [ + { + "device_id": { + "device_uuid": { + "uuid": "CS2-GW2" + } + }, + "endpoint_uuid": { + "uuid": "1/1" + } + }, + { + "device_id": { + "device_uuid": { + "uuid": "TN-R4" + } + }, + "endpoint_uuid": { + "uuid": "1/1" + } + } + ], + "link_id": { + "link_uuid": { + "uuid": "CS2-GW2/1/1==TN-R4/1/1" + } + } + }, + { + "link_endpoint_ids": [ + { + "device_id": { + "device_uuid": { + "uuid": "CS2-GW1" + } + }, + "endpoint_uuid": { + "uuid": "1/2" + } + }, + { + "device_id": { + "device_uuid": { + "uuid": "TN-R4" + } + }, + "endpoint_uuid": { + "uuid": "1/2" + } + } + ], + "link_id": { + "link_uuid": { + "uuid": "CS2-GW1/1/2==TN-R4/1/2" + } + } + }, + { + "link_endpoint_ids": [ + { + "device_id": { + "device_uuid": { + "uuid": "CS2-GW2" + } + }, + "endpoint_uuid": { + "uuid": "1/2" + } + }, + { + "device_id": { + "device_uuid": { + "uuid": "TN-R3" + } + }, + "endpoint_uuid": { + "uuid": "1/2" + } + } + ], + "link_id": { + "link_uuid": { + "uuid": "CS2-GW2/1/2==TN-R3/1/2" + } + } + }, + { + "link_endpoint_ids": [ + { + "device_id": { + "device_uuid": { + "uuid": "TN-R1" + } + }, + "endpoint_uuid": { + "uuid": "2/1" + } + }, + { + "device_id": { + "device_uuid": { + "uuid": "TN-OLS" + } + }, + "endpoint_uuid": { + "uuid": "a3adcbbcc03f" + } + } + ], + "link_id": { + "link_uuid": { + "uuid": "TN-R1/2/1==TN-OLS/a3adcbbcc03f" + } + } + }, + { + "link_endpoint_ids": [ + { + "device_id": { + "device_uuid": { + "uuid": "TN-R2" + } + }, + "endpoint_uuid": { + "uuid": "2/1" + } + }, + { + "device_id": { + "device_uuid": { + "uuid": "TN-OLS" + } + }, + "endpoint_uuid": { + "uuid": "9329780033f5" + } + } + ], + "link_id": { + "link_uuid": { + "uuid": "TN-R2/2/1==TN-OLS/9329780033f5" + } + } + }, + { + "link_endpoint_ids": [ + { + "device_id": { + "device_uuid": { + "uuid": "TN-R3" + } + }, + "endpoint_uuid": { + "uuid": "2/1" + } + }, + { + "device_id": { + "device_uuid": { + "uuid": "TN-OLS" + } + }, + "endpoint_uuid": { + "uuid": "e8a127ea3ed1" + } + } + ], + "link_id": { + "link_uuid": { + "uuid": "TN-R3/2/1==TN-OLS/e8a127ea3ed1" + } + } + }, + { + "link_endpoint_ids": [ + { + "device_id": { + "device_uuid": { + "uuid": "TN-R4" + } + }, + "endpoint_uuid": { + "uuid": "2/1" + } + }, + { + "device_id": { + "device_uuid": { + "uuid": "TN-OLS" + } + }, + "endpoint_uuid": { + "uuid": "ef1c58823a49" + } + } + ], + "link_id": { + "link_uuid": { + "uuid": "TN-R4/2/1==TN-OLS/ef1c58823a49" + } + } + } + ], + "topologies": [ + { + "device_ids": [], + "link_ids": [], + "topology_id": { + "context_id": { + "context_uuid": { + "uuid": "admin" + } + }, + "topology_uuid": { + "uuid": "admin" + } + } + }, + { + "device_ids": [], + "link_ids": [], + "topology_id": { + "context_id": { + "context_uuid": { + "uuid": "admin" + } + }, + "topology_uuid": { + "uuid": "DC1" + } + } + }, + { + "device_ids": [], + "link_ids": [], + "topology_id": { + "context_id": { + "context_uuid": { + "uuid": "admin" + } + }, + "topology_uuid": { + "uuid": "DC2" + } + } + }, + { + "device_ids": [], + "link_ids": [], + "topology_id": { + "context_id": { + "context_uuid": { + "uuid": "admin" + } + }, + "topology_uuid": { + "uuid": "CS1" + } + } + }, + { + "device_ids": [], + "link_ids": [], + "topology_id": { + "context_id": { + "context_uuid": { + "uuid": "admin" + } + }, + "topology_uuid": { + "uuid": "CS2" + } + } + }, + { + "device_ids": [], + "link_ids": [], + "topology_id": { + "context_id": { + "context_uuid": { + "uuid": "admin" + } + }, + "topology_uuid": { + "uuid": "TN" + } + } + } + ] +} \ No newline at end of file diff --git a/src/tests/ecoc22/redeploy.sh b/src/tests/ecoc22/redeploy.sh new file mode 100755 index 0000000000000000000000000000000000000000..3f3986debb9aec57e7bc7f67b549b960679a987f --- /dev/null +++ b/src/tests/ecoc22/redeploy.sh @@ -0,0 +1,4 @@ +#!/bin/bash +source ecoc22/deploy_specs.sh +./deploy.sh +source tfs_runtime_env_vars.sh diff --git a/src/tests/ecoc22/run_test_01_bootstrap.sh b/src/tests/ecoc22/run_test_01_bootstrap.sh new file mode 100755 index 0000000000000000000000000000000000000000..819991d78a499c6d6e4a10e96f6439ee5b56ed8d --- /dev/null +++ b/src/tests/ecoc22/run_test_01_bootstrap.sh @@ -0,0 +1,17 @@ +#!/bin/bash +# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +source tfs_runtime_env_vars.sh +pytest --verbose src/tests/ecoc22/tests/test_functional_bootstrap.py diff --git a/src/tests/ecoc22/run_test_02_create_service.sh b/src/tests/ecoc22/run_test_02_create_service.sh new file mode 100755 index 0000000000000000000000000000000000000000..5a54d39d496e203ee669efda636067dcc1aa27a9 --- /dev/null +++ b/src/tests/ecoc22/run_test_02_create_service.sh @@ -0,0 +1,17 @@ +#!/bin/bash +# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +source tfs_runtime_env_vars.sh +pytest --verbose src/tests/ecoc22/tests/test_functional_create_service.py diff --git a/src/tests/ecoc22/run_test_03_delete_service.sh b/src/tests/ecoc22/run_test_03_delete_service.sh new file mode 100755 index 0000000000000000000000000000000000000000..900e09b658c1a73664dd28dc60ef6a50a9e68570 --- /dev/null +++ b/src/tests/ecoc22/run_test_03_delete_service.sh @@ -0,0 +1,17 @@ +#!/bin/bash +# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +source tfs_runtime_env_vars.sh +pytest --verbose src/tests/ecoc22/tests/test_functional_delete_service.py diff --git a/src/tests/ecoc22/run_test_04_cleanup.sh b/src/tests/ecoc22/run_test_04_cleanup.sh new file mode 100755 index 0000000000000000000000000000000000000000..4e0622e6b22d470d842d99bb4202e23e88b72982 --- /dev/null +++ b/src/tests/ecoc22/run_test_04_cleanup.sh @@ -0,0 +1,17 @@ +#!/bin/bash +# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +source tfs_runtime_env_vars.sh +pytest --verbose src/tests/ecoc22/tests/test_functional_cleanup.py diff --git a/src/tests/ecoc22/run_tests_and_coverage.sh b/src/tests/ecoc22/run_tests_and_coverage.sh new file mode 100755 index 0000000000000000000000000000000000000000..835867896020f2b94e0797bdf60c85af2228eda2 --- /dev/null +++ b/src/tests/ecoc22/run_tests_and_coverage.sh @@ -0,0 +1,43 @@ +#!/bin/bash +# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +PROJECTDIR=`pwd` + +cd $PROJECTDIR/src +RCFILE=$PROJECTDIR/coverage/.coveragerc +COVERAGEFILE=$PROJECTDIR/coverage/.coverage + +# Configure the correct folder on the .coveragerc file +cat $PROJECTDIR/coverage/.coveragerc.template | sed s+~/teraflow/controller+$PROJECTDIR+g > $RCFILE + +# Destroy old coverage file +rm -f $COVERAGEFILE + +# Force a flush of Context database +kubectl --namespace $TFS_K8S_NAMESPACE exec -it deployment/contextservice --container redis -- redis-cli FLUSHALL + +# Run functional tests and analyze code coverage at the same time +coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \ + tests/ecoc22/tests/test_functional_bootstrap.py + +coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \ + tests/ecoc22/tests/test_functional_create_service.py + +coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \ + tests/ecoc22/tests/test_functional_delete_service.py + +coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \ + tests/ecoc22/tests/test_functional_cleanup.py diff --git a/src/tests/ecoc22/tests/.gitignore b/src/tests/ecoc22/tests/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..6b97d6fe3ad32f39097745229ab7f547f26ecb12 --- /dev/null +++ b/src/tests/ecoc22/tests/.gitignore @@ -0,0 +1 @@ +# Add here your files containing confidential testbed details such as IP addresses, ports, usernames, passwords, etc. diff --git a/src/tests/ecoc22/tests/BuildDescriptors.py b/src/tests/ecoc22/tests/BuildDescriptors.py new file mode 100644 index 0000000000000000000000000000000000000000..b0075c0639c70092ed60bafd06c9f62b581faa33 --- /dev/null +++ b/src/tests/ecoc22/tests/BuildDescriptors.py @@ -0,0 +1,71 @@ +# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Execution: +# $ cd src +# $ python -m tests.ecoc22.tests.BuildDescriptors dc-csgw-tn +# $ python -m tests.ecoc22.tests.BuildDescriptors dc-csgw-tn-ols +# $ python -m tests.ecoc22.tests.BuildDescriptors bignet + +import copy, json, os, sys +from enum import Enum +from typing import Dict, Tuple + +class Scenario(Enum): + BIGNET = 'bignet' + DC_CSGW_TN = 'dc-csgw-tn' + DC_CSGW_TN_OLS = 'dc-csgw-tn-ols' + +scenario = None if len(sys.argv) < 2 else sys.argv[1].lower() + +if scenario == Scenario.BIGNET.value: + from .Objects_BigNet import CONTEXTS, DEVICES, LINKS, TOPOLOGIES + FILENAME = 'tests/ecoc22/descriptors_emulated-BigNet.json' +elif scenario == Scenario.DC_CSGW_TN.value: + os.environ['ADD_CONNECT_RULES_TO_DEVICES'] = 'TRUE' + from .Objects_DC_CSGW_TN import CONTEXTS, DEVICES, LINKS, TOPOLOGIES + FILENAME = 'tests/ecoc22/descriptors_emulated-DC_CSGW_TN.json' +elif scenario == Scenario.DC_CSGW_TN_OLS.value: + os.environ['ADD_CONNECT_RULES_TO_DEVICES'] = 'TRUE' + from .Objects_DC_CSGW_TN_OLS import CONTEXTS, DEVICES, LINKS, TOPOLOGIES + FILENAME = 'tests/ecoc22/descriptors_emulated-DC_CSGW_TN_OLS.json' +else: + scenarios = str([s.value for s in Scenario]) + raise Exception('Unsupported Scenario({:s}), choices are: {:s}'.format(scenario, scenarios)) + +def main(): + with open(FILENAME, 'w', encoding='UTF-8') as f: + devices = [] + for item in DEVICES: + if isinstance(item, Dict): + device = item + elif isinstance(item, Tuple) and len(item) == 2: + device,connect_rules = item + else: + raise Exception('Wrongly formatted item: {:s}'.format(str(item))) + device = copy.deepcopy(device) + if len(item) == 2: + device['device_config']['config_rules'].extend(connect_rules) + devices.append(device) + + f.write(json.dumps({ + 'contexts': CONTEXTS, + 'topologies': TOPOLOGIES, + 'devices': devices, + 'links': LINKS + }, sort_keys=True, indent=4)) + return 0 + +if __name__ == '__main__': + sys.exit(main()) diff --git a/src/tests/ecoc22/tests/Credentials.py b/src/tests/ecoc22/tests/Credentials.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/src/tests/ecoc22/tests/Fixtures.py b/src/tests/ecoc22/tests/Fixtures.py new file mode 100644 index 0000000000000000000000000000000000000000..70b41bdcb159552daa3dcf0c041a3713e2d1c821 --- /dev/null +++ b/src/tests/ecoc22/tests/Fixtures.py @@ -0,0 +1,26 @@ +import pytest +from common.Settings import get_setting +from compute.tests.mock_osm.MockOSM import MockOSM +from context.client.ContextClient import ContextClient +from device.client.DeviceClient import DeviceClient +#from .Objects_BigNet import WIM_MAPPING, WIM_PASSWORD, WIM_USERNAME +from .Objects_DC_CSGW_TN import WIM_MAPPING, WIM_PASSWORD, WIM_USERNAME +#from .Objects_DC_CSGW_TN_OLS import WIM_MAPPING, WIM_PASSWORD, WIM_USERNAME + +@pytest.fixture(scope='session') +def context_client(): + _client = ContextClient() + yield _client + _client.close() + +@pytest.fixture(scope='session') +def device_client(): + _client = DeviceClient() + yield _client + _client.close() + +@pytest.fixture(scope='session') +def osm_wim(): + wim_url = 'http://{:s}:{:s}'.format( + get_setting('COMPUTESERVICE_SERVICE_HOST'), str(get_setting('COMPUTESERVICE_SERVICE_PORT_HTTP'))) + return MockOSM(wim_url, WIM_MAPPING, WIM_USERNAME, WIM_PASSWORD) diff --git a/src/tests/ecoc22/tests/LoadDescriptors.py b/src/tests/ecoc22/tests/LoadDescriptors.py new file mode 100644 index 0000000000000000000000000000000000000000..bd7e48366795d47624f1b8e295cbe6fa105bf8c7 --- /dev/null +++ b/src/tests/ecoc22/tests/LoadDescriptors.py @@ -0,0 +1,38 @@ +# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json, logging, sys +from common.Settings import get_setting +from context.client.ContextClient import ContextClient +from common.proto.context_pb2 import Context, Device, Link, Topology +from device.client.DeviceClient import DeviceClient + +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(logging.DEBUG) + +def main(): + context_client = ContextClient() + device_client = DeviceClient() + + with open('tests/ecoc22/descriptors.json', 'r', encoding='UTF-8') as f: + descriptors = json.loads(f.read()) + + for context in descriptors['contexts' ]: context_client.SetContext (Context (**context )) + for topology in descriptors['topologies']: context_client.SetTopology(Topology(**topology)) + for device in descriptors['devices' ]: device_client .AddDevice (Device (**device )) + for link in descriptors['links' ]: context_client.SetLink (Link (**link )) + return 0 + +if __name__ == '__main__': + sys.exit(main()) diff --git a/src/tests/ecoc22/tests/Objects_BigNet.py b/src/tests/ecoc22/tests/Objects_BigNet.py new file mode 100644 index 0000000000000000000000000000000000000000..592376ff9dbaebbf4d8d02b04189e5d4f24584e3 --- /dev/null +++ b/src/tests/ecoc22/tests/Objects_BigNet.py @@ -0,0 +1,302 @@ +# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from common.Constants import DEFAULT_CONTEXT_UUID, DEFAULT_TOPOLOGY_UUID +from common.tools.object_factory.Context import json_context, json_context_id +from common.tools.object_factory.Device import ( + json_device_emulated_connect_rules, json_device_emulated_datacenter_disabled, + json_device_emulated_packet_router_disabled, json_device_id) +from common.tools.object_factory.Topology import json_topology, json_topology_id +from .Tools import compose_bearer, compose_service_endpoint_id, json_endpoint_ids, link + +# ----- Context -------------------------------------------------------------------------------------------------------- +CONTEXT_ID = json_context_id(DEFAULT_CONTEXT_UUID) +CONTEXT = json_context(DEFAULT_CONTEXT_UUID) + + +# ----- Topology ------------------------------------------------------------------------------------------------------- +TOPOLOGY_ID = json_topology_id(DEFAULT_TOPOLOGY_UUID, context_id=CONTEXT_ID) +TOPOLOGY = json_topology(DEFAULT_TOPOLOGY_UUID, context_id=CONTEXT_ID) + + +# ----- Customer Equipment (CE) Devices -------------------------------------------------------------------------------- +DEVICE_CE1_UUID = 'CE1' +DEVICE_CE1_ENDPOINT_DEFS = [('1/1', 'copper', [])] +DEVICE_CE1_ID = json_device_id(DEVICE_CE1_UUID) +DEVICE_CE1_ENDPOINT_IDS = json_endpoint_ids(DEVICE_CE1_ID, DEVICE_CE1_ENDPOINT_DEFS) +DEVICE_CE1 = json_device_emulated_packet_router_disabled(DEVICE_CE1_UUID) +ENDPOINT_ID_CE1_1_1 = DEVICE_CE1_ENDPOINT_IDS[0] +DEVICE_CE1_CONNECT_RULES = json_device_emulated_connect_rules(DEVICE_CE1_ENDPOINT_DEFS) + +DEVICE_CE2_UUID = 'CE2' +DEVICE_CE2_ENDPOINT_DEFS = [('1/1', 'copper', [])] +DEVICE_CE2_ID = json_device_id(DEVICE_CE2_UUID) +DEVICE_CE2_ENDPOINT_IDS = json_endpoint_ids(DEVICE_CE2_ID, DEVICE_CE2_ENDPOINT_DEFS) +DEVICE_CE2 = json_device_emulated_packet_router_disabled(DEVICE_CE2_UUID) +ENDPOINT_ID_CE2_1_1 = DEVICE_CE2_ENDPOINT_IDS[0] +DEVICE_CE2_CONNECT_RULES = json_device_emulated_connect_rules(DEVICE_CE2_ENDPOINT_DEFS) + +DEVICE_CE3_UUID = 'CE3' +DEVICE_CE3_ENDPOINT_DEFS = [('1/1', 'copper', [])] +DEVICE_CE3_ID = json_device_id(DEVICE_CE3_UUID) +DEVICE_CE3_ENDPOINT_IDS = json_endpoint_ids(DEVICE_CE3_ID, DEVICE_CE3_ENDPOINT_DEFS) +DEVICE_CE3 = json_device_emulated_packet_router_disabled(DEVICE_CE3_UUID) +ENDPOINT_ID_CE3_1_1 = DEVICE_CE3_ENDPOINT_IDS[0] +DEVICE_CE3_CONNECT_RULES = json_device_emulated_connect_rules(DEVICE_CE3_ENDPOINT_DEFS) + +DEVICE_CE4_UUID = 'CE4' +DEVICE_CE4_ENDPOINT_DEFS = [('1/1', 'copper', [])] +DEVICE_CE4_ID = json_device_id(DEVICE_CE4_UUID) +DEVICE_CE4_ENDPOINT_IDS = json_endpoint_ids(DEVICE_CE4_ID, DEVICE_CE4_ENDPOINT_DEFS) +DEVICE_CE4 = json_device_emulated_packet_router_disabled(DEVICE_CE4_UUID) +ENDPOINT_ID_CE4_1_1 = DEVICE_CE4_ENDPOINT_IDS[0] +DEVICE_CE4_CONNECT_RULES = json_device_emulated_connect_rules(DEVICE_CE4_ENDPOINT_DEFS) + +# ----- Provider Equipment (PE) Devices -------------------------------------------------------------------------------- +DEVICE_PE1_UUID = 'PE1' +DEVICE_PE1_ENDPOINT_DEFS = [('1/1', 'copper', []), + ('2/1', 'copper', []), ('2/2', 'copper', [])] +DEVICE_PE1_ID = json_device_id(DEVICE_PE1_UUID) +DEVICE_PE1_ENDPOINT_IDS = json_endpoint_ids(DEVICE_PE1_ID, DEVICE_PE1_ENDPOINT_DEFS) +DEVICE_PE1 = json_device_emulated_packet_router_disabled(DEVICE_PE1_UUID) +ENDPOINT_ID_PE1_1_1 = DEVICE_PE1_ENDPOINT_IDS[0] +ENDPOINT_ID_PE1_2_1 = DEVICE_PE1_ENDPOINT_IDS[1] +ENDPOINT_ID_PE1_2_2 = DEVICE_PE1_ENDPOINT_IDS[2] +DEVICE_PE1_CONNECT_RULES = json_device_emulated_connect_rules(DEVICE_PE1_ENDPOINT_DEFS) + +DEVICE_PE2_UUID = 'PE2' +DEVICE_PE2_ENDPOINT_DEFS = [('1/1', 'copper', []), + ('2/1', 'copper', []), ('2/2', 'copper', [])] +DEVICE_PE2_ID = json_device_id(DEVICE_PE2_UUID) +DEVICE_PE2_ENDPOINT_IDS = json_endpoint_ids(DEVICE_PE2_ID, DEVICE_PE2_ENDPOINT_DEFS) +DEVICE_PE2 = json_device_emulated_packet_router_disabled(DEVICE_PE2_UUID) +ENDPOINT_ID_PE2_1_1 = DEVICE_PE2_ENDPOINT_IDS[0] +ENDPOINT_ID_PE2_2_1 = DEVICE_PE2_ENDPOINT_IDS[1] +ENDPOINT_ID_PE2_2_2 = DEVICE_PE2_ENDPOINT_IDS[2] +DEVICE_PE2_CONNECT_RULES = json_device_emulated_connect_rules(DEVICE_PE2_ENDPOINT_DEFS) + +DEVICE_PE3_UUID = 'PE3' +DEVICE_PE3_ENDPOINT_DEFS = [('1/1', 'copper', []), + ('2/1', 'copper', []), ('2/2', 'copper', [])] +DEVICE_PE3_ID = json_device_id(DEVICE_PE3_UUID) +DEVICE_PE3_ENDPOINT_IDS = json_endpoint_ids(DEVICE_PE3_ID, DEVICE_PE3_ENDPOINT_DEFS) +DEVICE_PE3 = json_device_emulated_packet_router_disabled(DEVICE_PE3_UUID) +ENDPOINT_ID_PE3_1_1 = DEVICE_PE3_ENDPOINT_IDS[0] +ENDPOINT_ID_PE3_2_1 = DEVICE_PE3_ENDPOINT_IDS[1] +ENDPOINT_ID_PE3_2_2 = DEVICE_PE3_ENDPOINT_IDS[2] +DEVICE_PE3_CONNECT_RULES = json_device_emulated_connect_rules(DEVICE_PE3_ENDPOINT_DEFS) + +DEVICE_PE4_UUID = 'PE4' +DEVICE_PE4_ENDPOINT_DEFS = [('1/1', 'copper', []), + ('2/1', 'copper', []), ('2/2', 'copper', [])] +DEVICE_PE4_ID = json_device_id(DEVICE_PE4_UUID) +DEVICE_PE4_ENDPOINT_IDS = json_endpoint_ids(DEVICE_PE4_ID, DEVICE_PE4_ENDPOINT_DEFS) +DEVICE_PE4 = json_device_emulated_packet_router_disabled(DEVICE_PE4_UUID) +ENDPOINT_ID_PE4_1_1 = DEVICE_PE4_ENDPOINT_IDS[0] +ENDPOINT_ID_PE4_2_1 = DEVICE_PE4_ENDPOINT_IDS[1] +ENDPOINT_ID_PE4_2_2 = DEVICE_PE4_ENDPOINT_IDS[2] +DEVICE_PE4_CONNECT_RULES = json_device_emulated_connect_rules(DEVICE_PE4_ENDPOINT_DEFS) + +# ----- BackBone (BB) Devices ------------------------------------------------------------------------------------------ +DEVICE_BB1_UUID = 'BB1' +DEVICE_BB1_ENDPOINT_DEFS = [('1/1', 'copper', []), ('1/2', 'copper', []), + ('2/1', 'copper', []), ('2/2', 'copper', []), ('2/3', 'copper', [])] +DEVICE_BB1_ID = json_device_id(DEVICE_BB1_UUID) +DEVICE_BB1_ENDPOINT_IDS = json_endpoint_ids(DEVICE_BB1_ID, DEVICE_BB1_ENDPOINT_DEFS) +DEVICE_BB1 = json_device_emulated_packet_router_disabled(DEVICE_BB1_UUID) +ENDPOINT_ID_BB1_1_1 = DEVICE_BB1_ENDPOINT_IDS[0] +ENDPOINT_ID_BB1_1_2 = DEVICE_BB1_ENDPOINT_IDS[1] +ENDPOINT_ID_BB1_2_1 = DEVICE_BB1_ENDPOINT_IDS[2] +ENDPOINT_ID_BB1_2_2 = DEVICE_BB1_ENDPOINT_IDS[3] +ENDPOINT_ID_BB1_2_3 = DEVICE_BB1_ENDPOINT_IDS[4] +DEVICE_BB1_CONNECT_RULES = json_device_emulated_connect_rules(DEVICE_BB1_ENDPOINT_DEFS) + +DEVICE_BB2_UUID = 'BB2' +DEVICE_BB2_ENDPOINT_DEFS = [('1/1', 'copper', []), ('1/2', 'copper', []), + ('2/1', 'copper', []), ('2/2', 'copper', []), ('2/3', 'copper', [])] +DEVICE_BB2_ID = json_device_id(DEVICE_BB2_UUID) +DEVICE_BB2_ENDPOINT_IDS = json_endpoint_ids(DEVICE_BB2_ID, DEVICE_BB2_ENDPOINT_DEFS) +DEVICE_BB2 = json_device_emulated_packet_router_disabled(DEVICE_BB2_UUID) +ENDPOINT_ID_BB2_1_1 = DEVICE_BB2_ENDPOINT_IDS[0] +ENDPOINT_ID_BB2_1_2 = DEVICE_BB2_ENDPOINT_IDS[1] +ENDPOINT_ID_BB2_2_1 = DEVICE_BB2_ENDPOINT_IDS[2] +ENDPOINT_ID_BB2_2_2 = DEVICE_BB2_ENDPOINT_IDS[3] +ENDPOINT_ID_BB2_2_3 = DEVICE_BB2_ENDPOINT_IDS[4] +DEVICE_BB2_CONNECT_RULES = json_device_emulated_connect_rules(DEVICE_BB2_ENDPOINT_DEFS) + +DEVICE_BB3_UUID = 'BB3' +DEVICE_BB3_ENDPOINT_DEFS = [('2/1', 'copper', []), ('2/2', 'copper', []), ('2/3', 'copper', [])] +DEVICE_BB3_ID = json_device_id(DEVICE_BB3_UUID) +DEVICE_BB3_ENDPOINT_IDS = json_endpoint_ids(DEVICE_BB3_ID, DEVICE_BB3_ENDPOINT_DEFS) +DEVICE_BB3 = json_device_emulated_packet_router_disabled(DEVICE_BB3_UUID) +ENDPOINT_ID_BB3_2_1 = DEVICE_BB3_ENDPOINT_IDS[0] +ENDPOINT_ID_BB3_2_2 = DEVICE_BB3_ENDPOINT_IDS[1] +ENDPOINT_ID_BB3_2_3 = DEVICE_BB3_ENDPOINT_IDS[2] +DEVICE_BB3_CONNECT_RULES = json_device_emulated_connect_rules(DEVICE_BB3_ENDPOINT_DEFS) + +DEVICE_BB4_UUID = 'BB4' +DEVICE_BB4_ENDPOINT_DEFS = [('1/1', 'copper', []), ('1/2', 'copper', []), + ('2/1', 'copper', []), ('2/2', 'copper', []), ('2/3', 'copper', [])] +DEVICE_BB4_ID = json_device_id(DEVICE_BB4_UUID) +DEVICE_BB4_ENDPOINT_IDS = json_endpoint_ids(DEVICE_BB4_ID, DEVICE_BB4_ENDPOINT_DEFS) +DEVICE_BB4 = json_device_emulated_packet_router_disabled(DEVICE_BB4_UUID) +ENDPOINT_ID_BB4_1_1 = DEVICE_BB4_ENDPOINT_IDS[0] +ENDPOINT_ID_BB4_1_2 = DEVICE_BB4_ENDPOINT_IDS[1] +ENDPOINT_ID_BB4_2_1 = DEVICE_BB4_ENDPOINT_IDS[2] +ENDPOINT_ID_BB4_2_2 = DEVICE_BB4_ENDPOINT_IDS[3] +ENDPOINT_ID_BB4_2_3 = DEVICE_BB4_ENDPOINT_IDS[4] +DEVICE_BB4_CONNECT_RULES = json_device_emulated_connect_rules(DEVICE_BB4_ENDPOINT_DEFS) + +DEVICE_BB5_UUID = 'BB5' +DEVICE_BB5_ENDPOINT_DEFS = [('1/1', 'copper', []), ('1/2', 'copper', []), + ('2/1', 'copper', []), ('2/2', 'copper', []), ('2/3', 'copper', [])] +DEVICE_BB5_ID = json_device_id(DEVICE_BB5_UUID) +DEVICE_BB5_ENDPOINT_IDS = json_endpoint_ids(DEVICE_BB5_ID, DEVICE_BB5_ENDPOINT_DEFS) +DEVICE_BB5 = json_device_emulated_packet_router_disabled(DEVICE_BB5_UUID) +ENDPOINT_ID_BB5_1_1 = DEVICE_BB5_ENDPOINT_IDS[0] +ENDPOINT_ID_BB5_1_2 = DEVICE_BB5_ENDPOINT_IDS[1] +ENDPOINT_ID_BB5_2_1 = DEVICE_BB5_ENDPOINT_IDS[2] +ENDPOINT_ID_BB5_2_2 = DEVICE_BB5_ENDPOINT_IDS[3] +ENDPOINT_ID_BB5_2_3 = DEVICE_BB5_ENDPOINT_IDS[4] +DEVICE_BB5_CONNECT_RULES = json_device_emulated_connect_rules(DEVICE_BB5_ENDPOINT_DEFS) + +DEVICE_BB6_UUID = 'BB6' +DEVICE_BB6_ENDPOINT_DEFS = [('2/1', 'copper', []), ('2/2', 'copper', []), ('2/3', 'copper', [])] +DEVICE_BB6_ID = json_device_id(DEVICE_BB6_UUID) +DEVICE_BB6_ENDPOINT_IDS = json_endpoint_ids(DEVICE_BB6_ID, DEVICE_BB6_ENDPOINT_DEFS) +DEVICE_BB6 = json_device_emulated_packet_router_disabled(DEVICE_BB6_UUID) +ENDPOINT_ID_BB6_2_1 = DEVICE_BB6_ENDPOINT_IDS[0] +ENDPOINT_ID_BB6_2_2 = DEVICE_BB6_ENDPOINT_IDS[1] +ENDPOINT_ID_BB6_2_3 = DEVICE_BB6_ENDPOINT_IDS[2] +DEVICE_BB6_CONNECT_RULES = json_device_emulated_connect_rules(DEVICE_BB6_ENDPOINT_DEFS) + +DEVICE_BB7_UUID = 'BB7' +DEVICE_BB7_ENDPOINT_DEFS = [('2/1', 'copper', []), ('2/2', 'copper', []), ('2/3', 'copper', []), ('2/4', 'copper', []), + ('2/5', 'copper', []), ('2/6', 'copper', [])] +DEVICE_BB7_ID = json_device_id(DEVICE_BB7_UUID) +DEVICE_BB7_ENDPOINT_IDS = json_endpoint_ids(DEVICE_BB7_ID, DEVICE_BB7_ENDPOINT_DEFS) +DEVICE_BB7 = json_device_emulated_packet_router_disabled(DEVICE_BB7_UUID) +ENDPOINT_ID_BB7_2_1 = DEVICE_BB7_ENDPOINT_IDS[0] +ENDPOINT_ID_BB7_2_2 = DEVICE_BB7_ENDPOINT_IDS[1] +ENDPOINT_ID_BB7_2_3 = DEVICE_BB7_ENDPOINT_IDS[2] +ENDPOINT_ID_BB7_2_4 = DEVICE_BB7_ENDPOINT_IDS[3] +ENDPOINT_ID_BB7_2_5 = DEVICE_BB7_ENDPOINT_IDS[4] +ENDPOINT_ID_BB7_2_6 = DEVICE_BB7_ENDPOINT_IDS[5] +DEVICE_BB7_CONNECT_RULES = json_device_emulated_connect_rules(DEVICE_BB7_ENDPOINT_DEFS) + + +# ----- Links ---------------------------------------------------------------------------------------------------------- +LINK_CE1_PE1_UUID, LINK_CE1_PE1_ID, LINK_CE1_PE1 = link(ENDPOINT_ID_CE1_1_1, ENDPOINT_ID_PE1_1_1) +LINK_CE2_PE2_UUID, LINK_CE2_PE2_ID, LINK_CE2_PE2 = link(ENDPOINT_ID_CE2_1_1, ENDPOINT_ID_PE2_1_1) +LINK_CE3_PE3_UUID, LINK_CE3_PE3_ID, LINK_CE3_PE3 = link(ENDPOINT_ID_CE3_1_1, ENDPOINT_ID_PE3_1_1) +LINK_CE4_PE4_UUID, LINK_CE4_PE4_ID, LINK_CE4_PE4 = link(ENDPOINT_ID_CE4_1_1, ENDPOINT_ID_PE4_1_1) + +LINK_PE1_BB1_UUID, LINK_PE1_BB1_ID, LINK_PE1_BB1 = link(ENDPOINT_ID_PE1_2_1, ENDPOINT_ID_BB1_1_1) +LINK_PE1_BB2_UUID, LINK_PE1_BB2_ID, LINK_PE1_BB2 = link(ENDPOINT_ID_PE1_2_2, ENDPOINT_ID_BB2_1_1) +LINK_PE2_BB1_UUID, LINK_PE2_BB1_ID, LINK_PE2_BB1 = link(ENDPOINT_ID_PE2_2_1, ENDPOINT_ID_BB1_1_2) +LINK_PE2_BB2_UUID, LINK_PE2_BB2_ID, LINK_PE2_BB2 = link(ENDPOINT_ID_PE2_2_2, ENDPOINT_ID_BB2_1_2) + +LINK_PE3_BB4_UUID, LINK_PE3_BB4_ID, LINK_PE3_BB4 = link(ENDPOINT_ID_PE3_2_1, ENDPOINT_ID_BB4_1_1) +LINK_PE3_BB5_UUID, LINK_PE3_BB5_ID, LINK_PE3_BB5 = link(ENDPOINT_ID_PE3_2_2, ENDPOINT_ID_BB5_1_1) +LINK_PE4_BB4_UUID, LINK_PE4_BB4_ID, LINK_PE4_BB4 = link(ENDPOINT_ID_PE4_2_1, ENDPOINT_ID_BB4_1_2) +LINK_PE4_BB5_UUID, LINK_PE4_BB5_ID, LINK_PE4_BB5 = link(ENDPOINT_ID_PE4_2_2, ENDPOINT_ID_BB5_1_2) + +LINK_BB1_BB2_UUID, LINK_BB1_BB2_ID, LINK_BB1_BB2 = link(ENDPOINT_ID_BB1_2_1, ENDPOINT_ID_BB2_2_2) +LINK_BB2_BB3_UUID, LINK_BB2_BB3_ID, LINK_BB2_BB3 = link(ENDPOINT_ID_BB2_2_1, ENDPOINT_ID_BB3_2_2) +LINK_BB3_BB4_UUID, LINK_BB3_BB4_ID, LINK_BB3_BB4 = link(ENDPOINT_ID_BB3_2_1, ENDPOINT_ID_BB4_2_2) +LINK_BB4_BB5_UUID, LINK_BB4_BB5_ID, LINK_BB4_BB5 = link(ENDPOINT_ID_BB4_2_1, ENDPOINT_ID_BB5_2_2) +LINK_BB5_BB6_UUID, LINK_BB5_BB6_ID, LINK_BB5_BB6 = link(ENDPOINT_ID_BB5_2_1, ENDPOINT_ID_BB6_2_2) +LINK_BB6_BB1_UUID, LINK_BB6_BB1_ID, LINK_BB6_BB1 = link(ENDPOINT_ID_BB6_2_1, ENDPOINT_ID_BB1_2_2) + +LINK_BB1_BB7_UUID, LINK_BB1_BB7_ID, LINK_BB1_BB7 = link(ENDPOINT_ID_BB1_2_3, ENDPOINT_ID_BB7_2_1) +LINK_BB2_BB7_UUID, LINK_BB2_BB7_ID, LINK_BB2_BB7 = link(ENDPOINT_ID_BB2_2_3, ENDPOINT_ID_BB7_2_2) +LINK_BB3_BB7_UUID, LINK_BB3_BB7_ID, LINK_BB3_BB7 = link(ENDPOINT_ID_BB3_2_3, ENDPOINT_ID_BB7_2_3) +LINK_BB4_BB7_UUID, LINK_BB4_BB7_ID, LINK_BB4_BB7 = link(ENDPOINT_ID_BB4_2_3, ENDPOINT_ID_BB7_2_4) +LINK_BB5_BB7_UUID, LINK_BB5_BB7_ID, LINK_BB5_BB7 = link(ENDPOINT_ID_BB5_2_3, ENDPOINT_ID_BB7_2_5) +LINK_BB6_BB7_UUID, LINK_BB6_BB7_ID, LINK_BB6_BB7 = link(ENDPOINT_ID_BB6_2_3, ENDPOINT_ID_BB7_2_6) + + +# ----- WIM Service Settings ------------------------------------------------------------------------------------------- +WIM_USERNAME = 'admin' +WIM_PASSWORD = 'admin' + +def mapping(site_id, ce_endpoint_id, pe_device_id, priority=None, redundant=[]): + ce_device_uuid = ce_endpoint_id['device_id']['device_uuid']['uuid'] + ce_endpoint_uuid = ce_endpoint_id['endpoint_uuid']['uuid'] + pe_device_uuid = pe_device_id['device_uuid']['uuid'] + service_endpoint_id = '{:s}-{:s}-{:s}'.format(site_id, ce_device_uuid, ce_endpoint_uuid) + bearer = '{:s}-{:s}'.format(ce_device_uuid, pe_device_uuid) + _mapping = { + 'service_endpoint_id': service_endpoint_id, + 'datacenter_id': site_id, 'device_id': ce_device_uuid, 'device_interface_id': ce_endpoint_uuid, + 'service_mapping_info': { + 'site-id': site_id, + 'bearer': {'bearer-reference': bearer}, + } + } + if priority is not None: _mapping['service_mapping_info']['priority'] = priority + if len(redundant) > 0: _mapping['service_mapping_info']['redundant'] = redundant + return service_endpoint_id, _mapping + +WIM_SEP_DC1_PRI, WIM_MAP_DC1_PRI = mapping('DC1', ENDPOINT_ID_CE1_1_1, DEVICE_PE1_ID, priority=10, redundant=['DC1-CE2-1/1']) +WIM_SEP_DC1_SEC, WIM_MAP_DC1_SEC = mapping('DC1', ENDPOINT_ID_CE2_1_1, DEVICE_PE2_ID, priority=20) +WIM_SEP_DC2_PRI, WIM_MAP_DC2_PRI = mapping('DC2', ENDPOINT_ID_CE3_1_1, DEVICE_PE3_ID, priority=10, redundant=['DC2-CE4-1/1']) +WIM_SEP_DC2_SEC, WIM_MAP_DC2_SEC = mapping('DC2', ENDPOINT_ID_CE4_1_1, DEVICE_PE4_ID, priority=20) + +WIM_MAPPING = [WIM_MAP_DC1_PRI, WIM_MAP_DC1_SEC, WIM_MAP_DC2_PRI, WIM_MAP_DC2_SEC] + +WIM_SRV_VLAN_ID = 300 +WIM_SERVICE_TYPE = 'ELAN' +WIM_SERVICE_CONNECTION_POINTS = [ + {'service_endpoint_id': WIM_SEP_DC1_PRI, + 'service_endpoint_encapsulation_type': 'dot1q', + 'service_endpoint_encapsulation_info': {'vlan': WIM_SRV_VLAN_ID}}, + {'service_endpoint_id': WIM_SEP_DC2_PRI, + 'service_endpoint_encapsulation_type': 'dot1q', + 'service_endpoint_encapsulation_info': {'vlan': WIM_SRV_VLAN_ID}}, +] + + +# ----- Object Collections --------------------------------------------------------------------------------------------- + +CONTEXTS = [CONTEXT] +TOPOLOGIES = [TOPOLOGY] + +DEVICES = [ + (DEVICE_CE1, DEVICE_CE1_CONNECT_RULES), + (DEVICE_CE2, DEVICE_CE2_CONNECT_RULES), + (DEVICE_CE3, DEVICE_CE3_CONNECT_RULES), + (DEVICE_CE4, DEVICE_CE4_CONNECT_RULES), + + (DEVICE_PE1, DEVICE_PE1_CONNECT_RULES), + (DEVICE_PE2, DEVICE_PE2_CONNECT_RULES), + (DEVICE_PE3, DEVICE_PE3_CONNECT_RULES), + (DEVICE_PE4, DEVICE_PE4_CONNECT_RULES), + + (DEVICE_BB1, DEVICE_BB1_CONNECT_RULES), + (DEVICE_BB2, DEVICE_BB2_CONNECT_RULES), + (DEVICE_BB6, DEVICE_BB6_CONNECT_RULES), + (DEVICE_BB7, DEVICE_BB7_CONNECT_RULES), + (DEVICE_BB3, DEVICE_BB3_CONNECT_RULES), + (DEVICE_BB5, DEVICE_BB5_CONNECT_RULES), + (DEVICE_BB4, DEVICE_BB4_CONNECT_RULES), +] + +LINKS = [ + LINK_CE1_PE1, LINK_CE2_PE2, LINK_CE3_PE3, LINK_CE4_PE4, + LINK_PE1_BB1, LINK_PE1_BB2, LINK_PE2_BB1, LINK_PE2_BB2, + LINK_PE3_BB5, LINK_PE3_BB4, LINK_PE4_BB5, LINK_PE4_BB4, + LINK_BB1_BB2, LINK_BB2_BB3, LINK_BB3_BB4, LINK_BB4_BB5, LINK_BB5_BB6, LINK_BB6_BB1, + LINK_BB1_BB7, LINK_BB2_BB7, LINK_BB3_BB7, LINK_BB4_BB7, LINK_BB5_BB7, LINK_BB6_BB7, +] diff --git a/src/tests/ecoc22/tests/Objects_DC_CSGW_TN.py b/src/tests/ecoc22/tests/Objects_DC_CSGW_TN.py new file mode 100644 index 0000000000000000000000000000000000000000..229e3d5fe3cee54fb7295ac0049507ec4e348a04 --- /dev/null +++ b/src/tests/ecoc22/tests/Objects_DC_CSGW_TN.py @@ -0,0 +1,227 @@ +# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +from common.Constants import DEFAULT_CONTEXT_UUID, DEFAULT_TOPOLOGY_UUID +from common.tools.object_factory.Context import json_context, json_context_id +from common.tools.object_factory.Device import ( + json_device_emulated_connect_rules, json_device_emulated_datacenter_disabled, + json_device_emulated_packet_router_disabled, json_device_id) +from common.tools.object_factory.EndPoint import json_endpoints +from common.tools.object_factory.Link import get_link_uuid, json_link, json_link_id +from common.tools.object_factory.Service import get_service_uuid, json_service_l3nm_planned +from common.tools.object_factory.Topology import json_topology, json_topology_id + +# if true, Device component is present and will infeer the endpoints from connect-rules +# if false, Device component is not present and device objects must contain preconfigured endpoints +ADD_CONNECT_RULES_TO_DEVICES = os.environ.get('ADD_CONNECT_RULES_TO_DEVICES', 'True') +ADD_CONNECT_RULES_TO_DEVICES = ADD_CONNECT_RULES_TO_DEVICES.upper() in {'T', 'TRUE', '1', 'Y', 'YES'} + +def compose_router(device_uuid, endpoint_uuids, topology_id=None): + device_id = json_device_id(device_uuid) + r_endpoints = [(endpoint_uuid, 'copper', []) for endpoint_uuid in endpoint_uuids] + config_rules = json_device_emulated_connect_rules(r_endpoints) if ADD_CONNECT_RULES_TO_DEVICES else [] + endpoints = json_endpoints(device_id, r_endpoints, topology_id=topology_id) + j_endpoints = [] if ADD_CONNECT_RULES_TO_DEVICES else endpoints + device = json_device_emulated_packet_router_disabled(device_uuid, config_rules=config_rules, endpoints=j_endpoints) + return device_id, endpoints, device + +def compose_datacenter(device_uuid, endpoint_uuids, topology_id=None): + device_id = json_device_id(device_uuid) + r_endpoints = [(endpoint_uuid, 'copper', []) for endpoint_uuid in endpoint_uuids] + config_rules = json_device_emulated_connect_rules(r_endpoints) if ADD_CONNECT_RULES_TO_DEVICES else [] + endpoints = json_endpoints(device_id, r_endpoints, topology_id=topology_id) + j_endpoints = [] if ADD_CONNECT_RULES_TO_DEVICES else endpoints + device = json_device_emulated_datacenter_disabled(device_uuid, config_rules=config_rules, endpoints=j_endpoints) + return device_id, endpoints, device + +def compose_link(endpoint_a, endpoint_z): + link_uuid = get_link_uuid(endpoint_a['endpoint_id'], endpoint_z['endpoint_id']) + link_id = json_link_id(link_uuid) + link = json_link(link_uuid, [endpoint_a['endpoint_id'], endpoint_z['endpoint_id']]) + return link_id, link + +def compose_service(endpoint_a, endpoint_z, constraints=[]): + service_uuid = get_service_uuid(endpoint_a['endpoint_id'], endpoint_z['endpoint_id']) + endpoint_ids = [endpoint_a['endpoint_id'], endpoint_z['endpoint_id']] + service = json_service_l3nm_planned(service_uuid, endpoint_ids=endpoint_ids, constraints=constraints) + return service + +# ----- Context -------------------------------------------------------------------------------------------------------- +CONTEXT_ID = json_context_id(DEFAULT_CONTEXT_UUID) +CONTEXT = json_context(DEFAULT_CONTEXT_UUID) + +# ----- Domains -------------------------------------------------------------------------------------------------------- +# Overall network topology +TOPO_ADMIN_UUID = DEFAULT_TOPOLOGY_UUID +TOPO_ADMIN_ID = json_topology_id(TOPO_ADMIN_UUID, context_id=CONTEXT_ID) +TOPO_ADMIN = json_topology(TOPO_ADMIN_UUID, context_id=CONTEXT_ID) + +# DataCenter #1 Network +TOPO_DC1_UUID = 'DC1' +TOPO_DC1_ID = json_topology_id(TOPO_DC1_UUID, context_id=CONTEXT_ID) +TOPO_DC1 = json_topology(TOPO_DC1_UUID, context_id=CONTEXT_ID) + +# DataCenter #2 Network +TOPO_DC2_UUID = 'DC2' +TOPO_DC2_ID = json_topology_id(TOPO_DC2_UUID, context_id=CONTEXT_ID) +TOPO_DC2 = json_topology(TOPO_DC2_UUID, context_id=CONTEXT_ID) + +# CellSite #1 Network +TOPO_CS1_UUID = 'CS1' +TOPO_CS1_ID = json_topology_id(TOPO_CS1_UUID, context_id=CONTEXT_ID) +TOPO_CS1 = json_topology(TOPO_CS1_UUID, context_id=CONTEXT_ID) + +# CellSite #2 Network +TOPO_CS2_UUID = 'CS2' +TOPO_CS2_ID = json_topology_id(TOPO_CS2_UUID, context_id=CONTEXT_ID) +TOPO_CS2 = json_topology(TOPO_CS2_UUID, context_id=CONTEXT_ID) + +# Transport Network Network +TOPO_TN_UUID = 'TN' +TOPO_TN_ID = json_topology_id(TOPO_TN_UUID, context_id=CONTEXT_ID) +TOPO_TN = json_topology(TOPO_TN_UUID, context_id=CONTEXT_ID) + + +# ----- Devices -------------------------------------------------------------------------------------------------------- +# DataCenters +DEV_DC1GW_ID, DEV_DC1GW_EPS, DEV_DC1GW = compose_datacenter('DC1-GW', ['eth1', 'eth2', 'int']) +DEV_DC2GW_ID, DEV_DC2GW_EPS, DEV_DC2GW = compose_datacenter('DC2-GW', ['eth1', 'eth2', 'int']) + +# CellSites +DEV_CS1GW1_ID, DEV_CS1GW1_EPS, DEV_CS1GW1 = compose_router('CS1-GW1', ['10/1', '1/1', '1/2']) +DEV_CS1GW2_ID, DEV_CS1GW2_EPS, DEV_CS1GW2 = compose_router('CS1-GW2', ['10/1', '1/1', '1/2']) +DEV_CS2GW1_ID, DEV_CS2GW1_EPS, DEV_CS2GW1 = compose_router('CS2-GW1', ['10/1', '1/1', '1/2']) +DEV_CS2GW2_ID, DEV_CS2GW2_EPS, DEV_CS2GW2 = compose_router('CS2-GW2', ['10/1', '1/1', '1/2']) + +# Transport Network +DEV_TNR1_ID, DEV_TNR1_EPS, DEV_TNR1 = compose_router('TN-R1', ['1/1', '1/2', '2/1', '2/2', '2/3']) +DEV_TNR2_ID, DEV_TNR2_EPS, DEV_TNR2 = compose_router('TN-R2', ['1/1', '1/2', '2/1', '2/2', '2/3']) +DEV_TNR3_ID, DEV_TNR3_EPS, DEV_TNR3 = compose_router('TN-R3', ['1/1', '1/2', '2/1', '2/2', '2/3']) +DEV_TNR4_ID, DEV_TNR4_EPS, DEV_TNR4 = compose_router('TN-R4', ['1/1', '1/2', '2/1', '2/2', '2/3']) + + +# ----- Links ---------------------------------------------------------------------------------------------------------- +# InterDomain DC-CSGW +LINK_DC1GW_CS1GW1_ID, LINK_DC1GW_CS1GW1 = compose_link(DEV_DC1GW_EPS[0], DEV_CS1GW1_EPS[0]) +LINK_DC1GW_CS1GW2_ID, LINK_DC1GW_CS1GW2 = compose_link(DEV_DC1GW_EPS[1], DEV_CS1GW2_EPS[0]) +LINK_DC2GW_CS2GW1_ID, LINK_DC2GW_CS2GW1 = compose_link(DEV_DC2GW_EPS[0], DEV_CS2GW1_EPS[0]) +LINK_DC2GW_CS2GW2_ID, LINK_DC2GW_CS2GW2 = compose_link(DEV_DC2GW_EPS[1], DEV_CS2GW2_EPS[0]) + +# InterDomain CSGW-TN +LINK_CS1GW1_TNR1_ID, LINK_CS1GW1_TNR1 = compose_link(DEV_CS1GW1_EPS[1], DEV_TNR1_EPS[0]) +LINK_CS1GW2_TNR2_ID, LINK_CS1GW2_TNR2 = compose_link(DEV_CS1GW2_EPS[1], DEV_TNR2_EPS[0]) +LINK_CS1GW1_TNR2_ID, LINK_CS1GW1_TNR2 = compose_link(DEV_CS1GW1_EPS[2], DEV_TNR2_EPS[1]) +LINK_CS1GW2_TNR1_ID, LINK_CS1GW2_TNR1 = compose_link(DEV_CS1GW2_EPS[2], DEV_TNR1_EPS[1]) +LINK_CS2GW1_TNR3_ID, LINK_CS2GW1_TNR3 = compose_link(DEV_CS2GW1_EPS[1], DEV_TNR3_EPS[0]) +LINK_CS2GW2_TNR4_ID, LINK_CS2GW2_TNR4 = compose_link(DEV_CS2GW2_EPS[1], DEV_TNR4_EPS[0]) +LINK_CS2GW1_TNR4_ID, LINK_CS2GW1_TNR4 = compose_link(DEV_CS2GW1_EPS[2], DEV_TNR4_EPS[1]) +LINK_CS2GW2_TNR3_ID, LINK_CS2GW2_TNR3 = compose_link(DEV_CS2GW2_EPS[2], DEV_TNR3_EPS[1]) + +# IntraDomain TN +LINK_TNR1_TNR2_ID, LINK_TNR1_TNR2 = compose_link(DEV_TNR1_EPS[2], DEV_TNR2_EPS[3]) +LINK_TNR2_TNR3_ID, LINK_TNR2_TNR3 = compose_link(DEV_TNR2_EPS[2], DEV_TNR3_EPS[3]) +LINK_TNR3_TNR4_ID, LINK_TNR3_TNR4 = compose_link(DEV_TNR3_EPS[2], DEV_TNR4_EPS[3]) +LINK_TNR4_TNR1_ID, LINK_TNR4_TNR1 = compose_link(DEV_TNR4_EPS[2], DEV_TNR1_EPS[3]) +LINK_TNR1_TNR3_ID, LINK_TNR1_TNR3 = compose_link(DEV_TNR1_EPS[4], DEV_TNR3_EPS[4]) +LINK_TNR2_TNR4_ID, LINK_TNR2_TNR4 = compose_link(DEV_TNR2_EPS[4], DEV_TNR4_EPS[4]) + + +# ----- WIM Service Settings ------------------------------------------------------------------------------------------- +WIM_USERNAME = 'admin' +WIM_PASSWORD = 'admin' + +def mapping(site_id, ce_endpoint_id, pe_device_id, priority=None, redundant=[]): + ce_endpoint_id = ce_endpoint_id['endpoint_id'] + ce_device_uuid = ce_endpoint_id['device_id']['device_uuid']['uuid'] + ce_endpoint_uuid = ce_endpoint_id['endpoint_uuid']['uuid'] + pe_device_uuid = pe_device_id['device_uuid']['uuid'] + service_endpoint_id = '{:s}:{:s}:{:s}'.format(site_id, ce_device_uuid, ce_endpoint_uuid) + bearer = '{:s}:{:s}'.format(ce_device_uuid, pe_device_uuid) + _mapping = { + 'service_endpoint_id': service_endpoint_id, + 'datacenter_id': site_id, 'device_id': ce_device_uuid, 'device_interface_id': ce_endpoint_uuid, + 'service_mapping_info': { + 'site-id': site_id, + 'bearer': {'bearer-reference': bearer}, + } + } + if priority is not None: _mapping['service_mapping_info']['priority'] = priority + if len(redundant) > 0: _mapping['service_mapping_info']['redundant'] = redundant + return service_endpoint_id, _mapping + +WIM_SEP_DC1_PRI, WIM_MAP_DC1_PRI = mapping('DC1', DEV_DC1GW_EPS[0], DEV_CS1GW1_ID, priority=10, redundant=['DC1:DC1-GW:eth2']) +WIM_SEP_DC1_SEC, WIM_MAP_DC1_SEC = mapping('DC1', DEV_DC1GW_EPS[1], DEV_CS1GW2_ID, priority=20, redundant=['DC1:DC1-GW:eth1']) +WIM_SEP_DC2_PRI, WIM_MAP_DC2_PRI = mapping('DC2', DEV_DC2GW_EPS[0], DEV_CS2GW1_ID, priority=10, redundant=['DC2:DC2-GW:eth2']) +WIM_SEP_DC2_SEC, WIM_MAP_DC2_SEC = mapping('DC2', DEV_DC2GW_EPS[1], DEV_CS2GW2_ID, priority=20, redundant=['DC2:DC2-GW:eth1']) + +WIM_MAPPING = [WIM_MAP_DC1_PRI, WIM_MAP_DC1_SEC, WIM_MAP_DC2_PRI, WIM_MAP_DC2_SEC] + +WIM_SRV_VLAN_ID = 300 +WIM_SERVICE_TYPE = 'ELAN' +WIM_SERVICE_CONNECTION_POINTS = [ + {'service_endpoint_id': WIM_SEP_DC1_PRI, + 'service_endpoint_encapsulation_type': 'dot1q', + 'service_endpoint_encapsulation_info': {'vlan': WIM_SRV_VLAN_ID}}, + {'service_endpoint_id': WIM_SEP_DC2_PRI, + 'service_endpoint_encapsulation_type': 'dot1q', + 'service_endpoint_encapsulation_info': {'vlan': WIM_SRV_VLAN_ID}}, +] + + +# ----- Containers ----------------------------------------------------------------------------------------------------- +CONTEXTS = [ CONTEXT ] +TOPOLOGIES = [ TOPO_ADMIN, TOPO_DC1, TOPO_DC2, TOPO_CS1, TOPO_CS2, TOPO_TN ] +DEVICES = [ DEV_DC1GW, DEV_DC2GW, + DEV_CS1GW1, DEV_CS1GW2, DEV_CS2GW1, DEV_CS2GW2, + DEV_TNR1, DEV_TNR2, DEV_TNR3, DEV_TNR4, + ] +LINKS = [ LINK_DC1GW_CS1GW1, LINK_DC1GW_CS1GW2, LINK_DC2GW_CS2GW1, LINK_DC2GW_CS2GW2, + LINK_CS1GW1_TNR1, LINK_CS1GW2_TNR2, LINK_CS1GW1_TNR2, LINK_CS1GW2_TNR1, + LINK_CS2GW1_TNR3, LINK_CS2GW2_TNR4, LINK_CS2GW1_TNR4, LINK_CS2GW2_TNR3, + LINK_TNR1_TNR2, LINK_TNR2_TNR3, LINK_TNR3_TNR4, LINK_TNR4_TNR1, LINK_TNR1_TNR3, LINK_TNR2_TNR4, + ] + +OBJECTS_PER_TOPOLOGY = [ + (TOPO_ADMIN_ID, + [ DEV_DC1GW_ID, DEV_DC2GW_ID, + DEV_CS1GW1_ID, DEV_CS1GW2_ID, DEV_CS2GW1_ID, DEV_CS2GW2_ID, + DEV_TNR1_ID, DEV_TNR2_ID, DEV_TNR3_ID, DEV_TNR4_ID, + ], + [ LINK_DC1GW_CS1GW1_ID, LINK_DC1GW_CS1GW2_ID, LINK_DC2GW_CS2GW1_ID, LINK_DC2GW_CS2GW2_ID, + LINK_CS1GW1_TNR1_ID, LINK_CS1GW2_TNR2_ID, LINK_CS1GW1_TNR2_ID, LINK_CS1GW2_TNR1_ID, + LINK_CS2GW1_TNR3_ID, LINK_CS2GW2_TNR4_ID, LINK_CS2GW1_TNR4_ID, LINK_CS2GW2_TNR3_ID, + LINK_TNR1_TNR2_ID, LINK_TNR2_TNR3_ID, LINK_TNR3_TNR4_ID, LINK_TNR4_TNR1_ID, LINK_TNR1_TNR3_ID, + LINK_TNR2_TNR4_ID, + ], + ), + (TOPO_DC1_ID, + [DEV_DC1GW_ID], + []), + (TOPO_DC2_ID, + [DEV_DC2GW_ID], + []), + (TOPO_CS1_ID, + [DEV_CS1GW1_ID, DEV_CS1GW2_ID], + []), + (TOPO_CS2_ID, + [DEV_CS2GW1_ID, DEV_CS2GW2_ID], + []), + (TOPO_TN_ID, + [ DEV_TNR1_ID, DEV_TNR2_ID, DEV_TNR3_ID, DEV_TNR4_ID, + ], + [ LINK_TNR1_TNR2_ID, LINK_TNR2_TNR3_ID, LINK_TNR3_TNR4_ID, LINK_TNR4_TNR1_ID, LINK_TNR1_TNR3_ID, + LINK_TNR2_TNR4_ID, + ]), +] diff --git a/src/tests/ecoc22/tests/Objects_DC_CSGW_TN_OLS.py b/src/tests/ecoc22/tests/Objects_DC_CSGW_TN_OLS.py new file mode 100644 index 0000000000000000000000000000000000000000..9d67b1a41a16709e2e47bda9f9dc0e7e4bfdc1cf --- /dev/null +++ b/src/tests/ecoc22/tests/Objects_DC_CSGW_TN_OLS.py @@ -0,0 +1,237 @@ +# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os, uuid +from common.Constants import DEFAULT_CONTEXT_UUID, DEFAULT_TOPOLOGY_UUID +from common.tools.object_factory.Context import json_context, json_context_id +from common.tools.object_factory.Device import ( + json_device_emulated_connect_rules, json_device_emulated_datacenter_disabled, + json_device_emulated_packet_router_disabled, json_device_emulated_tapi_disabled, json_device_id) +from common.tools.object_factory.EndPoint import json_endpoints +from common.tools.object_factory.Link import get_link_uuid, json_link, json_link_id +from common.tools.object_factory.Service import get_service_uuid, json_service_l3nm_planned +from common.tools.object_factory.Topology import json_topology, json_topology_id + +# if true, Device component is present and will infeer the endpoints from connect-rules +# if false, Device component is not present and device objects must contain preconfigured endpoints +ADD_CONNECT_RULES_TO_DEVICES = os.environ.get('ADD_CONNECT_RULES_TO_DEVICES', 'False') +ADD_CONNECT_RULES_TO_DEVICES = ADD_CONNECT_RULES_TO_DEVICES.upper() in {'T', 'TRUE', '1', 'Y', 'YES'} + +def compose_router(device_uuid, endpoint_uuids, topology_id=None): + device_id = json_device_id(device_uuid) + r_endpoints = [(endpoint_uuid, 'copper', []) for endpoint_uuid in endpoint_uuids] + config_rules = json_device_emulated_connect_rules(r_endpoints) if ADD_CONNECT_RULES_TO_DEVICES else [] + endpoints = json_endpoints(device_id, r_endpoints, topology_id=topology_id) + j_endpoints = [] if ADD_CONNECT_RULES_TO_DEVICES else endpoints + device = json_device_emulated_packet_router_disabled(device_uuid, config_rules=config_rules, endpoints=j_endpoints) + return device_id, endpoints, device + +def compose_ols(device_uuid, endpoint_uuids, topology_id=None): + device_id = json_device_id(device_uuid) + r_endpoints = [(endpoint_uuid, 'optical', []) for endpoint_uuid in endpoint_uuids] + config_rules = json_device_emulated_connect_rules(r_endpoints) if ADD_CONNECT_RULES_TO_DEVICES else [] + endpoints = json_endpoints(device_id, r_endpoints, topology_id=topology_id) + j_endpoints = [] if ADD_CONNECT_RULES_TO_DEVICES else endpoints + device = json_device_emulated_tapi_disabled(device_uuid, config_rules=config_rules, endpoints=j_endpoints) + return device_id, endpoints, device + +def compose_datacenter(device_uuid, endpoint_uuids, topology_id=None): + device_id = json_device_id(device_uuid) + r_endpoints = [(endpoint_uuid, 'copper', []) for endpoint_uuid in endpoint_uuids] + config_rules = json_device_emulated_connect_rules(r_endpoints) if ADD_CONNECT_RULES_TO_DEVICES else [] + endpoints = json_endpoints(device_id, r_endpoints, topology_id=topology_id) + j_endpoints = [] if ADD_CONNECT_RULES_TO_DEVICES else endpoints + device = json_device_emulated_datacenter_disabled(device_uuid, config_rules=config_rules, endpoints=j_endpoints) + return device_id, endpoints, device + +def compose_link(endpoint_a, endpoint_z): + link_uuid = get_link_uuid(endpoint_a['endpoint_id'], endpoint_z['endpoint_id']) + link_id = json_link_id(link_uuid) + link = json_link(link_uuid, [endpoint_a['endpoint_id'], endpoint_z['endpoint_id']]) + return link_id, link + +def compose_service(endpoint_a, endpoint_z, constraints=[]): + service_uuid = get_service_uuid(endpoint_a['endpoint_id'], endpoint_z['endpoint_id']) + endpoint_ids = [endpoint_a['endpoint_id'], endpoint_z['endpoint_id']] + service = json_service_l3nm_planned(service_uuid, endpoint_ids=endpoint_ids, constraints=constraints) + return service + +# ----- Context -------------------------------------------------------------------------------------------------------- +CONTEXT_ID = json_context_id(DEFAULT_CONTEXT_UUID) +CONTEXT = json_context(DEFAULT_CONTEXT_UUID) + +# ----- Domains -------------------------------------------------------------------------------------------------------- +# Overall network topology +TOPO_ADMIN_UUID = DEFAULT_TOPOLOGY_UUID +TOPO_ADMIN_ID = json_topology_id(TOPO_ADMIN_UUID, context_id=CONTEXT_ID) +TOPO_ADMIN = json_topology(TOPO_ADMIN_UUID, context_id=CONTEXT_ID) + +# DataCenter #1 Network +TOPO_DC1_UUID = 'DC1' +TOPO_DC1_ID = json_topology_id(TOPO_DC1_UUID, context_id=CONTEXT_ID) +TOPO_DC1 = json_topology(TOPO_DC1_UUID, context_id=CONTEXT_ID) + +# DataCenter #2 Network +TOPO_DC2_UUID = 'DC2' +TOPO_DC2_ID = json_topology_id(TOPO_DC2_UUID, context_id=CONTEXT_ID) +TOPO_DC2 = json_topology(TOPO_DC2_UUID, context_id=CONTEXT_ID) + +# CellSite #1 Network +TOPO_CS1_UUID = 'CS1' +TOPO_CS1_ID = json_topology_id(TOPO_CS1_UUID, context_id=CONTEXT_ID) +TOPO_CS1 = json_topology(TOPO_CS1_UUID, context_id=CONTEXT_ID) + +# CellSite #2 Network +TOPO_CS2_UUID = 'CS2' +TOPO_CS2_ID = json_topology_id(TOPO_CS2_UUID, context_id=CONTEXT_ID) +TOPO_CS2 = json_topology(TOPO_CS2_UUID, context_id=CONTEXT_ID) + +# Transport Network Network +TOPO_TN_UUID = 'TN' +TOPO_TN_ID = json_topology_id(TOPO_TN_UUID, context_id=CONTEXT_ID) +TOPO_TN = json_topology(TOPO_TN_UUID, context_id=CONTEXT_ID) + + +# ----- Devices -------------------------------------------------------------------------------------------------------- +# DataCenters +DEV_DC1GW_ID, DEV_DC1GW_EPS, DEV_DC1GW = compose_datacenter('DC1-GW', ['eth1', 'eth2', 'int']) +DEV_DC2GW_ID, DEV_DC2GW_EPS, DEV_DC2GW = compose_datacenter('DC2-GW', ['eth1', 'eth2', 'int']) + +# CellSites +DEV_CS1GW1_ID, DEV_CS1GW1_EPS, DEV_CS1GW1 = compose_router('CS1-GW1', ['10/1', '1/1', '1/2']) +DEV_CS1GW2_ID, DEV_CS1GW2_EPS, DEV_CS1GW2 = compose_router('CS1-GW2', ['10/1', '1/1', '1/2']) +DEV_CS2GW1_ID, DEV_CS2GW1_EPS, DEV_CS2GW1 = compose_router('CS2-GW1', ['10/1', '1/1', '1/2']) +DEV_CS2GW2_ID, DEV_CS2GW2_EPS, DEV_CS2GW2 = compose_router('CS2-GW2', ['10/1', '1/1', '1/2']) + +# Transport Network +DEV_TNR1_ID, DEV_TNR1_EPS, DEV_TNR1 = compose_router('TN-R1', ['1/1', '1/2', '2/1']) +DEV_TNR2_ID, DEV_TNR2_EPS, DEV_TNR2 = compose_router('TN-R2', ['1/1', '1/2', '2/1']) +DEV_TNR3_ID, DEV_TNR3_EPS, DEV_TNR3 = compose_router('TN-R3', ['1/1', '1/2', '2/1']) +DEV_TNR4_ID, DEV_TNR4_EPS, DEV_TNR4 = compose_router('TN-R4', ['1/1', '1/2', '2/1']) +tols_ep_uuids = [str(uuid.uuid4()).split('-')[-1] for _ in range(4)] +DEV_TOLS_ID, DEV_TOLS_EPS, DEV_TOLS = compose_ols('TN-OLS', tols_ep_uuids) + + +# ----- Links ---------------------------------------------------------------------------------------------------------- +# InterDomain DC-CSGW +LINK_DC1GW_CS1GW1_ID, LINK_DC1GW_CS1GW1 = compose_link(DEV_DC1GW_EPS[0], DEV_CS1GW1_EPS[0]) +LINK_DC1GW_CS1GW2_ID, LINK_DC1GW_CS1GW2 = compose_link(DEV_DC1GW_EPS[1], DEV_CS1GW2_EPS[0]) +LINK_DC2GW_CS2GW1_ID, LINK_DC2GW_CS2GW1 = compose_link(DEV_DC2GW_EPS[0], DEV_CS2GW1_EPS[0]) +LINK_DC2GW_CS2GW2_ID, LINK_DC2GW_CS2GW2 = compose_link(DEV_DC2GW_EPS[1], DEV_CS2GW2_EPS[0]) + +# InterDomain CSGW-TN +LINK_CS1GW1_TNR1_ID, LINK_CS1GW1_TNR1 = compose_link(DEV_CS1GW1_EPS[1], DEV_TNR1_EPS[0]) +LINK_CS1GW2_TNR2_ID, LINK_CS1GW2_TNR2 = compose_link(DEV_CS1GW2_EPS[1], DEV_TNR2_EPS[0]) +LINK_CS1GW1_TNR2_ID, LINK_CS1GW1_TNR2 = compose_link(DEV_CS1GW1_EPS[2], DEV_TNR2_EPS[1]) +LINK_CS1GW2_TNR1_ID, LINK_CS1GW2_TNR1 = compose_link(DEV_CS1GW2_EPS[2], DEV_TNR1_EPS[1]) +LINK_CS2GW1_TNR3_ID, LINK_CS2GW1_TNR3 = compose_link(DEV_CS2GW1_EPS[1], DEV_TNR3_EPS[0]) +LINK_CS2GW2_TNR4_ID, LINK_CS2GW2_TNR4 = compose_link(DEV_CS2GW2_EPS[1], DEV_TNR4_EPS[0]) +LINK_CS2GW1_TNR4_ID, LINK_CS2GW1_TNR4 = compose_link(DEV_CS2GW1_EPS[2], DEV_TNR4_EPS[1]) +LINK_CS2GW2_TNR3_ID, LINK_CS2GW2_TNR3 = compose_link(DEV_CS2GW2_EPS[2], DEV_TNR3_EPS[1]) + +# IntraDomain TN +LINK_TNR1_TOLS_ID, LINK_TNR1_TOLS = compose_link(DEV_TNR1_EPS[2], DEV_TOLS_EPS[0]) +LINK_TNR2_TOLS_ID, LINK_TNR2_TOLS = compose_link(DEV_TNR2_EPS[2], DEV_TOLS_EPS[1]) +LINK_TNR3_TOLS_ID, LINK_TNR3_TOLS = compose_link(DEV_TNR3_EPS[2], DEV_TOLS_EPS[2]) +LINK_TNR4_TOLS_ID, LINK_TNR4_TOLS = compose_link(DEV_TNR4_EPS[2], DEV_TOLS_EPS[3]) + + +# ----- WIM Service Settings ------------------------------------------------------------------------------------------- +WIM_USERNAME = 'admin' +WIM_PASSWORD = 'admin' + +def mapping(site_id, ce_endpoint_id, pe_device_id, priority=None, redundant=[]): + ce_endpoint_id = ce_endpoint_id['endpoint_id'] + ce_device_uuid = ce_endpoint_id['device_id']['device_uuid']['uuid'] + ce_endpoint_uuid = ce_endpoint_id['endpoint_uuid']['uuid'] + pe_device_uuid = pe_device_id['device_uuid']['uuid'] + service_endpoint_id = '{:s}:{:s}:{:s}'.format(site_id, ce_device_uuid, ce_endpoint_uuid) + bearer = '{:s}:{:s}'.format(ce_device_uuid, pe_device_uuid) + _mapping = { + 'service_endpoint_id': service_endpoint_id, + 'datacenter_id': site_id, 'device_id': ce_device_uuid, 'device_interface_id': ce_endpoint_uuid, + 'service_mapping_info': { + 'site-id': site_id, + 'bearer': {'bearer-reference': bearer}, + } + } + if priority is not None: _mapping['service_mapping_info']['priority'] = priority + if len(redundant) > 0: _mapping['service_mapping_info']['redundant'] = redundant + return service_endpoint_id, _mapping + +WIM_SEP_DC1_PRI, WIM_MAP_DC1_PRI = mapping('DC1', DEV_DC1GW_EPS[0], DEV_CS1GW1_ID, priority=10, redundant=['DC1:DC1-GW:eth2']) +WIM_SEP_DC1_SEC, WIM_MAP_DC1_SEC = mapping('DC1', DEV_DC1GW_EPS[1], DEV_CS1GW2_ID, priority=20, redundant=['DC1:DC1-GW:eth1']) +WIM_SEP_DC2_PRI, WIM_MAP_DC2_PRI = mapping('DC2', DEV_DC2GW_EPS[0], DEV_CS2GW1_ID, priority=10, redundant=['DC2:DC2-GW:eth2']) +WIM_SEP_DC2_SEC, WIM_MAP_DC2_SEC = mapping('DC2', DEV_DC2GW_EPS[1], DEV_CS2GW2_ID, priority=20, redundant=['DC2:DC2-GW:eth1']) + +WIM_MAPPING = [WIM_MAP_DC1_PRI, WIM_MAP_DC1_SEC, WIM_MAP_DC2_PRI, WIM_MAP_DC2_SEC] + +WIM_SRV_VLAN_ID = 300 +WIM_SERVICE_TYPE = 'ELAN' +WIM_SERVICE_CONNECTION_POINTS = [ + {'service_endpoint_id': WIM_SEP_DC1_PRI, + 'service_endpoint_encapsulation_type': 'dot1q', + 'service_endpoint_encapsulation_info': {'vlan': WIM_SRV_VLAN_ID}}, + {'service_endpoint_id': WIM_SEP_DC2_PRI, + 'service_endpoint_encapsulation_type': 'dot1q', + 'service_endpoint_encapsulation_info': {'vlan': WIM_SRV_VLAN_ID}}, +] + + +# ----- Containers ----------------------------------------------------------------------------------------------------- +CONTEXTS = [ CONTEXT ] +TOPOLOGIES = [ TOPO_ADMIN, TOPO_DC1, TOPO_DC2, TOPO_CS1, TOPO_CS2, TOPO_TN ] +DEVICES = [ DEV_DC1GW, DEV_DC2GW, + DEV_CS1GW1, DEV_CS1GW2, DEV_CS2GW1, DEV_CS2GW2, + DEV_TNR1, DEV_TNR2, DEV_TNR3, DEV_TNR4, + DEV_TOLS, + ] +LINKS = [ LINK_DC1GW_CS1GW1, LINK_DC1GW_CS1GW2, LINK_DC2GW_CS2GW1, LINK_DC2GW_CS2GW2, + LINK_CS1GW1_TNR1, LINK_CS1GW2_TNR2, LINK_CS1GW1_TNR2, LINK_CS1GW2_TNR1, + LINK_CS2GW1_TNR3, LINK_CS2GW2_TNR4, LINK_CS2GW1_TNR4, LINK_CS2GW2_TNR3, + LINK_TNR1_TOLS, LINK_TNR2_TOLS, LINK_TNR3_TOLS, LINK_TNR4_TOLS, + ] + +OBJECTS_PER_TOPOLOGY = [ + (TOPO_ADMIN_ID, + [ DEV_DC1GW_ID, DEV_DC2GW_ID, + DEV_CS1GW1_ID, DEV_CS1GW2_ID, DEV_CS2GW1_ID, DEV_CS2GW2_ID, + DEV_TNR1_ID, DEV_TNR2_ID, DEV_TNR3_ID, DEV_TNR4_ID, + DEV_TOLS_ID, + ], + [ LINK_DC1GW_CS1GW1_ID, LINK_DC1GW_CS1GW2_ID, LINK_DC2GW_CS2GW1_ID, LINK_DC2GW_CS2GW2_ID, + LINK_CS1GW1_TNR1_ID, LINK_CS1GW2_TNR2_ID, LINK_CS1GW1_TNR2_ID, LINK_CS1GW2_TNR1_ID, + LINK_CS2GW1_TNR3_ID, LINK_CS2GW2_TNR4_ID, LINK_CS2GW1_TNR4_ID, LINK_CS2GW2_TNR3_ID, + LINK_TNR1_TOLS_ID, LINK_TNR2_TOLS_ID, LINK_TNR3_TOLS_ID, LINK_TNR4_TOLS_ID, + ], + ), + (TOPO_DC1_ID, + [DEV_DC1GW_ID], + []), + (TOPO_DC2_ID, + [DEV_DC2GW_ID], + []), + (TOPO_CS1_ID, + [DEV_CS1GW1_ID, DEV_CS1GW2_ID], + []), + (TOPO_CS2_ID, + [DEV_CS2GW1_ID, DEV_CS2GW2_ID], + []), + (TOPO_TN_ID, + [ DEV_TNR1_ID, DEV_TNR2_ID, DEV_TNR3_ID, DEV_TNR4_ID, + DEV_TOLS_ID, + ], + [ LINK_TNR1_TOLS_ID, LINK_TNR2_TOLS_ID, LINK_TNR3_TOLS_ID, LINK_TNR4_TOLS_ID, + ]), +] diff --git a/src/tests/ecoc22/tests/Tools.py b/src/tests/ecoc22/tests/Tools.py new file mode 100644 index 0000000000000000000000000000000000000000..33205da9baeb6c9fe93a389e9744053aea664b16 --- /dev/null +++ b/src/tests/ecoc22/tests/Tools.py @@ -0,0 +1,36 @@ +from typing import Dict, List, Tuple +from common.tools.object_factory.EndPoint import json_endpoint, json_endpoint_id +from common.tools.object_factory.Link import json_link, json_link_id + +def json_endpoint_ids(device_id : Dict, endpoint_descriptors : List[Tuple[str, str, List[int]]]): + return [ + json_endpoint_id(device_id, ep_uuid, topology_id=None) + for ep_uuid, _, _ in endpoint_descriptors + ] + +def json_endpoints(device_id : Dict, endpoint_descriptors : List[Tuple[str, str, List[int]]]): + return [ + json_endpoint(device_id, ep_uuid, ep_type, topology_id=None, kpi_sample_types=ep_sample_types) + for ep_uuid, ep_type, ep_sample_types in endpoint_descriptors + ] + +def get_link_uuid(a_endpoint_id : Dict, z_endpoint_id : Dict) -> str: + return '{:s}/{:s}=={:s}/{:s}'.format( + a_endpoint_id['device_id']['device_uuid']['uuid'], a_endpoint_id['endpoint_uuid']['uuid'], + a_endpoint_id['device_id']['device_uuid']['uuid'], z_endpoint_id['endpoint_uuid']['uuid']) + +def link(a_endpoint_id, z_endpoint_id) -> Tuple[str, Dict, Dict]: + link_uuid = get_link_uuid(a_endpoint_id, z_endpoint_id) + link_id = json_link_id(link_uuid) + link_data = json_link(link_uuid, [a_endpoint_id, z_endpoint_id]) + return link_uuid, link_id, link_data + +def compose_service_endpoint_id(endpoint_id): + device_uuid = endpoint_id['device_id']['device_uuid']['uuid'] + endpoint_uuid = endpoint_id['endpoint_uuid']['uuid'] + return ':'.join([device_uuid, endpoint_uuid]) + +def compose_bearer(endpoint_id): + device_uuid = endpoint_id['device_id']['device_uuid']['uuid'] + endpoint_uuid = endpoint_id['endpoint_uuid']['uuid'] + return ':'.join([device_uuid, endpoint_uuid]) diff --git a/src/tests/ecoc22/tests/__init__.py b/src/tests/ecoc22/tests/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..70a33251242c51f49140e596b8208a19dd5245f7 --- /dev/null +++ b/src/tests/ecoc22/tests/__init__.py @@ -0,0 +1,14 @@ +# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + diff --git a/src/tests/ecoc22/tests/test_functional_bootstrap.py b/src/tests/ecoc22/tests/test_functional_bootstrap.py new file mode 100644 index 0000000000000000000000000000000000000000..210e53670d88030a69a3a14f36ec8e859cf681b8 --- /dev/null +++ b/src/tests/ecoc22/tests/test_functional_bootstrap.py @@ -0,0 +1,90 @@ +# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +from common.proto.context_pb2 import Context, ContextId, Device, DeviceId, Empty, Link, LinkId, Topology, TopologyId +from context.client.ContextClient import ContextClient +from device.client.DeviceClient import DeviceClient +from .Fixtures import context_client, device_client +#from .Objects_BigNet import CONTEXT_ID, CONTEXTS, DEVICES, LINKS, TOPOLOGIES +from .Objects_DC_CSGW_TN import CONTEXT_ID, CONTEXTS, DEVICES, LINKS, TOPOLOGIES, OBJECTS_PER_TOPOLOGY +#from .Objects_DC_CSGW_TN_OLS import CONTEXT_ID, CONTEXTS, DEVICES, LINKS, TOPOLOGIES + + +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(logging.DEBUG) + + +def test_scenario_empty(context_client : ContextClient): # pylint: disable=redefined-outer-name + # ----- List entities - Ensure database is empty ------------------------------------------------------------------- + response = context_client.ListContexts(Empty()) + assert len(response.contexts) == 0 + + response = context_client.ListDevices(Empty()) + assert len(response.devices) == 0 + + response = context_client.ListLinks(Empty()) + assert len(response.links) == 0 + + +def test_prepare_environment( + context_client : ContextClient, # pylint: disable=redefined-outer-name + device_client : DeviceClient): # pylint: disable=redefined-outer-name + + for context in CONTEXTS : context_client.SetContext (Context (**context )) + for topology in TOPOLOGIES: context_client.SetTopology(Topology(**topology)) + + for device in DEVICES : device_client .AddDevice (Device (**device )) + for topology_id, device_ids, _ in OBJECTS_PER_TOPOLOGY: + topology = Topology() + topology.CopyFrom(context_client.GetTopology(TopologyId(**topology_id))) + + device_ids_in_topology = {device_id.device_uuid.uuid for device_id in topology.device_ids} + func_device_id_not_added = lambda device_id: device_id['device_uuid']['uuid'] not in device_ids_in_topology + func_device_id_json_to_grpc = lambda device_id: DeviceId(**device_id) + device_ids_to_add = list(map(func_device_id_json_to_grpc, filter(func_device_id_not_added, device_ids))) + topology.device_ids.extend(device_ids_to_add) + + context_client.SetTopology(topology) + + for link in LINKS : context_client.SetLink (Link (**link )) + for topology_id, _, link_ids in OBJECTS_PER_TOPOLOGY: + topology = Topology() + topology.CopyFrom(context_client.GetTopology(TopologyId(**topology_id))) + + link_ids_in_topology = {link_id.link_uuid.uuid for link_id in topology.link_ids} + func_link_id_not_added = lambda link_id: link_id['link_uuid']['uuid'] not in link_ids_in_topology + func_link_id_json_to_grpc = lambda link_id: LinkId(**link_id) + link_ids_to_add = list(map(func_link_id_json_to_grpc, filter(func_link_id_not_added, link_ids))) + topology.link_ids.extend(link_ids_to_add) + + context_client.SetTopology(topology) + + +def test_scenario_ready(context_client : ContextClient): # pylint: disable=redefined-outer-name + # ----- List entities - Ensure scenario is ready ------------------------------------------------------------------- + response = context_client.ListContexts(Empty()) + assert len(response.contexts) == len(CONTEXTS) + + response = context_client.ListTopologies(ContextId(**CONTEXT_ID)) + assert len(response.topologies) == len(TOPOLOGIES) + + response = context_client.ListDevices(Empty()) + assert len(response.devices) == len(DEVICES) + + response = context_client.ListLinks(Empty()) + assert len(response.links) == len(LINKS) + + response = context_client.ListServices(ContextId(**CONTEXT_ID)) + assert len(response.services) == 0 diff --git a/src/tests/ecoc22/tests/test_functional_cleanup.py b/src/tests/ecoc22/tests/test_functional_cleanup.py new file mode 100644 index 0000000000000000000000000000000000000000..8a8439555b58f681fc24bebb9bc9858a9c61458c --- /dev/null +++ b/src/tests/ecoc22/tests/test_functional_cleanup.py @@ -0,0 +1,66 @@ +# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +from common.proto.context_pb2 import ContextId, DeviceId, Empty, LinkId, TopologyId +from common.tools.object_factory.Context import json_context_id +from context.client.ContextClient import ContextClient +from device.client.DeviceClient import DeviceClient +from .Fixtures import context_client, device_client +#from .Objects_BigNet import CONTEXT_ID, CONTEXTS, DEVICES, LINKS, TOPOLOGIES +from .Objects_DC_CSGW_TN import CONTEXT_ID, CONTEXTS, DEVICES, LINKS, TOPOLOGIES +#from .Objects_DC_CSGW_TN_OLS import CONTEXT_ID, CONTEXTS, DEVICES, LINKS, TOPOLOGIES + + +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(logging.DEBUG) + + +def test_services_removed(context_client : ContextClient): # pylint: disable=redefined-outer-name + # ----- List entities - Ensure service is removed ------------------------------------------------------------------ + response = context_client.ListContexts(Empty()) + assert len(response.contexts) == len(CONTEXTS) + + response = context_client.ListTopologies(ContextId(**CONTEXT_ID)) + assert len(response.topologies) == len(TOPOLOGIES) + + response = context_client.ListDevices(Empty()) + assert len(response.devices) == len(DEVICES) + + response = context_client.ListLinks(Empty()) + assert len(response.links) == len(LINKS) + + response = context_client.ListServices(ContextId(**CONTEXT_ID)) + assert len(response.services) == 0 + + +def test_scenario_cleanup( + context_client : ContextClient, device_client : DeviceClient): # pylint: disable=redefined-outer-name + + for link in LINKS : context_client.RemoveLink (LinkId (**link ['link_id' ])) + for device in DEVICES : device_client .DeleteDevice (DeviceId (**device ['device_id' ])) + for topology in TOPOLOGIES: context_client.RemoveTopology(TopologyId(**topology['topology_id'])) + for context in CONTEXTS : context_client.RemoveContext (ContextId (**context ['context_id' ])) + + +def test_scenario_empty_again(context_client : ContextClient): # pylint: disable=redefined-outer-name + # ----- List entities - Ensure database is empty again ------------------------------------------------------------- + response = context_client.ListContexts(Empty()) + assert len(response.contexts) == 0 + + response = context_client.ListDevices(Empty()) + assert len(response.devices) == 0 + + response = context_client.ListLinks(Empty()) + assert len(response.links) == 0 diff --git a/src/tests/ecoc22/tests/test_functional_create_service.py b/src/tests/ecoc22/tests/test_functional_create_service.py new file mode 100644 index 0000000000000000000000000000000000000000..3e830bdbe1cb5adaf1559314a07b826ac73c32d1 --- /dev/null +++ b/src/tests/ecoc22/tests/test_functional_create_service.py @@ -0,0 +1,78 @@ +# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +from common.proto.context_pb2 import ContextId, Empty +from common.tools.grpc.Tools import grpc_message_to_json_string +from compute.tests.mock_osm.MockOSM import MockOSM +from context.client.ContextClient import ContextClient +from .Fixtures import context_client, osm_wim +#from .Objects_BigNet import ( +# CONTEXT_ID, CONTEXTS, DEVICES, LINKS, TOPOLOGIES, WIM_SERVICE_CONNECTION_POINTS, WIM_SERVICE_TYPE) +from .Objects_DC_CSGW_TN import ( + CONTEXT_ID, CONTEXTS, DEVICES, LINKS, TOPOLOGIES, WIM_SERVICE_CONNECTION_POINTS, WIM_SERVICE_TYPE) +#from .Objects_DC_CSGW_TN_OLS import ( +# CONTEXT_ID, CONTEXTS, DEVICES, LINKS, TOPOLOGIES, WIM_SERVICE_CONNECTION_POINTS, WIM_SERVICE_TYPE) + +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(logging.DEBUG) + + +def test_scenario_is_correct(context_client : ContextClient): # pylint: disable=redefined-outer-name + # ----- List entities - Ensure links are created ------------------------------------------------------------------- + response = context_client.ListContexts(Empty()) + assert len(response.contexts) == len(CONTEXTS) + + response = context_client.ListTopologies(ContextId(**CONTEXT_ID)) + assert len(response.topologies) == len(TOPOLOGIES) + + response = context_client.ListDevices(Empty()) + assert len(response.devices) == len(DEVICES) + + response = context_client.ListLinks(Empty()) + assert len(response.links) == len(LINKS) + + response = context_client.ListServices(ContextId(**CONTEXT_ID)) + assert len(response.services) == 0 + + +def test_service_creation(context_client : ContextClient, osm_wim : MockOSM): # pylint: disable=redefined-outer-name + # ----- Create Service --------------------------------------------------------------------------------------------- + service_uuid = osm_wim.create_connectivity_service(WIM_SERVICE_TYPE, WIM_SERVICE_CONNECTION_POINTS) + osm_wim.get_connectivity_service_status(service_uuid) + + +def test_scenario_service_created(context_client : ContextClient): # pylint: disable=redefined-outer-name + # ----- List entities - Ensure service is created ------------------------------------------------------------------ + response = context_client.ListContexts(Empty()) + assert len(response.contexts) == len(CONTEXTS) + + response = context_client.ListTopologies(ContextId(**CONTEXT_ID)) + assert len(response.topologies) == len(TOPOLOGIES) + + response = context_client.ListDevices(Empty()) + assert len(response.devices) == len(DEVICES) + + response = context_client.ListLinks(Empty()) + assert len(response.links) == len(LINKS) + + response = context_client.ListServices(ContextId(**CONTEXT_ID)) + LOGGER.info('Services[{:d}] = {:s}'.format(len(response.services), grpc_message_to_json_string(response))) + assert len(response.services) == 1 # L2NM + for service in response.services: + service_id = service.service_id + response = context_client.ListConnections(service_id) + LOGGER.info(' ServiceId[{:s}] => Connections[{:d}] = {:s}'.format( + grpc_message_to_json_string(service_id), len(response.connections), grpc_message_to_json_string(response))) + assert len(response.connections) == 2 # 2 connections per service (primary + backup) diff --git a/src/tests/ecoc22/tests/test_functional_delete_service.py b/src/tests/ecoc22/tests/test_functional_delete_service.py new file mode 100644 index 0000000000000000000000000000000000000000..4bc322a0b0ac125896063eac0a066e8363d7c070 --- /dev/null +++ b/src/tests/ecoc22/tests/test_functional_delete_service.py @@ -0,0 +1,98 @@ +# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging, pytest +from common.DeviceTypes import DeviceTypeEnum +from common.Settings import get_setting +from common.proto.context_pb2 import ContextId, Empty, ServiceTypeEnum +from common.tests.EventTools import EVENT_REMOVE, EVENT_UPDATE, check_events +from common.tools.object_factory.Connection import json_connection_id +from common.tools.object_factory.Device import json_device_id +from common.tools.object_factory.Service import json_service_id +from common.tools.grpc.Tools import grpc_message_to_json_string +from compute.tests.mock_osm.MockOSM import MockOSM +from context.client.ContextClient import ContextClient +from .Fixtures import context_client, osm_wim +#from .Objects_BigNet import ( +# CONTEXT_ID, CONTEXTS, DEVICES, LINKS, TOPOLOGIES, WIM_SERVICE_CONNECTION_POINTS, WIM_SERVICE_TYPE) +from .Objects_DC_CSGW_TN import ( + CONTEXT_ID, CONTEXTS, DEVICES, LINKS, TOPOLOGIES, WIM_SERVICE_CONNECTION_POINTS, WIM_SERVICE_TYPE) +#from .Objects_DC_CSGW_TN_OLS import ( +# CONTEXT_ID, CONTEXTS, DEVICES, LINKS, TOPOLOGIES, WIM_SERVICE_CONNECTION_POINTS, WIM_SERVICE_TYPE) + +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(logging.DEBUG) + +DEVTYPE_EMU_PR = DeviceTypeEnum.EMULATED_PACKET_ROUTER.value +DEVTYPE_EMU_OLS = DeviceTypeEnum.EMULATED_OPEN_LINE_SYSTEM.value + +def test_scenario_is_correct(context_client : ContextClient): # pylint: disable=redefined-outer-name + # ----- List entities - Ensure service is created ------------------------------------------------------------------ + response = context_client.ListContexts(Empty()) + assert len(response.contexts) == len(CONTEXTS) + + response = context_client.ListTopologies(ContextId(**CONTEXT_ID)) + assert len(response.topologies) == len(TOPOLOGIES) + + response = context_client.ListDevices(Empty()) + assert len(response.devices) == len(DEVICES) + + response = context_client.ListLinks(Empty()) + assert len(response.links) == len(LINKS) + + response = context_client.ListServices(ContextId(**CONTEXT_ID)) + LOGGER.info('Services[{:d}] = {:s}'.format(len(response.services), grpc_message_to_json_string(response))) + assert len(response.services) == 1 # L2NM + for service in response.services: + service_id = service.service_id + response = context_client.ListConnections(service_id) + LOGGER.info(' ServiceId[{:s}] => Connections[{:d}] = {:s}'.format( + grpc_message_to_json_string(service_id), len(response.connections), grpc_message_to_json_string(response))) + assert len(response.connections) == 2 # 2 connections per service + + +def test_service_removal(context_client : ContextClient, osm_wim : MockOSM): # pylint: disable=redefined-outer-name + # ----- Delete Service --------------------------------------------------------------------------------------------- + response = context_client.ListServices(ContextId(**CONTEXT_ID)) + LOGGER.info('Services[{:d}] = {:s}'.format(len(response.services), grpc_message_to_json_string(response))) + service_uuids = set() + for service in response.services: + if service.service_type != ServiceTypeEnum.SERVICETYPE_L2NM: continue + service_uuid = service.service_id.service_uuid.uuid + if service_uuid.endswith(':optical'): continue + service_uuids.add(service_uuid) + osm_wim.conn_info[service_uuid] = {} + + assert len(service_uuids) == 1 # assume a single service has been created + service_uuid = set(service_uuids).pop() + + osm_wim.delete_connectivity_service(service_uuid) + + +def test_services_removed(context_client : ContextClient): # pylint: disable=redefined-outer-name + # ----- List entities - Ensure service is removed ------------------------------------------------------------------ + response = context_client.ListContexts(Empty()) + assert len(response.contexts) == len(CONTEXTS) + + response = context_client.ListTopologies(ContextId(**CONTEXT_ID)) + assert len(response.topologies) == len(TOPOLOGIES) + + response = context_client.ListDevices(Empty()) + assert len(response.devices) == len(DEVICES) + + response = context_client.ListLinks(Empty()) + assert len(response.links) == len(LINKS) + + response = context_client.ListServices(ContextId(**CONTEXT_ID)) + assert len(response.services) == 0 diff --git a/src/tests/oeccpsc22/tests/Tools.py b/src/tests/oeccpsc22/tests/Tools.py index a782b6bb3e541e4331f5f95164e69def5640f556..d26c8ae11468f05dc48cb55dc202b9f0efc1d3b6 100644 --- a/src/tests/oeccpsc22/tests/Tools.py +++ b/src/tests/oeccpsc22/tests/Tools.py @@ -12,7 +12,7 @@ def json_endpoint_ids(device_id : Dict, endpoint_descriptors : List[Tuple[str, s def get_link_uuid(a_endpoint_id : Dict, z_endpoint_id : Dict) -> str: return '{:s}/{:s}=={:s}/{:s}'.format( a_endpoint_id['device_id']['device_uuid']['uuid'], a_endpoint_id['endpoint_uuid']['uuid'], - a_endpoint_id['device_id']['device_uuid']['uuid'], z_endpoint_id['endpoint_uuid']['uuid']) + z_endpoint_id['device_id']['device_uuid']['uuid'], z_endpoint_id['endpoint_uuid']['uuid']) def compose_service_endpoint_id(endpoint_id): device_uuid = endpoint_id['device_id']['device_uuid']['uuid'] diff --git a/src/tests/ofc22/deploy_specs.sh b/src/tests/ofc22/deploy_specs.sh new file mode 100644 index 0000000000000000000000000000000000000000..b486474e2afad7305409bf410c7b8885b0afe2a8 --- /dev/null +++ b/src/tests/ofc22/deploy_specs.sh @@ -0,0 +1,17 @@ +# Set the URL of your local Docker registry where the images will be uploaded to. +export TFS_REGISTRY_IMAGE="http://localhost:32000/tfs/" + +# Set the list of components, separated by spaces, you want to build images for, and deploy. +export TFS_COMPONENTS="context device automation service compute monitoring webui" + +# Set the tag you want to use for your images. +export TFS_IMAGE_TAG="dev" + +# Set the name of the Kubernetes namespace to deploy to. +export TFS_K8S_NAMESPACE="tfs" + +# Set additional manifest files to be applied after the deployment +export TFS_EXTRA_MANIFESTS="manifests/nginx_ingress_http.yaml" + +# Set the neew Grafana admin password +export TFS_GRAFANA_PASSWORD="admin123+" diff --git a/src/tests/ofc22/descriptors_emulated.json b/src/tests/ofc22/descriptors_emulated.json index 0f6f75e16c641439035f833efa05ee60529070f7..83f9c39e2ac7154b088ccdd0a1519ea32c1aee1d 100644 --- a/src/tests/ofc22/descriptors_emulated.json +++ b/src/tests/ofc22/descriptors_emulated.json @@ -18,9 +18,9 @@ "device_id": {"device_uuid": {"uuid": "R1-EMU"}}, "device_type": "emu-packet-router", "device_config": {"config_rules": [ - {"action": 1, "resource_key": "_connect/address", "resource_value": "127.0.0.1"}, - {"action": 1, "resource_key": "_connect/port", "resource_value": "0"}, - {"action": 1, "resource_key": "_connect/settings", "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"optical\", \"uuid\": \"13/0/0\"}, {\"sample_types\": [101, 102, 201, 202], \"type\": \"copper\", \"uuid\": \"13/1/2\"}]}"} + {"action": 1, "custom": {"resource_key": "_connect/address", "resource_value": "127.0.0.1"}}, + {"action": 1, "custom": {"resource_key": "_connect/port", "resource_value": "0"}}, + {"action": 1, "custom": {"resource_key": "_connect/settings", "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"optical\", \"uuid\": \"13/0/0\"}, {\"sample_types\": [101, 102, 201, 202], \"type\": \"copper\", \"uuid\": \"13/1/2\"}]}"}} ]}, "device_operational_status": 1, "device_drivers": [0], @@ -30,9 +30,9 @@ "device_id": {"device_uuid": {"uuid": "R2-EMU"}}, "device_type": "emu-packet-router", "device_config": {"config_rules": [ - {"action": 1, "resource_key": "_connect/address", "resource_value": "127.0.0.1"}, - {"action": 1, "resource_key": "_connect/port", "resource_value": "0"}, - {"action": 1, "resource_key": "_connect/settings", "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"optical\", \"uuid\": \"13/0/0\"}, {\"sample_types\": [101, 102, 201, 202], \"type\": \"copper\", \"uuid\": \"13/1/2\"}]}"} + {"action": 1, "custom": {"resource_key": "_connect/address", "resource_value": "127.0.0.1"}}, + {"action": 1, "custom": {"resource_key": "_connect/port", "resource_value": "0"}}, + {"action": 1, "custom": {"resource_key": "_connect/settings", "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"optical\", \"uuid\": \"13/0/0\"}, {\"sample_types\": [101, 102, 201, 202], \"type\": \"copper\", \"uuid\": \"13/1/2\"}]}"}} ]}, "device_operational_status": 1, "device_drivers": [0], @@ -42,9 +42,9 @@ "device_id": {"device_uuid": {"uuid": "R3-EMU"}}, "device_type": "emu-packet-router", "device_config": {"config_rules": [ - {"action": 1, "resource_key": "_connect/address", "resource_value": "127.0.0.1"}, - {"action": 1, "resource_key": "_connect/port", "resource_value": "0"}, - {"action": 1, "resource_key": "_connect/settings", "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"optical\", \"uuid\": \"13/0/0\"}, {\"sample_types\": [101, 102, 201, 202], \"type\": \"copper\", \"uuid\": \"13/1/2\"}]}"} + {"action": 1, "custom": {"resource_key": "_connect/address", "resource_value": "127.0.0.1"}}, + {"action": 1, "custom": {"resource_key": "_connect/port", "resource_value": "0"}}, + {"action": 1, "custom": {"resource_key": "_connect/settings", "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"optical\", \"uuid\": \"13/0/0\"}, {\"sample_types\": [101, 102, 201, 202], \"type\": \"copper\", \"uuid\": \"13/1/2\"}]}"}} ]}, "device_operational_status": 1, "device_drivers": [0], @@ -54,9 +54,9 @@ "device_id": {"device_uuid": {"uuid": "R4-EMU"}}, "device_type": "emu-packet-router", "device_config": {"config_rules": [ - {"action": 1, "resource_key": "_connect/address", "resource_value": "127.0.0.1"}, - {"action": 1, "resource_key": "_connect/port", "resource_value": "0"}, - {"action": 1, "resource_key": "_connect/settings", "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"optical\", \"uuid\": \"13/0/0\"}, {\"sample_types\": [101, 102, 201, 202], \"type\": \"copper\", \"uuid\": \"13/1/2\"}]}"} + {"action": 1, "custom": {"resource_key": "_connect/address", "resource_value": "127.0.0.1"}}, + {"action": 1, "custom": {"resource_key": "_connect/port", "resource_value": "0"}}, + {"action": 1, "custom": {"resource_key": "_connect/settings", "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"optical\", \"uuid\": \"13/0/0\"}, {\"sample_types\": [101, 102, 201, 202], \"type\": \"copper\", \"uuid\": \"13/1/2\"}]}"}} ]}, "device_operational_status": 1, "device_drivers": [0], @@ -66,9 +66,9 @@ "device_id": {"device_uuid": {"uuid": "O1-OLS"}}, "device_type": "emu-open-line-system", "device_config": {"config_rules": [ - {"action": 1, "resource_key": "_connect/address", "resource_value": "127.0.0.1"}, - {"action": 1, "resource_key": "_connect/port", "resource_value": "0"}, - {"action": 1, "resource_key": "_connect/settings", "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"optical\", \"uuid\": \"aade6001-f00b-5e2f-a357-6a0a9d3de870\"}, {\"sample_types\": [], \"type\": \"optical\", \"uuid\": \"eb287d83-f05e-53ec-ab5a-adf6bd2b5418\"}, {\"sample_types\": [], \"type\": \"optical\", \"uuid\": \"0ef74f99-1acc-57bd-ab9d-4b958b06c513\"}, {\"sample_types\": [], \"type\": \"optical\", \"uuid\": \"50296d99-58cc-5ce7-82f5-fc8ee4eec2ec\"}]}"} + {"action": 1, "custom": {"resource_key": "_connect/address", "resource_value": "127.0.0.1"}}, + {"action": 1, "custom": {"resource_key": "_connect/port", "resource_value": "0"}}, + {"action": 1, "custom": {"resource_key": "_connect/settings", "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"optical\", \"uuid\": \"aade6001-f00b-5e2f-a357-6a0a9d3de870\"}, {\"sample_types\": [], \"type\": \"optical\", \"uuid\": \"eb287d83-f05e-53ec-ab5a-adf6bd2b5418\"}, {\"sample_types\": [], \"type\": \"optical\", \"uuid\": \"0ef74f99-1acc-57bd-ab9d-4b958b06c513\"}, {\"sample_types\": [], \"type\": \"optical\", \"uuid\": \"50296d99-58cc-5ce7-82f5-fc8ee4eec2ec\"}]}"}} ]}, "device_operational_status": 1, "device_drivers": [0], diff --git a/src/webui/Dockerfile b/src/webui/Dockerfile index 7760416be32b893ed5f2408b70e874fb89721e17..a17d2bd9aea9c6948262dcf17776f75c0be351b8 100644 --- a/src/webui/Dockerfile +++ b/src/webui/Dockerfile @@ -79,6 +79,8 @@ COPY --chown=webui:webui src/device/__init__.py device/__init__.py COPY --chown=webui:webui src/device/client/. device/client/ COPY --chown=webui:webui src/service/__init__.py service/__init__.py COPY --chown=webui:webui src/service/client/. service/client/ +COPY --chown=webui:webui src/slice/__init__.py slice/__init__.py +COPY --chown=webui:webui src/slice/client/. slice/client/ COPY --chown=webui:webui src/webui/. webui/ # Start the service diff --git a/src/webui/service/__init__.py b/src/webui/service/__init__.py index 9187d90e76acd256bcac752ce7e7be025889e133..75e1036420d0bc88a790fb7b65f4f4900abaaadd 100644 --- a/src/webui/service/__init__.py +++ b/src/webui/service/__init__.py @@ -72,11 +72,15 @@ def create_app(use_config=None, web_app_root=None): from webui.service.service.routes import service app.register_blueprint(service) + from webui.service.slice.routes import slice + app.register_blueprint(slice) + from webui.service.device.routes import device app.register_blueprint(device) from webui.service.link.routes import link app.register_blueprint(link) + app.jinja_env.filters['from_json'] = from_json diff --git a/src/webui/service/link/routes.py b/src/webui/service/link/routes.py index 04c4b1de59283832b17c92c91727fa716a2c0fea..51e903d9ec28c5aaac20cd49e2f97dd7044e12bf 100644 --- a/src/webui/service/link/routes.py +++ b/src/webui/service/link/routes.py @@ -12,10 +12,12 @@ # See the License for the specific language governing permissions and # limitations under the License. -from flask import render_template, Blueprint, flash, session, redirect, url_for -from common.proto.context_pb2 import Empty, LinkList + +from flask import current_app, render_template, Blueprint, flash, session, redirect, url_for +from common.proto.context_pb2 import Empty, Link, LinkEvent, LinkId, LinkIdList, LinkList, DeviceId from context.client.ContextClient import ContextClient + link = Blueprint('link', __name__, url_prefix='/link') context_client = ContextClient() @@ -32,4 +34,13 @@ def home(): return render_template( "link/home.html", links=response.links, - ) \ No newline at end of file + ) + +@link.route('detail/<path:link_uuid>', methods=('GET', 'POST')) +def detail(link_uuid: str): + request = LinkId() + request.link_uuid.uuid = link_uuid + context_client.connect() + response = context_client.GetLink(request) + context_client.close() + return render_template('link/detail.html',link=response) diff --git a/src/webui/service/main/routes.py b/src/webui/service/main/routes.py index 85d3aeeb7c6f23ab4123412173cdfda4d27b23a4..e9545ade40949a1ad772b35b669e02a1fa39d64d 100644 --- a/src/webui/service/main/routes.py +++ b/src/webui/service/main/routes.py @@ -12,53 +12,137 @@ # See the License for the specific language governing permissions and # limitations under the License. -import json, logging +import copy, json, logging from flask import jsonify, redirect, render_template, Blueprint, flash, session, url_for, request -from common.proto.context_pb2 import Context, Device, Empty, Link, Topology, ContextIdList +from common.proto.context_pb2 import Connection, Context, Device, Empty, Link, Service, Slice, Topology, ContextIdList +from common.tools.grpc.Tools import grpc_message_to_json_string from context.client.ContextClient import ContextClient from device.client.DeviceClient import DeviceClient +from service.client.ServiceClient import ServiceClient +from slice.client.SliceClient import SliceClient from webui.service.main.forms import ContextForm, DescriptorForm main = Blueprint('main', __name__) context_client = ContextClient() device_client = DeviceClient() +service_client = ServiceClient() +slice_client = SliceClient() logger = logging.getLogger(__name__) -def process_descriptor(item_name_singluar, item_name_plural, grpc_method, grpc_class, items): +ENTITY_TO_TEXT = { + # name => singular, plural + 'context' : ('Context', 'Contexts' ), + 'topology' : ('Topology', 'Topologies' ), + 'device' : ('Device', 'Devices' ), + 'link' : ('Link', 'Links' ), + 'service' : ('Service', 'Services' ), + 'slice' : ('Slice', 'Slices' ), + 'connection': ('Connection', 'Connections'), +} + +ACTION_TO_TEXT = { + # action => infinitive, past + 'add' : ('Add', 'Added'), + 'update' : ('Update', 'Updated'), +} + +def process_descriptor(entity_name, action_name, grpc_method, grpc_class, entities): + entity_name_singluar,entity_name_plural = ENTITY_TO_TEXT[entity_name] + action_infinitive, action_past = ACTION_TO_TEXT[action_name] num_ok, num_err = 0, 0 - for item in items: + for entity in entities: try: - grpc_method(grpc_class(**item)) + grpc_method(grpc_class(**entity)) num_ok += 1 except Exception as e: # pylint: disable=broad-except - flash(f'Unable to add {item_name_singluar} {str(item)}: {str(e)}', 'error') + flash(f'Unable to {action_infinitive} {entity_name_singluar} {str(entity)}: {str(e)}', 'error') num_err += 1 - if num_ok : flash(f'{str(num_ok)} {item_name_plural} added', 'success') - if num_err: flash(f'{str(num_err)} {item_name_plural} failed', 'danger') + if num_ok : flash(f'{str(num_ok)} {entity_name_plural} {action_past}', 'success') + if num_err: flash(f'{str(num_err)} {entity_name_plural} failed', 'danger') def process_descriptors(descriptors): - logger.warning(str(descriptors.data)) - logger.warning(str(descriptors.name)) try: - logger.warning(str(request.files)) descriptors_file = request.files[descriptors.name] - logger.warning(str(descriptors_file)) descriptors_data = descriptors_file.read() - logger.warning(str(descriptors_data)) descriptors = json.loads(descriptors_data) - logger.warning(str(descriptors)) except Exception as e: # pylint: disable=broad-except flash(f'Unable to load descriptor file: {str(e)}', 'danger') return + dummy_mode = descriptors.get('dummy_mode' , False) + contexts = descriptors.get('contexts' , []) + topologies = descriptors.get('topologies' , []) + devices = descriptors.get('devices' , []) + links = descriptors.get('links' , []) + services = descriptors.get('services' , []) + slices = descriptors.get('slices' , []) + connections = descriptors.get('connections', []) + + if dummy_mode: + # Dummy Mode: used to pre-load databases (WebUI debugging purposes) with no smart or automated tasks. + context_client.connect() + + contexts_add = copy.deepcopy(contexts) + for context in contexts_add: + context['topology_ids'] = [] + context['service_ids'] = [] + + topologies_add = copy.deepcopy(topologies) + for topology in topologies_add: + topology['device_ids'] = [] + topology['link_ids'] = [] + + process_descriptor('context', 'add', context_client.SetContext, Context, contexts_add ) + process_descriptor('topology', 'add', context_client.SetTopology, Topology, topologies_add) + process_descriptor('device', 'add', context_client.SetDevice, Device, devices ) + process_descriptor('link', 'add', context_client.SetLink, Link, links ) + process_descriptor('service', 'add', context_client.SetService, Service, services ) + process_descriptor('context', 'update', context_client.SetContext, Context, contexts ) + process_descriptor('topology', 'update', context_client.SetTopology, Topology, topologies ) + process_descriptor('slice', 'add', context_client.SetSlice, Slice, slices ) + process_descriptor('connection', 'add', context_client.SetConnection, Connection, connections ) + context_client.close() + return + + # Normal mode: follows the automated workflows in the different components + + # in normal mode, connections should not be set + assert len(connections) == 0 + + services_add = [] + for service in services: + service_copy = copy.deepcopy(service) + service_copy['service_endpoint_ids'] = [] + service_copy['service_constraints'] = [] + service_copy['service_config'] = {'config_rules': []} + services_add.append(service_copy) + + slices_add = [] + for slice in slices: + slice_copy = copy.deepcopy(slice) + slice_copy['slice_endpoint_ids'] = [] + slice_copy['slice_constraints'] = [] + slice_copy['slice_config'] = {'config_rules': []} + slices_add.append(slice_copy) + context_client.connect() device_client.connect() - process_descriptor('Context', 'Contexts', context_client.SetContext, Context, descriptors['contexts' ]) - process_descriptor('Topology', 'Topologies', context_client.SetTopology, Topology, descriptors['topologies']) - process_descriptor('Device', 'Devices', device_client .AddDevice, Device, descriptors['devices' ]) - process_descriptor('Link', 'Links', context_client.SetLink, Link, descriptors['links' ]) + service_client.connect() + slice_client.connect() + + process_descriptor('context', 'add', context_client.SetContext, Context, contexts ) + process_descriptor('topology', 'add', context_client.SetTopology, Topology, topologies ) + process_descriptor('device', 'add', device_client .AddDevice, Device, devices ) + process_descriptor('link', 'add', context_client.SetLink, Link, links ) + process_descriptor('service', 'add', service_client.CreateService, Service, services_add) + process_descriptor('service', 'update', service_client.UpdateService, Service, services ) + process_descriptor('slice', 'add', slice_client.CreateSlice, Slice, slices_add ) + process_descriptor('slice', 'update', slice_client.UpdateSlice, Slice, slices ) + + slice_client.close() + service_client.close() device_client.close() context_client.close() @@ -69,14 +153,18 @@ def home(): response: ContextIdList = context_client.ListContextIds(Empty()) context_form: ContextForm = ContextForm() context_form.context.choices.append(('', 'Select...')) + for context in response.context_ids: context_form.context.choices.append((context.context_uuid.uuid, context.context_uuid)) + if context_form.validate_on_submit(): session['context_uuid'] = context_form.context.data flash(f'The context was successfully set to `{context_form.context.data}`.', 'success') return redirect(url_for("main.home")) + if 'context_uuid' in session: context_form.context.data = session['context_uuid'] + descriptor_form: DescriptorForm = DescriptorForm() try: if descriptor_form.validate_on_submit(): @@ -88,6 +176,7 @@ def home(): finally: context_client.close() device_client.close() + return render_template('main/home.html', context_form=context_form, descriptor_form=descriptor_form) @main.route('/topology', methods=['GET']) @@ -102,11 +191,17 @@ def topology(): } for device in response.devices] response = context_client.ListLinks(Empty()) - links = [{ - 'id': link.link_id.link_uuid.uuid, - 'source': link.link_endpoint_ids[0].device_id.device_uuid.uuid, - 'target': link.link_endpoint_ids[1].device_id.device_uuid.uuid, - } for link in response.links] + links = [] + for link in response.links: + if len(link.link_endpoint_ids) != 2: + str_link = grpc_message_to_json_string(link) + logger.warning('Unexpected link with len(endpoints) != 2: {:s}'.format(str_link)) + continue + links.append({ + 'id': link.link_id.link_uuid.uuid, + 'source': link.link_endpoint_ids[0].device_id.device_uuid.uuid, + 'target': link.link_endpoint_ids[1].device_id.device_uuid.uuid, + }) return jsonify({'devices': devices, 'links': links}) except: diff --git a/src/webui/service/service/routes.py b/src/webui/service/service/routes.py index 81031490ef840ff63262444a5487932a4e72c111..bc05daee3e4ff8795c26bed9e0707b9a3ab2be7c 100644 --- a/src/webui/service/service/routes.py +++ b/src/webui/service/service/routes.py @@ -14,7 +14,7 @@ import grpc from flask import current_app, redirect, render_template, Blueprint, flash, session, url_for -from common.proto.context_pb2 import ContextId, Service, ServiceId, ServiceList, ServiceTypeEnum, ServiceStatusEnum +from common.proto.context_pb2 import ContextId, Service, ServiceId, ServiceList, ServiceTypeEnum, ServiceStatusEnum, Connection from context.client.ContextClient import ContextClient from service.client.ServiceClient import ServiceClient @@ -73,12 +73,14 @@ def detail(service_uuid: str): try: context_client.connect() response: Service = context_client.GetService(request) + connections: Connection = context_client.ListConnections(request) context_client.close() except Exception as e: flash('The system encountered an error and cannot show the details of this service.', 'warning') current_app.logger.exception(e) return redirect(url_for('service.home')) - return render_template('service/detail.html', service=response) + return render_template('service/detail.html', service=response, connections=connections,ste=ServiceTypeEnum, + sse=ServiceStatusEnum) @service.get('<path:service_uuid>/delete') @@ -100,4 +102,4 @@ def delete(service_uuid: str): except Exception as e: flash('Problem deleting service "{:s}": {:s}'.format(service_uuid, str(e.details())), 'danger') current_app.logger.exception(e) - return redirect(url_for('service.home')) + return redirect(url_for('service.home')) \ No newline at end of file diff --git a/src/webui/service/slice/__init__.py b/src/webui/service/slice/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..70a33251242c51f49140e596b8208a19dd5245f7 --- /dev/null +++ b/src/webui/service/slice/__init__.py @@ -0,0 +1,14 @@ +# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + diff --git a/src/webui/service/slice/routes.py b/src/webui/service/slice/routes.py new file mode 100644 index 0000000000000000000000000000000000000000..c5287501362db88edaf334426ca6e6d0e3331ef2 --- /dev/null +++ b/src/webui/service/slice/routes.py @@ -0,0 +1,103 @@ +# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import grpc +from flask import current_app, redirect, render_template, Blueprint, flash, session, url_for +from common.proto.context_pb2 import ContextId, Slice, SliceId, SliceList, Connection, SliceStatusEnum +from context.client.ContextClient import ContextClient +#from slice.client.SliceClient import SliceClient + + + +slice = Blueprint('slice', __name__, url_prefix='/slice') + +context_client = ContextClient() +#slice_client = SliceClient() + +@slice.get('/') +def home(): + # flash('This is an info message', 'info') + # flash('This is a danger message', 'danger') + + context_uuid = session.get('context_uuid', '-') + if context_uuid == "-": + flash("Please select a context!", "warning") + return redirect(url_for("main.home")) + request = ContextId() + request.context_uuid.uuid = context_uuid + context_client.connect() + try: + slice_list = context_client.ListSlices(request) + # print(slice_list) + slices = slice_list.slices + context_not_found = False + except grpc.RpcError as e: + if e.code() != grpc.StatusCode.NOT_FOUND: raise + if e.details() != 'Context({:s}) not found'.format(context_uuid): raise + slices = [] + context_not_found = True + context_client.close() + return render_template('slice/home.html',slices=slices, context_not_found=context_not_found, sse=SliceStatusEnum) + +# +#@slice.route('add', methods=['GET', 'POST']) +#def add(): +# flash('Add slice route called', 'danger') +# raise NotImplementedError() +# return render_template('slice/home.html') +# +# +@slice.get('<path:slice_uuid>/detail') +def detail(slice_uuid: str): + context_uuid = session.get('context_uuid', '-') + if context_uuid == "-": + flash("Please select a context!", "warning") + return redirect(url_for("main.home")) + + request: SliceId = SliceId() + request.slice_uuid.uuid = slice_uuid + request.context_id.context_uuid.uuid = context_uuid + req = ContextId() + req.context_uuid.uuid = context_uuid + try: + context_client.connect() + response: Slice = context_client.GetSlice(request) + services = context_client.ListServices(req) + context_client.close() + except Exception as e: + flash('The system encountered an error and cannot show the details of this slice.', 'warning') + current_app.logger.exception(e) + return redirect(url_for('slice.home')) + return render_template('slice/detail.html', slice=response, sse=SliceStatusEnum, services=services) +# +#@slice.get('<path:slice_uuid>/delete') +#def delete(slice_uuid: str): +# context_uuid = session.get('context_uuid', '-') +# if context_uuid == "-": +# flash("Please select a context!", "warning") +# return redirect(url_for("main.home")) +# +# try: +# request = SliceId() +# request.slice_uuid.uuid = slice_uuid +# request.context_id.context_uuid.uuid = context_uuid +# slice_client.connect() +# response = slice_client.DeleteSlice(request) +# slice_client.close() +# +# flash('Slice "{:s}" deleted successfully!'.format(slice_uuid), 'success') +# except Exception as e: +# flash('Problem deleting slice "{:s}": {:s}'.format(slice_uuid, str(e.details())), 'danger') +# current_app.logger.exception(e) +# return redirect(url_for('slice.home')) \ No newline at end of file diff --git a/src/webui/service/static/TeraFlow SDN Logo ScreenColour with Slogan.png b/src/webui/service/static/TeraFlow SDN Logo ScreenColour with Slogan.png new file mode 100644 index 0000000000000000000000000000000000000000..218cc713c0a2704f96371fdd2916ef16b44cf667 Binary files /dev/null and b/src/webui/service/static/TeraFlow SDN Logo ScreenColour with Slogan.png differ diff --git a/src/webui/service/static/topology_icons/Acknowledgements.txt b/src/webui/service/static/topology_icons/Acknowledgements.txt index 1ddf1e1d03d8dbc8695e1a7850f5c911cae7e28e..5daab200f87c29f18706e9e07023a45047739df7 100644 --- a/src/webui/service/static/topology_icons/Acknowledgements.txt +++ b/src/webui/service/static/topology_icons/Acknowledgements.txt @@ -10,3 +10,6 @@ https://symbols.getvecta.com/stencil_241/224_router.be30fb87e7.png => emu-packet https://symbols.getvecta.com/stencil_240/269_virtual-layer-switch.ed10fdede6.png => open-line-system.png https://symbols.getvecta.com/stencil_241/281_virtual-layer-switch.29420aff2f.png => emu-open-line-system.png + +https://symbols.getvecta.com/stencil_240/102_ibm-tower.2cc133f3d0.png => datacenter.png +https://symbols.getvecta.com/stencil_241/133_ibm-tower.995c44696c.png => emu-datacenter.png diff --git a/src/webui/service/static/topology_icons/datacenter.png b/src/webui/service/static/topology_icons/datacenter.png new file mode 100644 index 0000000000000000000000000000000000000000..33818cf87e0f47fb6fd45b45c46f368f62ab78d2 Binary files /dev/null and b/src/webui/service/static/topology_icons/datacenter.png differ diff --git a/src/webui/service/static/topology_icons/emu-datacenter.png b/src/webui/service/static/topology_icons/emu-datacenter.png new file mode 100644 index 0000000000000000000000000000000000000000..ed2cc7376b481815edb48fb6faaa025289cfc3ca Binary files /dev/null and b/src/webui/service/static/topology_icons/emu-datacenter.png differ diff --git a/src/webui/service/templates/base.html b/src/webui/service/templates/base.html index d314acb3d5cbe607e82474be7e66302f3d620d6a..9804e4afd1c1b7c889c2f3e0d627471ee13b5c68 100644 --- a/src/webui/service/templates/base.html +++ b/src/webui/service/templates/base.html @@ -1,160 +1,167 @@ -<!doctype html> -<!-- - Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. ---> - -<html lang="en"> - <head> - <!-- Required meta tags --> - <meta charset="utf-8"> - <meta name="viewport" content="width=device-width, initial-scale=1"> - - <link rel="shortcut icon" href="https://teraflow-h2020.eu/sites/teraflow/files/public/favicon.png" type="image/png" /> - - <!-- Bootstrap CSS --> - <link href="https://cdn.jsdelivr.net/npm/bootstrap@5.1.2/dist/css/bootstrap.min.css" rel="stylesheet" integrity="sha384-uWxY/CJNBR+1zjPWmfnSnVxwRheevXITnMqoEIeG1LJrdI0GlVs/9cVSyPYXdcSF" crossorigin="anonymous"> - <link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/bootstrap-icons@1.5.0/font/bootstrap-icons.css"> - - <title>TeraFlow OFC 2022 Demo</title> - </head> - <body> - <div id="teraflow-branding" style="width: 260px; margin: 7px;"> - <a href="{{ url_for('main.home') }}" title="Home" rel="home" id="main-logo" class="site-logo site-logo-pages"> - <svg id="Capa_1" data-name="Capa 1" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 436.3 132.1"><defs><style>.cls-1{fill:#36a9e1;}.cls-2{fill:#1d71b8;}.cls-3{fill:none;stroke-width:2.52px;}.cls-10,.cls-3,.cls-4,.cls-5,.cls-7,.cls-8,.cls-9{stroke:#0f77b6;}.cls-3,.cls-4,.cls-8{stroke-miterlimit:10;}.cls-10,.cls-4,.cls-5,.cls-7,.cls-8,.cls-9{fill:#fff;}.cls-4{stroke-width:0.73px;}.cls-5,.cls-7{stroke-miterlimit:10;}.cls-5{stroke-width:0.75px;}.cls-6{fill:#0f77b6;}.cls-7{stroke-width:0.72px;}.cls-8{stroke-width:0.7px;}.cls-9{stroke-miterlimit:10;stroke-width:0.69px;}.cls-10{stroke-miterlimit:10;stroke-width:0.7px;}</style></defs><path class="cls-1" d="M96,57V51.3h44.1V57H121v52.3h-5.9V57Z"></path><path class="cls-1" d="M168.9,95.1l4.7,2.4a26,26,0,0,1-5.3,7.3,22.27,22.27,0,0,1-6.7,4.2,22.64,22.64,0,0,1-8.5,1.4c-7,0-12.5-2.3-16.4-6.9a23.53,23.53,0,0,1-5.9-15.6,23,23,0,0,1,5-14.5c4.2-5.4,9.9-8.1,17-8.1,7.3,0,13.2,2.8,17.5,8.3,3.1,3.9,4.7,8.8,4.7,14.7H136.4a17.48,17.48,0,0,0,4.8,12.3,15.26,15.26,0,0,0,11.4,4.8,20,20,0,0,0,6.4-1.1,19.3,19.3,0,0,0,5.3-3A33.07,33.07,0,0,0,168.9,95.1Zm0-11.6a18.66,18.66,0,0,0-3.2-7.1,15.25,15.25,0,0,0-5.6-4.3,16.87,16.87,0,0,0-7.3-1.6,16.06,16.06,0,0,0-10.9,4.1,18.15,18.15,0,0,0-5,8.9Z"></path><path class="cls-1" d="M182,66.4h5.6v6.3a20,20,0,0,1,5.3-5.5,10.67,10.67,0,0,1,5.8-1.8,9.87,9.87,0,0,1,4.9,1.5l-2.9,4.7a7.52,7.52,0,0,0-2.9-.7,8.09,8.09,0,0,0-5.3,2.3,14.64,14.64,0,0,0-3.9,7c-.7,2.4-1,7.4-1,14.8v14.5H182Z"></path><path class="cls-1" d="M246.2,66.4v42.9h-5.4V102a23.11,23.11,0,0,1-7.8,6.3,21.23,21.23,0,0,1-9.4,2.1,21,21,0,0,1-15.6-6.6,23.07,23.07,0,0,1,.1-32,21.23,21.23,0,0,1,15.7-6.6,20,20,0,0,1,17.1,8.9V66.2h5.3Zm-22.1,4.2a16.67,16.67,0,0,0-8.5,2.3,15.93,15.93,0,0,0-6.2,6.4,17.68,17.68,0,0,0-2.3,8.7,18.26,18.26,0,0,0,2.3,8.7,15.93,15.93,0,0,0,6.2,6.4,16.58,16.58,0,0,0,8.4,2.3,17.59,17.59,0,0,0,8.6-2.3,15.42,15.42,0,0,0,6.2-6.2,17.17,17.17,0,0,0,2.2-8.8,16.73,16.73,0,0,0-4.9-12.4A15.8,15.8,0,0,0,224.1,70.6Z"></path><path class="cls-2" d="M259.5,51.3h29.1V57H265.3V75.2h23.3v5.7H265.3v28.5h-5.8V51.3Z"></path><path class="cls-2" d="M296.9,49.9h5.5v59.5h-5.5Z"></path><path class="cls-2" d="M330.5,65.3a21.1,21.1,0,0,1,16.4,7.2A22.55,22.55,0,0,1,352.8,88a22.24,22.24,0,0,1-6.3,15.7c-4.2,4.5-9.5,6.7-16.1,6.7s-12-2.2-16.1-6.7A22.24,22.24,0,0,1,308,88a22.73,22.73,0,0,1,5.9-15.5A21.81,21.81,0,0,1,330.5,65.3Zm0,5.4a15.83,15.83,0,0,0-11.8,5.1,17,17,0,0,0-4.9,12.3,17.68,17.68,0,0,0,2.3,8.7,15.19,15.19,0,0,0,6.1,6.2,16.48,16.48,0,0,0,8.4,2.2A16,16,0,0,0,339,103a15.82,15.82,0,0,0,6.1-6.2,17.68,17.68,0,0,0,2.3-8.7,17.07,17.07,0,0,0-5-12.3A16.2,16.2,0,0,0,330.5,70.7Z"></path><path class="cls-2" d="M351.2,66.4h5.7L370,97.6l13.7-31.1h1l13.8,31.1,13.4-31.1h5.7L399,109.3h-1L384.3,78.6l-13.7,30.7h-1Z"></path><polyline class="cls-3" points="51 105 51 41.2 27 41.2"></polyline><polyline class="cls-3" points="38.1 33.8 56.4 33.8 56.4 93"></polyline><polyline class="cls-3" points="79.9 33.8 61.5 33.8 61.5 79.2"></polyline><polyline class="cls-3" points="90.7 41.2 66.7 41.2 66.7 105"></polyline><line class="cls-3" x1="83.1" y1="62.6" x2="66.7" y2="62.6"></line><circle class="cls-4" cx="27" cy="41.2" r="5.3"></circle><path class="cls-1" d="M23.3,41.2a3.8,3.8,0,1,0,3.8-3.8A3.8,3.8,0,0,0,23.3,41.2Z"></path><circle class="cls-5" cx="51" cy="105" r="5.4"></circle><path class="cls-1" d="M47.3,105a3.8,3.8,0,1,0,3.8-3.8A3.8,3.8,0,0,0,47.3,105Z"></path><circle class="cls-6" cx="56.36" cy="93.02" r="3.4"></circle><circle class="cls-6" cx="61.5" cy="79.2" r="2.8"></circle><circle class="cls-7" cx="66.7" cy="105.01" r="5.3"></circle><path class="cls-1" d="M63,105a3.8,3.8,0,1,0,3.8-3.8A3.8,3.8,0,0,0,63,105Z"></path><circle class="cls-8" cx="90.7" cy="41.2" r="5.1"></circle><path class="cls-1" d="M87,41.2a3.8,3.8,0,1,0,3.8-3.8A3.8,3.8,0,0,0,87,41.2Z"></path><circle class="cls-8" cx="84.7" cy="62.6" r="5.1"></circle><path class="cls-1" d="M81,62.6a3.8,3.8,0,1,0,3.8-3.8A3.8,3.8,0,0,0,81,62.6Z"></path><line class="cls-3" x1="34.8" y1="62.6" x2="51.1" y2="62.6"></line><circle class="cls-8" cx="33.1" cy="62.6" r="5.1"></circle><path class="cls-1" d="M36.9,62.6a3.8,3.8,0,1,1-3.8-3.8A3.8,3.8,0,0,1,36.9,62.6Z"></path><line class="cls-3" x1="23.7" y1="26.7" x2="94.1" y2="26.7"></line><circle class="cls-9" cx="94.09" cy="26.67" r="5"></circle><path class="cls-1" d="M90.3,26.7a3.8,3.8,0,1,0,3.8-3.8A3.8,3.8,0,0,0,90.3,26.7Z"></path><circle class="cls-6" cx="78" cy="33.8" r="3.8"></circle><circle class="cls-6" cx="40" cy="33.8" r="3.8"></circle><circle class="cls-10" cx="23.71" cy="26.71" r="5.1"></circle><path class="cls-1" d="M20,26.7a3.8,3.8,0,1,0,3.8-3.8A3.8,3.8,0,0,0,20,26.7Z"></path></svg> - </a> - </div> - - <nav class="navbar navbar-expand-lg navbar-dark bg-primary" style="margin-bottom: 10px;"> - <div class="container-fluid"> - <a class="navbar-brand" href="{{ url_for('main.home') }}"> - <img src="https://teraflow-h2020.eu/sites/teraflow/files/public/favicon.png" alt="" width="30" height="24" class="d-inline-block align-text-top"/> - TeraFlow + <!doctype html> + <!-- + Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + --> + + <html lang="en"> + <head> + <!-- Required meta tags --> + <meta charset="utf-8"> + <meta name="viewport" content="width=device-width, initial-scale=1"> + + <link rel="shortcut icon" href="https://tfs.etsi.org/images/logos/tfs_logo_small.png" type="image/png" /> + + <!-- Bootstrap CSS --> + <link href="https://cdn.jsdelivr.net/npm/bootstrap@5.1.2/dist/css/bootstrap.min.css" rel="stylesheet" integrity="sha384-uWxY/CJNBR+1zjPWmfnSnVxwRheevXITnMqoEIeG1LJrdI0GlVs/9cVSyPYXdcSF" crossorigin="anonymous"> + <link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/bootstrap-icons@1.5.0/font/bootstrap-icons.css"> + + <title>ETSI TeraFlowSDN Controller</title> + </head> + <body> + <div id="teraflow-branding"> + <a href="{{ url_for('main.home') }}" title="Home" rel="home" id="main-logo" class="site-logo site-logo-pages"> + <img src="{{ url_for('static', filename='TeraFlow SDN Logo ScreenColour with Slogan.png') }}" width="400" type="image/png"> </a> - <button class="navbar-toggler" type="button" data-bs-toggle="collapse" data-bs-target="#navbarColor02" aria-controls="navbarColor02" aria-expanded="false" aria-label="Toggle navigation"> - <span class="navbar-toggler-icon"></span> - </button> - <div class="collapse navbar-collapse" id="navbarColor02"> - <ul class="navbar-nav me-auto mb-2 mb-lg-0"> - <li class="nav-item"> - {% if request.path == '/' %} - <a class="nav-link active" aria-current="page" href="{{ url_for('main.home') }}">Home</a> - {% else %} - <a class="nav-link" href="{{ url_for('main.home') }}">Home</a> - {% endif %} - </li> - <li class="nav-item"> - {% if '/device/' in request.path %} - <a class="nav-link active" aria-current="page" href="{{ url_for('device.home') }}">Device</a> - {% else %} - <a class="nav-link" href="{{ url_for('device.home') }}">Device</a> - {% endif %} - </li> - <li class="nav-item"> - {% if '/link/' in request.path %} - <a class="nav-link active" aria-current="page" href="{{ url_for('link.home') }}">Link</a> - {% else %} - <a class="nav-link" href="{{ url_for('link.home') }}">Link</a> - {% endif %} - </li> - <li class="nav-item"> - {% if '/service/' in request.path %} - <a class="nav-link active" aria-current="page" href="{{ url_for('service.home') }}">Service</a> - {% else %} - <a class="nav-link" href="{{ url_for('service.home') }}">Service</a> - {% endif %} - </li> - - <li class="nav-item"> - <a class="nav-link" href="/grafana" id="grafana_link" target="grafana">Grafana</a> - </li> - - <li class="nav-item"> - <a class="nav-link" href="{{ url_for('main.debug') }}">Debug</a> - </li> - - <!-- <li class="nav-item"> - <a class="nav-link" href="#">Context</a> - </li> - - <li class="nav-item"> - <a class="nav-link" href="#">Monitoring</a> - </li> --> - <li class="nav-item"> - <a class="nav-link" href="{{ url_for('main.about') }}">About</a> - </li> - </ul> - <span class="navbar-text" style="color: #fff;"> - Current context: <b>{{ get_working_context() }}</b> - </span> - </div> </div> - </nav> - - <main class="container"> - <div class="row"> - <div class="col-md-12"> - {% with messages = get_flashed_messages(with_categories=true) %} - {% if messages %} - {% for category, message in messages %} - <div class="alert alert-{{ category }} alert-dismissible fade show" role="alert"> - {{ message }} - <button type="button" class="btn-close" data-bs-dismiss="alert" aria-label="Close"></button> - </div> - - {% endfor %} - {% endif %} - {% endwith %} - </div> - </div> - <div class="row"> - <div class="col-xxl-12"> - {% block content %}{% endblock %} + + <nav class="navbar navbar-expand-lg navbar-dark bg-primary" style="margin-bottom: 10px;"> + <div class="container-fluid"> + <a class="navbar-brand" href="{{ url_for('main.home') }}"> + <img src="https://teraflow-h2020.eu/sites/teraflow/files/public/favicon.png" alt="" width="30" height="24" class="d-inline-block align-text-top"/> + TeraFlow + </a> + <button class="navbar-toggler" type="button" data-bs-toggle="collapse" data-bs-target="#navbarColor02" aria-controls="navbarColor02" aria-expanded="false" aria-label="Toggle navigation"> + <span class="navbar-toggler-icon"></span> + </button> + <div class="collapse navbar-collapse" id="navbarColor02"> + <ul class="navbar-nav me-auto mb-2 mb-lg-0"> + <li class="nav-item"> + {% if request.path == '/' %} + <a class="nav-link active" aria-current="page" href="{{ url_for('main.home') }}">Home</a> + {% else %} + <a class="nav-link" href="{{ url_for('main.home') }}">Home</a> + {% endif %} + </li> + <li class="nav-item"> + {% if '/device/' in request.path %} + <a class="nav-link active" aria-current="page" href="{{ url_for('device.home') }}">Device</a> + {% else %} + <a class="nav-link" href="{{ url_for('device.home') }}">Device</a> + {% endif %} + </li> + <li class="nav-item"> + {% if '/link/' in request.path %} + <a class="nav-link active" aria-current="page" href="{{ url_for('link.home') }}">Link</a> + {% else %} + <a class="nav-link" href="{{ url_for('link.home') }}">Link</a> + {% endif %} + </li> + <li class="nav-item"> + {% if '/service/' in request.path %} + <a class="nav-link active" aria-current="page" href="{{ url_for('service.home') }}">Service</a> + {% else %} + <a class="nav-link" href="{{ url_for('service.home') }}">Service</a> + {% endif %} + </li> + <li class="nav-item"> + {% if '/slice/' in request.path %} + <a class="nav-link active" aria-current="page" href="{{ url_for('slice.home') }}">Slice</a> + {% else %} + <a class="nav-link" href="{{ url_for('slice.home') }}">Slice</a> + {% endif %} + </li> + <!--<li class="nav-item"> + <a class="nav-link" href="/grafana" id="grafana_link" target="grafana">Grafana</a> + </li>--> + + <li class="nav-item"> + <a class="nav-link" href="{{ url_for('main.debug') }}">Debug</a> + </li> + + <!-- <li class="nav-item"> + <a class="nav-link" href="#">Context</a> + </li> + + <li class="nav-item"> + <a class="nav-link" href="#">Monitoring</a> + </li> --> + <li class="nav-item"> + <a class="nav-link" href="{{ url_for('main.about') }}">About</a> + </li> + </ul> + <span class="navbar-text" style="color: #fff;"> + Current context: <b>{{ get_working_context() }}</b> + </span> + </div> </div> - </div> - </main> - - <footer class="footer" style="background-color: darkgrey; margin-top: 30px; padding-top: 20px;"> - <div class="container"> + </nav> + + <main class="container"> <div class="row"> <div class="col-md-12"> - <p class="text-center" style="color: white;">© 2021-2023</p> + {% with messages = get_flashed_messages(with_categories=true) %} + {% if messages %} + {% for category, message in messages %} + <div class="alert alert-{{ category }} alert-dismissible fade show" role="alert"> + {{ message }} + <button type="button" class="btn-close" data-bs-dismiss="alert" aria-label="Close"></button> + </div> + + {% endfor %} + {% endif %} + {% endwith %} </div> </div> <div class="row"> - <div class="col-md-6"> - <p>This project has received funding from the European Union's Horizon 2020 research and innovation programme under grant agreement No 101015857.</p> + <div class="col-xxl-12"> + {% block content %}{% endblock %} + </div> + </div> + </main> + + <footer class="footer" style="background-color: darkgrey; margin-top: 30px; padding-top: 20px;"> + <div class="container"> + <div class="row"> + <div class="col-md-12"> + <p class="text-center" style="color: white;">© 2021-2023</p> + </div> </div> - <div class="col-md-6"> - <img src="https://teraflow-h2020.eu/sites/teraflow/files/public/content-images/media/2021/logo%205G-ppp%20eu.png" width="310" alt="5g ppp EU logo" loading="lazy" typeof="foaf:Image"> + <div class="row"> + <div class="col-md-6"> + <p>This project has received funding from the European Union's Horizon 2020 research and innovation programme under grant agreement No 101015857.</p> + </div> + <div class="col-md-6"> + <img src="https://teraflow-h2020.eu/sites/teraflow/files/public/content-images/media/2021/logo%205G-ppp%20eu.png" width="310" alt="5g ppp EU logo" loading="lazy" typeof="foaf:Image"> + </div> </div> </div> - </div> - </footer> - - <!-- Optional JavaScript; choose one of the two! --> - - <!-- Option 1: Bootstrap Bundle with Popper --> - <script src="https://cdn.jsdelivr.net/npm/bootstrap@5.1.2/dist/js/bootstrap.bundle.min.js" integrity="sha384-kQtW33rZJAHjgefvhyyzcGF3C5TFyBQBA13V1RKPf4uH+bwyzQxZ6CmMZHmNBEfJ" crossorigin="anonymous"></script> - <!-- <script src="{{ url_for('static', filename='site.js') }}"/> --> - <!-- <script> - document.getElementById("grafana_link").href = window.location.protocol + "//" + window.location.hostname + ":30300" - </script> --> - <!-- Option 2: Separate Popper and Bootstrap JS --> - <!-- - <script src="https://cdn.jsdelivr.net/npm/@popperjs/core@2.10.2/dist/umd/popper.min.js" integrity="sha384-7+zCNj/IqJ95wo16oMtfsKbZ9ccEh31eOz1HGyDuCQ6wgnyJNSYdrPa03rtR1zdB" crossorigin="anonymous"></script> - <script src="https://cdn.jsdelivr.net/npm/bootstrap@5.1.2/dist/js/bootstrap.min.js" integrity="sha384-PsUw7Xwds7x08Ew3exXhqzbhuEYmA2xnwc8BuD6SEr+UmEHlX8/MCltYEodzWA4u" crossorigin="anonymous"></script> - --> - </body> -</html> \ No newline at end of file + </footer> + + <!-- Optional JavaScript; choose one of the two! --> + + <!-- Option 1: Bootstrap Bundle with Popper --> + <script src="https://cdn.jsdelivr.net/npm/bootstrap@5.1.2/dist/js/bootstrap.bundle.min.js" integrity="sha384-kQtW33rZJAHjgefvhyyzcGF3C5TFyBQBA13V1RKPf4uH+bwyzQxZ6CmMZHmNBEfJ" crossorigin="anonymous"></script> + <!-- <script src="{{ url_for('static', filename='site.js') }}"/> --> + <!-- <script> + document.getElementById("grafana_link").href = window.location.protocol + "//" + window.location.hostname + ":30300" + </script> --> + <!-- Option 2: Separate Popper and Bootstrap JS --> + <!-- + <script src="https://cdn.jsdelivr.net/npm/@popperjs/core@2.10.2/dist/umd/popper.min.js" integrity="sha384-7+zCNj/IqJ95wo16oMtfsKbZ9ccEh31eOz1HGyDuCQ6wgnyJNSYdrPa03rtR1zdB" crossorigin="anonymous"></script> + <script src="https://cdn.jsdelivr.net/npm/bootstrap@5.1.2/dist/js/bootstrap.min.js" integrity="sha384-PsUw7Xwds7x08Ew3exXhqzbhuEYmA2xnwc8BuD6SEr+UmEHlX8/MCltYEodzWA4u" crossorigin="anonymous"></script> + --> + </body> + </html> \ No newline at end of file diff --git a/src/webui/service/templates/device/detail.html b/src/webui/service/templates/device/detail.html index b4cf6b715250d3e96b5026c3e19758a2be9a9607..f2cdc581553bbd8d45f237fd99d2b746ab0ad61b 100644 --- a/src/webui/service/templates/device/detail.html +++ b/src/webui/service/templates/device/detail.html @@ -1,111 +1,129 @@ <!-- - Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) + Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + --> + + {% extends 'base.html' %} + + {% block content %} + <h1>Device {{ device.device_id.device_uuid.uuid }}</h1> + + <div class="row mb-3"> + <div class="col-sm-3"> + <button type="button" class="btn btn-success" onclick="window.location.href='{{ url_for('device.home') }}'"> + <i class="bi bi-box-arrow-in-left"></i> + Back to device list + </button> + </div> + <div class="col-sm-3"> + <a id="update" class="btn btn-secondary" href="#"> + <i class="bi bi-pencil-square"></i> + Update + </a> + </div> + <div class="col-sm-3"> + <!-- <button type="button" class="btn btn-danger"><i class="bi bi-x-square"></i>Delete device</button> --> + <button type="button" class="btn btn-danger" data-bs-toggle="modal" data-bs-target="#deleteModal"> + <i class="bi bi-x-square"></i>Delete device + </button> + </div> + </div> - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. ---> - -{% extends 'base.html' %} - -{% block content %} - <h1>Device {{ device.device_id.device_uuid.uuid }}</h1> - - <div class="row mb-3"> - <div class="col-sm-3"> - <button type="button" class="btn btn-success" onclick="window.location.href='{{ url_for('device.home') }}'"> - <i class="bi bi-box-arrow-in-left"></i> - Back to device list - </button> + <br> + <div class="row mb-3"> + <div class="col-sm-4"> + <b>UUID: </b>{{ device.device_id.device_uuid.uuid }}<br><br> + <b>Type: </b>{{ device.device_type }}<br><br> + <b>Drivers: </b> + <ul> + {% for driver in device.device_drivers %} + <li>{{ dde.Name(driver).replace('DEVICEDRIVER_', '').replace('UNDEFINED', 'EMULATED') }}</li> + {% endfor %} + </ul> + </div> + <div class="col-sm-8"> + <table class="table table-striped table-hover"> + <thead> + <tr> + <th scope="col">Endpoints</th> + <th scope="col">Type</th> + </tr> + </thead> + <tbody> + {% for endpoint in device.device_endpoints %} + <tr> + <td> + {{ endpoint.endpoint_id.endpoint_uuid.uuid }} + </td> + <td> + {{ endpoint.endpoint_type }} + </td> + </tr> + {% endfor %} + </tbody> + </table> + </div> + </div> </div> - <div class="col-sm-3"> - <a id="update" class="btn btn-secondary" href="#"> - <i class="bi bi-pencil-square"></i> - Update - </a> - </div> - <div class="col-sm-3"> - <!-- <button type="button" class="btn btn-danger"><i class="bi bi-x-square"></i>Delete device</button> --> - <button type="button" class="btn btn-danger" data-bs-toggle="modal" data-bs-target="#deleteModal"> - <i class="bi bi-x-square"></i>Delete device - </button> - </div> - </div> - <div class="row mb-3"> - <div class="col-sm-1"><b>UUID:</b></div> - <div class="col-sm-5"> - {{ device.device_id.device_uuid.uuid }} - </div> - <div class="col-sm-1"><b>Type:</b></div> - <div class="col-sm-5"> - {{ device.device_type }} - </div> - </div> - <div class="row mb-3"> - <div class="col-sm-1"><b>Drivers:</b></div> - <div class="col-sm-11"> - <ul> - {% for driver in device.device_drivers %} - <li>{{ dde.Name(driver).replace('DEVICEDRIVER_', '').replace('UNDEFINED', 'EMULATED') }}</li> - {% endfor %} - </ul> - </div> - </div> - <div class="row mb-3"> - <b>Endpoints:</b> - <div class="col-sm-10"> - <ul> - {% for endpoint in device.device_endpoints %} - <li>{{ endpoint.endpoint_id.endpoint_uuid.uuid }}: {{ endpoint.endpoint_type }}</li> - {% endfor %} - </ul> - </div> - </div> - <div class="row mb-3"> <b>Configurations:</b> - <div class="col-sm-10"> - <ul> - {% for config in device.device_config.config_rules %} + <table class="table table-striped table-hover"> + <thead> + <tr> + <th scope="col">Key</th> + <th scope="col">Value</th> + </tr> + </thead> + <tbody> + {% for config in device.device_config.config_rules %} {% if config.WhichOneof('config_rule') == 'custom' %} - <li>{{ config.custom.resource_key }}: - <ul> - {% for key, value in (config.custom.resource_value | from_json).items() %} - <li><b>{{ key }}:</b> {{ value }}</li> - {% endfor %} - </ul> - </li> + <tr> + <td> + {{ config.custom.resource_key }} + </td> + <td> + <ul> + {% for key, value in (config.custom.resource_value | from_json).items() %} + <li><b>{{ key }}:</b> {{ value }}</li> + {% endfor %} + </ul> + </td> + </tr> {% endif %} - {% endfor %} - </ul> - </div> - </div> + {% endfor %} + </tbody> + </table> - <!-- Modal --> -<div class="modal fade" id="deleteModal" data-bs-backdrop="static" data-bs-keyboard="false" tabindex="-1" aria-labelledby="staticBackdropLabel" aria-hidden="true"> - <div class="modal-dialog"> - <div class="modal-content"> - <div class="modal-header"> - <h5 class="modal-title" id="staticBackdropLabel">Delete device?</h5> - <button type="button" class="btn-close" data-bs-dismiss="modal" aria-label="Close"></button> - </div> - <div class="modal-body"> - Are you sure you want to delete the device "{{ device.device_id.device_uuid.uuid }}"? - </div> - <div class="modal-footer"> - <button type="button" class="btn btn-secondary" data-bs-dismiss="modal">No</button> - <a type="button" class="btn btn-danger" href="{{ url_for('device.delete', device_uuid=device.device_id.device_uuid.uuid) }}"><i class="bi bi-exclamation-diamond"></i>Yes</a> - </div> - </div> - </div> - </div> -{% endblock %} \ No newline at end of file + <!-- Modal --> + <div class="modal fade" id="deleteModal" data-bs-backdrop="static" data-bs-keyboard="false" tabindex="-1" aria-labelledby="staticBackdropLabel" aria-hidden="true"> + <div class="modal-dialog"> + <div class="modal-content"> + <div class="modal-header"> + <h5 class="modal-title" id="staticBackdropLabel">Delete device?</h5> + <button type="button" class="btn-close" data-bs-dismiss="modal" aria-label="Close"></button> + </div> + <div class="modal-body"> + Are you sure you want to delete the device "{{ device.device_id.device_uuid.uuid }}"? + </div> + <div class="modal-footer"> + <button type="button" class="btn btn-secondary" data-bs-dismiss="modal">No</button> + <a type="button" class="btn btn-danger" href="{{ url_for('device.delete', device_uuid=device.device_id.device_uuid.uuid) }}"><i class="bi bi-exclamation-diamond"></i>Yes</a> + </div> + </div> + </div> + </div> + + {% endblock %} + \ No newline at end of file diff --git a/src/webui/service/templates/js/topology.js b/src/webui/service/templates/js/topology.js index 05216fb98808d5b574d613344c63a7e19cb2c472..69de0445dac24bf2f7f16ec21da4a6d35133e9da 100644 --- a/src/webui/service/templates/js/topology.js +++ b/src/webui/service/templates/js/topology.js @@ -15,6 +15,15 @@ // Based on: // https://www.d3-graph-gallery.com/graph/network_basic.html // https://bl.ocks.org/steveharoz/8c3e2524079a8c440df60c1ab72b5d03 +// https://www.d3indepth.com/zoom-and-pan/ + +// Pan & Zoom does not work; to be reviewed +//<button onclick="zoomIn()">Zoom in</button> +//<button onclick="zoomOut()">Zoom out</button> +//<button onclick="resetZoom()">Reset zoom</button> +//<button onclick="panLeft()">Pan left</button> +//<button onclick="panRight()">Pan right</button> +//<button onclick="center()">Center</button> // set the dimensions and margins of the graph const margin = {top: 5, right: 5, bottom: 5, left: 5}; @@ -22,16 +31,24 @@ const margin = {top: 5, right: 5, bottom: 5, left: 5}; const icon_width = 40; const icon_height = 40; -width = 800 - margin.left - margin.right; -height = 500 - margin.top - margin.bottom; +width = 1000 - margin.left - margin.right; +height = 600 - margin.top - margin.bottom; + +//function handleZoom(e) { +// console.dir(e); +// d3.select('svg g').attr('transform', e.transform); +//} +//let zoom = d3.zoom().scaleExtent([0.01, 10]).translateExtent([[0, 0], [width, height]]).on('zoom', handleZoom); // append the svg object to the body of the page const svg = d3.select('#topology') .append('svg') .attr('width', width + margin.left + margin.right) .attr('height', height + margin.top + margin.bottom) + //.call(zoom) .append('g') - .attr('transform', `translate(${margin.left}, ${margin.top})`); + .attr('transform', `translate(${margin.left}, ${margin.top})`) + ; // svg objects var link, node; @@ -148,3 +165,25 @@ d3.select(window).on("resize", function(){ height = +svg.node().getBoundingClientRect().height; simulation.alpha(1).restart(); }); + +///******************** UI ACTIONS *******************/ +// +//function resetZoom() { +// d3.select('svg').transition().call(zoom.scaleTo, 1.0); +//} +//function zoomIn() { +// d3.select('svg').transition().call(zoom.scaleBy, 2.0); +//} +//function zoomOut() { +// d3.select('svg').transition().call(zoom.scaleBy, 0.5); +//} +// +//function center() { +// d3.select('svg').transition().call(zoom.translateTo, 0.5 * width, 0.5 * height); +//} +//function panLeft() { +// d3.select('svg').transition().call(zoom.translateBy, -50, 0); +//} +//function panRight() { +// d3.select('svg').transition().call(zoom.translateBy, 50, 0); +//} diff --git a/src/webui/service/templates/link/detail.html b/src/webui/service/templates/link/detail.html new file mode 100644 index 0000000000000000000000000000000000000000..7df9ddce6bdddd511f3b50313cafa1374990b99e --- /dev/null +++ b/src/webui/service/templates/link/detail.html @@ -0,0 +1,65 @@ +<!-- + Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + --> + {% extends 'base.html' %} + + {% block content %} + <h1>Link {{ link.link_id.link_uuid.uuid }}</h1> + <div class="row mb-3"> + <div class="col-sm-3"> + <button type="button" class="btn btn-success" onclick="window.location.href='{{ url_for('link.home') }}'"> + <i class="bi bi-box-arrow-in-left"></i> + Back to link list + </button> + </div> + </div> + + <br> + <div class="row mb-3"> + <div class="col-sm-4"> + <b>UUID: </b>{{ link.link_id.link_uuid.uuid }}<br><br> + </div> + <div class="col-sm-8"> + <table class="table table-striped table-hover"> + <thead> + <tr> + <th scope="col">Endpoints</th> + <th scope="col">Device</th> + </tr> + </thead> + <tbody> + {% for end_point in link.link_endpoint_ids %} + <tr> + <td> + {{ end_point.endpoint_uuid.uuid }} + </td> + <td> + <a href="{{ url_for('device.detail', device_uuid=end_point.device_id.device_uuid.uuid) }}"> + {{ end_point.device_id.device_uuid.uuid }} + <svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" fill="currentColor" class="bi bi-eye" viewBox="0 0 16 16"> + <path d="M16 8s-3-5.5-8-5.5S0 8 0 8s3 5.5 8 5.5S16 8 16 8zM1.173 8a13.133 13.133 0 0 1 1.66-2.043C4.12 4.668 5.88 3.5 8 3.5c2.12 0 3.879 1.168 5.168 2.457A13.133 13.133 0 0 1 14.828 8c-.058.087-.122.183-.195.288-.335.48-.83 1.12-1.465 1.755C11.879 11.332 10.119 12.5 8 12.5c-2.12 0-3.879-1.168-5.168-2.457A13.134 13.134 0 0 1 1.172 8z"/> + <path d="M8 5.5a2.5 2.5 0 1 0 0 5 2.5 2.5 0 0 0 0-5zM4.5 8a3.5 3.5 0 1 1 7 0 3.5 3.5 0 0 1-7 0z"/> + </svg> + </a> + </td> + </tr> + {% endfor %} + </tbody> + </table> + </div> + </div> + + {% endblock %} + \ No newline at end of file diff --git a/src/webui/service/templates/link/home.html b/src/webui/service/templates/link/home.html index d0c122f6aafd0de8e2937be056d1c2e787c91710..77d00d34185ac45ada0ed6d8e9915c0b2f3ad9c0 100644 --- a/src/webui/service/templates/link/home.html +++ b/src/webui/service/templates/link/home.html @@ -1,96 +1,96 @@ <!-- - Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. ---> - -{% extends 'base.html' %} - -{% block content %} - <h1>Links</h1> - - <div class="row"> - <div class="col"> - <!-- <a href="#" class="btn btn-primary" style="margin-bottom: 10px;"> - <i class="bi bi-plus"></i> - Add New Link - </a> --> - </div> - <div class="col"> - {{ links | length }} links found</i> - </div> - <!-- <div class="col"> - <form> - <div class="input-group"> - <input type="text" aria-label="Search" placeholder="Search..." class="form-control"/> - <button type="submit" class="btn btn-primary">Search</button> - </div> - </form> - </div> --> - </div> - - <table class="table table-striped table-hover"> - <thead> - <tr> - <th scope="col">#</th> - <th scope="col">Endpoints</th> - <th scope="col"></th> - </tr> - </thead> - <tbody> - {% if links %} - {% for link in links %} - <tr> - <td> - <!-- <a href="#"> --> - {{ link.link_id.link_uuid.uuid }} - <!-- </a> --> - </td> - - <td> - <ul> - {% for end_point in link.link_endpoint_ids %} - <li> - {{ end_point.endpoint_uuid.uuid }} / - Device: - <a href="{{ url_for('device.detail', device_uuid=end_point.device_id.device_uuid.uuid) }}"> - {{ end_point.device_id.device_uuid.uuid }} - <svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" fill="currentColor" class="bi bi-eye" viewBox="0 0 16 16"> - <path d="M16 8s-3-5.5-8-5.5S0 8 0 8s3 5.5 8 5.5S16 8 16 8zM1.173 8a13.133 13.133 0 0 1 1.66-2.043C4.12 4.668 5.88 3.5 8 3.5c2.12 0 3.879 1.168 5.168 2.457A13.133 13.133 0 0 1 14.828 8c-.058.087-.122.183-.195.288-.335.48-.83 1.12-1.465 1.755C11.879 11.332 10.119 12.5 8 12.5c-2.12 0-3.879-1.168-5.168-2.457A13.134 13.134 0 0 1 1.172 8z"/> - <path d="M8 5.5a2.5 2.5 0 1 0 0 5 2.5 2.5 0 0 0 0-5zM4.5 8a3.5 3.5 0 1 1 7 0 3.5 3.5 0 0 1-7 0z"/> - </svg> - </a> - </li> - {% endfor %} - </ul> - </td> - - <td> - <!-- <a href="#"> - <svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" fill="currentColor" class="bi bi-eye" viewBox="0 0 16 16"> - <path d="M16 8s-3-5.5-8-5.5S0 8 0 8s3 5.5 8 5.5S16 8 16 8zM1.173 8a13.133 13.133 0 0 1 1.66-2.043C4.12 4.668 5.88 3.5 8 3.5c2.12 0 3.879 1.168 5.168 2.457A13.133 13.133 0 0 1 14.828 8c-.058.087-.122.183-.195.288-.335.48-.83 1.12-1.465 1.755C11.879 11.332 10.119 12.5 8 12.5c-2.12 0-3.879-1.168-5.168-2.457A13.134 13.134 0 0 1 1.172 8z"/> - <path d="M8 5.5a2.5 2.5 0 1 0 0 5 2.5 2.5 0 0 0 0-5zM4.5 8a3.5 3.5 0 1 1 7 0 3.5 3.5 0 0 1-7 0z"/> - </svg> - </a> --> - </td> - </tr> - {% endfor %} - {% else %} - <tr> - <td colspan="7">No links found</td> - </tr> - {% endif %} - </tbody> - </table> - -{% endblock %} \ No newline at end of file + Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + --> + + {% extends 'base.html' %} + + {% block content %} + <h1>Links</h1> + + <div class="row"> + <div class="col"> + <!-- <a href="#" class="btn btn-primary" style="margin-bottom: 10px;"> + <i class="bi bi-plus"></i> + Add New Link + </a> --> + </div> + <div class="col"> + {{ links | length }} links found</i> + </div> + <!-- <div class="col"> + <form> + <div class="input-group"> + <input type="text" aria-label="Search" placeholder="Search..." class="form-control"/> + <button type="submit" class="btn btn-primary">Search</button> + </div> + </form> + </div> --> + </div> + + <table class="table table-striped table-hover"> + <thead> + <tr> + <th scope="col">#</th> + <th scope="col">Endpoints</th> + <th scope="col"></th> + </tr> + </thead> + <tbody> + {% if links %} + {% for link in links %} + <tr> + <td> + <!-- <a href="#"> --> + {{ link.link_id.link_uuid.uuid }} + <!-- </a> --> + </td> + + <td> + <ul> + {% for end_point in link.link_endpoint_ids %} + <li> + {{ end_point.endpoint_uuid.uuid }} / + Device: + <a href="{{ url_for('device.detail', device_uuid=end_point.device_id.device_uuid.uuid) }}"> + {{ end_point.device_id.device_uuid.uuid }} + <svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" fill="currentColor" class="bi bi-eye" viewBox="0 0 16 16"> + <path d="M16 8s-3-5.5-8-5.5S0 8 0 8s3 5.5 8 5.5S16 8 16 8zM1.173 8a13.133 13.133 0 0 1 1.66-2.043C4.12 4.668 5.88 3.5 8 3.5c2.12 0 3.879 1.168 5.168 2.457A13.133 13.133 0 0 1 14.828 8c-.058.087-.122.183-.195.288-.335.48-.83 1.12-1.465 1.755C11.879 11.332 10.119 12.5 8 12.5c-2.12 0-3.879-1.168-5.168-2.457A13.134 13.134 0 0 1 1.172 8z"/> + <path d="M8 5.5a2.5 2.5 0 1 0 0 5 2.5 2.5 0 0 0 0-5zM4.5 8a3.5 3.5 0 1 1 7 0 3.5 3.5 0 0 1-7 0z"/> + </svg> + </a> + </li> + {% endfor %} + </ul> + </td> + + <td> + <a href="{{ url_for('link.detail', link_uuid=link.link_id.link_uuid.uuid) }}"> + <svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" fill="currentColor" class="bi bi-eye" viewBox="0 0 16 16"> + <path d="M16 8s-3-5.5-8-5.5S0 8 0 8s3 5.5 8 5.5S16 8 16 8zM1.173 8a13.133 13.133 0 0 1 1.66-2.043C4.12 4.668 5.88 3.5 8 3.5c2.12 0 3.879 1.168 5.168 2.457A13.133 13.133 0 0 1 14.828 8c-.058.087-.122.183-.195.288-.335.48-.83 1.12-1.465 1.755C11.879 11.332 10.119 12.5 8 12.5c-2.12 0-3.879-1.168-5.168-2.457A13.134 13.134 0 0 1 1.172 8z"/> + <path d="M8 5.5a2.5 2.5 0 1 0 0 5 2.5 2.5 0 0 0 0-5zM4.5 8a3.5 3.5 0 1 1 7 0 3.5 3.5 0 0 1-7 0z"/> + </svg> + </a> + </td> + </tr> + {% endfor %} + {% else %} + <tr> + <td colspan="7">No links found</td> + </tr> + {% endif %} + </tbody> + </table> + + {% endblock %} \ No newline at end of file diff --git a/src/webui/service/templates/main/about.html b/src/webui/service/templates/main/about.html index 4ba3a5845b0e8e70b029d4ec459733468899698b..80d61891ce95ff096308ed903da294bbf23c5070 100644 --- a/src/webui/service/templates/main/about.html +++ b/src/webui/service/templates/main/about.html @@ -16,10 +16,10 @@ {% extends 'base.html' %} {% block content %} - <h1>TeraFlow OS</h1> + <h1>ETSI TeraFlowSDN Controller</h1> - <p>For more information, visit the <a href="https://teraflow-h2020.eu/" target="_newtf">TeraFlow H2020 webpage</a>.</p> + <p>For more information, visit the <a href="https://tfs.etsi.org/" target="_newtf">ETSI Open Source Group for TeraFlowSDN</a>.</p> - <img alt="Consortium" class="img-fluid" src="{{ url_for('static', filename='partners.png') }}"/> + <!--<img alt="Consortium" class="img-fluid" src="{{ url_for('static', filename='partners.png') }}"/>--> {% endblock %} \ No newline at end of file diff --git a/src/webui/service/templates/main/home.html b/src/webui/service/templates/main/home.html index 3cc9fbcffce6cfbb6ebb40dec9d3359f59df5a15..db390939ff926b5bbfbfc6507b0f4e79695f3693 100644 --- a/src/webui/service/templates/main/home.html +++ b/src/webui/service/templates/main/home.html @@ -17,7 +17,7 @@ {% extends 'base.html' %} {% block content %} - <h1>TeraFlow OS SDN Controller</h1> + <h2>ETSI TeraFlowSDN Controller</h2> {% for field, message in context_form.errors.items() %} <div class="alert alert-dismissible fade show" role="alert"> diff --git a/src/webui/service/templates/service/detail.html b/src/webui/service/templates/service/detail.html index 1e58b9eaad3155524808f60b49840edab7f17739..f3f2506465df81ff3761960c04ae6f6c093250fe 100644 --- a/src/webui/service/templates/service/detail.html +++ b/src/webui/service/templates/service/detail.html @@ -17,85 +17,212 @@ {% extends 'base.html' %} {% block content %} - <h1>Service {{ service.service_id.service_uuid.uuid }}</h1> +<h1>Service {{ service.service_id.service_uuid.uuid }}</h1> - <div class="row mb-3"> - <div class="col-sm-3"> - <button type="button" class="btn btn-success" onclick="window.location.href='{{ url_for('service.home') }}'"> - <i class="bi bi-box-arrow-in-left"></i> - Back to service list - </button> - </div> - <div class="col-sm-3"> - <a id="update" class="btn btn-secondary" href="#"> - <i class="bi bi-pencil-square"></i> - Update - </a> - </div> - <div class="col-sm-3"> - <!-- <button type="button" class="btn btn-danger"><i class="bi bi-x-square"></i>Delete service</button> --> - <button type="button" class="btn btn-danger" data-bs-toggle="modal" data-bs-target="#deleteModal"> - <i class="bi bi-x-square"></i>Delete service - </button> - </div> +<div class="row mb-3"> + <div class="col-sm-3"> + <button type="button" class="btn btn-success" onclick="window.location.href='{{ url_for('service.home') }}'"> + <i class="bi bi-box-arrow-in-left"></i> + Back to service list + </button> </div> + <!-- + <div class="col-sm-3"> + <a id="update" class="btn btn-secondary" href="#"> + <i class="bi bi-pencil-square"></i> + Update + </a> + </div> + <div class="col-sm-3">--> + <!-- <button type="button" class="btn btn-danger"><i class="bi bi-x-square"></i>Delete service</button> --> + <!--<button type="button" class="btn btn-danger" data-bs-toggle="modal" data-bs-target="#deleteModal"> + <i class="bi bi-x-square"></i>Delete service + </button> + </div> + --> +</div> - <div class="row mb-3"> - <div class="col-sm-1"><b>UUID:</b></div> - <div class="col-sm-5"> - {{ service.service_id.service_uuid.uuid }} - </div> - <div class="col-sm-1"><b>Type:</b></div> - <div class="col-sm-5"> - {{ service.service_type }} - </div> +<div class="row mb-3"> + <div class="col-sm-4"> + <b>UUID: </b> {{ service.service_id.service_uuid.uuid }}<br><br> + <b>Type: </b> {{ ste.Name(service.service_type).replace('SERVICETYPE_', '') }}<br><br> + <b>Status: </b> {{ sse.Name(service.service_status.service_status).replace('SERVICESTATUS_', '') }}<br><br> </div> - <div class="row mb-3"> - <b>Endpoints:</b> - <div class="col-sm-10"> - <ul> - {% for endpoint in service.service_endpoint_ids %} - <li>{{ endpoint.endpoint_uuid.uuid }}: {{ endpoint.endpoint_type }}</li> - {% endfor %} - </ul> - </div> + <div class="col-sm-8"> + <table class="table table-striped table-hover"> + <thead> + <tr> + <th scope="col">Endpoints</th> + <th scope="col">Device</th> + </tr> + </thead> + <tbody> + {% for endpoint in service.service_endpoint_ids %} + <tr> + <td> + {{ endpoint.endpoint_uuid.uuid }} + </td> + <td> + <a href="{{ url_for('device.detail', device_uuid=endpoint.device_id.device_uuid.uuid) }}"> + {{ endpoint.device_id.device_uuid.uuid }} + <svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" fill="currentColor" + class="bi bi-eye" viewBox="0 0 16 16"> + <path + d="M16 8s-3-5.5-8-5.5S0 8 0 8s3 5.5 8 5.5S16 8 16 8zM1.173 8a13.133 13.133 0 0 1 1.66-2.043C4.12 4.668 5.88 3.5 8 3.5c2.12 0 3.879 1.168 5.168 2.457A13.133 13.133 0 0 1 14.828 8c-.058.087-.122.183-.195.288-.335.48-.83 1.12-1.465 1.755C11.879 11.332 10.119 12.5 8 12.5c-2.12 0-3.879-1.168-5.168-2.457A13.134 13.134 0 0 1 1.172 8z" /> + <path + d="M8 5.5a2.5 2.5 0 1 0 0 5 2.5 2.5 0 0 0 0-5zM4.5 8a3.5 3.5 0 1 1 7 0 3.5 3.5 0 0 1-7 0z" /> + </svg> + </a> + </td> + </tr> + {% endfor %} + </tbody> + </table> </div> - <div class="row mb-3"> - <b>Configurations:</b> - <div class="col-sm-10"> - <ul> - {% for config in service.service_config.config_rules %} - {% if config.WhichOneof('config_rule') == 'custom' %} - <li>{{ config.custom.resource_key }}: - <ul> - {% for key, value in (config.custom.resource_value | from_json).items() %} - <li><b>{{ key }}:</b> {{ value }}</li> - {% endfor %} - </ul> - </li> +</div> +<b>Constraints:</b> +<table class="table table-striped table-hover"> + <thead> + <tr> + <th scope="col">Kind</th> + <th scope="col">Type</th> + <th scope="col">Value</th> + </tr> + </thead> + <tbody> + {% for constraint in service.service_constraints %} + {% if constraint.WhichOneof('constraint')=='custom' %} + <tr> + <td>Custom</td> + <td>{{ constraint.custom.constraint_type }}</td> + <td>{{ constraint.custom.constraint_value }}</td> + </tr> + {% elif constraint.WhichOneof('constraint')=='endpoint_location' %} + <tr> + <td>Endpoint Location</td> + <td> + {{ constraint.endpoint_location.endpoint_id.device_id.device_uuid.uuid }} / {{ + constraint.endpoint_location.endpoint_id.endpoint_uuid.uuid }} + </td> + <td> + {% if constraint.endpoint_location.location.WhichOneof('location')=='region' %} + Region: {{ constraint.endpoint_location.location.region }} + {% elif constraint.endpoint_location.location.WhichOneof('location')=='gps_position' %} + Position (lat/long): + {{ constraint.endpoint_location.location.gps_position.latitude }} / + {{ constraint.endpoint_location.location.gps_position.longitude }} {% endif %} - {% endfor %} - </ul> - </div> - </div> - - <!-- Modal --> -<div class="modal fade" id="deleteModal" data-bs-backdrop="static" data-bs-keyboard="false" tabindex="-1" aria-labelledby="staticBackdropLabel" aria-hidden="true"> + </td> + </tr> + {% elif constraint.WhichOneof('constraint')=='endpoint_priority' %} + <tr> + <td>Endpoint Priority</td> + <td> + {{ constraint.endpoint_priority.endpoint_id.device_id.device_uuid.uuid }} / {{ + constraint.endpoint_priority.endpoint_id.endpoint_uuid.uuid }} + </td> + <td>{{ constraint.endpoint_priority.priority }}</td> + </tr> + {% elif constraint.WhichOneof('constraint')=='sla_availability' %} + <tr> + <td>SLA Availability</td> + <td>-</td> + <td> + {{ constraint.sla_availability.num_disjoint_paths }} disjoint paths; + {% if constraint.sla_availability.all_active %}all{% else %}single{% endif %}active + </td> + </tr> + {% else %} + <tr> + <td>-</td> + <td>-</td> + <td>{{ constraint }}</td> + </tr> + {% endif %} + {% endfor %} + </tbody> +</table> +<b>Configurations:</b> +<table class="table table-striped table-hover"> + <thead> + <tr> + <th scope="col">Key</th> + <th scope="col">Value</th> + </tr> + </thead> + <tbody> + {% for config in service.service_config.config_rules %} + {% if config.WhichOneof('config_rule') == 'custom' %} + <tr> + <td> + {{ config.custom.resource_key }} + </td> + <td> + <ul> + {% for key, value in (config.custom.resource_value | from_json).items() %} + <li><b>{{ key }}:</b> {{ value }}</li> + {% endfor %} + </ul> + </td> + </tr> + {% endif %} + {% endfor %} + </tbody> +</table> +<!-- Modal --> +<div class="modal fade" id="deleteModal" data-bs-backdrop="static" data-bs-keyboard="false" tabindex="-1" + aria-labelledby="staticBackdropLabel" aria-hidden="true"> <div class="modal-dialog"> - <div class="modal-content"> - <div class="modal-header"> - <h5 class="modal-title" id="staticBackdropLabel">Delete service?</h5> - <button type="button" class="btn-close" data-bs-dismiss="modal" aria-label="Close"></button> - </div> - <div class="modal-body"> - Are you sure you want to delete the service "{{ service.service_id.service_uuid.uuid }}"? - </div> - <div class="modal-footer"> - <button type="button" class="btn btn-secondary" data-bs-dismiss="modal">No</button> - <a type="button" class="btn btn-danger" href="{{ url_for('service.delete', service_uuid=service.service_id.service_uuid.uuid) }}"><i class="bi bi-exclamation-diamond"></i>Yes</a> + <div class="modal-content"> + <div class="modal-header"> + <h5 class="modal-title" id="staticBackdropLabel">Delete service?</h5> + <button type="button" class="btn-close" data-bs-dismiss="modal" aria-label="Close"></button> + </div> + <div class="modal-body"> + Are you sure you want to delete the service "{{ service.service_id.service_uuid.uuid }}"? + </div> + <div class="modal-footer"> + <button type="button" class="btn btn-secondary" data-bs-dismiss="modal">No</button> + <a type="button" class="btn btn-danger" + href="{{ url_for('service.delete', service_uuid=service.service_id.service_uuid.uuid) }}"><i + class="bi bi-exclamation-diamond"></i>Yes</a> + </div> </div> - </div> </div> - </div> +</div> + + +<table class="table table-striped table-hover"> + <thead> + <tr> + <th scope="col">Connection Id</th> + <th scope="col">Sub-service</th> + <th scope="col">Path</th> + </tr> + </thead> + <tbody> + {% for connection in connections.connections %} + <tr> + <td> + {{ connection.connection_id.connection_uuid.uuid }} + </td> + <td> + {{ connection.sub_service_ids|map(attribute='service_uuid')|map(attribute='uuid')|join(', ') }} + </td> + + {% for i in range(connection.path_hops_endpoint_ids|length) %} + <td> + {{ connection.path_hops_endpoint_ids[i].device_id.device_uuid.uuid }} / {{ + connection.path_hops_endpoint_ids[i].endpoint_uuid.uuid }} + </td> + {% endfor %} + </tr> + {% endfor %} + </tbody> +</table> + + + + {% endblock %} \ No newline at end of file diff --git a/src/webui/service/templates/service/home.html b/src/webui/service/templates/service/home.html index 0e152006c149df35d477ecfb81bb4fcc0b562d9a..c0a01839bb519074526a4ed34669ebfdd3d8b8e4 100644 --- a/src/webui/service/templates/service/home.html +++ b/src/webui/service/templates/service/home.html @@ -46,7 +46,6 @@ <th scope="col">#</th> <th scope="col">Type</th> <th scope="col">End points</th> - <th scope="col">Constraints</th> <th scope="col">Status</th> <th scope="col"></th> </tr> @@ -70,14 +69,7 @@ {% endfor %} </ul> </td> - <td> - <ul> - {% for constraint in service.service_constraints %} - <li>{{ constraint.constraint_type }}: {{ constraint.constraint_value }}</li> - {% endfor %} - </ul> - </td> - <td>{{ sse.Name(service.service_status.service_status).replace('SERVICESTATUS_', '') }}</td> + <td>{{ sse.Name(service.service_status.service_status).replace('SERVICESTATUS_', '') }} </td> <td> <a href="{{ url_for('service.detail', service_uuid=service.service_id.service_uuid.uuid) }}"> <svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" fill="currentColor" class="bi bi-eye" viewBox="0 0 16 16"> diff --git a/src/webui/service/templates/slice/detail.html b/src/webui/service/templates/slice/detail.html new file mode 100644 index 0000000000000000000000000000000000000000..936b0f08fb1b7def156e11f16bf552b8d60018be --- /dev/null +++ b/src/webui/service/templates/slice/detail.html @@ -0,0 +1,221 @@ +<!-- + Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +--> + +{% extends 'base.html' %} + +{% block content %} +<h1>Slice {{ slice.slice_id.slice_uuid.uuid }} </h1> + +<div class="row mb-3"> + <div class="col-sm-3"> + <button type="button" class="btn btn-success" onclick="window.location.href='{{ url_for('slice.home') }}'"> + <i class="bi bi-box-arrow-in-left"></i> + Back to slice list + </button> + </div> + <!-- + <div class="col-sm-3"> + <a id="update" class="btn btn-secondary" href="#"> + <i class="bi bi-pencil-square"></i> + Update + </a> + </div> + <div class="col-sm-3">--> + <!-- <button type="button" class="btn btn-danger"><i class="bi bi-x-square"></i>Delete slice</button> --> + <!--<button type="button" class="btn btn-danger" data-bs-toggle="modal" data-bs-target="#deleteModal"> + <i class="bi bi-x-square"></i>Delete slice + </button> + </div> + --> +</div> + +<div class="row mb-3"> + <div class="col-sm-4"> + <b>UUID: </b> {{ slice.slice_id.slice_uuid.uuid }}<br><br> + <b>Status: </b> {{ sse.Name(slice.slice_status.slice_status).replace('SLICESTATUS_', '') }}<br><br> + </div> + <div class="col-sm-8"> + <table class="table table-striped table-hover"> + <thead> + <tr> + <th scope="col">Endpoints</th> + <th scope="col">Device</th> + </tr> + </thead> + <tbody> + {% for endpoint in slice.slice_endpoint_ids %} + <tr> + <td> + {{ endpoint.endpoint_uuid.uuid }} + </td> + <td> + <a href="{{ url_for('device.detail', device_uuid=endpoint.device_id.device_uuid.uuid) }}"> + {{ endpoint.device_id.device_uuid.uuid }} + <svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" fill="currentColor" + class="bi bi-eye" viewBox="0 0 16 16"> + <path + d="M16 8s-3-5.5-8-5.5S0 8 0 8s3 5.5 8 5.5S16 8 16 8zM1.173 8a13.133 13.133 0 0 1 1.66-2.043C4.12 4.668 5.88 3.5 8 3.5c2.12 0 3.879 1.168 5.168 2.457A13.133 13.133 0 0 1 14.828 8c-.058.087-.122.183-.195.288-.335.48-.83 1.12-1.465 1.755C11.879 11.332 10.119 12.5 8 12.5c-2.12 0-3.879-1.168-5.168-2.457A13.134 13.134 0 0 1 1.172 8z" /> + <path + d="M8 5.5a2.5 2.5 0 1 0 0 5 2.5 2.5 0 0 0 0-5zM4.5 8a3.5 3.5 0 1 1 7 0 3.5 3.5 0 0 1-7 0z" /> + </svg> + </a> + </td> + </tr> + {% endfor %} + </tbody> + </table> + </div> +</div> +<b>Constraints:</b> +<table class="table table-striped table-hover"> + <thead> + <tr> + <th scope="col">Kind</th> + <th scope="col">Type</th> + <th scope="col">Value</th> + </tr> + </thead> + <tbody> + {% for constraint in slice.slice_constraints %} + {% if constraint.WhichOneof('constraint')=='custom' %} + <tr> + <td>Custom</td> + <td>{{ constraint.custom.constraint_type }}</td> + <td>{{ constraint.custom.constraint_value }}</td> + </tr> + {% elif constraint.WhichOneof('constraint')=='endpoint_location' %} + <tr> + <td>Endpoint Location</td> + <td> + {{ constraint.endpoint_location.endpoint_id.device_id.device_uuid.uuid }} / {{ + constraint.endpoint_location.endpoint_id.endpoint_uuid.uuid }} + </td> + <td> + {% if constraint.endpoint_location.location.WhichOneof('location')=='region' %} + Region: {{ constraint.endpoint_location.location.region }} + {% elif constraint.endpoint_location.location.WhichOneof('location')=='gps_position' %} + Position (lat/long): + {{ constraint.endpoint_location.location.gps_position.latitude }} / + {{ constraint.endpoint_location.location.gps_position.longitude }} + {% endif %} + </td> + </tr> + {% elif constraint.WhichOneof('constraint')=='endpoint_priority' %} + <tr> + <td>Endpoint Priority</td> + <td> + {{ constraint.endpoint_priority.endpoint_id.device_id.device_uuid.uuid }} / {{ + constraint.endpoint_priority.endpoint_id.endpoint_uuid.uuid }} + </td> + <td>{{ constraint.endpoint_priority.priority }}</td> + </tr> + {% elif constraint.WhichOneof('constraint')=='sla_availability' %} + <tr> + <td>SLA Availability</td> + <td>-</td> + <td> + {{ constraint.sla_availability.num_disjoint_paths }} disjoint paths; + {% if constraint.sla_availability.all_active %}all{% else %}single{% endif %}active + </td> + </tr> + {% else %} + <tr> + <td>-</td> + <td>-</td> + <td>{{ constraint }}</td> + </tr> + {% endif %} + {% endfor %} + </tbody> +</table> +<b>Configurations:</b> +<table class="table table-striped table-hover"> + <thead> + <tr> + <th scope="col">Key</th> + <th scope="col">Value</th> + </tr> + </thead> + <tbody> + {% for config in slice.slice_config.config_rules %} + {% if config.WhichOneof('config_rule') == 'custom' %} + <tr> + <td> + {{ config.custom.resource_key }} + </td> + <td> + <ul> + {% for key, value in (config.custom.resource_value | from_json).items() %} + <li><b>{{ key }}:</b> {{ value }}</li> + {% endfor %} + </ul> + </td> + </tr> + {% endif %} + {% endfor %} + </tbody> +</table> +<div class="row mb-2"> + <div class="col-sm-6"> + <table class="table table-striped table-hover"> + <thead> + <tr> + <th scope="col">Service Id</th> + </tr> + </thead> + <tbody> + {% for service_id in slice.slice_service_ids %} + <tr> + <td> + <a href="{{ url_for('service.detail', service_uuid=service_id.service_uuid.uuid) }}"> + {{ service_id.service_uuid.uuid }} + <svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" fill="currentColor" class="bi bi-eye" viewBox="0 0 16 16"> + <path d="M16 8s-3-5.5-8-5.5S0 8 0 8s3 5.5 8 5.5S16 8 16 8zM1.173 8a13.133 13.133 0 0 1 1.66-2.043C4.12 4.668 5.88 3.5 8 3.5c2.12 0 3.879 1.168 5.168 2.457A13.133 13.133 0 0 1 14.828 8c-.058.087-.122.183-.195.288-.335.48-.83 1.12-1.465 1.755C11.879 11.332 10.119 12.5 8 12.5c-2.12 0-3.879-1.168-5.168-2.457A13.134 13.134 0 0 1 1.172 8z"/> + <path d="M8 5.5a2.5 2.5 0 1 0 0 5 2.5 2.5 0 0 0 0-5zM4.5 8a3.5 3.5 0 1 1 7 0 3.5 3.5 0 0 1-7 0z"/> + </svg> + </a> + </td> + </tr> + {% endfor %} + </tbody> + </table> + </div> + <div class="col-sm-6"> + <table class="table table-striped table-hover"> + <thead> + <tr> + <th scope="col">Sub-slices</th> + </tr> + </thead> + <tbody> + {% for subslice_id in slice.slice_subslice_ids %} + <tr> + <td> + <a href="{{ url_for('slice.detail', slice_uuid=subslice_id.slice_uuid.uuid) }}"> + {{ subslice_id.slice_uuid.uuid }} + <svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" fill="currentColor" class="bi bi-eye" viewBox="0 0 16 16"> + <path d="M16 8s-3-5.5-8-5.5S0 8 0 8s3 5.5 8 5.5S16 8 16 8zM1.173 8a13.133 13.133 0 0 1 1.66-2.043C4.12 4.668 5.88 3.5 8 3.5c2.12 0 3.879 1.168 5.168 2.457A13.133 13.133 0 0 1 14.828 8c-.058.087-.122.183-.195.288-.335.48-.83 1.12-1.465 1.755C11.879 11.332 10.119 12.5 8 12.5c-2.12 0-3.879-1.168-5.168-2.457A13.134 13.134 0 0 1 1.172 8z"/> + <path d="M8 5.5a2.5 2.5 0 1 0 0 5 2.5 2.5 0 0 0 0-5zM4.5 8a3.5 3.5 0 1 1 7 0 3.5 3.5 0 0 1-7 0z"/> + </svg> + </a> + </td> + </tr> + {% endfor %} + </tbody> + </table> + </div> +</div> +{% endblock %} \ No newline at end of file diff --git a/src/webui/service/templates/slice/home.html b/src/webui/service/templates/slice/home.html new file mode 100644 index 0000000000000000000000000000000000000000..7096407b3e79ccd1b4fe4ec3f143cbbe3fba2876 --- /dev/null +++ b/src/webui/service/templates/slice/home.html @@ -0,0 +1,77 @@ +<!-- + Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +--> + +{% extends 'base.html' %} + +{% block content %} + <h1>Slice</h1> + + <div class="row"> + + <div class="col"> + {{ slices | length }} slices found in context <i>{{ session['context_uuid'] }}</i> + </div> + + </div> + + + <table class="table table-striped table-hover"> + <thead> + <tr> + <th scope="col">#</th> + <th scope="col">End points</th> + <th scope="col">Status</th> + <th scope="col"></th> + + </tr> + </thead> + <tbody> + {% if slices %} + {% for slice in slices %} + <tr> + <td> + {{ slice.slice_id.slice_uuid.uuid }} + </td> + <td> + {% for i in range(slice.slice_endpoint_ids|length) %} + <ul> + <li> {{ slice.slice_endpoint_ids[i].device_id.device_uuid.uuid }} / {{ slice.slice_endpoint_ids[i].endpoint_uuid.uuid }} </li> + </ul> + {% endfor %} + </td> + <td> + {{ sse.Name(slice.slice_status.slice_status).replace('SLICESTATUS_', '') }} + </td> + <td> + <a href="{{ url_for('slice.detail', slice_uuid=slice.slice_id.slice_uuid.uuid) }}"> + <svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" fill="currentColor" class="bi bi-eye" viewBox="0 0 16 16"> + <path d="M16 8s-3-5.5-8-5.5S0 8 0 8s3 5.5 8 5.5S16 8 16 8zM1.173 8a13.133 13.133 0 0 1 1.66-2.043C4.12 4.668 5.88 3.5 8 3.5c2.12 0 3.879 1.168 5.168 2.457A13.133 13.133 0 0 1 14.828 8c-.058.087-.122.183-.195.288-.335.48-.83 1.12-1.465 1.755C11.879 11.332 10.119 12.5 8 12.5c-2.12 0-3.879-1.168-5.168-2.457A13.134 13.134 0 0 1 1.172 8z"/> + <path d="M8 5.5a2.5 2.5 0 1 0 0 5 2.5 2.5 0 0 0 0-5zM4.5 8a3.5 3.5 0 1 1 7 0 3.5 3.5 0 0 1-7 0z"/> + </svg> + </a> + </td> + </tr> + {% endfor %} + {% else %} + <tr> + <td colspan="7">No slices found</td> + </tr> + + {% endif %} + </tbody> + </table> + +{% endblock %} \ No newline at end of file diff --git a/src/webui/tests/test_unitary.py b/src/webui/tests/test_unitary.py index 945a60186e04cc1bd3ee7678b340e9321646df97..11cc77a460a94707c6226cdfc4ca747563e95f45 100644 --- a/src/webui/tests/test_unitary.py +++ b/src/webui/tests/test_unitary.py @@ -68,6 +68,7 @@ class TestWebUI(ClientTestCase): with self.app.app_context(): url_for('main.home') url_for('service.home') + url_for('slice.home') url_for('device.home') url_for('link.home') #url_for('main.debug') diff --git a/tutorial/2-0-run-experiments.md b/tutorial/2-0-run-experiments.md index f87d00e98a66449f5fa6d267c527565b145722b2..82f6a56bf0481a4edeaf71251510f74c51138096 100644 --- a/tutorial/2-0-run-experiments.md +++ b/tutorial/2-0-run-experiments.md @@ -8,5 +8,5 @@ commands you might need, configuring the network topology, and executing differe - [2.1. Configure the Python environment](./2-1-python-environment.md) - [2.2. OFC'22 Demo - Bootstrap devices, Monitor device Endpoints, Manage L3VPN Services](./2-2-ofc22.md) - [2.3. OECC/PSC'22 Demo (WORK IN PROGRESS)](./2-3-oeccpsc22.md) -- [2.4. ECOC'22 Demo (PENDING)](./2-4-ecoc22.md) +- [2.4. ECOC'22 Demo - Disjoint DC-2-DC L3VPN Service (WORK IN PROGRESS)](./2-4-ecoc22.md) - [2.5. NFV-SDN'22 Demo (PENDING)](./2-5-nfvsdn22.md) diff --git a/tutorial/2-2-ofc22.md b/tutorial/2-2-ofc22.md index 651b7777d56dda7c125ba79697f3df78e4fba7ce..bd308ae30e756e140cf36184c089099a00181365 100644 --- a/tutorial/2-2-ofc22.md +++ b/tutorial/2-2-ofc22.md @@ -30,8 +30,8 @@ __Important__: The device drivers operating with real devices, e.g., OpenConfigD ## 2.2.3. Deployment and Dependencies To run this functional test, it is assumed you have deployed a MicroK8s-based Kubernetes environment and a TeraFlowSDN -controller instance as described in the [Tutorial: Deployment Guide](./1-0-deployment.md), and you configured -the Python environment as described in +controller instance as described in the [Tutorial: Deployment Guide](./1-0-deployment.md), and you configured the Python +environment as described in [Tutorial: Run Experiments Guide > 2.1. Configure Python Environment](./2-1-python-environment.md). Remember to source the scenario settings appropriately, e.g., `cd ~/tfs-ctrl && source my_deploy.sh` in each terminal you open. diff --git a/tutorial/2-4-ecoc22.md b/tutorial/2-4-ecoc22.md index f752bda840a3eb2fbde6c907e4ce139de3f8ce82..6fc9333b58fe7c6da51be5eefe9167853508456a 100644 --- a/tutorial/2-4-ecoc22.md +++ b/tutorial/2-4-ecoc22.md @@ -1 +1,120 @@ -# 2.4. ECOC'22 Demo (PENDING) +# 2.4. ECOC'22 Demo - Disjoint DC-2-DC L3VPN Service (WORK IN PROGRESS) + +This functional test reproduces the experimental assessment of "Experimental Demonstration of Transport Network Slicing +with SLA Using the TeraFlowSDN Controller" presented at [ECOC'22](https://www.ecoc2022.org/). + +## 2.4.1. Functional test folder + +This functional test can be found in folder `./src/tests/ecoc22/`. A convenience alias `./ecoc22/` pointing to that +folder has been defined. + +## 2.4.2. Execute with real devices + +This functional test has only been tested with emulated devices; however, if you have access to real devices, you can +modify the files `./ecoc22/tests/Objects.py` and `./ecoc22/tests/Credentials.py` to point to your devices, and map to +your network topology. +Otherwise, you can modify the `./ecoc22/tests/descriptors_emulated.json` that is designed to be uploaded through the +WebUI instead of using the command line scripts. + +__Important__: The device drivers operating with real devices, e.g., OpenConfigDriver, P4Driver, and TransportApiDriver, + have to be considered as experimental. The configuration and monitoring capabilities they support are + limited or partially implemented/tested. Use them with care. + + +## 2.4.3. Deployment and Dependencies + +To run this functional test, it is assumed you have deployed a MicroK8s-based Kubernetes environment and a TeraFlowSDN +controller instance as described in the [Tutorial: Deployment Guide](./1-0-deployment.md), and you configured the Python +environment as described in +[Tutorial: Run Experiments Guide > 2.1. Configure Python Environment](./2-1-python-environment.md). +Remember to source the scenario settings appropriately, e.g., `cd ~/tfs-ctrl && source my_deploy.sh` in each terminal +you open. +Next, remember to source the environment variables created by the deployment, e.g., +`cd ~/tfs-ctrl && source tfs_runtime_env_vars.sh`. +Then, re-build the protocol buffers code from the proto files: +`./proto/generate_code_python.sh` + + + +## 2.4.4. Access to the WebUI and Dashboard + +When the deployment completes, you can connect to the TeraFlowSDN WebUI and Dashboards as described in +[Tutorial: Deployment Guide > 1.4. Access TeraFlowSDN WebUI and Grafana Dashboards](./1-4-access-webui.md) + +Notes: +- the default credentials for the Grafana Dashboiard is user/pass: `admin`/`admin123+`. +- this functional test does not involve the Monitoring component, so no monitoring data is plotted in Grafana. + + +## 2.4.5. Test execution + +To execute this functional test, four main steps needs to be carried out: +1. Device bootstrapping +2. L3VPN Service creation +3. L3VPN Service removal +4. Cleanup + +Upon the execution of each test progresses, a report will be generated indicating PASSED / FAILED / SKIPPED. If there +is some error during the execution, you should see a detailed report on the error. See the troubleshooting section if +needed. + +You can check the logs of the different components using the appropriate `scripts/show_logs_[component].sh` scripts +after you execute each step. + + +### 2.4.5.1. Device bootstrapping + +This step configures some basic entities (Context and Topology), the devices, and the links in the topology. The +expected results are: +- The devices to be added into the Topology. +- The devices to be pre-configured and initialized as ENABLED by the Automation component. +- The monitoring for the device ports (named as endpoints in TeraFlowSDN) to be activated and data collection to + automatically start. +- The links to be added to the topology. + +To run this step, you can do it from the WebUI by uploading the file `./ecoc22/tests/descriptors_emulated.json` that +contains the descriptors of the contexts, topologies, devices, and links, or by executing the +`./ecoc22/run_test_01_bootstrap.sh` script. + +When the bootstrapping finishes, check in the Grafana L3-Monitoring Dashboard and you should see the monitoring data +being plotted and updated every 5 seconds (by default). Given that there is no service configured, you should see a +0-valued flat plot. + +In the WebUI, select the "admin" Context. Then, in the "Devices" tab you should see that 5 different emulated devices +have been created and activated: 4 packet routers, and 1 optical line system controller. Besides, in the "Services" tab +you should see that there is no service created. Note here that the emulated devices produce synthetic +randomly-generated data and do not care about the services configured. + + +### 2.4.5.2. L3VPN Service creation + +This step configures a new service emulating the request an OSM WIM would make by means of a Mock OSM instance. + +To run this step, execute the `./ecoc22/run_test_02_create_service.sh` script. + +When the script finishes, check the WebUI "Services" tab. You should see that two services have been created, one for +the optical layer and another for the packet layer. Besides, you can check the "Devices" tab to see the configuration +rules that have been configured in each device. In the Grafana Dashboard, given that there is now a service configured, +you should see the plots with the monitored data for the device. By default, device R1-EMU is selected. + + +### 2.4.5.3. L3VPN Service removal + +This step deconfigures the previously created services emulating the request an OSM WIM would make by means of a Mock +OSM instance. + +To run this step, execute the `./ecoc22/run_test_03_delete_service.sh` script, or delete the L3NM service from the WebUI. + +When the script finishes, check the WebUI "Services" tab. You should see that the two services have been removed. +Besides, in the "Devices" tab you can see that the appropriate configuration rules have been deconfigured. In the +Grafana Dashboard, given that there is no service configured, you should see a 0-valued flat plot again. + + +### 2.4.5.4. Cleanup + +This last step performs a cleanup of the scenario removing all the TeraFlowSDN entities for completeness. + +To run this step, execute the `./ecoc22/run_test_04_cleanup.sh` script. + +When the script finishes, check the WebUI "Devices" tab, you should see that the devices have been removed. Besides, in +the "Services" tab you can see that the "admin" Context has no services given that that context has been removed.